|
14 | 14 |
|
15 | 15 | @pytest.mark.flaky(retries=3, delay=1)
|
16 | 16 | @pytest.mark.parametrize(
|
17 |
| - "test_file", |
18 |
| - ["local_sparkR"], |
| 17 | + "test_file,expected_warnings", |
| 18 | + [ |
| 19 | + ("local_sparkR", ["WARNING: Using incubator modules: jdk.incubator.vector"]), |
| 20 | + ("local_sparklyr", []), |
| 21 | + ], |
19 | 22 | )
|
20 | 23 | @pytest.mark.parametrize("output_format", ["pdf", "html", "markdown"])
|
21 | 24 | def test_spark_r_nbconvert(
|
22 |
| - container: TrackedContainer, test_file: str, output_format: str |
| 25 | + container: TrackedContainer, |
| 26 | + test_file: str, |
| 27 | + output_format: str, |
| 28 | + expected_warnings: list[str], |
23 | 29 | ) -> None:
|
24 | 30 | host_data_file = THIS_DIR / "data" / f"{test_file}.ipynb"
|
25 | 31 | logs = check_nbconvert(
|
26 |
| - container, host_data_file, "markdown", execute=True, no_warnings=False |
| 32 | + container, |
| 33 | + host_data_file, |
| 34 | + output_format, |
| 35 | + execute=True, |
| 36 | + no_warnings=(not expected_warnings), |
27 | 37 | )
|
28 | 38 |
|
29 |
| - warnings = TrackedContainer.get_warnings(logs) |
30 |
| - assert len(warnings) == 1 |
31 |
| - assert "Using incubator modules: jdk.incubator.vector" in warnings[0] |
| 39 | + if expected_warnings: |
| 40 | + warnings = TrackedContainer.get_warnings(logs) |
| 41 | + assert len(warnings) == len(expected_warnings) |
| 42 | + assert expected_warnings[0] == warnings[0] |
0 commit comments