diff --git a/tests/functional/adapter/test_python_model.py b/tests/functional/adapter/test_python_model.py index 85807ccfd..70a74e0be 100644 --- a/tests/functional/adapter/test_python_model.py +++ b/tests/functional/adapter/test_python_model.py @@ -5,7 +5,12 @@ BasePythonModelTests, BasePythonIncrementalTests, ) -from dbt.tests.adapter.python_model.test_spark import BasePySparkTests +from dbt.tests.adapter.python_model.test_spark import ( + BasePySparkTests, + PANDAS_MODEL, + PANDAS_ON_SPARK_MODEL, + PYSPARK_MODEL, +) @pytest.mark.skip_profile("apache_spark", "spark_session", "databricks_sql_endpoint") @@ -15,7 +20,13 @@ class TestPythonModelSpark(BasePythonModelTests): @pytest.mark.skip_profile("apache_spark", "spark_session", "databricks_sql_endpoint") class TestPySpark(BasePySparkTests): - pass + @pytest.fixture(scope="class") + def models(self): + return { + "pandas_df.py": PANDAS_MODEL, + "pyspark_df.py": PYSPARK_MODEL, + "pandas_on_spark_df.py": PANDAS_ON_SPARK_MODEL, + } @pytest.mark.skip_profile("apache_spark", "spark_session", "databricks_sql_endpoint") @@ -53,7 +64,7 @@ def model(dbt, spark): "ResourceClass": "SingleNode" } }, - packages=['spacy', 'torch', 'pydantic<1.10.3', 'numpy<1.20'] + packages=['spacy', 'torch', 'pydantic<1.10.3'] ) data = [[1,2]] * 10 return spark.createDataFrame(data, schema=['test', 'test2'])