Skip to content

Commit

Permalink
Restrict Dask and Fix Serialization Tests (#2694)
Browse files Browse the repository at this point in the history
* restrict dask in pyproject.toml

* update release notes

* fix indent

* fix tests

* update release notes

* lint

* lint

* try to remove dask

* undo
  • Loading branch information
thehomebrewnerd authored Mar 19, 2024
1 parent 493feb7 commit d6b3243
Show file tree
Hide file tree
Showing 9 changed files with 46 additions and 39 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ repos:
- id: add-trailing-comma
name: Add trailing comma
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.1.6'
rev: 'v0.3.3'
hooks:
- id: ruff
types_or: [ python, pyi, jupyter ]
Expand Down
13 changes: 8 additions & 5 deletions docs/source/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,18 @@
Release Notes
-------------

.. Future Release
==============
Future Release
==============
* Enhancements
* Fixes
* Changes
* Temporarily restrict Dask version :pr:`2694`
* Documentation Changes
* Testing Changes
* Fix serialization test to work with pytest 8.1.1 :pr:`2694`

.. Thanks to the following people for contributing to this release:
Thanks to the following people for contributing to this release:
:user:`thehomebrewnerd`

v1.30.0 Feb 26, 2024
====================
Expand All @@ -21,8 +24,8 @@ v1.30.0 Feb 26, 2024
* Testing Changes
* Update ``make_ecommerce_entityset`` to work without Dask (:pr:`2677`)

Thanks to the following people for contributing to this release:
:user:`tamargrey`, :user:`thehomebrewnerd`
Thanks to the following people for contributing to this release:
:user:`tamargrey`, :user:`thehomebrewnerd`

v1.29.0 Feb 16, 2024
====================
Expand Down
6 changes: 3 additions & 3 deletions featuretools/entityset/entityset.py
Original file line number Diff line number Diff line change
Expand Up @@ -1115,9 +1115,9 @@ def add_last_time_indexes(self, updated_dataframes=None):
child_cols = defaultdict(dict)
for r in self.relationships:
children[r._parent_dataframe_name].append(r.child_dataframe)
child_cols[r._parent_dataframe_name][
r._child_dataframe_name
] = r.child_column
child_cols[r._parent_dataframe_name][r._child_dataframe_name] = (
r.child_column
)

updated_dataframes = updated_dataframes or []
if updated_dataframes:
Expand Down
1 change: 1 addition & 0 deletions featuretools/feature_base/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Custom caching class, currently used for FeatureBase
"""

# needed for defaultdict annotation if < python 3.9
from __future__ import annotations

Expand Down
10 changes: 5 additions & 5 deletions featuretools/feature_base/features_serializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,12 +123,12 @@ def _feature_definitions(self):
# being converted to strings, but integer dict values are not.
primitives_dict_key = str(primitive_number)
primitive_id_to_key[primitive_id] = primitives_dict_key
self._primitives_dict[
self._primitives_dict[primitives_dict_key] = (
serialize_primitive(primitive)
)
self._features_dict[name]["arguments"]["primitive"] = (
primitives_dict_key
] = serialize_primitive(primitive)
self._features_dict[name]["arguments"][
"primitive"
] = primitives_dict_key
)
primitive_number += 1
else:
# Primitive we have seen already - use existing primitive_id key
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import warnings
from datetime import datetime

import numpy as np
Expand Down Expand Up @@ -388,10 +389,10 @@ def test_week_no_deprecation_message():
datetime(2019, 6, 17, 11, 10, 50),
datetime(2019, 11, 30, 19, 45, 15),
]
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
week = Week()
week(dates).tolist()
assert not record


def test_url_to_domain_urls():
Expand Down
1 change: 1 addition & 0 deletions featuretools/tests/profiling/dfs_profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
- max_depth > 2 is very slow (currently)
- stats output can be viewed online with https://nejc.saje.info/pstats-viewer.html
"""

import cProfile
from pathlib import Path

Expand Down
43 changes: 22 additions & 21 deletions featuretools/tests/synthesis/test_dfs_method.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import warnings
from unittest.mock import patch

import composeml as cp
Expand Down Expand Up @@ -87,10 +88,13 @@ def test_dfs_empty_features():
features_only=True,
)
assert features == []
with pytest.raises(AssertionError, match=error_text), patch.object(
DeepFeatureSynthesis,
"build_features",
return_value=[],
with (
pytest.raises(AssertionError, match=error_text),
patch.object(
DeepFeatureSynthesis,
"build_features",
return_value=[],
),
):
dfs(
dataframes,
Expand Down Expand Up @@ -447,7 +451,8 @@ def test_warns_with_unused_primitives(es):
assert record[0].message.args[0] == warning_text

# Should not raise a warning
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
dfs(
entityset=es,
target_dataframe_name="customers",
Expand All @@ -457,13 +462,12 @@ def test_warns_with_unused_primitives(es):
features_only=True,
)

assert not record


def test_no_warns_with_camel_and_title_case(es):
for trans_primitive in ["isNull", "IsNull"]:
# Should not raise a UnusedPrimitiveWarning warning
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
dfs(
entityset=es,
target_dataframe_name="customers",
Expand All @@ -472,11 +476,10 @@ def test_no_warns_with_camel_and_title_case(es):
features_only=True,
)

assert not record

for agg_primitive in ["numUnique", "NumUnique"]:
# Should not raise a UnusedPrimitiveWarning warning
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
dfs(
entityset=es,
target_dataframe_name="customers",
Expand All @@ -485,11 +488,10 @@ def test_no_warns_with_camel_and_title_case(es):
features_only=True,
)

assert not record


def test_does_not_warn_with_stacking_feature(pd_es):
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
dfs(
entityset=pd_es,
target_dataframe_name="régions",
Expand All @@ -501,8 +503,6 @@ def test_does_not_warn_with_stacking_feature(pd_es):
features_only=True,
)

assert not record


def test_warns_with_unused_where_primitives(es):
if es.dataframe_type == Library.SPARK:
Expand Down Expand Up @@ -549,7 +549,8 @@ def test_warns_with_unused_groupby_primitives(pd_es):
assert record[0].message.args[0] == warning_text

# Should not raise a warning
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
dfs(
entityset=pd_es,
target_dataframe_name="customers",
Expand All @@ -558,8 +559,6 @@ def test_warns_with_unused_groupby_primitives(pd_es):
features_only=True,
)

assert not record


def test_warns_with_unused_custom_primitives(pd_es):
class AboveTen(TransformPrimitive):
Expand Down Expand Up @@ -589,7 +588,8 @@ class AboveTen(TransformPrimitive):
assert record[0].message.args[0] == warning_text

# Should not raise a warning
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
dfs(
entityset=pd_es,
target_dataframe_name="customers",
Expand Down Expand Up @@ -625,7 +625,8 @@ class MaxAboveTen(AggregationPrimitive):
assert record[0].message.args[0] == warning_text

# Should not raise a warning
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
dfs(
entityset=pd_es,
target_dataframe_name="sessions",
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ test = [
"pytest-timeout >= 2.1.0"
]
dask = [
"dask[dataframe] >= 2023.2.0",
"distributed >= 2023.2.0",
"dask[dataframe] >= 2023.2.0, <2024.3.0",
"distributed >= 2023.2.0, <2024.3.0",
"woodwork[dask] >= 0.28.0",
]
spark = [
Expand Down

0 comments on commit d6b3243

Please sign in to comment.