这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@ Unreleased

Changed
~~~~~~~
- Dropped support for pyspark <3.5.0 after discovering that it does not work on Macs (this may not work for older versions as well).

- Dropped support for pyspark <3.5.0 on Macs after discovering that these configurations frequently crash. Older versions of the library may also be affected.

.. _v0.20.2:

Expand Down
99 changes: 88 additions & 11 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
API reference.
"""

import sys
from pathlib import Path

import nox
Expand All @@ -28,17 +29,93 @@
"""For test suites where we track coverage (i.e. the fast tests and the full
test suite), fail if test coverage falls below this percentage."""


def is_mac():
"""Returns true if the current system is a mac."""
return sys.platform == "darwin"


DEPENDENCY_MATRIX = [
#fmt: off
DependencyConfiguration(id="3.9-oldest", python="3.9", packages={"pyspark[sql]": "==3.5.0", "sympy": "==1.8", "pandas": "==1.4.0", "tmlt.core": "==0.18.0"}),
DependencyConfiguration(id="3.9-newest", python="3.9", packages={"pyspark[sql]": "==3.5.6", "sympy": "==1.9", "pandas": "==1.5.3", "tmlt.core": ">=0.18.0"}),
DependencyConfiguration(id="3.10-oldest", python="3.10", packages={"pyspark[sql]": "==3.5.0", "sympy": "==1.8", "pandas": "==1.4.0", "tmlt.core": "==0.18.0"}),
DependencyConfiguration(id="3.10-newest", python="3.10", packages={"pyspark[sql]": "==3.5.6", "sympy": "==1.9", "pandas": "==1.5.3", "tmlt.core": ">=0.18.0"}),
DependencyConfiguration(id="3.11-oldest", python="3.11", packages={"pyspark[sql]": "==3.5.0", "sympy": "==1.8", "pandas": "==1.5.0", "tmlt.core": "==0.18.0"}),
DependencyConfiguration(id="3.11-newest", python="3.11", packages={"pyspark[sql]": "==3.5.6", "sympy": "==1.9", "pandas": "==1.5.3", "tmlt.core": ">=0.18.0"}),
DependencyConfiguration(id="3.12-oldest", python="3.12", packages={"pyspark[sql]": "==3.5.0", "sympy": "==1.8", "pandas": "==2.2.0", "tmlt.core": "==0.18.0"}),
DependencyConfiguration(id="3.12-newest", python="3.12", packages={"pyspark[sql]": "==3.5.6", "sympy": "==1.9", "pandas": "==2.2.3", "tmlt.core": ">=0.18.0"}),
#fmt: on
DependencyConfiguration(
id="3.9-oldest",
python="3.9",
packages={
"pyspark[sql]": "==3.3.1" if not is_mac() else "==3.5.0",
"sympy": "==1.8",
"pandas": "==1.4.0",
"tmlt.core": "==0.18.0",
},
),
DependencyConfiguration(
id="3.9-newest",
python="3.9",
packages={
"pyspark[sql]": "==3.5.6",
"sympy": "==1.9",
"pandas": "==1.5.3",
"tmlt.core": ">=0.18.0",
},
),
DependencyConfiguration(
id="3.10-oldest",
python="3.10",
packages={
"pyspark[sql]": "==3.3.1" if not is_mac() else "==3.5.0",
"sympy": "==1.8",
"pandas": "==1.4.0",
"tmlt.core": "==0.18.0",
},
),
DependencyConfiguration(
id="3.10-newest",
python="3.10",
packages={
"pyspark[sql]": "==3.5.6",
"sympy": "==1.9",
"pandas": "==1.5.3",
"tmlt.core": ">=0.18.0",
},
),
DependencyConfiguration(
id="3.11-oldest",
python="3.11",
packages={
"pyspark[sql]": "==3.4.0" if not is_mac() else "==3.5.0",
"sympy": "==1.8",
"pandas": "==1.5.0",
"tmlt.core": "==0.18.0",
},
),
DependencyConfiguration(
id="3.11-newest",
python="3.11",
packages={
"pyspark[sql]": "==3.5.6",
"sympy": "==1.9",
"pandas": "==1.5.3",
"tmlt.core": ">=0.18.0",
},
),
DependencyConfiguration(
id="3.12-oldest",
python="3.12",
packages={
"pyspark[sql]": "==3.5.0",
"sympy": "==1.8",
"pandas": "==2.2.0",
"tmlt.core": "==0.18.0",
},
),
DependencyConfiguration(
id="3.12-newest",
python="3.12",
packages={
"pyspark[sql]": "==3.5.6",
"sympy": "==1.9",
"pandas": "==2.2.3",
"tmlt.core": ">=0.18.0",
},
),
]

AUDIT_VERSIONS = ["3.9", "3.10", "3.11", "3.12"]
Expand Down Expand Up @@ -101,7 +178,7 @@
sm.docs()

for benchmark_name, timeout in BENCHMARK_TO_TIMEOUT.items():
sm.benchmark(CWD / benchmark_name, timeout*60)
sm.benchmark(CWD / benchmark_name, timeout * 60)

sm.audit()

Expand Down
5 changes: 4 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,10 @@ dependencies = [
"pandas >=1.4.0,<2 ; python_version < '3.11'",
"pandas >=1.5.0,<2 ; python_version == '3.11'",
"pandas >=2.2.0,<3 ; python_version >= '3.12'",
"pyspark[sql] >=3.5.0,<3.6",
"pyspark[sql] >=3.3.1,<3.6 ; python_version < '3.11' and sys_platform != 'darwin'",
"pyspark[sql] >=3.4.0,<3.6 ; python_version == '3.11' and sys_platform != 'darwin'",
"pyspark[sql] >=3.5.0,<3.6 ; python_version >= '3.12' and sys_platform != 'darwin'",
"pyspark[sql] >=3.5.0,<3.6 ; sys_platform == 'darwin'",
"sympy >=1.8,<1.13",
"typeguard >=4.0.0,<5",
"typing-extensions >=4.1.0,<5",
Expand Down
101 changes: 68 additions & 33 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.