这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
354 changes: 42 additions & 312 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,26 +10,18 @@
indicate this in their docstrings.
"""

import os
import subprocess
from pathlib import Path

import nox
from nox import session as nox_session
from nox_poetry import session as poetry_session
from tmlt.nox_utils import SessionBuilder
from tmlt.nox_utils.dependencies import install, show_installed
from tmlt.nox_utils.environment import with_clean_workdir
from nox import session as session
from tmlt.nox_utils import SessionManager, install_group

nox.options.default_venv_backend = "uv|virtualenv"

CWD = Path(".").resolve()

PACKAGE_NAME = "tmlt.core"
"""Name of the package."""
PACKAGE_VERSION = (
subprocess.run(["poetry", "version", "-s"], capture_output=True, check=True)
.stdout.decode("utf-8")
.strip()
)
PACKAGE_SOURCE_DIR = "src/tmlt/core"
"""Relative path from the project root to its source code."""
# TODO(#2177): Once we have a better way to self-test our code, use it here in
# place of this import check.
SMOKETEST_SCRIPT = """
Expand Down Expand Up @@ -110,47 +102,7 @@
]
}

LICENSE_IGNORE_GLOBS = [
r".*\.ci.*",
r".*\.gitlab.*",
r".*\.ico",
r".*\.ipynb",
r".*\.json",
r".*\.png",
r".*\.svg",
r"ext\/.*",
]

LICENSE_IGNORE_FILES = [
r".gitignore",
r".gitlab-ci.yml",
r".pipeline_handlers",
r"CHANGELOG.rst",
r"CONTRIBUTING.md",
r"LICENSE.docs",
r"Makefile",
r"changelog.rst",
r"class.rst",
r"module.rst",
r"noxfile.py",
r"poetry.lock",
r"py.typed",
r"pyproject.toml",
r"test_requirements.txt",
]

LICENSE_YEAR_ONLY_FILES = [
r"LICENSE",
r"NOTICE",
]

LICENSE_KEYWORDS = ["Apache-2.0", "CC-BY-SA-4.0"]

ILLEGAL_WORDS_IGNORE_GLOBS = LICENSE_IGNORE_GLOBS
ILLEGAL_WORDS_IGNORE_FILES = LICENSE_IGNORE_FILES
ILLEGAL_WORDS = []

AUDIT_VERSIONS = ["3.8", "3.9", "3.10", "3.11", "3.12"]
AUDIT_VERSIONS = ["3.9", "3.10", "3.11", "3.12"]
AUDIT_SUPPRESSIONS = [
"PYSEC-2023-228",
# Affects: pip<23.3
Expand All @@ -160,85 +112,19 @@
# have their own pip installations -- it is not a dependency of Core.
]

CWD = Path(".").resolve()

_builder = SessionBuilder(
PACKAGE_NAME,
Path(PACKAGE_SOURCE_DIR).resolve(),
options={
"code_dirs": [Path(PACKAGE_SOURCE_DIR).resolve(), Path("test").resolve()],
"default_python_version": "3.9",
"smoketest_script": SMOKETEST_SCRIPT,
"dependency_matrix": DEPENDENCY_MATRIX,
"license_exclude_globs": LICENSE_IGNORE_GLOBS,
"license_exclude_files": LICENSE_IGNORE_FILES,
"license_year_only_files": LICENSE_YEAR_ONLY_FILES,
"license_keyword_patterns": LICENSE_KEYWORDS,
"check_copyright": True,
"illegal_words_exclude_globs": ILLEGAL_WORDS_IGNORE_GLOBS,
"illegal_words_exclude_files": ILLEGAL_WORDS_IGNORE_FILES,
"illegal_words": ILLEGAL_WORDS,
"audit_versions": AUDIT_VERSIONS,
"audit_suppressions": AUDIT_SUPPRESSIONS,
"minimum_coverage": MIN_COVERAGE,
"coverage_module": "tmlt.core",
"parallel_tests": False,
},
)

BENCHMARK_VALUES = [
("private_join", 35),
("count_sum", 25),
("quantile", 84),
("noise_mechanism", 7),
("sparkmap", 28),
("sparkflatmap", 12),
("public_join", 14),
BENCHMARKS = [
("private_join", 35 * 60),
("count_sum", 25 * 60),
("quantile", 84 * 60),
("noise_mechanism", 7 * 60),
("sparkmap", 28 * 60),
("sparkflatmap", 12 * 60),
("public_join", 14 * 60),
]

_builder.black()
_builder.isort()
_builder.mypy()
_builder.pylint()
_builder.pydocstyle()
_builder.license_check()
_builder.illegal_words_check()
_builder.audit()

_builder.test()
_builder.test_doctest()
_builder.test_demos()
_builder.test_smoketest()
_builder.test_fast()
_builder.test_slow()
_builder.test_dependency_matrix()

_builder.docs_linkcheck()
_builder.docs_doctest()
_builder.docs()

_builder.release_test()
_builder.release_smoketest()

_builder.prepare_release()
_builder.post_release()

ids = []
dependency_pythons = []
dependency_packages = []
for config_id, config in DEPENDENCY_MATRIX.items():
ids.append(config_id)
try:
dependency_pythons.append(config.pop("python"))
except KeyError as e:
raise RuntimeError(
"Dependency matrix configurations must specify a Python minor " "version"
) from e
dependency_packages.append(config)


@poetry_session()
@install("cibuildwheel")
@session
@install_group("build")
def build(session):
"""Build packages for distribution.

Expand All @@ -249,190 +135,34 @@ def build(session):
session.run("cibuildwheel", "--output-dir", "dist/", *session.posargs)


@nox_session
@with_clean_workdir
@nox.parametrize(
"python,packages", zip(dependency_pythons, dependency_packages), ids=ids
sm = SessionManager(
PACKAGE_NAME, CWD,
custom_build=build,
smoketest_script=SMOKETEST_SCRIPT,
parallel_tests=False,
min_coverage=MIN_COVERAGE,
audit_versions=AUDIT_VERSIONS,
audit_suppressions=AUDIT_SUPPRESSIONS,
)
def benchmark_multi_deps(session, packages):
"""Run tests using various dependencies."""
session.log(f"Session name: {session.name}")
session.install(
f"{PACKAGE_NAME}=={PACKAGE_VERSION}",
"--find-links",
f"{CWD}/dist/",
"--only-binary",
PACKAGE_NAME,
)
session.install(*[pkg + version for pkg, version in packages.items()])
session.run("pip", "freeze")

(CWD / "benchmark_output").mkdir(exist_ok=True)
session.log("Exit code 124 indicates a timeout, others are script errors")
# If we want to run benchmarks on non-Linux platforms this will probably
# have to be reworked, but it's fine for now.
for script, timeout in BENCHMARK_VALUES:
session.run(
"timeout",
f"{timeout}m",
"python",
f"{CWD}/benchmark/benchmark_{script}.py",
external=True,
)


@poetry_session()
def get_mac_wheels(session):
"""Gets the build wheels for this commit.

Checks s3 first. If the wheels aren't yet in s3, get them from circleci and
uploads them.

Uses the AWS command line instead of boto3 because boto3 does not work well
with poetry.
"""
commit_hash = (
subprocess.run(["git", "rev-parse", "HEAD"], capture_output=True, check=True)
.stdout.decode("ascii")
.strip()
)
session.log(f"Fetching wheels for commit {commit_hash}...")

# If there is not yet anything for this commit, this will error and print
# nothing.
buckets = session.run(
"aws", "s3", "ls", f"s3://tumult.core-wheel-cache/{commit_hash}",
silent=True, success_codes=[0, 1]
)
sm.black()
sm.isort()
sm.mypy()
sm.pylint()
sm.pydocstyle()

sm.smoketest()
sm.release_smoketest()
sm.test()
sm.test_fast()
sm.test_slow()
sm.test_doctest()

if buckets == "":
session.log("Nothing in s3, fetching wheels from circleci.")
wheel_dir = Path(session.create_tmp())
try:
get_wheels_from_circleci(commit_hash, wheel_dir)
except RuntimeError as e:
session.error(str(e))
session.run(
"aws", "s3", "cp", "--recursive", f"{wheel_dir}",
f"s3://tumult.core-wheel-cache/{commit_hash}"
)

Path("dist").mkdir(exist_ok=True)

session.run(
"aws", "s3", "cp", "--recursive",
f"s3://tumult.core-wheel-cache/{commit_hash}",
".",
)


def get_wheels_from_circleci(commit_hash: str, wheel_dir: Path):
"""Get Core wheels for macOS x86 from CircleCI and save them to a directory.

This helper method is used to grab macOS wheels from CircleCI. It finds the CircleCI
pipeline associated with the commit's sha and downloads the wheels.
"""
import polling2 # pylint: disable=import-outside-toplevel
import requests # pylint: disable=import-outside-toplevel

CIRCLECI_TOKEN = os.environ.get("CIRCLECI_API_TOKEN")
if not CIRCLECI_TOKEN:
raise RuntimeError("CIRCLECI_API_TOKEN not set, unable to get wheels from CircleCI")
headers = {
"Accept": "application/json",
"Circle-Token": CIRCLECI_TOKEN,
"Content-Type": "application/json",
}
PROJECT_SLUG = "circleci/GmqTygdwMo6PcdZd3KHo6P/Dw3pczSBYDhEDb4rML7i7i"
circle_org_slug = requests.get(
f"https://circleci.com/api/v2/project/{PROJECT_SLUG}",
headers=headers,
timeout=10,
).json()["organization_slug"]
next_page_token = None
while True:
pipelines = requests.get(
"https://circleci.com/api/v2/pipeline",
params={"org-slug": circle_org_slug, "page-token": next_page_token},
headers=headers,
timeout=10,
).json()
commit_pipelines = [
p
for p in pipelines["items"]
if p["state"] != "errored"
and p.get("trigger_parameters", {}).get("gitlab", {}).get("commit_sha")
== commit_hash
]
if len(commit_pipelines) > 0:
break
next_page_token = pipelines["next_page_token"]
if next_page_token is None:
raise RuntimeError(
f"Unable to find CircleCI pipeline for commit {commit_hash}, "
"unable to get wheels from CircleCI"
)

pipeline_id = commit_pipelines[0]["id"]
workflows = requests.get(
f"https://circleci.com/api/v2/pipeline/{pipeline_id}/workflow",
headers=headers,
timeout=10,
).json()
if "items" not in workflows or len(workflows["items"]) == 0:
raise RuntimeError(f"Unable to find CircleCI workflow for commit {commit_hash}")
workflow_id = workflows["items"][0]["id"]
polling2.poll(
lambda: requests.get( # pylint: disable=missing-timeout
f"https://circleci.com/api/v2/workflow/{workflow_id}",
headers=headers,
),
step=10,
timeout=60 * 60,
check_success=lambda response: response.json()["status"] == "success",
)
jobs = requests.get(
f"https://circleci.com/api/v2/workflow/{workflow_id}/job",
headers=headers,
timeout=10,
).json()
if "items" not in jobs or len(jobs["items"]) == 0:
raise ValueError(f"Unable to find CircleCI job for commit {commit_hash}")
for job in jobs["items"]:
job_no = job["job_number"]
artifacts = requests.get(
f"https://circleci.com/api/v2/project/{PROJECT_SLUG}/{job_no}/artifacts",
headers=headers,
timeout=10,
).json()
Path(wheel_dir / "dist").mkdir(exist_ok=True)
if "items" not in artifacts or len(artifacts["items"]) == 0:
raise RuntimeError(f"Unable to find wheels for commit {commit_hash} in job "
f"{job_no}. Have they expired?")
for artifact in artifacts["items"]:
with open(wheel_dir / artifact["path"], "wb") as f:
f.write(
requests.get(artifact["url"], headers=headers, timeout=10).content
)
sm.docs_linkcheck()
sm.docs_doctest()
sm.docs()

sm.audit()

@poetry_session(tags=["benchmark"])
@nox.parametrize("script,timeout", BENCHMARK_VALUES)
@_builder.install_package
@install("pytest")
@show_installed
def benchmark(session, script: str, timeout: int):
"""Run all benchmarks."""
(CWD / "benchmark_output").mkdir(exist_ok=True)
session.log("Exit code 124 indicates a timeout, others are script errors")
# If we want to run benchmarks on non-Linux platforms this will probably
# have to be reworked, but it's fine for now.
session.run(
"timeout",
f"{timeout}m",
"bash",
"-c",
f"time python {CWD}/benchmark/benchmark_{script}.py",
external=True,
)
for name, timeout in BENCHMARKS:
sm.benchmark(Path('benchmark') / f"{name}.py", timeout)
Loading