From b51966745ccaee7935f01b000b46047c483bb016 Mon Sep 17 00:00:00 2001 From: beckermr Date: Mon, 15 Dec 2025 16:38:05 -0600 Subject: [PATCH 01/12] fix: try this approach for better syncs --- .github/workflows/bot-bot.yml | 2 +- conda_forge_tick/auto_tick.py | 8 +++++++- conda_forge_tick/deploy.py | 15 +++++++-------- conda_forge_tick/git_utils.py | 8 ++++++++ conda_forge_tick/update_upstream_versions.py | 3 +++ tests_integration/test_integration.py | 2 +- 6 files changed, 27 insertions(+), 11 deletions(-) diff --git a/.github/workflows/bot-bot.yml b/.github/workflows/bot-bot.yml index 029726d0c..ee9449970 100644 --- a/.github/workflows/bot-bot.yml +++ b/.github/workflows/bot-bot.yml @@ -79,7 +79,7 @@ jobs: pushd cf-graph export RUN_URL="https://github.com/regro/cf-scripts/actions/runs/${RUN_ID}" - conda-forge-tick deploy-to-github + conda-forge-tick deploy-to-github --git-only env: BOT_TOKEN: ${{ secrets.AUTOTICK_BOT_TOKEN }} RUN_ID: ${{ github.run_id }} diff --git a/conda_forge_tick/auto_tick.py b/conda_forge_tick/auto_tick.py index 54b8e06ff..ea9091c88 100644 --- a/conda_forge_tick/auto_tick.py +++ b/conda_forge_tick/auto_tick.py @@ -36,6 +36,7 @@ RepositoryNotFoundError, github_backend, is_github_api_limit_reached, + reset_and_restore_file, ) from conda_forge_tick.lazy_json_backends import ( LazyJson, @@ -532,7 +533,12 @@ def _make_and_sync_pr_lazy_json(pr_data) -> LazyJson | Literal[False]: __edit_pr_lazy_json.update(**pr_data.model_dump(mode="json")) if "id" in pr_lazy_json: - sync_lazy_json_object(pr_lazy_json, "file", ["github_api"]) + try: + sync_lazy_json_object(pr_lazy_json, "file", ["github_api"]) + except Exception: + pass + else: + reset_and_restore_file(pr_lazy_json.sharded_path) else: pr_lazy_json = False diff --git a/conda_forge_tick/deploy.py b/conda_forge_tick/deploy.py index 5d17c593f..3b11833b0 100644 --- a/conda_forge_tick/deploy.py +++ b/conda_forge_tick/deploy.py @@ -6,7 +6,12 @@ import time from .cli_context import CliContext -from .git_utils import delete_file_via_gh_api, get_bot_token, push_file_via_gh_api +from .git_utils import ( + delete_file_via_gh_api, + get_bot_token, + push_file_via_gh_api, + reset_and_restore_file, +) from .lazy_json_backends import ( CF_TICK_GRAPH_DATA_HASHMAPS, get_lazy_json_backends, @@ -213,12 +218,6 @@ def _get_pth_commit_message(pth): return msg -def _reset_and_restore_file(pth): - subprocess.run(["git", "reset", "--", pth], capture_output=True, text=True) - subprocess.run(["git", "restore", "--", pth], capture_output=True, text=True) - subprocess.run(["git", "clean", "-f", "--", pth], capture_output=True, text=True) - - def _deploy_via_api( do_git_ops: bool, files_to_add: set[str], @@ -274,7 +273,7 @@ def _deploy_via_api( do_git_ops = True for pth in files_done: - _reset_and_restore_file(pth) + reset_and_restore_file(pth) return do_git_ops, files_to_add, files_done, files_to_try_again diff --git a/conda_forge_tick/git_utils.py b/conda_forge_tick/git_utils.py index 646ace796..fb92187b7 100644 --- a/conda_forge_tick/git_utils.py +++ b/conda_forge_tick/git_utils.py @@ -1962,3 +1962,11 @@ def delete_file_via_gh_api(pth: str, repo_full_name: str, msg: str) -> None: interval = base**tr interval = rfrac * interval + (rfrac * RNG.uniform(0, 1) * interval) time.sleep(interval) + + +@lock_git_operation() +def reset_and_restore_file(pth: str): + """Reset the status of a file tracked by git to its version at the current commit.""" + subprocess.run(["git", "reset", "--", pth], capture_output=True, text=True) + subprocess.run(["git", "restore", "--", pth], capture_output=True, text=True) + subprocess.run(["git", "clean", "-f", "--", pth], capture_output=True, text=True) diff --git a/conda_forge_tick/update_upstream_versions.py b/conda_forge_tick/update_upstream_versions.py index 7cb1a4217..773734fcd 100644 --- a/conda_forge_tick/update_upstream_versions.py +++ b/conda_forge_tick/update_upstream_versions.py @@ -29,6 +29,7 @@ from conda_forge_tick.cli_context import CliContext from conda_forge_tick.executors import executor +from conda_forge_tick.git_utils import reset_and_restore_file from conda_forge_tick.lazy_json_backends import LazyJson, dumps, sync_lazy_json_object from conda_forge_tick.settings import ( ENV_CONDA_FORGE_ORG, @@ -469,6 +470,8 @@ def _update_upstream_versions_process_pool( except Exception: # will sync in deploy later if this fails pass + else: + reset_and_restore_file(version_attrs.sharded_path) @functools.lru_cache(maxsize=1) diff --git a/tests_integration/test_integration.py b/tests_integration/test_integration.py index c167ae9fc..0987483f3 100644 --- a/tests_integration/test_integration.py +++ b/tests_integration/test_integration.py @@ -306,7 +306,7 @@ def test_scenario( with in_fresh_cf_graph(): with mitmproxy_env(): invoke_bot_command(["--debug", "auto-tick"]) - invoke_bot_command(["--debug", "deploy-to-github"]) + invoke_bot_command(["--debug", "deploy-to-github", "--git-only"]) with in_fresh_cf_graph(): # because of an implementation detail in the bot, we need to run make-migrators twice From 228879f5fffd716608f7560ab78125836e7dc80e Mon Sep 17 00:00:00 2001 From: beckermr Date: Mon, 15 Dec 2025 16:44:59 -0600 Subject: [PATCH 02/12] feat: use last_fecthed to detect updating PRs --- conda_forge_tick/events/pr_events.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/conda_forge_tick/events/pr_events.py b/conda_forge_tick/events/pr_events.py index ed92da607..2d6fdbc39 100644 --- a/conda_forge_tick/events/pr_events.py +++ b/conda_forge_tick/events/pr_events.py @@ -41,9 +41,9 @@ def _react_to_pr(uid: str, dry_run: bool = False) -> None: pr_data = refresh_pr(copy.deepcopy(pr_json.data), dry_run=dry_run) if pr_data is not None: if ( - "Last-Modified" in pr_json - and "Last-Modified" in pr_data - and pr_json["Last-Modified"] != pr_data["Last-Modified"] + "last_fetched" in pr_json + and "last_fetched" in pr_data + and pr_json["last_fetched"] != pr_data["last_fetched"] ): print("refreshed PR data", flush=True) pr_json.update(pr_data) @@ -52,9 +52,9 @@ def _react_to_pr(uid: str, dry_run: bool = False) -> None: pr_data = close_out_labels(copy.deepcopy(pr_json.data), dry_run=dry_run) if pr_data is not None: if ( - "Last-Modified" in pr_json - and "Last-Modified" in pr_data - and pr_json["Last-Modified"] != pr_data["Last-Modified"] + "last_fetched" in pr_json + and "last_fetched" in pr_data + and pr_json["last_fetched"] != pr_data["last_fetched"] ): print("closed PR due to bot-rerun label", flush=True) pr_json.update(pr_data) @@ -64,9 +64,9 @@ def _react_to_pr(uid: str, dry_run: bool = False) -> None: pr_data = refresh_pr(copy.deepcopy(pr_json.data), dry_run=dry_run) if pr_data is not None: if ( - "Last-Modified" in pr_json - and "Last-Modified" in pr_data - and pr_json["Last-Modified"] != pr_data["Last-Modified"] + "last_fetched" in pr_json + and "last_fetched" in pr_data + and pr_json["last_fetched"] != pr_data["last_fetched"] ): print("refreshed PR data", flush=True) pr_json.update(pr_data) @@ -77,9 +77,9 @@ def _react_to_pr(uid: str, dry_run: bool = False) -> None: ) if pr_data is not None: if ( - "Last-Modified" in pr_json - and "Last-Modified" in pr_data - and pr_json["Last-Modified"] != pr_data["Last-Modified"] + "last_fetched" in pr_json + and "last_fetched" in pr_data + and pr_json["last_fetched"] != pr_data["last_fetched"] ): print("closed PR due to merge conflicts", flush=True) pr_json.update(pr_data) From 4c9c7f4b1c64a1705d77737918c039ec1fa484d0 Mon Sep 17 00:00:00 2001 From: beckermr Date: Mon, 15 Dec 2025 16:51:10 -0600 Subject: [PATCH 03/12] fix: only use git for mid-bot deploy too --- conda_forge_tick/auto_tick.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/conda_forge_tick/auto_tick.py b/conda_forge_tick/auto_tick.py index ea9091c88..2e37fbbcb 100644 --- a/conda_forge_tick/auto_tick.py +++ b/conda_forge_tick/auto_tick.py @@ -1367,7 +1367,9 @@ def main(ctx: CliContext) -> None: with fold_log_lines("updating graph with PR info"): _update_graph_with_pr_info() - deploy(ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"]) + deploy( + ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"], git_only=True + ) # record tmp dir so we can be sure to clean it later temp = glob.glob("/tmp/*") From d1af25a4abf6c15bc1927144c6129af8109fcae4 Mon Sep 17 00:00:00 2001 From: beckermr Date: Mon, 15 Dec 2025 17:51:53 -0600 Subject: [PATCH 04/12] doc: comments and run one test --- conda_forge_tick/auto_tick.py | 4 ++++ conda_forge_tick/update_upstream_versions.py | 3 +++ tests_integration/lib/_definitions/__init__.py | 8 ++++---- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/conda_forge_tick/auto_tick.py b/conda_forge_tick/auto_tick.py index 2e37fbbcb..adbb1b2ad 100644 --- a/conda_forge_tick/auto_tick.py +++ b/conda_forge_tick/auto_tick.py @@ -536,8 +536,12 @@ def _make_and_sync_pr_lazy_json(pr_data) -> LazyJson | Literal[False]: try: sync_lazy_json_object(pr_lazy_json, "file", ["github_api"]) except Exception: + # we will deploy via git later if this fails pass else: + # this function removes the local copy of the pr_json on disk + # when the deploy via git happens, the bot will ignore this + # bit of pr_json completely and prefer the copy already pushed reset_and_restore_file(pr_lazy_json.sharded_path) else: diff --git a/conda_forge_tick/update_upstream_versions.py b/conda_forge_tick/update_upstream_versions.py index 773734fcd..9590b6aa7 100644 --- a/conda_forge_tick/update_upstream_versions.py +++ b/conda_forge_tick/update_upstream_versions.py @@ -471,6 +471,9 @@ def _update_upstream_versions_process_pool( # will sync in deploy later if this fails pass else: + # this function removes the local copy of the pr_json on disk + # when the deploy via git happens, the bot will ignore this + # bit of pr_json completely and prefer the copy already pushed reset_and_restore_file(version_attrs.sharded_path) diff --git a/tests_integration/lib/_definitions/__init__.py b/tests_integration/lib/_definitions/__init__.py index e28255398..3155372e2 100644 --- a/tests_integration/lib/_definitions/__init__.py +++ b/tests_integration/lib/_definitions/__init__.py @@ -2,11 +2,11 @@ from .base_classes import AbstractIntegrationTestHelper, GitHubAccount, TestCase TEST_CASE_MAPPING: dict[str, list[TestCase]] = { - "conda-forge-pinning": conda_forge_pinning.ALL_TEST_CASES, + # "conda-forge-pinning": conda_forge_pinning.ALL_TEST_CASES, "fastapi": fastapi.ALL_TEST_CASES, - "polars": polars.ALL_TEST_CASES, - "pydantic": pydantic.ALL_TEST_CASES, - "zizmor": zizmor.ALL_TEST_CASES, + # "polars": polars.ALL_TEST_CASES, + # "pydantic": pydantic.ALL_TEST_CASES, + # "zizmor": zizmor.ALL_TEST_CASES, } """ Maps from feedstock name to a list of all test cases for that feedstock. From c7cb24547455c65b4d378aad12482e080937aa3c Mon Sep 17 00:00:00 2001 From: beckermr Date: Mon, 15 Dec 2025 18:12:08 -0600 Subject: [PATCH 05/12] fix: try not deploying --- .github/workflows/bot-bot.yml | 2 +- conda_forge_tick/auto_tick.py | 13 +------------ conda_forge_tick/cli.py | 10 ++++++++-- conda_forge_tick/deploy.py | 13 ++++++++++++- conda_forge_tick/update_upstream_versions.py | 6 ------ tests_integration/lib/_definitions/__init__.py | 1 + tests_integration/test_integration.py | 2 +- 7 files changed, 24 insertions(+), 23 deletions(-) diff --git a/.github/workflows/bot-bot.yml b/.github/workflows/bot-bot.yml index ee9449970..5f0b85c77 100644 --- a/.github/workflows/bot-bot.yml +++ b/.github/workflows/bot-bot.yml @@ -79,7 +79,7 @@ jobs: pushd cf-graph export RUN_URL="https://github.com/regro/cf-scripts/actions/runs/${RUN_ID}" - conda-forge-tick deploy-to-github --git-only + conda-forge-tick deploy-to-github --git-only --dirs-to-ignore="pr_json" env: BOT_TOKEN: ${{ secrets.AUTOTICK_BOT_TOKEN }} RUN_ID: ${{ github.run_id }} diff --git a/conda_forge_tick/auto_tick.py b/conda_forge_tick/auto_tick.py index adbb1b2ad..578916991 100644 --- a/conda_forge_tick/auto_tick.py +++ b/conda_forge_tick/auto_tick.py @@ -36,7 +36,6 @@ RepositoryNotFoundError, github_backend, is_github_api_limit_reached, - reset_and_restore_file, ) from conda_forge_tick.lazy_json_backends import ( LazyJson, @@ -533,17 +532,7 @@ def _make_and_sync_pr_lazy_json(pr_data) -> LazyJson | Literal[False]: __edit_pr_lazy_json.update(**pr_data.model_dump(mode="json")) if "id" in pr_lazy_json: - try: - sync_lazy_json_object(pr_lazy_json, "file", ["github_api"]) - except Exception: - # we will deploy via git later if this fails - pass - else: - # this function removes the local copy of the pr_json on disk - # when the deploy via git happens, the bot will ignore this - # bit of pr_json completely and prefer the copy already pushed - reset_and_restore_file(pr_lazy_json.sharded_path) - + sync_lazy_json_object(pr_lazy_json, "file", ["github_api"]) else: pr_lazy_json = False diff --git a/conda_forge_tick/cli.py b/conda_forge_tick/cli.py index 284b20886..24be90348 100644 --- a/conda_forge_tick/cli.py +++ b/conda_forge_tick/cli.py @@ -210,11 +210,17 @@ def make_mappings() -> None: is_flag=True, help="If given, only deploy graph data to GitHub via the git command line.", ) +@click.option( + "--dirs-to-ignore", + default=None, + help=("Comma-separated list of directories to ignore. If given, directories will " + "not be deployed.") +) @pass_context -def deploy_to_github(ctx: CliContext, git_only: bool) -> None: +def deploy_to_github(ctx: CliContext, git_only: bool, dirs_to_ignore: str) -> None: from . import deploy - deploy.deploy(ctx, git_only=git_only) + deploy.deploy(ctx, git_only=git_only, dirs_to_ignore=[] if dirs_to_ignore is None else dirs_to_ignore.split(",")) @main.command(name="backup-lazy-json") diff --git a/conda_forge_tick/deploy.py b/conda_forge_tick/deploy.py index 3b11833b0..5d8f599f3 100644 --- a/conda_forge_tick/deploy.py +++ b/conda_forge_tick/deploy.py @@ -279,7 +279,10 @@ def _deploy_via_api( def deploy( - ctx: CliContext, dirs_to_deploy: list[str] | None = None, git_only: bool = False + ctx: CliContext, + dirs_to_deploy: list[str] | None = None, + git_only: bool = False, + dirs_to_ignore: list[str] | None = None, ): """Deploy the graph to GitHub.""" if ctx.dry_run: @@ -303,8 +306,16 @@ def deploy( drs_to_deploy += CF_TICK_GRAPH_DATA_HASHMAPS drs_to_deploy += ["graph.json"] else: + if dirs_to_ignore is not None: + raise RuntimeError( + "You cannot specify both `dirs_to_deploy` " + "and `dirs_to_ignore` when deploying the graph!" + ) drs_to_deploy = dirs_to_deploy + if dirs_to_ignore is not None: + drs_to_deploy = [dr for dr in drs_to_deploy if dr not in dirs_to_ignore] + for dr in drs_to_deploy: if not os.path.exists(dr): continue diff --git a/conda_forge_tick/update_upstream_versions.py b/conda_forge_tick/update_upstream_versions.py index 9590b6aa7..7cb1a4217 100644 --- a/conda_forge_tick/update_upstream_versions.py +++ b/conda_forge_tick/update_upstream_versions.py @@ -29,7 +29,6 @@ from conda_forge_tick.cli_context import CliContext from conda_forge_tick.executors import executor -from conda_forge_tick.git_utils import reset_and_restore_file from conda_forge_tick.lazy_json_backends import LazyJson, dumps, sync_lazy_json_object from conda_forge_tick.settings import ( ENV_CONDA_FORGE_ORG, @@ -470,11 +469,6 @@ def _update_upstream_versions_process_pool( except Exception: # will sync in deploy later if this fails pass - else: - # this function removes the local copy of the pr_json on disk - # when the deploy via git happens, the bot will ignore this - # bit of pr_json completely and prefer the copy already pushed - reset_and_restore_file(version_attrs.sharded_path) @functools.lru_cache(maxsize=1) diff --git a/tests_integration/lib/_definitions/__init__.py b/tests_integration/lib/_definitions/__init__.py index 3155372e2..e24a5a613 100644 --- a/tests_integration/lib/_definitions/__init__.py +++ b/tests_integration/lib/_definitions/__init__.py @@ -3,6 +3,7 @@ TEST_CASE_MAPPING: dict[str, list[TestCase]] = { # "conda-forge-pinning": conda_forge_pinning.ALL_TEST_CASES, + # FIXME "fastapi": fastapi.ALL_TEST_CASES, # "polars": polars.ALL_TEST_CASES, # "pydantic": pydantic.ALL_TEST_CASES, diff --git a/tests_integration/test_integration.py b/tests_integration/test_integration.py index 0987483f3..6b0295485 100644 --- a/tests_integration/test_integration.py +++ b/tests_integration/test_integration.py @@ -244,7 +244,7 @@ def invoke_bot_command(args: list[str]): cli.main(args, standalone_mode=False) -@pytest.mark.parametrize("use_containers", [False, True]) +@pytest.mark.parametrize("use_containers", [False]) # FIXME - put this back, True]) def test_scenario( use_containers: bool, scenario: tuple[int, dict[str, TestCase]], From 39e27d1739e93ed5fe9acb543e7d7f53a15e630c Mon Sep 17 00:00:00 2001 From: beckermr Date: Tue, 16 Dec 2025 06:43:39 -0600 Subject: [PATCH 06/12] fix: reset ignored files --- conda_forge_tick/deploy.py | 49 ++++++++++++++++----------- tests_integration/test_integration.py | 4 +-- 2 files changed, 31 insertions(+), 22 deletions(-) diff --git a/conda_forge_tick/deploy.py b/conda_forge_tick/deploy.py index 5d8f599f3..cf13e49d1 100644 --- a/conda_forge_tick/deploy.py +++ b/conda_forge_tick/deploy.py @@ -136,20 +136,9 @@ def _deploy_batch( except Exception as e: print(e, flush=True) - # make sure the graph can load, if not we will error - try: - gx = load_existing_graph() - # TODO: be more selective about which json to check - for node, attrs in gx.nodes.items(): - with attrs["payload"]: - pass - graph_ok = True - except Exception: - graph_ok = False - status = 1 num_try = 0 - while status != 0 and num_try < 20 and graph_ok: + while status != 0 and num_try < 20: with fold_log_lines(">>>>>>>>>>>> git pull+push try %d" % num_try): try: print(">>>>>>>>>>>> git pull", flush=True) @@ -180,7 +169,7 @@ def _deploy_batch( time.sleep(interval) num_try += 1 - if status != 0 or not graph_ok: + if status != 0: # we did try to push to a branch but it never worked so we'll just stop raise RuntimeError("bot did not push its data! stopping!") @@ -284,11 +273,17 @@ def deploy( git_only: bool = False, dirs_to_ignore: list[str] | None = None, ): - """Deploy the graph to GitHub.""" if ctx.dry_run: - print("(dry run) deploying") + print("(dry run) deploying", flush=True) return + # make sure the graph can load, if not it will error + gx = load_existing_graph() + # TODO: be more selective about which json to check + for node, attrs in gx.nodes.items(): + with attrs["payload"]: + pass + with fold_log_lines("cleaning up disk space for deploy"): clean_disk_space() @@ -313,9 +308,6 @@ def deploy( ) drs_to_deploy = dirs_to_deploy - if dirs_to_ignore is not None: - drs_to_deploy = [dr for dr in drs_to_deploy if dr not in dirs_to_ignore] - for dr in drs_to_deploy: if not os.path.exists(dr): continue @@ -347,9 +339,26 @@ def deploy( ).stdout.splitlines(), ) - print("found %d files to add" % len(files_to_add), flush=True) - files_to_delete = _get_files_to_delete() + + if dirs_to_ignore is not None: + new_files_to_add = set() + for fn in files_to_add: + if any(fn.startswith(f"{dr}/") for dr in dirs_to_ignore): + reset_and_restore_file(fn) + else: + new_files_to_add.add(fn) + files_to_add = new_files_to_add + + new_files_to_delete = set() + for fn in files_to_delete: + if any(fn.startswith(f"{dr}/") for dr in dirs_to_ignore): + reset_and_restore_file(fn) + else: + new_files_to_delete.add(fn) + files_to_delete = new_files_to_delete + + print("found %d files to add" % len(files_to_add), flush=True) print("found %d files to delete" % len(files_to_delete), flush=True) do_git_ops = False diff --git a/tests_integration/test_integration.py b/tests_integration/test_integration.py index 6b0295485..8615f26ff 100644 --- a/tests_integration/test_integration.py +++ b/tests_integration/test_integration.py @@ -306,7 +306,7 @@ def test_scenario( with in_fresh_cf_graph(): with mitmproxy_env(): invoke_bot_command(["--debug", "auto-tick"]) - invoke_bot_command(["--debug", "deploy-to-github", "--git-only"]) + invoke_bot_command(["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore='pr_json'"]) with in_fresh_cf_graph(): # because of an implementation detail in the bot, we need to run make-migrators twice @@ -320,6 +320,6 @@ def test_scenario( # for changes to be picked up with mitmproxy_env(): invoke_bot_command(["--debug", "auto-tick"]) - invoke_bot_command(["--debug", "deploy-to-github", "--git-only"]) + invoke_bot_command(["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore='pr_json'"]) run_all_validate_functions(scenario) From b0b33c8740908b5c70dff3b43c34e0affdb4ba30 Mon Sep 17 00:00:00 2001 From: beckermr Date: Tue, 16 Dec 2025 06:57:22 -0600 Subject: [PATCH 07/12] debug: what is happening --- conda_forge_tick/deploy.py | 3 +++ tests_integration/test_integration.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/conda_forge_tick/deploy.py b/conda_forge_tick/deploy.py index cf13e49d1..9c219522b 100644 --- a/conda_forge_tick/deploy.py +++ b/conda_forge_tick/deploy.py @@ -342,10 +342,12 @@ def deploy( files_to_delete = _get_files_to_delete() if dirs_to_ignore is not None: + print("ignoring dirs:", dirs_to_ignore, flush=True) new_files_to_add = set() for fn in files_to_add: if any(fn.startswith(f"{dr}/") for dr in dirs_to_ignore): reset_and_restore_file(fn) + print("ignoring file to add:", fn, flush=True) else: new_files_to_add.add(fn) files_to_add = new_files_to_add @@ -354,6 +356,7 @@ def deploy( for fn in files_to_delete: if any(fn.startswith(f"{dr}/") for dr in dirs_to_ignore): reset_and_restore_file(fn) + print("ignoring file to delete:", fn, flush=True) else: new_files_to_delete.add(fn) files_to_delete = new_files_to_delete diff --git a/tests_integration/test_integration.py b/tests_integration/test_integration.py index 8615f26ff..0eec70320 100644 --- a/tests_integration/test_integration.py +++ b/tests_integration/test_integration.py @@ -306,7 +306,7 @@ def test_scenario( with in_fresh_cf_graph(): with mitmproxy_env(): invoke_bot_command(["--debug", "auto-tick"]) - invoke_bot_command(["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore='pr_json'"]) + invoke_bot_command(["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore=pr_json"]) with in_fresh_cf_graph(): # because of an implementation detail in the bot, we need to run make-migrators twice @@ -320,6 +320,6 @@ def test_scenario( # for changes to be picked up with mitmproxy_env(): invoke_bot_command(["--debug", "auto-tick"]) - invoke_bot_command(["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore='pr_json'"]) + invoke_bot_command(["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore=pr_json"]) run_all_validate_functions(scenario) From d7f1d1fceb7f9ecb9451b39cada5e99ce18ea4e1 Mon Sep 17 00:00:00 2001 From: beckermr Date: Tue, 16 Dec 2025 07:06:52 -0600 Subject: [PATCH 08/12] test: put stuff back --- conda_forge_tick/cli.py | 12 +++++++++--- tests_integration/lib/_definitions/__init__.py | 9 ++++----- tests_integration/test_integration.py | 10 +++++++--- 3 files changed, 20 insertions(+), 11 deletions(-) diff --git a/conda_forge_tick/cli.py b/conda_forge_tick/cli.py index 24be90348..3d0c76125 100644 --- a/conda_forge_tick/cli.py +++ b/conda_forge_tick/cli.py @@ -213,14 +213,20 @@ def make_mappings() -> None: @click.option( "--dirs-to-ignore", default=None, - help=("Comma-separated list of directories to ignore. If given, directories will " - "not be deployed.") + help=( + "Comma-separated list of directories to ignore. If given, directories will " + "not be deployed." + ), ) @pass_context def deploy_to_github(ctx: CliContext, git_only: bool, dirs_to_ignore: str) -> None: from . import deploy - deploy.deploy(ctx, git_only=git_only, dirs_to_ignore=[] if dirs_to_ignore is None else dirs_to_ignore.split(",")) + deploy.deploy( + ctx, + git_only=git_only, + dirs_to_ignore=[] if dirs_to_ignore is None else dirs_to_ignore.split(","), + ) @main.command(name="backup-lazy-json") diff --git a/tests_integration/lib/_definitions/__init__.py b/tests_integration/lib/_definitions/__init__.py index e24a5a613..e28255398 100644 --- a/tests_integration/lib/_definitions/__init__.py +++ b/tests_integration/lib/_definitions/__init__.py @@ -2,12 +2,11 @@ from .base_classes import AbstractIntegrationTestHelper, GitHubAccount, TestCase TEST_CASE_MAPPING: dict[str, list[TestCase]] = { - # "conda-forge-pinning": conda_forge_pinning.ALL_TEST_CASES, - # FIXME + "conda-forge-pinning": conda_forge_pinning.ALL_TEST_CASES, "fastapi": fastapi.ALL_TEST_CASES, - # "polars": polars.ALL_TEST_CASES, - # "pydantic": pydantic.ALL_TEST_CASES, - # "zizmor": zizmor.ALL_TEST_CASES, + "polars": polars.ALL_TEST_CASES, + "pydantic": pydantic.ALL_TEST_CASES, + "zizmor": zizmor.ALL_TEST_CASES, } """ Maps from feedstock name to a list of all test cases for that feedstock. diff --git a/tests_integration/test_integration.py b/tests_integration/test_integration.py index 0eec70320..0fb474d49 100644 --- a/tests_integration/test_integration.py +++ b/tests_integration/test_integration.py @@ -244,7 +244,7 @@ def invoke_bot_command(args: list[str]): cli.main(args, standalone_mode=False) -@pytest.mark.parametrize("use_containers", [False]) # FIXME - put this back, True]) +@pytest.mark.parametrize("use_containers", [False, True]) def test_scenario( use_containers: bool, scenario: tuple[int, dict[str, TestCase]], @@ -306,7 +306,9 @@ def test_scenario( with in_fresh_cf_graph(): with mitmproxy_env(): invoke_bot_command(["--debug", "auto-tick"]) - invoke_bot_command(["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore=pr_json"]) + invoke_bot_command( + ["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore=pr_json"] + ) with in_fresh_cf_graph(): # because of an implementation detail in the bot, we need to run make-migrators twice @@ -320,6 +322,8 @@ def test_scenario( # for changes to be picked up with mitmproxy_env(): invoke_bot_command(["--debug", "auto-tick"]) - invoke_bot_command(["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore=pr_json"]) + invoke_bot_command( + ["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore=pr_json"] + ) run_all_validate_functions(scenario) From 8c2b2fc072454da193566a28cc38793a5f4f863e Mon Sep 17 00:00:00 2001 From: beckermr Date: Tue, 16 Dec 2025 07:08:03 -0600 Subject: [PATCH 09/12] fix: no quotes --- .github/workflows/bot-bot.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/bot-bot.yml b/.github/workflows/bot-bot.yml index 5f0b85c77..bcdf55642 100644 --- a/.github/workflows/bot-bot.yml +++ b/.github/workflows/bot-bot.yml @@ -79,7 +79,7 @@ jobs: pushd cf-graph export RUN_URL="https://github.com/regro/cf-scripts/actions/runs/${RUN_ID}" - conda-forge-tick deploy-to-github --git-only --dirs-to-ignore="pr_json" + conda-forge-tick deploy-to-github --git-only --dirs-to-ignore=pr_json env: BOT_TOKEN: ${{ secrets.AUTOTICK_BOT_TOKEN }} RUN_ID: ${{ github.run_id }} From 769fa5cd6150b3c31fb60e78f9046d8137d58e8b Mon Sep 17 00:00:00 2001 From: "Matthew R. Becker" Date: Tue, 16 Dec 2025 07:08:26 -0600 Subject: [PATCH 10/12] Apply suggestion from @beckermr --- conda_forge_tick/auto_tick.py | 1 + 1 file changed, 1 insertion(+) diff --git a/conda_forge_tick/auto_tick.py b/conda_forge_tick/auto_tick.py index 578916991..21919db8f 100644 --- a/conda_forge_tick/auto_tick.py +++ b/conda_forge_tick/auto_tick.py @@ -533,6 +533,7 @@ def _make_and_sync_pr_lazy_json(pr_data) -> LazyJson | Literal[False]: if "id" in pr_lazy_json: sync_lazy_json_object(pr_lazy_json, "file", ["github_api"]) + else: pr_lazy_json = False From bfbe3d5c5aab1b517825d16e1b853253a8fabdfb Mon Sep 17 00:00:00 2001 From: beckermr Date: Tue, 16 Dec 2025 07:14:04 -0600 Subject: [PATCH 11/12] test: add test for new cli option --- tests/test_cli.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/tests/test_cli.py b/tests/test_cli.py index 6aa8c8a10..ddd8bfae6 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -172,6 +172,28 @@ def test_cli_mock_deploy_to_github_git_only( cmd_mock.assert_called_once_with(mock.ANY, git_only=git_only) +@pytest.mark.parametrize("dirs_to_ignore", [None, "pr_json", "pr_json,pr_info"]) +@mock.patch("conda_forge_tick.deploy.deploy") +def test_cli_mock_deploy_to_github_dirs_to_ignore( + cmd_mock: MagicMock, + dirs_to_ignore, +): + runner = CliRunner() + result = runner.invoke( + main, + ["deploy-to-github"] + + ([f"--dirs-to-ignore={dirs_to_ignore}"] if dirs_to_ignore else []), + ) + + if dirs_to_ignore: + kws = {"dirs_to_ignore": dirs_to_ignore.split(",")} + else: + kws = {"dirs_to_ignore": []} + + assert result.exit_code == 0 + cmd_mock.assert_called_once_with(mock.ANY, git_only=False, **kws) + + @pytest.mark.parametrize( "job, n_jobs, feedstock", [(1, 5, None), (3, 7, None), (4, 4, "foo")] ) From ba931cd2e58fef1f3360c5c525154042d392a0c7 Mon Sep 17 00:00:00 2001 From: beckermr Date: Tue, 16 Dec 2025 08:19:32 -0600 Subject: [PATCH 12/12] test: update test for new cli option --- tests/test_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index ddd8bfae6..244390587 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -169,7 +169,7 @@ def test_cli_mock_deploy_to_github_git_only( ) assert result.exit_code == 0 - cmd_mock.assert_called_once_with(mock.ANY, git_only=git_only) + cmd_mock.assert_called_once_with(mock.ANY, git_only=git_only, dirs_to_ignore=[]) @pytest.mark.parametrize("dirs_to_ignore", [None, "pr_json", "pr_json,pr_info"])