Skip to content
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/bot-bot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ jobs:
pushd cf-graph

export RUN_URL="https://github.com/regro/cf-scripts/actions/runs/${RUN_ID}"
conda-forge-tick deploy-to-github
conda-forge-tick deploy-to-github --git-only --dirs-to-ignore="pr_json"
env:
BOT_TOKEN: ${{ secrets.AUTOTICK_BOT_TOKEN }}
RUN_ID: ${{ github.run_id }}
Expand Down
5 changes: 3 additions & 2 deletions conda_forge_tick/auto_tick.py
Original file line number Diff line number Diff line change
Expand Up @@ -533,7 +533,6 @@ def _make_and_sync_pr_lazy_json(pr_data) -> LazyJson | Literal[False]:

if "id" in pr_lazy_json:
sync_lazy_json_object(pr_lazy_json, "file", ["github_api"])

else:
pr_lazy_json = False

Expand Down Expand Up @@ -1361,7 +1360,9 @@ def main(ctx: CliContext) -> None:

with fold_log_lines("updating graph with PR info"):
_update_graph_with_pr_info()
deploy(ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"])
deploy(
ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"], git_only=True
)

# record tmp dir so we can be sure to clean it later
temp = glob.glob("/tmp/*")
Expand Down
16 changes: 14 additions & 2 deletions conda_forge_tick/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,11 +210,23 @@ def make_mappings() -> None:
is_flag=True,
help="If given, only deploy graph data to GitHub via the git command line.",
)
@click.option(
"--dirs-to-ignore",
default=None,
help=(
"Comma-separated list of directories to ignore. If given, directories will "
"not be deployed."
),
)
@pass_context
def deploy_to_github(ctx: CliContext, git_only: bool) -> None:
def deploy_to_github(ctx: CliContext, git_only: bool, dirs_to_ignore: str) -> None:
from . import deploy

deploy.deploy(ctx, git_only=git_only)
deploy.deploy(
ctx,
git_only=git_only,
dirs_to_ignore=[] if dirs_to_ignore is None else dirs_to_ignore.split(","),
)


@main.command(name="backup-lazy-json")
Expand Down
74 changes: 48 additions & 26 deletions conda_forge_tick/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,12 @@
import time

from .cli_context import CliContext
from .git_utils import delete_file_via_gh_api, get_bot_token, push_file_via_gh_api
from .git_utils import (
delete_file_via_gh_api,
get_bot_token,
push_file_via_gh_api,
reset_and_restore_file,
)
from .lazy_json_backends import (
CF_TICK_GRAPH_DATA_HASHMAPS,
get_lazy_json_backends,
Expand Down Expand Up @@ -131,20 +136,9 @@ def _deploy_batch(
except Exception as e:
print(e, flush=True)

# make sure the graph can load, if not we will error
try:
gx = load_existing_graph()
# TODO: be more selective about which json to check
for node, attrs in gx.nodes.items():
with attrs["payload"]:
pass
graph_ok = True
except Exception:
graph_ok = False

status = 1
num_try = 0
while status != 0 and num_try < 20 and graph_ok:
while status != 0 and num_try < 20:
with fold_log_lines(">>>>>>>>>>>> git pull+push try %d" % num_try):
try:
print(">>>>>>>>>>>> git pull", flush=True)
Expand Down Expand Up @@ -175,7 +169,7 @@ def _deploy_batch(
time.sleep(interval)
num_try += 1

if status != 0 or not graph_ok:
if status != 0:
# we did try to push to a branch but it never worked so we'll just stop
raise RuntimeError("bot did not push its data! stopping!")

Expand Down Expand Up @@ -213,12 +207,6 @@ def _get_pth_commit_message(pth):
return msg


def _reset_and_restore_file(pth):
subprocess.run(["git", "reset", "--", pth], capture_output=True, text=True)
subprocess.run(["git", "restore", "--", pth], capture_output=True, text=True)
subprocess.run(["git", "clean", "-f", "--", pth], capture_output=True, text=True)


def _deploy_via_api(
do_git_ops: bool,
files_to_add: set[str],
Expand Down Expand Up @@ -274,19 +262,28 @@ def _deploy_via_api(
do_git_ops = True

for pth in files_done:
_reset_and_restore_file(pth)
reset_and_restore_file(pth)

return do_git_ops, files_to_add, files_done, files_to_try_again


def deploy(
ctx: CliContext, dirs_to_deploy: list[str] | None = None, git_only: bool = False
ctx: CliContext,
dirs_to_deploy: list[str] | None = None,
git_only: bool = False,
dirs_to_ignore: list[str] | None = None,
):
"""Deploy the graph to GitHub."""
if ctx.dry_run:
print("(dry run) deploying")
print("(dry run) deploying", flush=True)
return

# make sure the graph can load, if not it will error
gx = load_existing_graph()
# TODO: be more selective about which json to check
for node, attrs in gx.nodes.items():
with attrs["payload"]:
pass

with fold_log_lines("cleaning up disk space for deploy"):
clean_disk_space()

Expand All @@ -304,6 +301,11 @@ def deploy(
drs_to_deploy += CF_TICK_GRAPH_DATA_HASHMAPS
drs_to_deploy += ["graph.json"]
else:
if dirs_to_ignore is not None:
raise RuntimeError(
"You cannot specify both `dirs_to_deploy` "
"and `dirs_to_ignore` when deploying the graph!"
)
drs_to_deploy = dirs_to_deploy

for dr in drs_to_deploy:
Expand Down Expand Up @@ -337,9 +339,29 @@ def deploy(
).stdout.splitlines(),
)

print("found %d files to add" % len(files_to_add), flush=True)

files_to_delete = _get_files_to_delete()

if dirs_to_ignore is not None:
print("ignoring dirs:", dirs_to_ignore, flush=True)
new_files_to_add = set()
for fn in files_to_add:
if any(fn.startswith(f"{dr}/") for dr in dirs_to_ignore):
reset_and_restore_file(fn)
print("ignoring file to add:", fn, flush=True)
else:
new_files_to_add.add(fn)
files_to_add = new_files_to_add

new_files_to_delete = set()
for fn in files_to_delete:
if any(fn.startswith(f"{dr}/") for dr in dirs_to_ignore):
reset_and_restore_file(fn)
print("ignoring file to delete:", fn, flush=True)
else:
new_files_to_delete.add(fn)
files_to_delete = new_files_to_delete

print("found %d files to add" % len(files_to_add), flush=True)
print("found %d files to delete" % len(files_to_delete), flush=True)

do_git_ops = False
Expand Down
24 changes: 12 additions & 12 deletions conda_forge_tick/events/pr_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@ def _react_to_pr(uid: str, dry_run: bool = False) -> None:
pr_data = refresh_pr(copy.deepcopy(pr_json.data), dry_run=dry_run)
if pr_data is not None:
if (
"Last-Modified" in pr_json
and "Last-Modified" in pr_data
and pr_json["Last-Modified"] != pr_data["Last-Modified"]
"last_fetched" in pr_json
and "last_fetched" in pr_data
and pr_json["last_fetched"] != pr_data["last_fetched"]
):
print("refreshed PR data", flush=True)
pr_json.update(pr_data)
Expand All @@ -52,9 +52,9 @@ def _react_to_pr(uid: str, dry_run: bool = False) -> None:
pr_data = close_out_labels(copy.deepcopy(pr_json.data), dry_run=dry_run)
if pr_data is not None:
if (
"Last-Modified" in pr_json
and "Last-Modified" in pr_data
and pr_json["Last-Modified"] != pr_data["Last-Modified"]
"last_fetched" in pr_json
and "last_fetched" in pr_data
and pr_json["last_fetched"] != pr_data["last_fetched"]
):
print("closed PR due to bot-rerun label", flush=True)
pr_json.update(pr_data)
Expand All @@ -64,9 +64,9 @@ def _react_to_pr(uid: str, dry_run: bool = False) -> None:
pr_data = refresh_pr(copy.deepcopy(pr_json.data), dry_run=dry_run)
if pr_data is not None:
if (
"Last-Modified" in pr_json
and "Last-Modified" in pr_data
and pr_json["Last-Modified"] != pr_data["Last-Modified"]
"last_fetched" in pr_json
and "last_fetched" in pr_data
and pr_json["last_fetched"] != pr_data["last_fetched"]
):
print("refreshed PR data", flush=True)
pr_json.update(pr_data)
Expand All @@ -77,9 +77,9 @@ def _react_to_pr(uid: str, dry_run: bool = False) -> None:
)
if pr_data is not None:
if (
"Last-Modified" in pr_json
and "Last-Modified" in pr_data
and pr_json["Last-Modified"] != pr_data["Last-Modified"]
"last_fetched" in pr_json
and "last_fetched" in pr_data
and pr_json["last_fetched"] != pr_data["last_fetched"]
):
print("closed PR due to merge conflicts", flush=True)
pr_json.update(pr_data)
Expand Down
8 changes: 8 additions & 0 deletions conda_forge_tick/git_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1962,3 +1962,11 @@ def delete_file_via_gh_api(pth: str, repo_full_name: str, msg: str) -> None:
interval = base**tr
interval = rfrac * interval + (rfrac * RNG.uniform(0, 1) * interval)
time.sleep(interval)


@lock_git_operation()
def reset_and_restore_file(pth: str):
"""Reset the status of a file tracked by git to its version at the current commit."""
subprocess.run(["git", "reset", "--", pth], capture_output=True, text=True)
subprocess.run(["git", "restore", "--", pth], capture_output=True, text=True)
subprocess.run(["git", "clean", "-f", "--", pth], capture_output=True, text=True)
8 changes: 6 additions & 2 deletions tests_integration/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,9 @@ def test_scenario(
with in_fresh_cf_graph():
with mitmproxy_env():
invoke_bot_command(["--debug", "auto-tick"])
invoke_bot_command(["--debug", "deploy-to-github"])
invoke_bot_command(
["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore=pr_json"]
)

with in_fresh_cf_graph():
# because of an implementation detail in the bot, we need to run make-migrators twice
Expand All @@ -320,6 +322,8 @@ def test_scenario(
# for changes to be picked up
with mitmproxy_env():
invoke_bot_command(["--debug", "auto-tick"])
invoke_bot_command(["--debug", "deploy-to-github", "--git-only"])
invoke_bot_command(
["--debug", "deploy-to-github", "--git-only", "--dirs-to-ignore=pr_json"]
)

run_all_validate_functions(scenario)
Loading