Skip to content

Commit 5f69e19

Browse files
xuanyang15copybara-github
authored andcommitted
fix: Enhance ADK release analyzer with resume functionality and better session management
Co-authored-by: Xuan Yang <xygoogle@google.com> PiperOrigin-RevId: 890127075
1 parent abe4bbe commit 5f69e19

File tree

4 files changed

+190
-24
lines changed

4 files changed

+190
-24
lines changed

.github/workflows/analyze-releases-for-adk-docs-updates.yml

Lines changed: 33 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,20 @@ on:
66
types: [published]
77
# Manual trigger for testing and retrying.
88
workflow_dispatch:
9+
inputs:
10+
resume:
11+
description: 'Resume from the last failed/interrupted run'
12+
required: false
13+
type: boolean
14+
default: false
15+
start_tag:
16+
description: 'Older release tag (base), e.g. v1.26.0'
17+
required: false
18+
type: string
19+
end_tag:
20+
description: 'Newer release tag (head), e.g. v1.27.0'
21+
required: false
22+
type: string
923

1024
jobs:
1125
analyze-new-release-for-adk-docs-updates:
@@ -33,6 +47,13 @@ jobs:
3347
python -m pip install --upgrade pip
3448
pip install requests google-adk
3549
50+
- name: Restore session DB from cache
51+
if: ${{ github.event.inputs.resume == 'true' }}
52+
uses: actions/cache/restore@v4
53+
with:
54+
path: contributing/samples/adk_documentation/adk_release_analyzer/sessions.db
55+
key: analyzer-session-db
56+
3657
- name: Run Analyzing Script
3758
env:
3859
GITHUB_TOKEN: ${{ secrets.ADK_TRIAGE_AGENT }}
@@ -44,4 +65,15 @@ jobs:
4465
CODE_REPO: 'adk-python'
4566
INTERACTIVE: 0
4667
PYTHONPATH: contributing/samples/adk_documentation
47-
run: python -m adk_release_analyzer.main
68+
run: >-
69+
python -m adk_release_analyzer.main
70+
${{ github.event.inputs.resume == 'true' && '--resume' || '' }}
71+
${{ github.event.inputs.start_tag && format('--start-tag {0}', github.event.inputs.start_tag) || '' }}
72+
${{ github.event.inputs.end_tag && format('--end-tag {0}', github.event.inputs.end_tag) || '' }}
73+
74+
- name: Save session DB to cache
75+
if: always()
76+
uses: actions/cache/save@v4
77+
with:
78+
path: contributing/samples/adk_documentation/adk_release_analyzer/sessions.db
79+
key: analyzer-session-db

contributing/samples/adk_documentation/adk_release_analyzer/agent.py

Lines changed: 52 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
- recommendations: Accumulated recommendations from all groups
3030
"""
3131

32+
import copy
3233
import os
3334
import sys
3435
from typing import Any
@@ -114,6 +115,7 @@ def get_next_file_group(tool_context: ToolContext) -> dict[str, Any]:
114115
current_index = tool_context.state.get("current_group_index", 0)
115116

116117
if current_index >= len(file_groups):
118+
print(f"[Progress] All {len(file_groups)} groups processed.")
117119
return {
118120
"status": "complete",
119121
"message": "All file groups have been processed.",
@@ -122,7 +124,11 @@ def get_next_file_group(tool_context: ToolContext) -> dict[str, Any]:
122124
}
123125

124126
current_group = file_groups[current_index]
125-
tool_context.state["current_group_index"] = current_index + 1
127+
file_paths = [f.get("relative_path", "?") for f in current_group]
128+
print(
129+
f"[Progress] Starting group {current_index + 1}/{len(file_groups)}:"
130+
f" {file_paths}"
131+
)
126132

127133
return {
128134
"status": "success",
@@ -157,6 +163,16 @@ def save_group_recommendations(
157163
all_recommendations = tool_context.state.get("recommendations", [])
158164
all_recommendations.extend(recommendations)
159165
tool_context.state["recommendations"] = all_recommendations
166+
# Advance index only after recommendations are saved, so interrupted
167+
# groups get retried on resume instead of being skipped.
168+
tool_context.state["current_group_index"] = group_index + 1
169+
170+
total_groups = len(tool_context.state.get("file_groups", []))
171+
print(
172+
f"[Progress] Group {group_index + 1}/{total_groups} done."
173+
f" +{len(recommendations)} recommendations"
174+
f" ({len(all_recommendations)} total)"
175+
)
160176

161177
return {
162178
"status": "success",
@@ -180,6 +196,11 @@ def get_all_recommendations(tool_context: ToolContext) -> dict[str, Any]:
180196
end_tag = tool_context.state.get("end_tag", "unknown")
181197
compare_url = tool_context.state.get("compare_url", "")
182198

199+
print(
200+
f"[Summary] Retrieving recommendations: {len(recommendations)} total,"
201+
f" release {start_tag}{end_tag}"
202+
)
203+
183204
return {
184205
"status": "success",
185206
"start_tag": start_tag,
@@ -226,6 +247,12 @@ def save_release_info(
226247
tool_context.state["release_summary"] = release_summary
227248
tool_context.state["all_changed_files"] = all_changed_files
228249

250+
total_files = sum(len(group) for group in file_groups)
251+
print(
252+
f"[Planning] Release {start_tag}{end_tag}:"
253+
f" {total_files} files in {len(file_groups)} groups"
254+
)
255+
229256
return {
230257
"status": "success",
231258
"start_tag": start_tag,
@@ -404,10 +431,13 @@ def file_analyzer_instruction(readonly_context: ReadonlyContext) -> str:
404431
4. For EACH significant change, call `search_local_git_repo` to find related docs
405432
in {LOCAL_REPOS_DIR_PATH}/{DOC_REPO}/docs/
406433
- Search for the feature name, class name, or related keywords
434+
- **ALWAYS** pass `ignored_dirs=["api-reference"]` to skip auto-generated API
435+
reference docs (they are updated automatically by code, not manually)
407436
- If no docs found, recommend creating new documentation
408437
409438
5. Call `read_local_git_repo_file_content` to read the relevant doc files
410439
and check if they need updating.
440+
- **SKIP** any files under `docs/api-reference/` — these are auto-generated.
411441
412442
6. For each documentation update needed, create a recommendation with:
413443
- summary: Brief summary of what needs to change
@@ -446,6 +476,7 @@ def file_analyzer_instruction(readonly_context: ReadonlyContext) -> str:
446476
"Analyzes a group of changed files and generates recommendations."
447477
),
448478
instruction=file_analyzer_instruction,
479+
include_contents="none",
449480
tools=[
450481
get_next_file_group,
451482
get_release_context, # Get global context to avoid duplicates
@@ -554,6 +585,26 @@ def summary_instruction(readonly_context: ReadonlyContext) -> str:
554585
)
555586

556587

588+
# Resume pipeline: skips planner, continues from where loop left off.
589+
# Deep copy agents since ADK agents can only have one parent.
590+
_resume_loop = copy.deepcopy(file_analysis_loop)
591+
_resume_loop.parent_agent = None
592+
_resume_summary = copy.deepcopy(summary_agent)
593+
_resume_summary.parent_agent = None
594+
595+
resume_pipeline = SequentialAgent(
596+
name="resume_pipeline",
597+
description=(
598+
"Resumes the release analysis pipeline from the file analysis loop,"
599+
" skipping the planning phase."
600+
),
601+
sub_agents=[
602+
_resume_loop,
603+
_resume_summary,
604+
],
605+
)
606+
607+
557608
# =============================================================================
558609
# Root Agent: Entry point that understands user requests
559610
# =============================================================================

contributing/samples/adk_documentation/adk_release_analyzer/main.py

Lines changed: 91 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,10 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import argparse
1516
import asyncio
1617
import logging
18+
import os
1719
import time
1820

1921
from adk_documentation.adk_release_analyzer import agent
@@ -23,30 +25,105 @@
2325
from adk_documentation.settings import DOC_REPO
2426
from adk_documentation.utils import call_agent_async
2527
from google.adk.cli.utils import logs
26-
from google.adk.runners import InMemoryRunner
28+
from google.adk.runners import Runner
29+
from google.adk.sessions import DatabaseSessionService
2730

2831
APP_NAME = "adk_release_analyzer"
2932
USER_ID = "adk_release_analyzer_user"
33+
DB_PATH = os.path.join(os.path.dirname(__file__), "sessions.db")
34+
DB_URL = f"sqlite+aiosqlite:///{DB_PATH}"
3035

31-
logs.setup_adk_logger(level=logging.DEBUG)
36+
logs.setup_adk_logger(level=logging.INFO)
3237

3338

3439
async def main():
35-
runner = InMemoryRunner(
36-
agent=agent.root_agent,
37-
app_name=APP_NAME,
40+
parser = argparse.ArgumentParser(description="ADK Release Analyzer")
41+
parser.add_argument(
42+
"--resume",
43+
action="store_true",
44+
help="Resume from the last session instead of starting fresh.",
3845
)
39-
session = await runner.session_service.create_session(
40-
app_name=APP_NAME,
41-
user_id=USER_ID,
46+
parser.add_argument(
47+
"--start-tag",
48+
type=str,
49+
default=None,
50+
help="The older release tag (base) for comparison, e.g. v1.26.0.",
4251
)
43-
44-
response = await call_agent_async(
45-
runner,
46-
USER_ID,
47-
session.id,
48-
"Please analyze the most recent two releases of ADK Python!",
52+
parser.add_argument(
53+
"--end-tag",
54+
type=str,
55+
default=None,
56+
help="The newer release tag (head) for comparison, e.g. v1.27.0.",
4957
)
58+
args = parser.parse_args()
59+
60+
session_service = DatabaseSessionService(db_url=DB_URL)
61+
62+
if args.resume:
63+
# Find the most recent session to resume
64+
sessions_response = await session_service.list_sessions(
65+
app_name=APP_NAME, user_id=USER_ID
66+
)
67+
if not sessions_response.sessions:
68+
print("No previous session found. Starting fresh.")
69+
args.resume = False
70+
71+
if args.resume:
72+
# Resume: use existing session with resume_pipeline (skip planner)
73+
last_session = sessions_response.sessions[-1]
74+
session_id = last_session.id
75+
session = await session_service.get_session(
76+
app_name=APP_NAME, user_id=USER_ID, session_id=session_id
77+
)
78+
state = session.state
79+
group_index = state.get("current_group_index", 0)
80+
total_groups = len(state.get("file_groups", []))
81+
num_recs = len(state.get("recommendations", []))
82+
print(f"Resuming session {session_id}")
83+
print(
84+
f" Progress: group {group_index + 1}/{total_groups},"
85+
f" {num_recs} recommendations so far"
86+
)
87+
print(
88+
f" Release: {state.get('start_tag', '?')} →"
89+
f" {state.get('end_tag', '?')}"
90+
)
91+
92+
runner = Runner(
93+
agent=agent.resume_pipeline,
94+
app_name=APP_NAME,
95+
session_service=session_service,
96+
)
97+
prompt = "Resume analyzing the remaining file groups."
98+
else:
99+
# Fresh run
100+
runner = Runner(
101+
agent=agent.root_agent,
102+
app_name=APP_NAME,
103+
session_service=session_service,
104+
)
105+
session = await session_service.create_session(
106+
app_name=APP_NAME,
107+
user_id=USER_ID,
108+
)
109+
session_id = session.id
110+
if args.start_tag and args.end_tag:
111+
prompt = (
112+
f"Please analyze ADK Python releases from {args.start_tag} to"
113+
f" {args.end_tag}!"
114+
)
115+
elif args.end_tag:
116+
prompt = (
117+
f"Please analyze the ADK Python release {args.end_tag} against its"
118+
" previous release!"
119+
)
120+
else:
121+
prompt = "Please analyze the most recent two releases of ADK Python!"
122+
123+
print(f"Session ID: {session_id}")
124+
print("-" * 80)
125+
126+
response = await call_agent_async(runner, USER_ID, session_id, prompt)
50127
print(f"<<<< Agent Final Output: {response}\n")
51128

52129

contributing/samples/adk_documentation/tools.py

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -183,18 +183,24 @@ def read_local_git_repo_file_content(file_path: str) -> Dict[str, Any]:
183183
commit hash.
184184
"""
185185
print(f"Attempting to read file from path: {file_path}")
186-
dir_path = os.path.dirname(file_path)
187-
head_commit_sha = _find_head_commit_sha(dir_path)
186+
if not os.path.isabs(file_path):
187+
return error_response(
188+
f"file_path must be an absolute path, got: {file_path}"
189+
)
190+
191+
try:
192+
dir_path = os.path.dirname(file_path)
193+
head_commit_sha = _find_head_commit_sha(dir_path)
194+
except (FileNotFoundError, subprocess.CalledProcessError):
195+
head_commit_sha = "unknown"
188196

189197
try:
190-
# Open and read the file content
191198
with open(file_path, "r", encoding="utf-8") as f:
192199
content = f.read()
193200

194-
# Add line numbers to the content
195-
lines = content.splitlines()
196-
numbered_lines = [f"{i + 1}: {line}" for i, line in enumerate(lines)]
197-
numbered_content = "\n".join(numbered_lines)
201+
lines = content.splitlines()
202+
numbered_lines = [f"{i + 1}: {line}" for i, line in enumerate(lines)]
203+
numbered_content = "\n".join(numbered_lines)
198204

199205
return {
200206
"status": "success",
@@ -204,7 +210,7 @@ def read_local_git_repo_file_content(file_path: str) -> Dict[str, Any]:
204210
}
205211
except FileNotFoundError:
206212
return error_response(f"Error: File not found at {file_path}")
207-
except IOError as e:
213+
except (IOError, OSError) as e:
208214
return error_response(f"An unexpected error occurred: {e}")
209215

210216

0 commit comments

Comments
 (0)