Skip to content

Commit 7d9bde7

Browse files
authored
Merge pull request #49 from redhat-community-ai-tools/GENIE-1131/STORY/Support-miltiple-mcp-providers
Genie 1131/story/support multiple mcp providers
2 parents 32bfcc5 + 8718dd9 commit 7d9bde7

File tree

19 files changed

+569
-209
lines changed

19 files changed

+569
-209
lines changed

multi-agent/api/flask/endpoints/resources.py

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,54 @@ def validate_resource(resource_id, timeout_seconds):
157157
return jsonify({"error": str(e)}), 500
158158

159159

160+
@resources_bp.route("/resources.validate", methods=["POST"])
161+
@from_body({
162+
"resource_ids": fields.List(fields.Str(), data_key="resourceIds", required=True),
163+
"timeout_seconds": fields.Float(data_key="timeoutSeconds", load_default=10.0),
164+
"max_workers": fields.Int(data_key="maxWorkers", load_default=10),
165+
})
166+
def validate_resources(resource_ids, timeout_seconds, max_workers):
167+
"""
168+
Validate multiple resources in parallel.
169+
170+
Request:
171+
{
172+
"resourceIds": ["rid1", "rid2", "rid3"],
173+
"timeoutSeconds": 10.0,
174+
"maxWorkers": 10
175+
}
176+
177+
Response:
178+
[
179+
{ "element_rid": "rid1", "is_valid": true, ... },
180+
{ "element_rid": "rid2", "is_valid": false, ... },
181+
{ "element_rid": "rid3", "is_valid": true, ... }
182+
]
183+
184+
Results are returned in the same order as the input resourceIds.
185+
"""
186+
svc = current_app.container.resources_service
187+
188+
# Validate input
189+
if not resource_ids:
190+
return jsonify([]), 200
191+
192+
# Cap max_workers to prevent resource exhaustion
193+
max_workers = min(max_workers, 20)
194+
195+
try:
196+
results = svc.validate_resources(
197+
rids=resource_ids,
198+
timeout_seconds=timeout_seconds,
199+
max_workers=max_workers,
200+
)
201+
return jsonify([r.to_dict() for r in results]), 200
202+
except RuntimeError as e:
203+
return jsonify({"error": str(e)}), 500
204+
except Exception as e:
205+
return jsonify({"error": str(e)}), 500
206+
207+
160208
@resources_bp.route("/config.validate", methods=["POST"])
161209
@from_body({
162210
"category": fields.Str(required=True),

multi-agent/core/ref/models.py

Lines changed: 27 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,34 @@
1-
from typing import ClassVar, Literal, Annotated, Optional
2-
from pydantic import RootModel, model_serializer, SerializerFunctionWrapHandler, SerializationInfo, Field
1+
from typing import ClassVar
2+
from pydantic import RootModel, model_serializer, SerializerFunctionWrapHandler, SerializationInfo
33
from core.enums import ResourceCategory
44

55

66
class Ref(RootModel[str]):
77
"""
8+
Base class for resource references.
9+
810
A wrapper around a single string that may be:
911
- a bare ID, e.g. "abcd-1234"
1012
- an external ref, e.g. "$ref:abcd-1234"
1113
12-
Exposes:
13-
- .root the original string
14-
- .ref the ID without prefix
15-
- .is_external_ref()
16-
- .is_inline()
17-
- .get_category() NEW: get resource category
14+
All subclasses MUST define _category to specify which resource
15+
category they reference. This is enforced at class definition time.
1816
"""
1917
REF_PREFIX: ClassVar[str] = "$ref:"
20-
_category: ClassVar[Optional[ResourceCategory]] = None
18+
_category: ClassVar[ResourceCategory]
19+
20+
def __init_subclass__(cls, **kwargs):
21+
"""Enforce that all Ref subclasses define _category."""
22+
super().__init_subclass__(**kwargs)
23+
if not hasattr(cls, '_category') or cls._category is None:
24+
raise TypeError(
25+
f"Ref subclass '{cls.__name__}' must define '_category'. "
26+
f"Example: _category: ClassVar[ResourceCategory] = ResourceCategory.YOUR_CATEGORY"
27+
)
2128

2229
@property
2330
def ref(self) -> str:
31+
"""The ID without the $ref: prefix."""
2432
raw = self.root
2533
if raw.startswith(self.REF_PREFIX):
2634
return raw[len(self.REF_PREFIX):]
@@ -31,10 +39,10 @@ def is_external_ref(self) -> bool:
3139

3240
def is_inline(self) -> bool:
3341
return not self.is_external_ref()
34-
35-
def get_category(self) -> Optional[ResourceCategory]:
42+
43+
def get_category(self) -> ResourceCategory:
3644
"""Get the resource category this ref points to."""
37-
return getattr(self.__class__, '_category', None)
45+
return self._category
3846

3947
@model_serializer(mode="wrap")
4048
def _serialize(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo) -> str:
@@ -47,11 +55,10 @@ def _serialize(self, handler: SerializerFunctionWrapHandler, info: Serialization
4755
return self.ref
4856

4957

50-
# Specific Ref classes with category information + JSON schema
5158
class LLMRef(Ref):
5259
"""Reference to an LLM resource."""
5360
_category: ClassVar[ResourceCategory] = ResourceCategory.LLM
54-
61+
5562
model_config = {
5663
"json_schema_extra": {
5764
"category": ResourceCategory.LLM.value,
@@ -64,11 +71,11 @@ class LLMRef(Ref):
6471
class NodeRef(Ref):
6572
"""Reference to a Node resource."""
6673
_category: ClassVar[ResourceCategory] = ResourceCategory.NODE
67-
74+
6875
model_config = {
6976
"json_schema_extra": {
7077
"category": ResourceCategory.NODE.value,
71-
"description": "Reference to a Node resource",
78+
"description": "Reference to a Node resource",
7279
"examples": ["$ref:custom-agent-1", "data-processor"]
7380
}
7481
}
@@ -77,7 +84,7 @@ class NodeRef(Ref):
7784
class RetrieverRef(Ref):
7885
"""Reference to a Retriever resource."""
7986
_category: ClassVar[ResourceCategory] = ResourceCategory.RETRIEVER
80-
87+
8188
model_config = {
8289
"json_schema_extra": {
8390
"category": ResourceCategory.RETRIEVER.value,
@@ -90,7 +97,7 @@ class RetrieverRef(Ref):
9097
class ToolRef(Ref):
9198
"""Reference to a Tool resource."""
9299
_category: ClassVar[ResourceCategory] = ResourceCategory.TOOL
93-
100+
94101
model_config = {
95102
"json_schema_extra": {
96103
"category": ResourceCategory.TOOL.value,
@@ -103,7 +110,7 @@ class ToolRef(Ref):
103110
class ProviderRef(Ref):
104111
"""Reference to a Provider resource."""
105112
_category: ClassVar[ResourceCategory] = ResourceCategory.PROVIDER
106-
113+
107114
model_config = {
108115
"json_schema_extra": {
109116
"category": ResourceCategory.PROVIDER.value,
@@ -116,7 +123,7 @@ class ProviderRef(Ref):
116123
class ConditionRef(Ref):
117124
"""Reference to a Condition resource."""
118125
_category: ClassVar[ResourceCategory] = ResourceCategory.CONDITION
119-
126+
120127
model_config = {
121128
"json_schema_extra": {
122129
"category": ResourceCategory.CONDITION.value,

multi-agent/elements/common/validator.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,27 @@ def to_dict(self) -> Dict:
109109
},
110110
}
111111

112+
@classmethod
113+
def create_error(cls, rid: str, error: str) -> "ElementValidationResult":
114+
"""
115+
Factory for creating an error result.
116+
117+
Useful when validation fails due to exceptions (resource not found, etc.)
118+
rather than actual validation logic.
119+
"""
120+
return cls(
121+
is_valid=False,
122+
element_rid=rid,
123+
element_type="unknown",
124+
name=None,
125+
messages=[ValidationMessage(
126+
severity=ValidationSeverity.ERROR,
127+
code="VALIDATION_ERROR",
128+
message=error,
129+
)],
130+
dependency_results={},
131+
)
132+
112133

113134
@dataclass(frozen=True)
114135
class ValidationContext:

multi-agent/elements/llms/google_genai/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ class GoogleGenAIConfig(BaseModel):
3131
)
3232

3333
max_tokens: Optional[int] = Field(
34-
default=None,
34+
default=8192,
3535
description="Maximum number of tokens to generate (None for model default)"
3636
)
3737

multi-agent/elements/llms/google_genai/google_genai.py

Lines changed: 8 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from ..common.chat.converter import LangChainConverter
77
from ..common.chat.message import ChatMessage
88
from ...tools.common.base_tool import BaseTool
9-
from ...tools.common.converter import LangChainToolsConverter
9+
from .tools_converter import GoogleGenAIToolsConverter
1010

1111

1212
def _extract_text_content(content: Any) -> str:
@@ -20,7 +20,6 @@ def _extract_text_content(content: Any) -> str:
2020
if isinstance(content, str):
2121
return content
2222
if isinstance(content, list):
23-
# Extract text from content blocks
2423
texts = []
2524
for block in content:
2625
if isinstance(block, dict) and block.get("type") == "text":
@@ -57,7 +56,6 @@ def __init__(
5756
"""
5857
self._name = "google-genai"
5958

60-
# Build kwargs, only including non-None values
6159
client_kwargs: Dict[str, Any] = {
6260
"model": model_name,
6361
"google_api_key": api_key,
@@ -99,15 +97,12 @@ def chat(
9997
call_params["max_output_tokens"] = max_tokens
10098
call_params.update(kwargs)
10199

102-
# Convert to LangChain message objects
103100
lc_messages = LangChainConverter.to_lc(messages)
104-
105101
response = self.client.invoke(lc_messages, **call_params)
106-
107-
# Google GenAI returns content as list of blocks, normalize it to string
102+
108103
if hasattr(response, 'content') and isinstance(response.content, list):
109104
response.content = _extract_text_content(response.content)
110-
105+
111106
return LangChainConverter.from_lc_message(response)
112107

113108
def stream(
@@ -123,49 +118,34 @@ def stream(
123118
and, at the very end, yields **one** `ChatMessage` representing
124119
the full assistant reply with `tool_calls=[…]`.
125120
"""
126-
# Translate our domain history → LangChain
127121
lc_history = LangChainConverter.to_lc(messages)
128-
129-
aggregated: Any | None = None # will hold the growing AIMessage
122+
aggregated: Any | None = None
130123

131124
for chunk in self.client.stream(lc_history, **call_params):
132-
# Tool-call partials -------------------------------------------------
133125
if getattr(chunk, "tool_call_chunks", None):
134126
aggregated = chunk if aggregated is None else aggregated + chunk
135-
# we do NOT yield yet — wait until provider is done
136127
continue
137128

138-
# Plain token path --------------------------------------------------
139-
# Google GenAI returns content as list of blocks, not a simple string
140129
token = _extract_text_content(chunk.content)
141130
if token:
142131
yield token
143132

144-
# Provider finished ------------------------------------------------------
145133
if aggregated:
146-
# LangChain "+" produced a final AIMessage with complete tool_calls,
147-
# Normalize content format before converting
148134
if hasattr(aggregated, 'content') and isinstance(aggregated.content, list):
149135
aggregated.content = _extract_text_content(aggregated.content)
150-
# Convert once to our ChatMessage model and yield it.
151136
yield LangChainConverter.from_lc_message(aggregated)
152137

153138
def bind_tools(self, tools: List[BaseTool]) -> "GoogleGenAILLM":
154139
"""
155-
Return a new GoogleGenAILLM instance with tools bound, avoiding cross-contamination.
140+
Return a new GoogleGenAILLM instance with tools bound.
156141
157-
This creates a copy of the current LLM with tools bound to the client,
158-
ensuring the original LLM instance remains unchanged.
142+
Uses GoogleGenAIToolsConverter which sanitizes schemas to meet
143+
Google GenAI's strict validation requirements.
159144
"""
160-
# Create a shallow copy of the current instance
161145
new_llm = copy.copy(self)
162-
163-
# Create a new client with tools bound (LangChain's bind_tools returns a copy)
164-
new_llm.client = self.client.bind_tools(LangChainToolsConverter.to_lc(tools))
165-
146+
new_llm.client = self.client.bind_tools(GoogleGenAIToolsConverter.to_lc(tools))
166147
return new_llm
167148

168149
@property
169150
def name(self) -> str:
170151
return self._name
171-

0 commit comments

Comments
 (0)