async def execute_mcp_tool(self, request,tool_to_call):
with tracer.start_as_current_span(
name=f"Tool - {tool_to_call.name}",
attributes={
SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.TOOL.value,
SpanAttributes.TOOL_NAME: tool_to_call.name,
SpanAttributes.TOOL_DESCRIPTION: tool_to_call.description,
SpanAttributes.TOOL_PARAMETERS: tool_call.model_dump().get('function').get('arguments'),
ToolAttributes.TOOL_JSON_SCHEMA: ''
},
) as tool_span:
metadata = self.metadata
response = None
try:
async with self.mcp.client as client:
request = json.loads(request)
request['input']['metadata'] = metadata
response = await client.call_tool(tool_to_call.name, request)
except Exception as e:
print(e)
print(f'Exception occurred while executing tool:{tool_to_call.name}, {Exception}')
if response.content and len(response.content) > 0:
content = response.content[0].text if hasattr(response.content[0], 'text') else str(
response.content[0])
else:
content = None
tool_span.set_attributes({
SpanAttributes.OUTPUT_VALUE: content,
SpanAttributes.OUTPUT_MIME_TYPE: "application/json"
})
return content