Hi, RunLLM How to get the phoenix trace link for my below code ?
# managing tracer objects in cache
session = px.launch_app(use_temp_dir=False)
tracers = {}
@extend_schema(
tags=['Main'],
operation_id='query',
summary='Chained Query',
description='API for Changed Queries',
parameters=[
OpenApiParameter(name='app_id', description='App ID', required=True, type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH),
OpenApiParameter(name='thread_id', description='Thread ID', required=True, type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH),
],
responses={200: OpenApiResponse(description='Success')}
)
@api_view(['POST'])
def query(request, app_id, thread_id):
if thread_id not in tracers.keys():
tracer = tracing.setup_tracing(thread_id)
tracers[thread_id] = tracer
else:
tracer = tracers[thread_id]
logging.info("New conversation started for app id : %s holding the thread id : %s", app_id, thread_id)
if request.method == 'POST':
# receiving json payload
payload = request.data
query = payload['query']
type = payload['type']
image_file_path = ""
# handle language
language = True
if 'language' in payload:
language_str = payload['language']
if language_str == 'true':
language = True
elif language_str == 'false':
language = False
# handle voice
voice = False
if 'voice' in payload:
voice = eval(payload['voice'].capitalize())
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True,
input_key="question", output_key="answer")
# Answer Generation
with tracer.start_as_current_span(name="completions") as span:
try:
span.set_attribute("input.value", f"Query : {query}, Language : {str(language)}")
if type == 'Agent':
span.set_attribute("openinference.span.kind", "AGENT")
answer, current_memory = conversational_agent.get_result(
query, language, voice, memory, app_id, thread_id, tracer)
elif type == 'Chain':
span.set_attribute("openinference.span.kind", "CHAIN")
answer, current_memory = conversational_chain.get_result(
query, language, voice, image_file_path, memory, app_id, thread_id, tracer)
span.set_status(Status(StatusCode.OK))
span.set_attribute("output.value", answer["answer"])
return Response(answer, status=status.HTTP_200_OK)
except Exception as e:
logging.info("Exception occured while trying to call get result method : %s", e)
answer = {
"answer": "Query overload in LLM. Please try again shortly",
"type": "doc",
"documents": ['']
}
span.set_status(Status(StatusCode.ERROR))
return Response(answer, status=status.HTTP_200_OK)To get the Phoenix trace link for your code, you need to ensure that the trace ID and project ID are correctly retrieved and used to construct the link. Here鈥檚 how you can achieve this: 1. Set Up Tracing: Ensure that your tracing setup is correctly configured to send traces to Phoenix.
2. Retrieve Trace ID: Use the get_current_span method from OpenTelemetry to get the current span and extract the trace ID.
3. Retrieve Project ID: Use a GraphQL query to get the project ID based on the project name.
4. Construct Trace Link: Combine the project ID and trace ID to form the trace link.Here is the complete code snippet:
from typing import Optional
from httpx import Client
from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
import phoenix as pxLBKS# Set up tracing
tracer_provider = TracerProvider()
trace.set_tracer_provider(tracer_provider)
span_exporter = OTLPSpanExporter(endpoint="[http://localhost:6006/v1/traces](http://localhost:6006/v1/traces)")
span_processor = SimpleSpanProcessor(span_exporter)
tracer_provider.add_span_processor(span_processor)
tracer = trace.get_tracer(__name__)LBKS# Launch Phoenix app
session = px.launch_app(use_temp_dir=False)
tracers = {}LBKS# GraphQL query to get project ID
client = Client(base_url="[http://localhost:6006](http://localhost:6006)")
query = """
query ($after: String = null) {
projects(after: $after) {
edges {
project: node {
id
name
}
}
pageInfo {
hasNextPage
endCursor
}
}
}
"""LBKSdef get_id_for_project_name(name: str) -> Optional[str]:
has_next_page = True
after = None
while has_next_page:
response = [client.post](http://client.post)("/graphql", json={"query": query, "variables": {"after": after}})
if response.status_code != 200:
raise Exception(f"Failed with status code: {response.status_code}")
data = response.json()["data"]
edges = data["projects"]["edges"]
projects = map(lambda edge: edge["project"], edges)
for project in projects:
if project["name"] == name:
return project["id"]
page_info = data["projects"]["pageInfo"]
has_next_page = page_info["hasNextPage"]
after = page_info["endCursor"]
return NoneLBKSdef get_current_project_name() -> str:
project_name = os.getenv("CURRENT_PROJECT_NAME")
if not project_name:
raise Exception("Current project name is not set")
return project_nameLBKS# Set the current project name as an environment variable
os.environ["CURRENT_PROJECT_NAME"] = "your_current_project_name"LBKS# Get the current project name
current_project_name = get_current_project_name()LBKS# Get project ID dynamically for the current project
current_project_id = get_id_for_project_name(current_project_name)
if current_project_id is None:
raise Exception(f"Project ID for '{current_project_name}' not found")LBKS# Get the current span and trace ID
current_span = trace.get_current_span()
trace_id = current_span.get_span_context().trace_id
trace_id_hex = format(trace_id, '032x')LBKS# Construct the trace link
trace_link = f"<http://localhost:{os.getenv>('PORT', '6006')}/projects/{current_project_id}/traces/{trace_id_hex}"LBKSprint(f"Trace Link: {trace_link}")LBKS# Your existing code
@extend_schema(
tags=['Main'],
operation_id='query',
summary='Chained Query',
description='API for Changed Queries',
parameters=[LBKS```
OpenApiParameter(name='app_id', description='App ID', required=True, type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH),
OpenApiParameter(name='thread_id', description='Thread ID', required=True, type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH),
],
responses={200: OpenApiResponse(description='Success')}
)
@api_view(['POST'])
def query(request, app_id, thread_id):
if thread_id not in tracers.keys():
tracer = tracing.setup_tracing(thread_id)
tracers[thread_id] = tracer
else:
tracer = tracers[thread_id]LBKS [logging.info](http://logging.info)("New conversation started for app id : %s holding the thread id : %s", app_id, thread_id)LBKS if request.method == 'POST':
payload = request.data
query = payload['query']
type = payload['type']
image_file_path = ""LBKS language = True
if 'language' in payload:
language_str = payload['language']
if language_str == 'true':
language = True
elif language_str == 'false':
language = FalseLBKS voice = False
if 'voice' in payload:
voice = eval(payload['voice'].capitalize())LBKS memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True,
input_key="question", output_key="answer")LBKS with tracer.start_as_current_span(name="completions") as span:
try:
span.set_attribute("input.value", f"Query : {query}, Language : {str(language)}")
if type == 'Agent':
span.set_attribute("openinference.span.kind", "AGENT")
answer, current_memory = conversational_agent.get_result(
query, language, voice, memory, app_id, thread_id, tracer)
elif type == 'Chain':
span.set_attribute("openinference.span.kind", "CHAIN")
answer, current_memory = conversational_chain.get_result(
query, language, voice, image_file_path, memory, app_id, thread_id, tracer)
span.set_status(Status(StatusCode.OK))
span.set_attribute("output.value", answer["answer"])
return Response(answer, status=status.HTTP_200_OK)
except Exception as e:
[logging.info](http://logging.info)("Exception occured while trying to call get result method : %s", e)
answer = {
"answer": "Query overload in LLM. Please try again shortly",
"type": "doc",
"documents": ['']
}
span.set_status(Status(StatusCode.ERROR))
return Response(answer, status=status.HTTP_200_OK)LBKS```
This code will dynamically fetch the project ID and trace ID, and construct the trace link for your Phoenix setup.Would you like more details on any specific part?Sources: - Slack Conversation - GitHub Issue #3775
馃挕聽Hint: Mention RunLLM in the thread for followups.
INFO 2024-08-17 15:03:30,868 views New conversation started for app id : eb909347-8e7c-45b7-b767-5446f628840b holding the thread id : 86329f13-5019-47cd-bf19-e21a7b6303e5 INFO 2024-08-17 15:03:32,962 _client HTTP Request: POST http://localhost:6006/graphql "HTTP/1.1 200 OK" INFO 2024-08-17 15:03:32,963 views Exception occurred while trying to call get result method : Project ID for '86329f13-5019-47cd-bf19-e21a7b6303e5' not found RunLLM when a new thread_id is created it doesn't take it as a project and give this error. Give me the code to solve this issue.
The error occurs because the thread_id is not being recognized as a project ID. To solve this, you need to ensure that a new project is created dynamically whenever a new thread_id is encountered. Here鈥檚 how you can modify your code to create a new project if it doesn't already exist: 1. Create Project Dynamically: Add a function to create a new project using the GraphQL API if the project ID is not found.
2. Update the query Function: Modify the query function to create a new project if the thread_id is not found.Here is the updated code:
from typing import Optional
from httpx import Client
from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
import phoenix as pxLBKS# Set up tracing
tracer_provider = TracerProvider()
trace.set_tracer_provider(tracer_provider)
span_exporter = OTLPSpanExporter(endpoint="[http://localhost:6006/v1/traces](http://localhost:6006/v1/traces)")
span_processor = SimpleSpanProcessor(span_exporter)
tracer_provider.add_span_processor(span_processor)
tracer = trace.get_tracer(__name__)LBKS# Launch Phoenix app
session = px.launch_app(use_temp_dir=False)
tracers = {}LBKS# GraphQL query to get project ID
client = Client(base_url="[http://localhost:6006](http://localhost:6006)")
query = """
query ($after: String = null) {
projects(after: $after) {
edges {
project: node {
id
name
}
}
pageInfo {
hasNextPage
endCursor
}
}
}
"""LBKSdef get_id_for_project_name(name: str) -> Optional[str]:
has_next_page = True
after = None
while has_next_page:
response = [client.post](http://client.post)("/graphql", json={"query": query, "variables": {"after": after}})
if response.status_code != 200:
raise Exception(f"Failed with status code: {response.status_code}")
data = response.json()["data"]
edges = data["projects"]["edges"]
projects = map(lambda edge: edge["project"], edges)
for project in projects:
if project["name"] == name:
return project["id"]
page_info = data["projects"]["pageInfo"]
has_next_page = page_info["hasNextPage"]
after = page_info["endCursor"]
return NoneLBKSdef create_project(name: str) -> str:
mutation = """
mutation ($name: String!) {
createProject(input: {name: $name}) {
project {
id
}
}
}
"""
response = [client.post](http://client.post)("/graphql", json={"query": mutation, "variables": {"name": name}})
if response.status_code != 200:
raise Exception(f"Failed to create project with status code: {response.status_code}")
return response.json()["data"]["createProject"]["project"]["id"]LBKSdef get_current_project_name() -> str:
project_name = os.getenv("CURRENT_PROJECT_NAME")
if not project_name:
raise Exception("Current project name is not set")
return project_nameLBKS# Set the current project name as an environment variable
os.environ["CURRENT_PROJECT_NAME"] = "your_current_project_name"LBKS# Get the current project name
current_project_name = get_current_project_name()LBKS# Get project ID dynamically for the current project
current_project_id = get_id_for_project_name(current_project_name)
if current_project_id is None:
current_project_id = create_project(current_project_name)LBKS# Get the current span and trace ID
current_span = trace.get_current_span()LBKS```
trace_id = current_span.get_span_context().trace_id
trace_id_hex = format(trace_id, '032x')LBKS# Construct the trace link
trace_link = f"<http://localhost:{os.getenv>('PORT', '6006')}/projects/{current_project_id}/traces/{trace_id_hex}"LBKSprint(f"Trace Link: {trace_link}")LBKS# Your existing code
@extend_schema(
tags=['Main'],
operation_id='query',
summary='Chained Query',
description='API for Changed Queries',
parameters=[
OpenApiParameter(name='app_id', description='App ID', required=True, type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH),
OpenApiParameter(name='thread_id', description='Thread ID', required=True, type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH),
],
responses={200: OpenApiResponse(description='Success')}
)
@api_view(['POST'])
def query(request, app_id, thread_id):
if thread_id not in tracers.keys():
tracer = tracing.setup_tracing(thread_id)
tracers[thread_id] = tracer
else:
tracer = tracers[thread_id]LBKS [logging.info](http://logging.info)("New conversation started for app id : %s holding the thread id : %s", app_id, thread_id)LBKS if request.method == 'POST':
payload = request.data
query = payload['query']
type = payload['type']
image_file_path = ""LBKS language = True
if 'language' in payload:
language_str = payload['language']
if language_str == 'true':
language = True
elif language_str == 'false':
language = FalseLBKS voice = False
if 'voice' in payload:
voice = eval(payload['voice'].capitalize())LBKS memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True,
input_key="question", output_key="answer")LBKS with tracer.start_as_current_span(name="completions") as span:
try:
span.set_attribute("input.value", f"Query : {query}, Language : {str(language)}")
if type == 'Agent':
span.set_attribute("openinference.span.kind", "AGENT")
answer, current_memory = conversational_agent.get_result(
query, language, voice, memory, app_id, thread_id, tracer)
elif type == 'Chain':
span.set_attribute("openinference.span.kind", "CHAIN")
answer, current_memory = conversational_chain.get_result(
query, language, voice, image_file_path, memory, app_id, thread_id, tracer)
span.set_status(Status(StatusCode.OK))
span.set_attribute("output.value", answer["answer"])
return Response(answer, status=status.HTTP_200_OK)
except Exception as e:
[logging.info](http://logging.info)("Exception occurred while trying to call get result method : %s", e)
answer = {LBKS```
"answer": "Query overload in LLM. Please try again shortly",
"type": "doc",
"documents": ['']
}
span.set_status(Status(StatusCode.ERROR))
return Response(answer, status=status.HTTP_200_OK)LBKS```
This code will dynamically create a new project if the thread_id is not found, ensuring that the trace link can be constructed correctly.Would you like more details on any specific part?Sources:
- Slack Conversation
馃挕聽Hint: Mention RunLLM in the thread for followups.
Yes, you can maintain a list of project names and add the thread_id to this list as soon as the thread is created. This way, you can check if the thread_id exists in the list and avoid creating a new project each time. Here鈥檚 how you can modify your code to achieve this: 1. Maintain a List of Project Names: Use a list to store all project names.
2. Check and Add thread_id to the List: When a new thread_id is encountered, add it to the list if it doesn't already exist.Here is the updated code:
from typing import Optional
from httpx import Client
from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
import phoenix as pxLBKS# Set up tracing
tracer_provider = TracerProvider()
trace.set_tracer_provider(tracer_provider)
span_exporter = OTLPSpanExporter(endpoint="[http://localhost:6006/v1/traces](http://localhost:6006/v1/traces)")
span_processor = SimpleSpanProcessor(span_exporter)
tracer_provider.add_span_processor(span_processor)
tracer = trace.get_tracer(__name__)LBKS# Launch Phoenix app
session = px.launch_app(use_temp_dir=False)
tracers = {}LBKS# GraphQL query to get project ID
client = Client(base_url="[http://localhost:6006](http://localhost:6006)")
query = """
query ($after: String = null) {
projects(after: $after) {
edges {
project: node {
id
name
}
}
pageInfo {
hasNextPage
endCursor
}
}
}
"""LBKSdef get_id_for_project_name(name: str) -> Optional[str]:
has_next_page = True
after = None
while has_next_page:
response = [client.post](http://client.post)("/graphql", json={"query": query, "variables": {"after": after}})
if response.status_code != 200:
raise Exception(f"Failed with status code: {response.status_code}")
data = response.json()["data"]
edges = data["projects"]["edges"]
projects = map(lambda edge: edge["project"], edges)
for project in projects:
if project["name"] == name:
return project["id"]
page_info = data["projects"]["pageInfo"]
has_next_page = page_info["hasNextPage"]
after = page_info["endCursor"]
return NoneLBKSdef get_current_project_name() -> str:
project_name = os.getenv("CURRENT_PROJECT_NAME")
if not project_name:
raise Exception("Current project name is not set")
return project_nameLBKS# Set the current project name as an environment variable
os.environ["CURRENT_PROJECT_NAME"] = "your_current_project_name"LBKS# Get the current project name
current_project_name = get_current_project_name()LBKS# Get project ID dynamically for the current project
current_project_id = get_id_for_project_name(current_project_name)
if current_project_id is None:
raise Exception(f"Project ID for '{current_project_name}' not found")LBKS# List to store project names
project_names = []LBKS# Get the current span and trace ID
current_span = trace.get_current_span()
trace_id = current_span.get_span_context().trace_id
trace_id_hex = format(trace_id, '032x')LBKS# Construct the trace link
trace_link = f"<http://localhost:{os.getenv>('PORT', '6006')}/projects/{current_project_id}/traces/{trace_id_hex}"LBKSprint(f"Trace Link: {trace_link}")LBKS# Your existing code
@extend_schema(
tags=['Main'],
operation_id='query',
summary='Chained Query',
description='API for Changed Queries',
parameters=[LBKS```
OpenApiParameter(name='app_id', description='App ID', required=True, type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH),
OpenApiParameter(name='thread_id', description='Thread ID', required=True, type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH),
],
responses={200: OpenApiResponse(description='Success')}
)
@api_view(['POST'])
def query(request, app_id, thread_id):
if thread_id not in tracers.keys():
tracer = tracing.setup_tracing(thread_id)
tracers[thread_id] = tracer
else:
tracer = tracers[thread_id]LBKS [logging.info](http://logging.info)("New conversation started for app id : %s holding the thread id : %s", app_id, thread_id)LBKS if request.method == 'POST':
payload = request.data
query = payload['query']
type = payload['type']
image_file_path = ""LBKS language = True
if 'language' in payload:
language_str = payload['language']
if language_str == 'true':
language = True
elif language_str == 'false':
language = FalseLBKS voice = False
if 'voice' in payload:
voice = eval(payload['voice'].capitalize())LBKS memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True,
input_key="question", output_key="answer")LBKS # Check if thread_id is in project_names list
if thread_id not in project_names:
project_names.append(thread_id)LBKS with tracer.start_as_current_span(name="completions") as span:
try:
span.set_attribute("input.value", f"Query : {query}, Language : {str(language)}")
if type == 'Agent':
span.set_attribute("openinference.span.kind", "AGENT")
answer, current_memory = conversational_agent.get_result(
query, language, voice, memory, app_id, thread_id, tracer)
elif type == 'Chain':
span.set_attribute("openinference.span.kind", "CHAIN")
answer, current_memory = conversational_chain.get_result(
query, language, voice, image_file_path, memory, app_id, thread_id, tracer)
span.set_status(Status(StatusCode.OK))
span.set_attribute("output.value", answer["answer"])
return Response(answer, status=status.HTTP_200_OK)
except Exception as e:
[logging.info](http://logging.info)("Exception occurred while trying to call get result method : %s", e)
answer = {
"answer": "Query overload in LLM. Please try again shortly",
"type": "doc",
"documents": ['']
}
span.set_status(Status(StatusCode.ERROR))
return Response(answer, status=status.HTTP_200_OK)LBKS```
This code will add the thread_id to the project_names list as soon as it is encountered, ensuring that it is recognized as a project name.Would you like more details on any specific part?Sources:
- Slack Conversation
馃挕聽Hint: Mention RunLLM in the thread for followups.
