I've;
- 1.
- 2.
copy pasted the arize space id directly from your portal
still nothing happens when running this:
import os
from langchain_openai import ChatOpenAI
from arize.otel import register, Transport
from openinference.instrumentation.langchain import LangChainInstrumentor
os.environ["LANGCHAIN_TRACING_V2"] = "false"
os.environ["LANGSMITH_TRACING"] = "false"
os.environ["LANGCHAIN_API_KEY"] = "false"
# Sanity-check creds exist
assert os.getenv("ARIZE_SPACE_ID") and os.getenv("ARIZE_API_KEY")
tracer_provider = register(
space_id=os.getenv("ARIZE_SPACE_ID"),
api_key=os.getenv("ARIZE_API_KEY"),
project_name="litmus",
# endpoint="https://otlp.arize.com/v1/traces",
# transport=Transport.HTTP,
log_to_console=True,
)
LangChainInstrumentor().instrument(tracer_provider=tracer_provider)
llm = ChatOpenAI(model="gpt-4.1")
response = llm.invoke("This is a test")
instead it only returns in the logs that: Failed to export traces to otlp.arize.com, error code: StatusCode.INTERNAL
Let me know if anyone can jump on a call to quickly