hey, ya'll - currently working through the quickstart for getting our nodejs backend all set up for traces. OpenAI was 馃嵃 , but setting Anthropic up is proving to be much more finnicky - any guidance on how to structure the calls and objects would be very much appreciated. ATM, the only metadata coming across is Name, Start Time, Status
Hey Aman! the anthropic instrumentation I'm attempting is manual - prior to adding this in, nothing was making it to the arize LLM UI when I was testing out with anthropic calls locally
import Anthropic from "@anthropic-ai/sdk";
import { context, trace, SpanKind, SpanStatusCode } from "@opentelemetry/api";
import LLMConfigService, { ModelConfig } from "../services/llmConfig";
...
class AnthropicClient {
...
// arize instrumentation
const tracer = trace.getTracer("aarden-be");
return await context.with(
trace.setSpan(
context.active(),
tracer.startSpan("anthropic.messages.create", { kind: SpanKind.CLIENT })
),
async () => {
const span = trace.getSpan(context.active());
try {
span?.setAttributes({
// OpenTelemetry Generative AI semantic conventions
"gen_ai.system": "anthropic",
"gen_ai.request.model": modelConfig.id,
"gen_ai.request.max_output_tokens": modelConfig.maxTokensOutput,
"gen_ai.request.temperature": DEFAULT_TEMPERATURE,
});
const response = await this.anthropic.messages.create({
model: modelConfig.id,
max_tokens: modelConfig.maxTokensOutput,
temperature: DEFAULT_TEMPERATURE,
stream: false,
system: prompt,
messages: conversation,
});
const { input_tokens, output_tokens } = response.usage;
const totalTokens = input_tokens + output_tokens;
const estimatedCostDollars =
(input_tokens * modelConfig.costPer1MInputTokens) / 1000000 +
(output_tokens * modelConfig.costPer1MOutputTokens) / 1000000;
const inputPreview = query.slice(0, 2000);
const outputPreview = response.content
.map((c) => ("text" in c ? c.text : ""))
.join("")
.slice(0, 4000);
span?.setAttributes({
"gen_ai.usage.input_tokens": input_tokens,
"gen_ai.usage.output_tokens": output_tokens,
"gen_ai.usage.total_tokens": totalTokens,
"gen_ai.cost.usd": Number(estimatedCostDollars.toFixed(6)),
});
span?.setAttributes({
"openinference.span.kind": "llm",
"openinference.model_name": modelConfig.id,
"openinference.model_provider": "anthropic",
"openinference.input": inputPreview,
"openinference.output": outputPreview,
"openinference.usage.input_tokens": input_tokens,
"openinference.usage.output_tokens": output_tokens,
"openinference.usage.prompt_tokens": input_tokens,
"openinference.usage.completion_tokens": output_tokens,
"openinference.usage.total_tokens": totalTokens,
});
span?.setAttributes({
"llm.input": inputPreview,
"llm.output": outputPreview,
"llm.model_name": modelConfig.id,
"llm.model_provider": "anthropic",
"llm.usage.prompt_tokens": input_tokens,
"llm.usage.completion_tokens": output_tokens,
"llm.usage.total_tokens": totalTokens,
});
span?.setAttributes({
"usage.input_tokens": input_tokens,
"usage.output_tokens": output_tokens,
"usage.total_tokens": totalTokens,
});
span?.addEvent("openinference.input", {
"openinference.input.value": inputPreview,
});
span?.addEvent("openinference.output", {
"openinference.output.value": outputPreview,
});
span?.setStatus({ code: SpanStatusCode.OK });
...
} catch (err: any) {
span?.recordException(err);
span?.setStatus({
code: SpanStatusCode.ERROR,
message: err?.message || String(err),
});
} finally {
span?.end();
}
}
);
}
}
export default new AnthropicClient();I'm just trying to dump anything at all to the span - these fields are all populated locally
amazing, thank you! I'll give this a try.
