Skip to content

Commit

Permalink
Updates all GenAI insights to filter to those created by automatic LLM
Browse files Browse the repository at this point in the history
instrumentation
  • Loading branch information
elijahbenizzy committed Aug 28, 2024
1 parent b9af8b0 commit 32a2347
Showing 1 changed file with 20 additions and 8 deletions.
28 changes: 20 additions & 8 deletions telemetry/ui/src/components/routes/app/InsightsView.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -54,20 +54,24 @@ const REGISTERED_INSIGHTS: Insight[] = [
{
category: 'llm',
hasInsight: (allAttributes) => {
return allAttributes.some((attribute) => attribute.key.endsWith('prompt_tokens'));
return allAttributes.some(
(attribute) => attribute.key.endsWith('prompt_tokens') && attribute.key.startsWith('gen_ai')
);
},
insightName: 'Total Prompt Tokens',
RenderInsightValue: (props) => {
let totalPromptTokens = 0;
props.attributes.forEach((attribute) => {
if (attribute.key.endsWith('prompt_tokens')) {
if (attribute.key.endsWith('prompt_tokens') && attribute.key.startsWith('gen_ai')) {
totalPromptTokens += attribute.value as number;
}
});
return <p>{totalPromptTokens}</p>;
},
captureIndividualValues: (allAttributes) => {
return allAttributes.filter((attribute) => attribute.key.endsWith('prompt_tokens'));
return allAttributes.filter(
(attribute) => attribute.key.endsWith('prompt_tokens') && attribute.key.startsWith('gen_ai')
);
},
RenderIndividualValue: (props: { attribute: AttributeModel }) => {
return <p>{props.attribute.value?.toString()}</p>;
Expand All @@ -76,7 +80,10 @@ const REGISTERED_INSIGHTS: Insight[] = [
{
category: 'llm',
hasInsight: (allAttributes) => {
return allAttributes.some((attribute) => attribute.key.endsWith('completion_tokens'));
return allAttributes.some(
(attribute) =>
attribute.key.endsWith('completion_tokens') && attribute.key.startsWith('gen_ai')
);
},
insightName: 'Total Completion Tokens',
RenderInsightValue: (props) => {
Expand All @@ -89,7 +96,10 @@ const REGISTERED_INSIGHTS: Insight[] = [
return <p>{totalCompletionTokens}</p>;
},
captureIndividualValues: (allAttributes) => {
return allAttributes.filter((attribute) => attribute.key.endsWith('completion_tokens'));
return allAttributes.filter(
(attribute) =>
attribute.key.endsWith('completion_tokens') && attribute.key.startsWith('gen_ai')
);
},
RenderIndividualValue: (props: { attribute: AttributeModel }) => {
return <p>{props.attribute.value?.toString()}</p>;
Expand All @@ -98,13 +108,15 @@ const REGISTERED_INSIGHTS: Insight[] = [
{
category: 'llm',
hasInsight: (allAttributes) => {
return allAttributes.some((attribute) => attribute.key.endsWith('prompt_tokens'));
return allAttributes.some(
(attribute) => attribute.key.endsWith('prompt_tokens') && attribute.key.startsWith('gen_ai')
);
},
insightName: 'Total LLM Calls',
RenderInsightValue: (props) => {
let totalLLMCalls = 0;
props.attributes.forEach((attribute) => {
if (attribute.key.endsWith('prompt_tokens')) {
if (attribute.key.endsWith('prompt_tokens') && attribute.key.startsWith('gen_ai')) {
totalLLMCalls += 1;
}
});
Expand All @@ -113,7 +125,7 @@ const REGISTERED_INSIGHTS: Insight[] = [
captureIndividualValues: (allAttributes) => {
const spanIDToLLMCalls = new Map<string, number>();
allAttributes.forEach((attribute) => {
if (attribute.key.endsWith('prompt_tokens')) {
if (attribute.key.endsWith('prompt_tokens') && attribute.key.startsWith('gen_ai')) {
spanIDToLLMCalls.set(
attribute.span_id || '',
(spanIDToLLMCalls.get(attribute.span_id || '') || 0) + 1
Expand Down

0 comments on commit 32a2347

Please sign in to comment.