diff --git a/llm/src/main/java/com/instana/dc/llm/impl/llm/LLMDc.java b/llm/src/main/java/com/instana/dc/llm/impl/llm/LLMDc.java index 91428d5..648446f 100644 --- a/llm/src/main/java/com/instana/dc/llm/impl/llm/LLMDc.java +++ b/llm/src/main/java/com/instana/dc/llm/impl/llm/LLMDc.java @@ -149,6 +149,7 @@ public void resetMetrics() { public LLMDc(Map properties, CustomDcConfig cdcConfig) throws Exception { super(properties, cdcConfig); + logLLMSpecificConfig(properties); watsonxPricePromptTokens = (Double) properties.getOrDefault(WATSONX_PRICE_PROMPT_TOKES_PER_KILO, 0.0); watsonxPriceCompleteTokens = (Double) properties.getOrDefault(WATSONX_PRICE_COMPLETE_TOKES_PER_KILO, 0.0); openaiPricePromptTokens = (Double) properties.getOrDefault(OPENAI_PRICE_PROMPT_TOKES_PER_KILO, 0.0); @@ -158,6 +159,19 @@ public LLMDc(Map properties, CustomDcConfig cdcConfig) throws Ex listenPort = (int) properties.getOrDefault(SERVICE_LISTEN_PORT, 8000); } + private void logLLMSpecificConfig(Map properties) { + logger.info("LLM Specific Configuration:"); + logger.info(" OPENAI_PRICE_PROMPT_TOKES_PER_KILO: " + + properties.getOrDefault(OPENAI_PRICE_PROMPT_TOKES_PER_KILO, "Not set")); + logger.info(" OPENAI_PRICE_COMPLETE_TOKES_PER_KILO: " + + properties.getOrDefault(OPENAI_PRICE_COMPLETE_TOKES_PER_KILO, "Not set")); + // 他の関連する設定値もここでログ出力 + Double promptPrice = (Double) properties.getOrDefault("openai.price.prompt.tokens.per.kilo", 0.0); + logger.info("Prompt price per kilo tokens: " + promptPrice); + + + } + @Override public void initOnce() throws ClassNotFoundException { var server = Server.builder() @@ -194,61 +208,43 @@ public void registerMetrics() { @Override public void collectData() { logger.info("Start to collect metrics"); - + for(Map.Entry entry : modelAggrMap.entrySet()){ ModelAggregation aggr = entry.getValue(); aggr.resetMetrics(); } - + List otelMetrics = metricsCollector.getMetrics(); + logger.info("Number of metrics received: " + otelMetrics.size()); metricsCollector.clearMetrics(); + for (OtelMetric metric : otelMetrics) { try { - double duration = metric.getDuration(); - if(duration == 0.0) { - continue; - } String modelId = metric.getModelId(); String aiSystem = metric.getAiSystem(); + long promptTokens = metric.getPromtTokens(); + long completeTokens = metric.getCompleteTokens(); + double duration = metric.getDuration(); long requestCount = metric.getReqCount(); - - ModelAggregation modelAggr = modelAggrMap.get(modelId); - if (modelAggr == null) { - modelAggr = new ModelAggregation(modelId, aiSystem); - modelAggrMap.put(modelId, modelAggr); - } + + logger.info("Processing metric - Model: " + modelId + ", AI System: " + aiSystem + + ", Prompt Tokens: " + promptTokens + ", Complete Tokens: " + completeTokens + + ", Duration: " + duration + ", Request Count: " + requestCount); + + ModelAggregation modelAggr = modelAggrMap.computeIfAbsent(modelId, k -> new ModelAggregation(modelId, aiSystem)); modelAggr.addDeltaDuration((long)(duration*1000), requestCount); modelAggr.addDeltaReqCount(requestCount); + modelAggr.addDeltaPromptTokens(promptTokens, requestCount); + modelAggr.addDeltaCompleteTokens(completeTokens, requestCount); + + logger.info("After aggregation - Delta Prompt Tokens: " + modelAggr.getDeltaPromptTokens() + + ", Delta Complete Tokens: " + modelAggr.getDeltaCompleteTokens()); } catch (Exception e) { + logger.severe("Error processing metric: " + e.getMessage()); e.printStackTrace(); } } - for (OtelMetric metric : otelMetrics) { - try { - String modelId = metric.getModelId(); - String aiSystem = metric.getAiSystem(); - long promptTokens = metric.getPromtTokens(); - long completeTokens = metric.getCompleteTokens(); - if(promptTokens == 0 && completeTokens == 0) { - continue; - } - ModelAggregation modelAggr = modelAggrMap.get(modelId); - if (modelAggr == null) { - modelAggr = new ModelAggregation(modelId, aiSystem); - modelAggrMap.put(modelId, modelAggr); - } - long currentReqCount = modelAggr.getCurrentReqCount(); - if(promptTokens > 0) { - modelAggr.addDeltaPromptTokens(promptTokens, currentReqCount); - } - if(completeTokens > 0) { - modelAggr.addDeltaCompleteTokens(completeTokens, currentReqCount); - } - } catch (Exception e) { - e.printStackTrace(); - } - } - + logger.info("-----------------------------------------"); for(Map.Entry entry : modelAggrMap.entrySet()){ ModelAggregation aggr = entry.getValue(); @@ -259,31 +255,22 @@ public void collectData() { long deltaPromptTokens = aggr.getDeltaPromptTokens(); long deltaCompleteTokens = aggr.getDeltaCompleteTokens(); long maxDuration = aggr.getMaxDuration(); - - long avgDuration = deltaDuration/(deltaRequestCount==0?1:deltaRequestCount); + + long avgDuration = deltaRequestCount == 0 ? 0 : deltaDuration / deltaRequestCount; if(avgDuration > maxDuration) { maxDuration = avgDuration; aggr.setMaxDuration(maxDuration); } - + int intervalSeconds = LLM_POLL_INTERVAL; String agentLess = System.getenv("AGENTLESS_MODE_ENABLED"); if (agentLess != null) { intervalSeconds = 1; } - - double pricePromptTokens = 0.0; - double priceCompleteTokens = 0.0; - if (aiSystem.compareTo("watsonx") == 0) { - pricePromptTokens = watsonxPricePromptTokens; - priceCompleteTokens = watsonxPriceCompleteTokens; - } else if (aiSystem.compareTo("openai") == 0) { - pricePromptTokens = openaiPricePromptTokens; - priceCompleteTokens = openaiPriceCompleteTokens; - } else if (aiSystem.compareTo("anthropic") == 0) { - pricePromptTokens = anthropicPricePromptTokens; - priceCompleteTokens = anthropicPriceCompleteTokens; - } + + double pricePromptTokens = getPricePromptTokens(aiSystem); + double priceCompleteTokens = getPriceCompleteTokens(aiSystem); + double intervalReqCount = (double)deltaRequestCount/intervalSeconds; double intervalPromptTokens = (double)deltaPromptTokens/intervalSeconds; double intervalCompleteTokens = (double)deltaCompleteTokens/intervalSeconds; @@ -291,8 +278,7 @@ public void collectData() { double intervalPromptCost = (intervalPromptTokens/1000) * pricePromptTokens; double intervalCompleteCost = (intervalCompleteTokens/1000) * priceCompleteTokens; double intervalTotalCost = intervalPromptCost + intervalCompleteCost; - aggr.resetMetrics(); - + logger.info("ModelId : " + modelId); logger.info("AiSystem : " + aiSystem); logger.info("AvgDuration : " + avgDuration); @@ -300,7 +286,7 @@ public void collectData() { logger.info("IntervalTokens : " + intervalTotalTokens); logger.info("IntervalCost : " + intervalTotalCost); logger.info("IntervalRequest : " + intervalReqCount); - + Map attributes = new HashMap<>(); attributes.put("model_id", modelId); attributes.put("ai_system", aiSystem); @@ -310,7 +296,28 @@ public void collectData() { getRawMetric(LLM_COST_NAME).getDataPoint(modelId).setValue(intervalTotalCost, attributes); getRawMetric(LLM_TOKEN_NAME).getDataPoint(modelId).setValue(intervalTotalTokens, attributes); getRawMetric(LLM_REQ_COUNT_NAME).getDataPoint(modelId).setValue(intervalReqCount, attributes); + + aggr.resetMetrics(); } logger.info("-----------------------------------------"); } + + + private double getPricePromptTokens(String aiSystem) { + switch (aiSystem) { + case "watsonx": return watsonxPricePromptTokens; + case "openai": return openaiPricePromptTokens; + case "anthropic": return anthropicPricePromptTokens; + default: return 0.0; + } + } + + private double getPriceCompleteTokens(String aiSystem) { + switch (aiSystem) { + case "watsonx": return watsonxPriceCompleteTokens; + case "openai": return openaiPriceCompleteTokens; + case "anthropic": return anthropicPriceCompleteTokens; + default: return 0.0; + } + } } diff --git a/llm/src/main/java/com/instana/dc/llm/impl/llm/MetricsCollectorService.java b/llm/src/main/java/com/instana/dc/llm/impl/llm/MetricsCollectorService.java index 3ba3b18..e60df99 100644 --- a/llm/src/main/java/com/instana/dc/llm/impl/llm/MetricsCollectorService.java +++ b/llm/src/main/java/com/instana/dc/llm/impl/llm/MetricsCollectorService.java @@ -101,10 +101,8 @@ public void export( logger.info("--------------------------------------------------------"); synchronized (mutex) { - List allResourceMetrics = request.getResourceMetricsList(); for (ResourceMetrics resourceMetrics : allResourceMetrics) { - Resource resource = resourceMetrics.getResource(); for (KeyValue reskv : resource.getAttributesList()) { logger.info("Received metric --- Resource attrKey: " + reskv.getKey()); @@ -113,13 +111,14 @@ public void export( for (ScopeMetrics scoMetrics : resourceMetrics.getScopeMetricsList()) { InstrumentationScope instrumentationScope = scoMetrics.getScope(); - instrumentationScope.getAttributesList(); for (KeyValue inskv : instrumentationScope.getAttributesList()) { logger.info("Received metric --- Scope attrKey: " + inskv.getKey()); logger.info("Received metric --- Scope attrVal: " + inskv.getValue().getStringValue()); } for (Metric metric : scoMetrics.getMetricsList()) { + logger.info("Processing metric: " + metric.getName()); + logger.info("Metric data case: " + metric.getDataCase()); logger.info("Received metric --- Scope Name: " + metric.getName()); logger.info("Received metric --- Scope Desc: " + metric.getDescription()); logger.info("Received metric --- Scope Unit: " + metric.getUnit()); @@ -127,114 +126,21 @@ public void export( switch (metric.getDataCase()) { case SUM: - if (metric.getName().compareTo("llm.watsonx.completions.tokens") == 0 || - metric.getName().compareTo("llm.openai.chat_completions.tokens") == 0 || - metric.getName().compareTo("llm.anthropic.completion.tokens") == 0 || - metric.getName().compareTo("gen_ai.client.token.usage") == 0) { - - List sumDataPoints = metric.getSum().getDataPointsList(); - for (NumberDataPoint dataPoint : sumDataPoints) { - - List kvList = dataPoint.getAttributesList(); - - String modelId = ""; - String tokenType = ""; - String aiSystem = ""; - for (KeyValue kv : kvList) { - logger.info("Received metric --- Tokens attrKey: " + kv.getKey()); - logger.info("Received metric --- Tokens attrVal: " - + kv.getValue().getStringValue()); - if (kv.getKey().compareTo("llm.response.model") == 0 || kv.getKey().compareTo("gen_ai.response.model") == 0) { - modelId = kv.getValue().getStringValue(); - } else if (kv.getKey().compareTo("llm.usage.token_type") == 0 || kv.getKey().compareTo("gen_ai.token.type") == 0) { - tokenType = kv.getValue().getStringValue(); - } else if (kv.getKey().compareTo("gen_ai.system") == 0) { - aiSystem = kv.getValue().getStringValue(); - } - } - if (aiSystem.isEmpty() && metric.getName().compareTo("gen_ai.client.token.usage") != 0) { - String[] parts = metric.getName().split("\\.", 3); - aiSystem = parts[1]; - } else { - aiSystem = "n/a"; - } - - long promptTokens = 0; - long completeTokens = 0; - if (tokenType.compareTo("prompt") == 0 || tokenType.compareTo("input") == 0) { - promptTokens = dataPoint.getAsInt(); - logger.info("Received metric --- Prompt Value: " + promptTokens); - } else if (tokenType.compareTo("completion") == 0 || tokenType.compareTo("output") == 0) { - completeTokens = dataPoint.getAsInt(); - logger.info("Received metric --- Complete Value: " + completeTokens); - } - - if (!modelId.isEmpty()) { - OtelMetric otelMetric = new OtelMetric(); - otelMetric.setModelId(modelId); - otelMetric.setAiSystem(aiSystem); - if(promptTokens > 0) { - otelMetric.setPromptTokens(promptTokens); - } - if(completeTokens > 0) { - otelMetric.setCompleteTokens(completeTokens); - } - exportMetrics.add(otelMetric); - } - } - } + processSumMetric(metric); + break; case HISTOGRAM: if (metric.getName().compareTo("llm.watsonx.completions.duration") == 0 || metric.getName().compareTo("llm.openai.chat_completions.duration") == 0 || metric.getName().compareTo("llm.anthropic.completion.duration") == 0 || metric.getName().compareTo("gen_ai.client.operation.duration") == 0) { - - List histDataPoints = metric.getHistogram().getDataPointsList(); - for (HistogramDataPoint dataPoint : histDataPoints) { - - List kvList = dataPoint.getAttributesList(); - - String modelId = ""; - String aiSystem = ""; - for (KeyValue kv : kvList) { - logger.info("Received metric --- Duration attrKey: " + kv.getKey()); - logger.info("Received metric --- Duration attrVal: " - + kv.getValue().getStringValue()); - if (kv.getKey().compareTo("llm.response.model") == 0 || kv.getKey().compareTo("gen_ai.response.model") == 0) { - modelId = kv.getValue().getStringValue(); - } else if (kv.getKey().compareTo("gen_ai.system") == 0) { - aiSystem = kv.getValue().getStringValue(); - } - } - if (aiSystem.isEmpty() && metric.getName().compareTo("gen_ai.client.token.usage") != 0) { - String[] parts = metric.getName().split("\\.", 3); - aiSystem = parts[1]; - } else { - aiSystem = "n/a"; - } - - Double durationSum = dataPoint.getSum(); - long requestCount = dataPoint.getCount(); - logger.info("Received metric --- Duration Sum Value: " + durationSum); - logger.info("Received metric --- Duration Count Value: " + requestCount); - - if (!modelId.isEmpty()) { - OtelMetric otelMetric = new OtelMetric(); - otelMetric.setModelId(modelId); - otelMetric.setAiSystem(aiSystem); - otelMetric.setDuration(durationSum); - otelMetric.setReqCount(requestCount); - exportMetrics.add(otelMetric); - } - } + processHistogramMetric(metric); } break; case GAUGE: case SUMMARY: default: logger.info("Unsupported metric DataCase: " + metric.getDataCase()); - throw new AssertionError("Unsupported metric DataCase: " + metric.getDataCase()); } } } @@ -244,4 +150,109 @@ public void export( responseObserver.onNext(ExportMetricsServiceResponse.getDefaultInstance()); responseObserver.onCompleted(); } + + private void processSumMetric(Metric metric) { + logger.info("Processing Sum Metric: " + metric.getName()); + List sumDataPoints = metric.getSum().getDataPointsList(); + for (NumberDataPoint dataPoint : sumDataPoints) { + List kvList = dataPoint.getAttributesList(); + + String modelId = ""; + String tokenType = ""; + String aiSystem = ""; + for (KeyValue kv : kvList) { + logger.info("Attribute - Key: " + kv.getKey() + ", Value: " + kv.getValue().getStringValue()); + if (kv.getKey().compareTo("llm.response.model") == 0 || kv.getKey().compareTo("gen_ai.response.model") == 0) { + modelId = kv.getValue().getStringValue(); + } else if (kv.getKey().compareTo("llm.usage.token_type") == 0 || kv.getKey().compareTo("gen_ai.token.type") == 0) { + tokenType = kv.getValue().getStringValue(); + } else if (kv.getKey().compareTo("gen_ai.system") == 0) { + aiSystem = kv.getValue().getStringValue(); + } + } + + aiSystem = inferAiSystem(metric.getName(), aiSystem); + + long tokens = dataPoint.getAsInt(); + logger.info("Tokens: " + tokens + ", ModelId: " + modelId + ", TokenType: " + tokenType + ", AISystem: " + aiSystem); + + if (!modelId.isEmpty()) { + OtelMetric otelMetric = new OtelMetric(); + otelMetric.setModelId(modelId); + otelMetric.setAiSystem(aiSystem); + if (tokenType.compareTo("prompt") == 0 || tokenType.compareTo("input") == 0) { + otelMetric.setPromptTokens(tokens); + logger.info("Added prompt tokens: " + tokens); + } else if (tokenType.compareTo("completion") == 0 || tokenType.compareTo("output") == 0) { + otelMetric.setCompleteTokens(tokens); + logger.info("Added completion tokens: " + tokens); + } else { + // If token type is not specified, assume it's the total + otelMetric.setPromptTokens(tokens); + otelMetric.setCompleteTokens(tokens); + logger.info("Added total tokens: " + tokens); + } + exportMetrics.add(otelMetric); + logger.info("Added metric - Model: " + modelId + ", AI System: " + aiSystem + + ", Prompt Tokens: " + otelMetric.getPromtTokens() + ", Complete Tokens: " + otelMetric.getCompleteTokens()); + } else { + logger.warning("ModelId is empty. Skipping metric."); + } + } + } + + + private void processHistogramMetric(Metric metric) { + List histDataPoints = metric.getHistogram().getDataPointsList(); + for (HistogramDataPoint dataPoint : histDataPoints) { + List kvList = dataPoint.getAttributesList(); + + String modelId = ""; + String aiSystem = ""; + for (KeyValue kv : kvList) { + logger.info("Received metric --- Duration attrKey: " + kv.getKey()); + logger.info("Received metric --- Duration attrVal: " + kv.getValue().getStringValue()); + if (kv.getKey().compareTo("llm.response.model") == 0 || kv.getKey().compareTo("gen_ai.response.model") == 0) { + modelId = kv.getValue().getStringValue(); + } else if (kv.getKey().compareTo("gen_ai.system") == 0) { + aiSystem = kv.getValue().getStringValue(); + } + } + + aiSystem = inferAiSystem(metric.getName(), aiSystem); + + Double durationSum = dataPoint.getSum(); + long requestCount = dataPoint.getCount(); + logger.info("Received metric --- Duration Sum Value: " + durationSum); + logger.info("Received metric --- Duration Count Value: " + requestCount); + + if (!modelId.isEmpty()) { + OtelMetric otelMetric = new OtelMetric(); + otelMetric.setModelId(modelId); + otelMetric.setAiSystem(aiSystem); + otelMetric.setDuration(durationSum); + otelMetric.setReqCount(requestCount); + exportMetrics.add(otelMetric); + logger.info("Added metric - Model: " + modelId + ", AI System: " + aiSystem + + ", Duration: " + durationSum + ", Request Count: " + requestCount); + } + } + } + + private String inferAiSystem(String metricName, String aiSystem) { + if (aiSystem == null || aiSystem.isEmpty() || aiSystem.equals("n/a")) { + if (metricName.startsWith("gen_ai.")) { + return "openai"; + } else if (metricName.startsWith("llm.")) { + String[] parts = metricName.split("\\.", 3); + if (parts.length > 1) { + return parts[1]; + } + } + + return "unknown"; + } + + return aiSystem; + } }