diff --git a/search-processors/src/main/java/org/opensearch/searchpipelines/questionanswering/generative/GenerativeQAResponseProcessor.java b/search-processors/src/main/java/org/opensearch/searchpipelines/questionanswering/generative/GenerativeQAResponseProcessor.java index 09d9b0d896..67edb02c87 100644 --- a/search-processors/src/main/java/org/opensearch/searchpipelines/questionanswering/generative/GenerativeQAResponseProcessor.java +++ b/search-processors/src/main/java/org/opensearch/searchpipelines/questionanswering/generative/GenerativeQAResponseProcessor.java @@ -126,7 +126,8 @@ public SearchResponse processResponse(SearchRequest request, SearchResponse resp throw new IllegalArgumentException("llm_model cannot be null."); } String conversationId = params.getConversationId(); - log.info("LLM question: {}, LLM model {}, conversation id: {}", llmQuestion, llmModel, conversationId); + + log.info("LLM model {}, conversation id: {}", llmModel, conversationId); Instant start = Instant.now(); Integer interactionSize = params.getInteractionSize(); if (interactionSize == null || interactionSize == GenerativeQAParameters.SIZE_NULL_VALUE) { diff --git a/search-processors/src/main/java/org/opensearch/searchpipelines/questionanswering/generative/llm/DefaultLlmImpl.java b/search-processors/src/main/java/org/opensearch/searchpipelines/questionanswering/generative/llm/DefaultLlmImpl.java index 9fbb96a1b7..0001d0c832 100644 --- a/search-processors/src/main/java/org/opensearch/searchpipelines/questionanswering/generative/llm/DefaultLlmImpl.java +++ b/search-processors/src/main/java/org/opensearch/searchpipelines/questionanswering/generative/llm/DefaultLlmImpl.java @@ -83,7 +83,7 @@ public ChatCompletionOutput doChatCompletion(ChatCompletionInput chatCompletionI // Response from a remote model Map dataAsMap = modelOutput.getMlModelOutputs().get(0).getMlModelTensors().get(0).getDataAsMap(); - log.info("dataAsMap: {}", dataAsMap.toString()); + // log.info("dataAsMap: {}", dataAsMap.toString()); // TODO dataAsMap can be null or can contain information such as throttling. Handle non-happy cases. @@ -104,7 +104,7 @@ protected Map getInputParameters(ChatCompletionInput chatComplet chatCompletionInput.getContexts() ); inputParameters.put(CONNECTOR_INPUT_PARAMETER_MESSAGES, messages); - log.info("Messages to LLM: {}", messages); + // log.info("Messages to LLM: {}", messages); } else if (chatCompletionInput.getModelProvider() == ModelProvider.BEDROCK) { inputParameters .put( @@ -122,7 +122,7 @@ protected Map getInputParameters(ChatCompletionInput chatComplet throw new IllegalArgumentException("Unknown/unsupported model provider: " + chatCompletionInput.getModelProvider()); } - log.info("LLM input parameters: {}", inputParameters.toString()); + // log.info("LLM input parameters: {}", inputParameters.toString()); return inputParameters; } @@ -138,14 +138,15 @@ protected ChatCompletionOutput buildChatCompletionOutput(ModelProvider provider, errors = List.of((String) error.get(CONNECTOR_OUTPUT_MESSAGE)); } else { Map firstChoiceMap = (Map) choices.get(0); - log.info("Choices: {}", firstChoiceMap.toString()); + // log.info("Choices: {}", firstChoiceMap.toString()); Map message = (Map) firstChoiceMap.get(CONNECTOR_OUTPUT_MESSAGE); + /* log .info( "role: {}, content: {}", message.get(CONNECTOR_OUTPUT_MESSAGE_ROLE), message.get(CONNECTOR_OUTPUT_MESSAGE_CONTENT) - ); + );*/ answers = List.of(message.get(CONNECTOR_OUTPUT_MESSAGE_CONTENT)); } } else if (provider == ModelProvider.BEDROCK) {