package com.emonster.taroaichat.service.llm.gemini;

import com.emonster.taroaichat.config.ApplicationProperties;
import com.emonster.taroaichat.domain.enumeration.SessionStatus;
import com.emonster.taroaichat.service.llm.TarotPrompts;
import com.emonster.taroaichat.service.llm.dto.AIResponse;
import com.emonster.taroaichat.service.llm.dto.TarotCardData;
import com.emonster.taroaichat.service.dto.UserProfileDTO;
import com.emonster.taroaichat.service.llm.openrouter.tools.AIToolManager;
import com.emonster.taroaichat.service.llm.openrouter.tools.AITool;
import com.emonster.taroaichat.service.llm.gemini.tools.GeminiToolManager;
import com.google.genai.types.GenerateContentResponse;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

/**
 * Gemini-based AI service providing the same interface as the OpenRouter AIService.
 * This service uses Google's Gemini SDK for AI interactions while maintaining
 * compatibility with the existing AIService interface.
 */
@Service
public class GeminiService {

    private static final Logger LOG = LoggerFactory.getLogger(GeminiService.class);

    private final GeminiClient geminiClient;
    private final ApplicationProperties applicationProperties;
    private final TarotPrompts tarotPrompts;
    private final AIToolManager toolManager;
    private final GeminiToolManager geminiToolManager;
    private final ObjectMapper objectMapper;

    public GeminiService(GeminiClient geminiClient,
                        ApplicationProperties applicationProperties,
                        TarotPrompts tarotPrompts,
                        AIToolManager toolManager,
                        GeminiToolManager geminiToolManager,
                        ObjectMapper objectMapper) {
        this.geminiClient = geminiClient;
        this.applicationProperties = applicationProperties;
        this.tarotPrompts = tarotPrompts;
        this.toolManager = toolManager;
        this.geminiToolManager = geminiToolManager;
        this.objectMapper = objectMapper;
    }

    /**
     * Generate AI response for general chat conversation.
     *
     * @param userMessage The user's message
     * @param conversationHistory Previous messages in the conversation
     * @param hasCardsSelected Whether the session has cards selected
     * @return AI response with possible tool calls
     */
    public AIResponse generateChatResponse(String userMessage, List<String> conversationHistory, boolean hasCardsSelected) {
        try {
            List<GeminiClient.GeminiRequest.Message> messages = buildChatMessages(userMessage, conversationHistory, hasCardsSelected);
            return callGeminiWithTools(messages, false);
        } catch (Exception e) {
            LOG.error("Error generating chat response with Gemini", e);
            return new AIResponse("I apologize, but I'm having trouble responding right now. Please try again in a moment.", null);
        }
    }

    /**
     * Generate AI interpretation for tarot reading.
     *
     * @param selectedCards List of selected cards with positions
     * @param userQuestion Optional user question for the reading
     * @return AI interpretation of the reading
     */
    public String generateTarotReading(List<TarotCardData> selectedCards, String userQuestion) {
        try {
            List<GeminiClient.GeminiRequest.Message> messages = buildTarotMessages(selectedCards, userQuestion);
            return callGemini(messages, true);
        } catch (Exception e) {
            LOG.error("Error generating tarot reading with Gemini", e);
            return "I apologize, but I'm having trouble interpreting your reading right now. Please try again in a moment.";
        }
    }

    /**
     * Generate a concise summary of a tarot reading interpretation.
     *
     * @param selectedCards List of selected cards with positions
     * @param fullInterpretation The full tarot reading interpretation
     * @return Concise summary suitable for chat context
     */
    public String generateReadingSummary(List<TarotCardData> selectedCards, String fullInterpretation) {
        try {
            String systemPrompt = tarotPrompts.getSummarySystemPrompt();
            String userPrompt = tarotPrompts.buildSummaryPrompt(selectedCards, fullInterpretation);

            List<GeminiClient.GeminiRequest.Message> messages = Arrays.asList(
                new GeminiClient.GeminiRequest.Message("system", systemPrompt),
                new GeminiClient.GeminiRequest.Message("user", userPrompt)
            );

            return callGemini(messages, false);
        } catch (Exception e) {
            LOG.error("Error generating reading summary with Gemini", e);
            return buildBasicSummary(selectedCards);
        }
    }

    /**
     * Generate AI response for general chat conversation as a stream with card data available.
     *
     * @param userMessage The user's message
     * @param conversationHistory Previous messages in the conversation
     * @param hasCardsSelected Whether the session has cards selected
     * @param enableTools Whether to enable tool calling in this response
     * @param cardData The actual card data if available
     * @return A Flux stream of AI response chunks
     */
    public Flux<Map<String, Object>> generateChatResponseStreamWithCards(String userMessage, List<String> conversationHistory, boolean hasCardsSelected, boolean enableTools, List<TarotCardData> cardData) {
        return generateChatResponseStreamWithCards(userMessage, conversationHistory, hasCardsSelected, enableTools, cardData, null, null);
    }

    /**
     * Generate AI response for general chat conversation as a stream with card data available.
     * This is a combined approach that handles both tools and streaming in one call.
     *
     * @param userMessage The user's message
     * @param conversationHistory Previous messages in the conversation
     * @param hasCardsSelected Whether the session has cards selected
     * @param enableTools Whether to enable tool calling in this response
     * @param cardData The actual card data if available
     * @param sessionStatus The current session status for state-specific prompts
     * @return A Flux stream of response events (both text and tool results)
     */
    public Flux<Map<String, Object>> generateChatResponseStreamWithCards(String userMessage, List<String> conversationHistory, boolean hasCardsSelected, boolean enableTools, List<TarotCardData> cardData, String sessionStatus) {
        return generateChatResponseStreamWithCards(userMessage, conversationHistory, hasCardsSelected, enableTools, cardData, sessionStatus, null);
    }

    /**
     * Generate AI response for general chat conversation as a stream with card data and user profile available.
     * This is a combined approach that handles both tools and streaming in one call.
     *
     * @param userMessage The user's message
     * @param conversationHistory Previous messages in the conversation
     * @param hasCardsSelected Whether the session has cards selected
     * @param enableTools Whether to enable tool calling in this response
     * @param cardData The actual card data if available
     * @param sessionStatus The current session status for state-specific prompts
     * @param userProfile The user's profile data for personalization
     * @return A Flux stream of response events (both text and tool results)
     */
    public Flux<Map<String, Object>> generateChatResponseStreamWithCards(String userMessage, List<String> conversationHistory, boolean hasCardsSelected, boolean enableTools, List<TarotCardData> cardData, String sessionStatus, UserProfileDTO userProfile) {
        try {
            // Get system prompt based on state
            String systemPrompt = null;
            if (sessionStatus != null) {
                systemPrompt = enableTools ?
                    tarotPrompts.getStateSpecificPrompt(sessionStatus) :
                    tarotPrompts.getStateSpecificPromptForStreaming(sessionStatus);
            } else {
                systemPrompt = tarotPrompts.getChatSystemPrompt();
            }

            // Add card data to system prompt if available
            if (cardData != null && !cardData.isEmpty()) {
                StringBuilder cardInfo = new StringBuilder("\n\nSelected cards for this reading:\n");
                for (TarotCardData card : cardData) {
                    cardInfo.append("- ").append(card.getPosition()).append(": ")
                           .append(card.getName())
                           .append(card.isReversed() ? " (Reversed)" : "")
                           .append("\n");
                }
                systemPrompt += cardInfo.toString();
            }

            // Add user profile data to system prompt if available
            if (userProfile != null) {
                StringBuilder profileInfo = new StringBuilder("\n\nUser Profile Information:\n");
                
                if (userProfile.getBirthday() != null) {
                    profileInfo.append("- Birthday: ").append(userProfile.getBirthday()).append("\n");
                }
                
                if (userProfile.getGender() != null) {
                    profileInfo.append("- Gender: ").append(userProfile.getGender()).append("\n");
                }
                
                if (userProfile.getOccupation() != null && !userProfile.getOccupation().isEmpty()) {
                    profileInfo.append("- Occupation: ").append(userProfile.getOccupation()).append("\n");
                }
                
                profileInfo.append("\nUse this profile information to provide more personalized and relevant tarot interpretations.\n");
                systemPrompt += profileInfo.toString();
            }

            // Build conversation messages WITHOUT system prompt
            List<GeminiClient.GeminiRequest.Message> messages = buildConversationMessages(userMessage, conversationHistory);

            ApplicationProperties.Gemini geminiConfig = applicationProperties.getGemini();
            String model = geminiConfig.getModel().getPrimary();

            GeminiClient.GeminiRequest request = new GeminiClient.GeminiRequest();
            request.model = model;
            request.messages = messages;
            request.temperature = geminiConfig.getParameters().getTemperature();
            request.maxTokens = geminiConfig.getParameters().getMaxTokens();
            request.topP = geminiConfig.getParameters().getTopP();
            request.topK = geminiConfig.getParameters().getTopK();

            // Add system prompt separately for Gemini
            request.systemPrompt = systemPrompt;

            LOG.info("Calling combined Gemini stream with tools: model={}, status={}, messageCount={}, hasSystemPrompt={}, enableTools={}",
                model, sessionStatus, messages.size(), systemPrompt != null, enableTools);

            // Log system prompt details
            if (systemPrompt != null) {
                LOG.info("System prompt length: {} chars", systemPrompt.length());

                // Check if prompt contains tool instructions
                boolean hasToolInstructions = systemPrompt.contains("reveal_card") || systemPrompt.contains("TOOL") || systemPrompt.contains("start_interpretation");
                LOG.info("System prompt contains tool instructions: {}", hasToolInstructions);

                // Log if this is the trigger message
                if (userMessage.equals("I'm ready to begin my reading interpretation. Please reveal my first card.")) {
                    LOG.info("TRIGGER MESSAGE DETECTED - Should call reveal_card tool per prompt");
                }

                // Log first 300 chars of system prompt for debugging
                LOG.debug("System prompt preview: {}", systemPrompt.substring(0, Math.min(300, systemPrompt.length())));
            }

            // Use the new combined streaming method - pass through both text and tool results
            return geminiClient.generateContentStreamWithTools(request, enableTools);

        } catch (Exception e) {
            LOG.error("Error generating combined chat response stream", e);
            return Flux.error(e);
        }
    }

    /**
     * Generate a post-card selection greeting with streaming support.
     *
     * @param selectedCards The cards that were selected
     * @return A Flux stream of greeting message chunks
     */
    public Flux<String> generatePostCardSelectionGreetingStream(List<TarotCardData> selectedCards) {
        try {
            String greetingPrompt = tarotPrompts.getPostCardSelectionGreeting(selectedCards);
            String systemPrompt = tarotPrompts.getPostCardSelectionSystemPrompt();

            List<GeminiClient.GeminiRequest.Message> messages = Arrays.asList(
                new GeminiClient.GeminiRequest.Message("system", systemPrompt),
                new GeminiClient.GeminiRequest.Message("user", greetingPrompt)
            );

            ApplicationProperties.Gemini geminiConfig = applicationProperties.getGemini();
            String model = geminiConfig.getModel().getPrimary();

            GeminiClient.GeminiRequest request = new GeminiClient.GeminiRequest();
            request.model = model;
            request.messages = messages;
            request.temperature = geminiConfig.getParameters().getTemperature();
            request.maxTokens = geminiConfig.getParameters().getMaxTokens();
            request.topP = geminiConfig.getParameters().getTopP();
            request.topK = geminiConfig.getParameters().getTopK();

            LOG.debug("Streaming post-card selection greeting with Gemini");
            return geminiClient.generateContentStream(request);

        } catch (Exception e) {
            LOG.error("Error generating post-card selection greeting stream with Gemini", e);
            return Flux.error(e);
        }
    }

    /**
     * Analyze conversation for UI actions using a separate, non-streaming call.
     *
     * @param userMessage The user's message
     * @param conversationHistory Previous messages in the conversation
     * @param hasCardsSelected Whether the session has cards selected
     * @param selectedCards The actual card data if available
     * @param sessionStatus The current session status for state-specific prompts
     * @return AI response with tool calls for UI control
     */
    public AIResponse analyzeForUIActions(String userMessage, List<String> conversationHistory, boolean hasCardsSelected, List<TarotCardData> selectedCards, String sessionStatus) {
        try {
            LOG.info("analyzeForUIActions called with Gemini - userMessage: '{}', hasCardsSelected: {}, sessionStatus: {}", userMessage, hasCardsSelected, sessionStatus);

            // Log the conversation history to debug context
            if (conversationHistory != null && !conversationHistory.isEmpty()) {
                LOG.debug("Conversation history has {} messages", conversationHistory.size());
                // Log the last few messages for context
                int startIdx = Math.max(0, conversationHistory.size() - 3);
                for (int i = startIdx; i < conversationHistory.size(); i++) {
                    LOG.debug("History[{}]: {}", i, conversationHistory.get(i));
                }
            }

            List<GeminiClient.GeminiRequest.Message> messages = buildChatMessagesWithState(userMessage, conversationHistory, hasCardsSelected, sessionStatus);

            // If cards are available, add them to the context
            if (selectedCards != null && !selectedCards.isEmpty()) {
                StringBuilder cardInfo = new StringBuilder("Selected cards for this reading:\n");
                for (TarotCardData card : selectedCards) {
                    cardInfo.append("- ").append(card.getPosition()).append(": ")
                           .append(card.getName())
                           .append(card.isReversed() ? " (Reversed)" : "")
                           .append("\n");
                }
                messages.add(new GeminiClient.GeminiRequest.Message("system", cardInfo.toString()));
            }

            // For the special trigger message, don't add extra instructions
            if (!"I'm ready to begin my reading interpretation.".equals(userMessage)) {
                String analysisPrompt = "Based on the conversation above and the current session state (" + sessionStatus + "), analyze if any UI tools should be called. " +
                    "This is a separate analysis phase - do not provide conversational text.\n\n";

                if ("AWAITING_USER_CONTEXT".equals(sessionStatus)) {
                    analysisPrompt += "The session is in AWAITING_USER_CONTEXT state. Look at the conversation history:\n" +
                        "- Did the user share specific context about their situation? (e.g., 'I have a big presentation next week and I'm worried it won't be received well')\n" +
                        "- If yes, call start_interpretation with:\n" +
                        "  - userContext: The specific situation they described (from conversation history)\n" +
                        "  - readyToInterpret: true\n" +
                        "  - overallTheme: career/relationships/personal_growth/decision_making/spiritual_guidance/general\n" +
                        "- If they only said vague things like 'work stress' without details, don't call any tool.\n";
                } else if ("AWAITING_SITUATION".equals(sessionStatus)) {
                    analysisPrompt += "The session is in AWAITING_SITUATION state. If the user said 'I'm ready to begin my reading interpretation.', call reveal_card with cardPosition='situation'.\n";
                } else if ("AWAITING_OBSTACLE".equals(sessionStatus)) {
                    analysisPrompt += "The session is in AWAITING_OBSTACLE state. ONLY If the user confirms they're ready (e.g., 'yes', 'I'm ready', 'continue', 'next', 'show me', or any affirmative response), call reveal_card with cardPosition='obstacle'.\n";
                } else if ("AWAITING_ADVICE".equals(sessionStatus)) {
                    analysisPrompt += "The session is in AWAITING_ADVICE state. ONLY If the user confirms they're ready (e.g., 'yes', 'I'm ready', 'continue', 'next', 'show me', or any affirmative response), call reveal_card with cardPosition='advice'.\n";
                }

                analysisPrompt += "\nIMPORTANT: Always provide ALL required parameters for the tool you're calling. Extract context from the conversation history, not just the current message.";

                messages.add(new GeminiClient.GeminiRequest.Message("user", analysisPrompt));
            }

            return callGeminiWithTools(messages, false);
        } catch (Exception e) {
            LOG.error("Error analyzing for UI actions with Gemini", e);
            return new AIResponse("", null);
        }
    }

    private List<GeminiClient.GeminiRequest.Message> buildChatMessages(String userMessage, List<String> conversationHistory, boolean hasCardsSelected) {
        List<GeminiClient.GeminiRequest.Message> messages = new ArrayList<>();

        String systemPrompt = tarotPrompts.getChatSystemPrompt();
        messages.add(new GeminiClient.GeminiRequest.Message("system", systemPrompt));

        // Add conversation history for context
        if (conversationHistory != null && !conversationHistory.isEmpty()) {
            for (String historicalMessage : conversationHistory) {
                if (historicalMessage.startsWith("User: ")) {
                    String content = historicalMessage.substring(6);
                    messages.add(new GeminiClient.GeminiRequest.Message("user", content));
                } else if (historicalMessage.startsWith("AI: ")) {
                    String content = historicalMessage.substring(4);
                    messages.add(new GeminiClient.GeminiRequest.Message("assistant", content));
                }
            }
        }

        // Add the current user message
        messages.add(new GeminiClient.GeminiRequest.Message("user", userMessage));

        LOG.debug("Built chat messages for Gemini with {} total messages (including system prompt)", messages.size());
        return messages;
    }

    private List<GeminiClient.GeminiRequest.Message> buildChatMessagesWithState(String userMessage, List<String> conversationHistory, boolean hasCardsSelected, String sessionStatus) {
        return buildChatMessagesWithState(userMessage, conversationHistory, hasCardsSelected, sessionStatus, true);
    }

    private List<GeminiClient.GeminiRequest.Message> buildChatMessagesWithState(String userMessage, List<String> conversationHistory, boolean hasCardsSelected, String sessionStatus, boolean enableTools) {
        List<GeminiClient.GeminiRequest.Message> messages = new ArrayList<>();

        // Use state-specific prompt if available, choosing tool-enabled or streaming version
        String systemPrompt;
        if (sessionStatus != null) {
            systemPrompt = enableTools ?
                tarotPrompts.getStateSpecificPrompt(sessionStatus) :
                tarotPrompts.getStateSpecificPromptForStreaming(sessionStatus);
        } else {
            systemPrompt = tarotPrompts.getChatSystemPrompt();
        }

        messages.add(new GeminiClient.GeminiRequest.Message("system", systemPrompt));

        // Add conversation history for context
        if (conversationHistory != null && !conversationHistory.isEmpty()) {
            for (String historicalMessage : conversationHistory) {
                if (historicalMessage.startsWith("User: ")) {
                    String content = historicalMessage.substring(6);
                    messages.add(new GeminiClient.GeminiRequest.Message("user", content));
                } else if (historicalMessage.startsWith("AI: ")) {
                    String content = historicalMessage.substring(4);
                    messages.add(new GeminiClient.GeminiRequest.Message("assistant", content));
                }
            }
        }

        // Add the current user message
        messages.add(new GeminiClient.GeminiRequest.Message("user", userMessage));

        LOG.debug("Built chat messages for Gemini with {} total messages (including state-specific prompt for status: {})", messages.size(), sessionStatus);
        return messages;
    }

    private List<GeminiClient.GeminiRequest.Message> buildTarotMessages(List<TarotCardData> selectedCards, String userQuestion) {
//        String systemPrompt = tarotPrompts.getTarotSystemPrompt();
        String systemPrompt = "";
        String readingPrompt = tarotPrompts.buildTarotReadingPrompt(selectedCards, userQuestion);

        return Arrays.asList(
            new GeminiClient.GeminiRequest.Message("system", systemPrompt),
            new GeminiClient.GeminiRequest.Message("user", readingPrompt)
        );
    }

    private String callGemini(List<GeminiClient.GeminiRequest.Message> messages, boolean isReading) {
        try {
            ApplicationProperties.Gemini geminiConfig = applicationProperties.getGemini();
            String model = geminiConfig.getModel().getPrimary();

            GeminiClient.GeminiRequest request = new GeminiClient.GeminiRequest();
            request.model = model;
            request.messages = messages;
            request.temperature = geminiConfig.getParameters().getTemperature();
            request.maxTokens = isReading ? 1500 : geminiConfig.getParameters().getMaxTokens();
            request.topP = geminiConfig.getParameters().getTopP();
            request.topK = geminiConfig.getParameters().getTopK();

            LOG.debug("Calling Gemini with model: {} for {}", model, isReading ? "tarot reading" : "chat");

            GeminiClient.GeminiResponse result = geminiClient.generateContent(request);

            if (result.candidates != null && !result.candidates.isEmpty()) {
                GeminiClient.GeminiResponse.Candidate candidate = result.candidates.get(0);
                if (candidate.content != null && candidate.content.parts != null && !candidate.content.parts.isEmpty()) {
                    String response = candidate.content.parts.get(0).text;
                    LOG.debug("Gemini response received: {} characters", response.length());
                    return response;
                }
            }

            throw new RuntimeException("No response from Gemini model");

        } catch (Exception e) {
            LOG.warn("Primary Gemini model failed, trying fallback model", e);
            return callGeminiWithFallback(messages, isReading);
        }
    }

    private String callGeminiWithFallback(List<GeminiClient.GeminiRequest.Message> messages, boolean isReading) {
        try {
            ApplicationProperties.Gemini geminiConfig = applicationProperties.getGemini();
            String fallbackModel = geminiConfig.getModel().getFallback();

            GeminiClient.GeminiRequest request = new GeminiClient.GeminiRequest();
            request.model = fallbackModel;
            request.messages = messages;
            request.temperature = geminiConfig.getParameters().getTemperature();
            request.maxTokens = isReading ? 1500 : geminiConfig.getParameters().getMaxTokens();
            request.topP = geminiConfig.getParameters().getTopP();
            request.topK = geminiConfig.getParameters().getTopK();

            LOG.debug("Calling fallback Gemini model: {}", fallbackModel);

            GeminiClient.GeminiResponse result = geminiClient.generateContent(request);

            if (result.candidates != null && !result.candidates.isEmpty()) {
                GeminiClient.GeminiResponse.Candidate candidate = result.candidates.get(0);
                if (candidate.content != null && candidate.content.parts != null && !candidate.content.parts.isEmpty()) {
                    return candidate.content.parts.get(0).text;
                }
            }

            throw new RuntimeException("No response from fallback Gemini model");

        } catch (Exception e) {
            LOG.error("Both primary and fallback Gemini models failed", e);
            throw new RuntimeException("Gemini AI service temporarily unavailable", e);
        }
    }

    /**
     * Call Gemini with native tool support using two-phase workflow.
     * Phase 1: Tool analysis with native Gemini tools
     * Phase 2: Generate response (text only, no tools)
     */
    private AIResponse callGeminiWithTools(List<GeminiClient.GeminiRequest.Message> messages, boolean isReading) {
        try {
            ApplicationProperties.Gemini geminiConfig = applicationProperties.getGemini();
            String model = geminiConfig.getModel().getPrimary();

            GeminiClient.GeminiRequest request = new GeminiClient.GeminiRequest();
            request.model = model;
            request.messages = messages;
            request.temperature = geminiConfig.getParameters().getTemperature();
            request.maxTokens = isReading ? 1500 : geminiConfig.getParameters().getMaxTokens();
            request.topP = geminiConfig.getParameters().getTopP();
            request.topK = geminiConfig.getParameters().getTopK();

            List<AITool.ToolResult> toolResults = null;

            // Phase 1: Tool analysis with native Gemini tools
            if (!isReading && geminiToolManager.hasTools()) {
                LOG.debug("Phase 1: Analyzing for native Gemini tool calls with model: {}", model);

                GenerateContentResponse toolResponse = geminiClient.generateContentWithNativeTools(request, true);

                // Execute native tools if any were called
                List<Map<String, Object>> nativeToolResults = geminiClient.executeNativeTools(toolResponse);

                if (!nativeToolResults.isEmpty()) {
                    // Convert native tool results to AITool.ToolResult format for compatibility
                    toolResults = new ArrayList<>();
                    for (Map<String, Object> result : nativeToolResults) {
                        boolean success = (boolean) result.get("success");
                        String message = (String) result.get("message");
                        Map<String, Object> data = (Map<String, Object>) result.get("data");

                        if (success) {
                            toolResults.add(AITool.ToolResult.success(message, data));
                        } else {
                            toolResults.add(AITool.ToolResult.failure(message));
                        }
                    }
                    LOG.debug("Executed {} native Gemini tools", toolResults.size());
                }

                // Extract any text from tool response (usually minimal or empty when tools are called)
                String toolResponseText = extractTextFromResponse(toolResponse);
                LOG.debug("Phase 1 tool response text: '{}'", toolResponseText);

                // When tools are executed, don't return any text from Phase 1
                // The text response should come from Phase 2 only
                if (toolResults != null && !toolResults.isEmpty()) {
                    // Return empty text with tool results - Phase 2 will provide the conversational response
                    return new AIResponse("", toolResults);
                }
            }

            // Phase 2: Generate conversational response (no tools)
            LOG.debug("Phase 2: Generating conversational response with model: {}", model);

            GeminiClient.GeminiResponse result = geminiClient.generateContent(request);

            if (result.candidates != null && !result.candidates.isEmpty()) {
                GeminiClient.GeminiResponse.Candidate candidate = result.candidates.get(0);

                String response = "";
                if (candidate.content != null && candidate.content.parts != null && !candidate.content.parts.isEmpty()) {
                    response = candidate.content.parts.get(0).text;
                }

                LOG.debug("Gemini response received: {} characters, {} tool calls",
                    response.length(),
                    toolResults != null ? toolResults.size() : 0);

                return new AIResponse(response, toolResults);
            } else {
                throw new RuntimeException("No response from Gemini model");
            }

        } catch (Exception e) {
            LOG.warn("Primary Gemini model failed, trying fallback model", e);
            // For fallback, we don't include tools
            String response = callGeminiWithFallback(messages, isReading);
            return new AIResponse(response, null);
        }
    }

    /**
     * Process tool calls from Gemini response.
     */
    private List<AITool.ToolResult> processToolCalls(List<GeminiClient.GeminiResponse.Candidate.ToolCall> toolCalls) {
        List<AITool.ToolResult> results = new ArrayList<>();

        for (GeminiClient.GeminiResponse.Candidate.ToolCall toolCall : toolCalls) {
            try {
                String toolName = toolCall.function.name;
                String argumentsJson = toolCall.function.arguments;

                LOG.info("Processing Gemini tool call: {} with arguments: {}", toolName, argumentsJson);

                // Parse arguments
                Map<String, Object> arguments = objectMapper.readValue(argumentsJson, Map.class);

                // Execute tool
                AITool.ToolResult result = toolManager.executeTool(toolName, arguments);
                results.add(result);

            } catch (Exception e) {
                LOG.error("Error processing Gemini tool call", e);
                results.add(AITool.ToolResult.failure("Error processing tool: " + e.getMessage()));
            }
        }

        return results;
    }

    /**
     * Build a basic summary without AI assistance (fallback).
     */
    private String buildBasicSummary(List<TarotCardData> selectedCards) {
        StringBuilder summary = new StringBuilder();
        summary.append("Tarot reading performed with ");
        summary.append(selectedCards.size()).append(" cards: ");

        for (int i = 0; i < selectedCards.size(); i++) {
            TarotCardData card = selectedCards.get(i);
            if (i > 0) summary.append(", ");
            summary.append(card.getName());
            summary.append(" (").append(card.getPosition()).append(")");
            if (card.isReversed()) summary.append(" reversed");
        }

        summary.append(". Full interpretation was provided to guide the querent.");
        return summary.toString();
    }

    /**
     * Build conversation messages WITHOUT system prompt.
     * This is used when system prompt is passed separately.
     */
    private List<GeminiClient.GeminiRequest.Message> buildConversationMessages(String userMessage, List<String> conversationHistory) {
        List<GeminiClient.GeminiRequest.Message> messages = new ArrayList<>();

        // Add conversation history for context (no system messages)
        if (conversationHistory != null && !conversationHistory.isEmpty()) {
            for (String historicalMessage : conversationHistory) {
                if (historicalMessage.startsWith("User: ")) {
                    String content = historicalMessage.substring(6);
                    messages.add(new GeminiClient.GeminiRequest.Message("user", content));
                } else if (historicalMessage.startsWith("AI: ")) {
                    String content = historicalMessage.substring(4);
                    messages.add(new GeminiClient.GeminiRequest.Message("assistant", content));
                }
            }
        }

        // Add the current user message
        messages.add(new GeminiClient.GeminiRequest.Message("user", userMessage));

        LOG.debug("Built conversation messages for Gemini with {} messages (no system prompt)", messages.size());
        return messages;
    }

    /**
     * Extract text from native Gemini SDK response.
     * Handles the SDK's response structure and extracts text content.
     */
    private String extractTextFromResponse(GenerateContentResponse response) {
        if (response == null) {
            return "";
        }

        try {
            // First check if there are candidates
            var candidatesOpt = response.candidates();
            if (candidatesOpt.isEmpty() || candidatesOpt.get().isEmpty()) {
                LOG.debug("No candidates in Gemini response");
                return "";
            }

            // Get the first candidate
            var candidate = candidatesOpt.get().get(0);

            // Check the finish reason to see if it's a tool call
            var finishReasonOpt = candidate.finishReason();
            if (finishReasonOpt.isPresent()) {
                var finishReason = finishReasonOpt.get();
                // If it's a tool call, don't try to extract text
                if (finishReason.toString().contains("TOOL_CALL") || finishReason.knownEnum() == com.google.genai.types.FinishReason.Known.UNEXPECTED_TOOL_CALL) {
                    // Check if the response actually contains a function call
                    var contentOpt = candidate.content();
                    if (contentOpt.isPresent()) {
                        var partsOpt = contentOpt.get().parts();
                        if (partsOpt.isPresent() && !partsOpt.get().isEmpty()) {
                            var firstPart = partsOpt.get().get(0);
                            if (firstPart.functionCall().isPresent()) {
                                LOG.debug("Response contains function call, not text");
                                return "";
                            }
                        }
                    }
                }
            }

            // Now safely try to extract text
            // Instead of using response.text() which can throw, extract manually
            var contentOpt = candidate.content();
            if (contentOpt.isPresent()) {
                var partsOpt = contentOpt.get().parts();
                if (partsOpt.isPresent()) {
                    StringBuilder textBuilder = new StringBuilder();
                    for (var part : partsOpt.get()) {
                        var textOpt = part.text();
                        if (textOpt.isPresent()) {
                            textBuilder.append(textOpt.get());
                        }
                    }
                    return textBuilder.toString();
                }
            }

            LOG.debug("No text content found in Gemini response");
            return "";
        } catch (Exception e) {
            LOG.debug("Error extracting text from Gemini response: {}", e.getMessage());
            return "";
        }
    }

    /**
     * Generate a simple text response without tool support.
     * Used for standalone prompts where we just need plain text output.
     *
     * @param prompt The prompt to send to Gemini
     * @return The text response from Gemini
     */
    public String generateSimpleResponse(String prompt) {
        try {
            // Build messages with just the user prompt
            List<GeminiClient.GeminiRequest.Message> messages = Arrays.asList(
                new GeminiClient.GeminiRequest.Message("user", prompt)
            );

            // Call Gemini without tools enabled
            return callGemini(messages, false);
        } catch (Exception e) {
            LOG.error("Error generating simple response with Gemini", e);
            return null;
        }
    }
}
