from crewai import LLM
from user_journey_service.core.config import settings
import yaml
from pathlib import Path

prompts_path = 'user_journey_service/config/prompts.yaml'

class SummaryGenerator:
    def __init__(self):
        self.llm = LLM(
            model=f"{settings.PROVIDER}/{settings.LLM_FOR_REVIEW}",
            temperature=settings.TEMPERATURE,
            api_key=settings.OPENAI_KEY2,
            api_base=settings.ENDPOINT2,
            api_version=settings.API_VERSION2,
            seed=settings.SEED,                # For reproducible results
            timeout=settings.TIMEOUT
        )
        self.prompts = self.load_prompts()

    def load_prompts(self):
            with open(Path(prompts_path), "r") as f:
                return yaml.safe_load(f)


    def structured_learning(self,content):
        print("Inside structured learning function....")
        prompt_template = self.prompts["structured_learning"]
        prompt = prompt_template.format(
                content=content
            )

        print(f"The prompt is : {prompt}")
        # LLM message format for CrewAI
        messages = [{'role': 'user', 'content': prompt}]

        # Call the LLM
        response = self.llm.call(messages)
        print(response)
        return response

    def ondemand_learning(self,content):
        self.skill_gap_analysis(content)


    def skill_gap_analysis(self,content):
        print("Inside skill gap analysis function....")
        prompt_template = self.prompts["skills_gap_analysis"]
        prompt = prompt_template.format(
                content=content
            )

        print(f"The prompt is : {prompt}")
        # LLM message format for CrewAI
        messages = [{'role': 'user', 'content': prompt}]

        # Call the LLM
        response = self.llm.call(messages)
        print(response)
        return response


   