from crewai import LLM
from user_journey_service.core.config import settings
import yaml
from pathlib import Path
prompts_path = 'user_journey_service/config/prompts.yaml'

class MicrolearningDefaultsEstimator:
    def __init__(self):
        self.llm = LLM(
          model=f"{settings.PROVIDER}/{settings.LLM1}",
          temperature=settings.TEMPERATURE,
          api_key=settings.OPENAI_KEY1,
          api_base=settings.ENDPOINT1,
          api_version=settings.API_VERSION1,
          seed=settings.SEED,                # For reproducible results
          timeout=settings.TIMEOUT
	    )
        self.prompts = self.load_prompts()
       
    def load_prompts(self):
        with open(Path(prompts_path), "r") as f:
            return yaml.safe_load(f)
        
    def estimate_defaults(self, input_data):
        prompt_template = self.prompts["estimate_defaults"]
        # print(prompt_template)
        prompt = prompt_template.format(
            role=input_data.Job_Title,
            experience=input_data.Experience,
            Skills=input_data.Skills,
            topic=input_data.topic,
            Level=input_data.Level,
            Company_Name=input_data.Company_Name,
            Industry=input_data.Industry,
            Company_Size=input_data.Company_Size,
            Business_Model=input_data.Business_Model,
            Top_Use_Cases=input_data.Top_Use_Cases,
            Learning_Focus=input_data.Learning_Focus,
            Tech_stack=input_data.Tech_stack,
            Compliance=input_data.Compliance
        )
        print(f"the prompt is : {prompt}")

        messages = [
            {
                'role': 'user',
                'content': prompt,
            },
        ]

        response = self.llm.call(messages)
        return response.strip()

