from crewai import LLM
from user_journey_service.core.config import settings
import yaml
from pathlib import Path
prompts_path = 'user_journey_service/config/prompts.yaml'

from user_journey_service.tools.custom_duration_estimator_tool import DurationEstimatorTool



# class MicrolearningDurationEstimator:

#     def __init__(self):
#         self.tool = DurationEstimatorTool()
#     def estimate_duration(self,input_data):
#         return self.tool._run(
#             Job_Title=input_data.Job_Title,
#             Experience=input_data.Experience,
#             Skills=input_data.Skills,
#             topic=input_data.topic,
#             Level=input_data.Level
#         )









class MicrolearningDurationEstimator:
    def __init__(self):
        self.llm = LLM(
          model=f"{settings.PROVIDER}/{settings.LLM1}",
          temperature=settings.TEMPERATURE,
          api_key=settings.OPENAI_KEY1,
          api_base=settings.ENDPOINT1,
          api_version=settings.API_VERSION1,
          seed=settings.SEED,                # For reproducible results
          timeout=settings.TIMEOUT
	    )
        self.prompts = self.load_prompts()
       
    def load_prompts(self):
        with open(Path(prompts_path), "r") as f:
            return yaml.safe_load(f)
        
    def estimate_duration(self, input_data):
        prompt_template = self.prompts["duration_estimator"]
        prompt = prompt_template.format(
            Job_Title=input_data.Job_Title,
            Experience=input_data.Experience,
            Skills=input_data.Skills,
            topic=input_data.topic,
            Level=input_data.Level
        )
        # print(f"the prompt is : {prompt}")

        messages = [
            {
                'role': 'user',
                'content': prompt,
            },
        ]

        response = self.llm.call(messages)
        return response.strip()

