Coverage for src/app/adapters/langChain_adapter.py: 44%
35 statements
« prev ^ index » next coverage.py v7.7.0, created at 2025-04-03 00:51 +0200
« prev ^ index » next coverage.py v7.7.0, created at 2025-04-03 00:51 +0200
1from models.answer_model import AnswerModel
2from models.context_model import ContextModel
3from models.question_model import QuestionModel
4from models.file_chunk_model import FileChunkModel
5from models.file_model import FileModel
6from models.prompt_template_model import PromptTemplateModel
8from entities.query_entity import QueryEntity
9from entities.document_context_entity import DocumentContextEntity
10from entities.file_entity import FileEntity
12from repositories.langChain_repository import LangChainRepository
13from ports.split_file_port import SplitFilePort
14from ports.generate_answer_port import GenerateAnswerPort
16class LangChainAdapter(GenerateAnswerPort, SplitFilePort):
18 def __init__(self, lang_chain_repository: LangChainRepository):
19 self.lang_chain_repository = lang_chain_repository
21 def generate_answer(self, question: QuestionModel, context: list[ContextModel], prompt_template: PromptTemplateModel) -> AnswerModel:
22 """
23 Generates an answer based on the given question, context, and prompt template.
25 Args:
26 question (QuestionModel): The question model containing the user ID and question text.
27 context (list[ContextModel]): A list of context models containing the context content.
28 prompt_template (PromptTemplateModel): The prompt template model containing the prompt template content.
30 Returns:
31 AnswerModel: The generated answer model containing the answer text.
32 """
33 try:
34 question_entity = QueryEntity(question.get_user_id(), question.get_question())
36 context_entities = []
37 for context_model in context:
38 context_entities.append(DocumentContextEntity(context_model.get_content()))
40 answer = self.lang_chain_repository.generate_answer(question_entity, context_entities, prompt_template.get_prompt_template_content())
42 return AnswerModel(answer.get_answer())
44 except Exception as e:
45 raise e
47 def split_file(self, file: FileModel) -> list[FileChunkModel]:
48 """
49 Splits the given file into chunks.
51 Args:
52 file (FileModel): The file model containing the filename and file content.
54 Returns:
55 list[FileChunkModel]: A list of file chunk models containing the chunk content and metadata.
56 """
57 try:
58 file_entity = FileEntity(file.get_filename(), file.get_file_content())
60 file_chunks = self.lang_chain_repository.split_file(file_entity)
62 file_chunk_models = []
63 for file_chunk in file_chunks:
64 file_chunk_models.append(FileChunkModel(file_chunk.get_chunk_content(), file_chunk.get_metadata()))
66 return file_chunk_models
68 except Exception as e:
69 raise e