Coverage for src/app/adapters/langChain_adapter.py: 44%

35 statements  

« prev     ^ index     » next       coverage.py v7.7.0, created at 2025-04-03 00:51 +0200

1from models.answer_model import AnswerModel 

2from models.context_model import ContextModel 

3from models.question_model import QuestionModel 

4from models.file_chunk_model import FileChunkModel 

5from models.file_model import FileModel 

6from models.prompt_template_model import PromptTemplateModel 

7 

8from entities.query_entity import QueryEntity 

9from entities.document_context_entity import DocumentContextEntity 

10from entities.file_entity import FileEntity 

11 

12from repositories.langChain_repository import LangChainRepository 

13from ports.split_file_port import SplitFilePort 

14from ports.generate_answer_port import GenerateAnswerPort 

15 

16class LangChainAdapter(GenerateAnswerPort, SplitFilePort): 

17 

18 def __init__(self, lang_chain_repository: LangChainRepository): 

19 self.lang_chain_repository = lang_chain_repository 

20 

21 def generate_answer(self, question: QuestionModel, context: list[ContextModel], prompt_template: PromptTemplateModel) -> AnswerModel: 

22 """ 

23 Generates an answer based on the given question, context, and prompt template. 

24 

25 Args: 

26 question (QuestionModel): The question model containing the user ID and question text. 

27 context (list[ContextModel]): A list of context models containing the context content. 

28 prompt_template (PromptTemplateModel): The prompt template model containing the prompt template content. 

29 

30 Returns: 

31 AnswerModel: The generated answer model containing the answer text. 

32 """ 

33 try: 

34 question_entity = QueryEntity(question.get_user_id(), question.get_question()) 

35 

36 context_entities = [] 

37 for context_model in context: 

38 context_entities.append(DocumentContextEntity(context_model.get_content())) 

39 

40 answer = self.lang_chain_repository.generate_answer(question_entity, context_entities, prompt_template.get_prompt_template_content()) 

41 

42 return AnswerModel(answer.get_answer()) 

43 

44 except Exception as e: 

45 raise e 

46 

47 def split_file(self, file: FileModel) -> list[FileChunkModel]: 

48 """ 

49 Splits the given file into chunks. 

50 

51 Args: 

52 file (FileModel): The file model containing the filename and file content. 

53 

54 Returns: 

55 list[FileChunkModel]: A list of file chunk models containing the chunk content and metadata. 

56 """ 

57 try: 

58 file_entity = FileEntity(file.get_filename(), file.get_file_content()) 

59 

60 file_chunks = self.lang_chain_repository.split_file(file_entity) 

61 

62 file_chunk_models = [] 

63 for file_chunk in file_chunks: 

64 file_chunk_models.append(FileChunkModel(file_chunk.get_chunk_content(), file_chunk.get_metadata())) 

65 

66 return file_chunk_models 

67 

68 except Exception as e: 

69 raise e 

70