1313
1414# Imports from the library
1515from .base_node import BaseNode
16- from ..helpers import template_chunks , template_no_chunks , template_merge , template_chunks_with_schema , template_no_chunks_with_schema
16+ from ..helpers import template_chunks_gen_answ , template_no_chunks_gen_answ , template_merge_gen_answ , template_chunks_with_schema_gen_answ , template_chunks_with_schema_gen_answ
1717
1818class GenerateAnswerNode (BaseNode ):
1919 """
@@ -77,28 +77,28 @@ def execute(self, state: dict) -> dict:
7777 for i , chunk in enumerate (tqdm (doc , desc = "Processing chunks" , disable = not self .verbose )):
7878 if self .node_config ["schema" ] is None and len (doc ) == 1 :
7979 prompt = PromptTemplate (
80- template = template_no_chunks ,
80+ template = template_no_chunks_gen_answ ,
8181 input_variables = ["question" ],
8282 partial_variables = {"context" : chunk .page_content ,
8383 "format_instructions" : format_instructions })
8484 elif self .node_config ["schema" ] is not None and len (doc ) == 1 :
8585 prompt = PromptTemplate (
86- template = template_no_chunks_with_schema ,
86+ template = template_chunks_with_schema_gen_answ ,
8787 input_variables = ["question" ],
8888 partial_variables = {"context" : chunk .page_content ,
8989 "format_instructions" : format_instructions ,
9090 "schema" : self .node_config ["schema" ]
9191 })
9292 elif self .node_config ["schema" ] is None and len (doc ) > 1 :
9393 prompt = PromptTemplate (
94- template = template_chunks ,
94+ template = template_chunks_gen_answ ,
9595 input_variables = ["question" ],
9696 partial_variables = {"context" : chunk .page_content ,
9797 "chunk_id" : i + 1 ,
9898 "format_instructions" : format_instructions })
9999 elif self .node_config ["schema" ] is not None and len (doc ) > 1 :
100100 prompt = PromptTemplate (
101- template = template_chunks_with_schema ,
101+ template = template_chunks_with_schema_gen_answ ,
102102 input_variables = ["question" ],
103103 partial_variables = {"context" : chunk .page_content ,
104104 "chunk_id" : i + 1 ,
@@ -116,7 +116,7 @@ def execute(self, state: dict) -> dict:
116116 answer = map_chain .invoke ({"question" : user_prompt })
117117 # Merge the answers from the chunks
118118 merge_prompt = PromptTemplate (
119- template = template_merge ,
119+ template = template_merge_gen_answ ,
120120 input_variables = ["context" , "question" ],
121121 partial_variables = {"format_instructions" : format_instructions },
122122 )
0 commit comments