11"""
2- Module having abstract class for creating all the graphs
2+ AbstractGraph Module
33"""
4+
45from abc import ABC , abstractmethod
56from typing import Optional
67from ..models import OpenAI , Gemini , Ollama , AzureOpenAI , HuggingFace , Groq
910
1011class AbstractGraph (ABC ):
1112 """
12- Abstract class representing a generic graph-based tool.
13+ Scaffolding class for creating a graph representation and executing it.
14+
15+ Attributes:
16+ prompt (str): The prompt for the graph.
17+ source (str): The source of the graph.
18+ config (dict): Configuration parameters for the graph.
19+ llm_model: An instance of a language model client, configured for generating answers.
20+ embedder_model: An instance of an embedding model client, configured for generating embeddings.
21+ verbose (bool): A flag indicating whether to show print statements during execution.
22+ headless (bool): A flag indicating whether to run the graph in headless mode.
23+
24+ Args:
25+ prompt (str): The prompt for the graph.
26+ config (dict): Configuration parameters for the graph.
27+ source (str, optional): The source of the graph.
28+
29+ Example:
30+ >>> class MyGraph(AbstractGraph):
31+ ... def _create_graph(self):
32+ ... # Implementation of graph creation here
33+ ... return graph
34+ ...
35+ >>> my_graph = MyGraph("Example Graph", {"llm": {"model": "gpt-3.5-turbo"}}, "example_source")
36+ >>> result = my_graph.run()
1337 """
1438
1539 def __init__ (self , prompt : str , config : dict , source : Optional [str ] = None ):
16- """
17- Initializes the AbstractGraph with a prompt, file source, and configuration.
18- """
40+
1941 self .prompt = prompt
2042 self .source = source
2143 self .config = config
@@ -32,10 +54,20 @@ def __init__(self, prompt: str, config: dict, source: Optional[str] = None):
3254 self .final_state = None
3355 self .execution_info = None
3456
35- def _create_llm (self , llm_config : dict ):
57+ def _create_llm (self , llm_config : dict ) -> object :
3658 """
37- Creates an instance of the language model (OpenAI or Gemini) based on configuration.
59+ Create a large language model instance based on the configuration provided.
60+
61+ Args:
62+ llm_config (dict): Configuration parameters for the language model.
63+
64+ Returns:
65+ object: An instance of the language model client.
66+
67+ Raises:
68+ KeyError: If the model is not supported.
3869 """
70+
3971 llm_defaults = {
4072 "temperature" : 0 ,
4173 "streaming" : False
@@ -104,16 +136,27 @@ def _create_llm(self, llm_config: dict):
104136
105137 def get_state (self , key = None ) -> dict :
106138 """""
107- Obtain the current state
139+ Get the final state of the graph.
140+
141+ Args:
142+ key (str, optional): The key of the final state to retrieve.
143+
144+ Returns:
145+ dict: The final state of the graph.
108146 """
147+
109148 if key is not None :
110149 return self .final_state [key ]
111150 return self .final_state
112151
113152 def get_execution_info (self ):
114153 """
115154 Returns the execution information of the graph.
155+
156+ Returns:
157+ dict: The execution information of the graph.
116158 """
159+
117160 return self .execution_info
118161
119162 @abstractmethod
0 commit comments