Skip to content
2 changes: 1 addition & 1 deletion backend/app/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
pass
from .main import main
1 change: 0 additions & 1 deletion backend/app/chatBot.py

This file was deleted.

1 change: 1 addition & 0 deletions backend/app/control/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from app.control.dao.configDataManager import ConfigManager
9 changes: 9 additions & 0 deletions backend/app/control/bot/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from .openaiBot import OpenAIBot
from .petalsBot import PetalsBot
from .wenxinBot import WenxinBot

# Optional: Initialize or configure models here if necessary
# For example, you could load bot configurations or API keys from environment variables

# Export the models for easy import elsewhere in your application
__all__ = ['OpenAIBot', 'PetalsBot', 'WenxinBot']
24 changes: 24 additions & 0 deletions backend/app/control/bot/baseBot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from abc import ABC, abstractmethod

from app.model.dataModel import MessageModel, SessionModel, AgentModel, VectorStoreModel


class BaseBot(ABC):

@abstractmethod
def ask(self,
message: MessageModel,
agent: AgentModel,
session: SessionModel,
vector_store_model: VectorStoreModel) -> MessageModel:
"""
Process input message and session context to return bot output.

Args:
message (MessageModel): The message to be processed by the bot.
session (SessionModel): The session context in which the message is being processed, if applicable.

Returns:
str: The output generated by the bot in response to the message.
"""
pass
34 changes: 34 additions & 0 deletions backend/app/control/bot/openaiBot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from openai import OpenAI

from app.model.dataModel import MessageModel, SessionModel, AgentModel, VectorStoreModel, RoleEnum
from .baseBot import BaseBot


class OpenAIBot(BaseBot):
def __init__(self, api_key):
self.client = OpenAI(api_key=api_key)

def ask(self,
message: MessageModel,
session: SessionModel,
agent: AgentModel = None,
vector_store_model: VectorStoreModel = None) -> MessageModel:
if agent:
fake_message = MessageModel(content=agent.generate_prompt(content=message.content, self_name=agent.name,
target_name="Open AI Assistant"),
role=message.role)
else:
fake_message = message

completion = self.client.chat.completions.create(
model="gpt-4o-mini",
messages=session.serialize()["message_list"] + [fake_message.serialize()]
)

# Retrieve the bot's response
content_response = completion.choices[0].message.content

message_response = MessageModel(content=content_response, role=RoleEnum.ASSISTANT)

# Return the last assistant message and the updated context
return message_response
12 changes: 12 additions & 0 deletions backend/app/control/bot/petalsBot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Manages interactions with the Petals framework, enabling decentralized bot operations as described earlier.
from .baseBot import BaseBot
from app.model.dataModel import MessageModel, AgentModel, SessionModel, VectorStoreModel


class PetalsBot(BaseBot):
def ask(self,
message: MessageModel,
agent: AgentModel,
session: SessionModel,
vector_store_model: VectorStoreModel) -> MessageModel: # Interaction with Petals decentralized network
pass # TODO
12 changes: 12 additions & 0 deletions backend/app/control/bot/wenxinBot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Interfaces with the Wenxin API (likely another AI or ML service), handling specific functionalities provided by this service.
from .baseBot import BaseBot
from app.model.dataModel import MessageModel, AgentModel, SessionModel, VectorStoreModel


class WenxinBot(BaseBot):
def ask(self,
message: MessageModel,
agent: AgentModel,
session: SessionModel,
vector_store_model: VectorStoreModel) -> MessageModel: # Call to Wenxin API
pass # TODO
11 changes: 11 additions & 0 deletions backend/app/control/dao/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# app/dao/__init__.py

from .historyDataManager import HistoryManager
from .vectorDataManager import VectorEmbeddingManager
from .configDataManager import ConfigManager

# Optional: Initialize dao connection here if necessary
# This could be a setup for a SQLAlchemy dao engine or similar

# Export the dao classes for easy import elsewhere in your application
__all__ = ['HistoryManager', 'VectorEmbeddingManager']
28 changes: 28 additions & 0 deletions backend/app/control/dao/configDataManager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import yaml
from app.model.configModel import ConfigModel, FlaskConfigModel


class ConfigManager:
def __init__(self, config_path: str):
self.config_path = config_path
self.config = self._load()

def _load(self) -> ConfigModel:
"""Loads the configuration from the YAML file."""
with open(self.config_path, 'r') as file:
config_dict = yaml.safe_load(file)
return ConfigModel(config_dict=config_dict)

def update(self, key, value) -> bool:
"""Updates the configuration key with a new value and saves to file."""
if key in self.config:
setattr(self, key, value)
self._save()
return True
return False

def _save(self) -> bool:
"""Saves the current configurations back to the YAML file."""
with open(self.config_path, 'w') as file:
yaml.safe_dump(self.config.serialize(), file, default_flow_style=False)
return True
111 changes: 111 additions & 0 deletions backend/app/control/dao/historyDataManager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
import json
import os
from typing import List, Optional

from app.model.dataModel import MessageModel, SessionModel, RoleEnum


class HistoryManager:
def __init__(self, history_path: str = None):
self.history_path = history_path
if not os.path.exists(self.history_path):
os.makedirs(self.history_path)

def create_session(self):
session = SessionModel()
self.write_session(session)
return session

def get_session(self, session_id: str) -> SessionModel:
session_file = os.path.join(self.history_path, f"{session_id}.json")
with open(session_file, 'r') as f:
json_data = json.load(f)
return SessionModel(id=session_id,
time_created=json_data['time_created'],
message_list=[MessageModel(id=message["id"], time_created=message["time_created"],
role=RoleEnum.get_by_name(message["role_name"]),
content=message["content"]) for message in
json_data['message_list']],
vector_store_id=json_data['vector_store_id'])

def write_session(self, session: SessionModel):
"""
Write the session data to a JSON file.

Args:
session (SessionModel): The session model to be serialized and saved.
"""
session_file = os.path.join(self.history_path, f"{session.id}.json")
json_data = session.serialize()

with open(session_file, 'w') as f:
json.dump(json_data, f, indent=4)

def add_session_message(self, message: MessageModel, session: SessionModel) -> bool:
"""Add a new entry to the history."""
if session.add_message(message):
self.write_session(session)
return True
return False

def delete_session_message(self, session: SessionModel, message_id: str) -> bool:
"""
Delete a message from the session by message ID and save the updated session.

Args:
session (SessionModel): The session from which to delete the message.
message_id (str): The ID of the message to be deleted.
"""
if session.delete_message(message_id):
self.write_session(session)
return True
return False

def edit_session_message(self, session: SessionModel, message_id: str, new_content: str) -> bool:
"""Edit the content of a message by message ID within a session and save the updated session."""
if session.edit_message_content(message_id, new_content):
self.write_session(session)
return True
return False

def get_all_sessions(self) -> List[SessionModel]:
"""
Retrieve all session files from the history path and return them as a list of SessionModel instances.

Returns:
list[SessionModel]: A list containing all the loaded session models.
"""
sessions = []
# List all files in the directory
for filename in os.listdir(self.history_path):
# Check if the file is a JSON file
if filename.endswith('.json'):
# Extract the session ID from the filename
session_id = filename[:-5] # Remove the '.json' part
# Load the session using the existing load_session method
session = self.get_session(session_id)
sessions.append(session)
return sessions

def delete_session(self, session_id: str) -> bool:
"""
Delete the session file associated with the given session_id.

Args:
session_id (str): The ID of the session to delete.

Returns:
bool: True if the file was successfully deleted, False otherwise.
"""
# Construct the path to the session file
session_file = os.path.join(self.history_path, f"{session_id}.json")

# Check if the file exists
if os.path.exists(session_file):
# Delete the file
os.remove(session_file)
return True
else:
# Return False if the file does not exist
print(f"Warning: The session file for session_id '{session_id}' does not exist.")
return False
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@
import faiss
import numpy as np

from app.services.embeddingService import EmbeddingService
from app.control.services.embeddingService import EmbeddingService


class VectorStore:
def __init__(self, dimension, index_type='Flat', storage_path='vector_store'):
class VectorEmbeddingManager:
def __init__(self, dimension, api_key, index_type='Flat', storage_path='vector_store'):
self.dimension = dimension
self.index_type = index_type
self.index = self.create_index()
self.embedding_service = EmbeddingService()
self.embedding_service = EmbeddingService(api_key=api_key)
self.text_map = {}
self.current_id = 0
self.storage_path = storage_path
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Provides services related to generating and handling embeddings, crucial for tasks such as similarity searches or ML model inputs.
# Provides services related to generating and handling embeddings, crucial for tasks such as similarity searches or ML bot inputs.


# app/services/embedding_service.py
Expand All @@ -13,7 +13,7 @@ def __init__(self, api_key):
def get_embeddings(self, text):
"""Fetch embeddings for the given text using OpenAI's API."""
response = openai.Embedding.create(
model="text-similarity-babbage-001", # or any other suitable model
model="text-similarity-babbage-001", # or any other suitable bot
input=text
)
embeddings = response['data']
Expand Down
Empty file.
10 changes: 0 additions & 10 deletions backend/app/database/__init__.py

This file was deleted.

50 changes: 0 additions & 50 deletions backend/app/database/historyStore.py

This file was deleted.

14 changes: 14 additions & 0 deletions backend/app/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from app.control.dao import ConfigManager
from app.view import AppView


def main(config_path: str) -> None:
config_manager = ConfigManager(config_path)
flask_app = AppView(config_manager)

flask_config_model = config_manager.config.flask_config
flask_app.run(
host=flask_config_model.host,
port=flask_config_model.port,
debug=flask_config_model.debug
)
9 changes: 0 additions & 9 deletions backend/app/model/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +0,0 @@
from .openaiModel import OpenAIModel
from .petalsModel import PetalsModel
from .wenxinModel import WenxinModel

# Optional: Initialize or configure models here if necessary
# For example, you could load model configurations or API keys from environment variables

# Export the models for easy import elsewhere in your application
__all__ = ['OpenAIModel', 'PetalsModel', 'WenxinModel']
Loading