Skip to content
Snippets Groups Projects

Optimized graph display, revised UI layout, added hover feature to messages...

Merged Niels Jarne Timm requested to merge 22-integrate-llm into main
28 files
+ 752
234
Compare changes
  • Side-by-side
  • Inline
Files
28
+ 23
3
@@ -7,10 +7,12 @@ from src.data_handler import Data_handler
import aiofiles
import json
import os
from pydantic_settings import BaseSettings
from pydantic import BaseModel
class Item(BaseModel):
toDelete:list
root_path:str = "/app/src"
root_path:str = os.getenv("ROOT_PATH","/app/src")
tmp_path:str = root_path+"/tmp"
if not os.path.exists(tmp_path):
os.mkdir(tmp_path)
@@ -71,9 +73,27 @@ async def get_knowledge_graph(name:str):
async def stored_knowledge_graphs():
return {"knowledge_graphs":list(data_handler.kgs.keys())}
@app.post("/delete_knowledge_graphs")
async def delete_knowledge_graphs(item:Item):
return data_handler.delete_knowledge_graphs(item.toDelete)
@app.websocket("/chat/{name}")
async def chat(name: str,websocket: WebSocket):
await websocket.accept()
data_handler.init_llm_client(name)
while True:
data = await websocket.receive_json()
await websocket.send_json({"response":data['message'],"graph":data_handler.get_kg_as_json(name)})
\ No newline at end of file
answer = data_handler.ask_question(name,data['message'])
knowledge_graph = answer['knowledge_graph']
human_answer = answer['human_answer']
await websocket.send_json({"response":human_answer,"graph":json.dumps(knowledge_graph)})
@app.get("/chat_history/{name}")
async def get_chat_history(name:str):
return {"history":json.dumps(data_handler.chats[name])}
@app.delete("/chat_history/{name}")
async def get_chat_history(name:str):
data_handler.delete_chat_history(name)
return {"status":"OK"}
\ No newline at end of file
Loading