-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathserver.jac
120 lines (95 loc) · 3.99 KB
/
server.jac
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
# import:py from mtllm.llms {OpenAI}
# glob llm = OpenAI(model_name='gpt-4o');
import:py from mtllm.llms {Ollama}
glob llm = Ollama(model_name='llama3.1');
import:jac from rag {RagEngine}
glob rag_engine:RagEngine = RagEngine();
enum ChatType {
RAG : 'Need to use Retrievable information in specific documents to respond' = "RAG",
QA : 'Given context is enough for an answer' = "user_qa",
FEEDBACK: 'Handle casual conversation and improve upon user entries' = "Feedback"
}
node Router {
can 'route the query to the appropriate task type'
classify(message:'query from the user to be routed.':str) -> ChatType by llm(method="Reason", temperature=0.0);
}
node Chat {
has chat_type: ChatType;
}
walker infer {
has message:str;
has chat_history: list[dict];
can init_router with `root entry {
visit [-->](`?Router) else {
router_node = here ++> Router();
router_node ++> RagChat();
router_node ++> QAChat();
router_node ++> FeedbackChat();
visit router_node;
}
}
can route with Router entry {
classification = here.classify(message = self.message);
visit [-->](`?Chat)(?chat_type==classification);
}
}
walker interact {
has message: str;
has session_id: str;
can init_session with `root entry {
visit [-->](`?Session)(?id == self.session_id) else {
session_node = here ++> Session(id=self.session_id, chat_history=[], status=1);
print("Session Node Created");
visit session_node;
}
}
}
node RagChat :Chat: {
has chat_type: ChatType = ChatType.RAG;
can respond with infer entry {
can 'Respond to message using chat_history as context and agent_role as the goal of the agent'
respond_with_llm( message:'current message':str,
chat_history: 'chat history':list[dict],
agent_role:'role of the agent responding':str,
context:'retirved context from documents':list
) -> 'response':str by llm();
data = rag_engine.get_from_chroma(query=here.message);
here.response = respond_with_llm(here.message, here.chat_history, "You are a conversation agent designed to help users with their queries based on the documents provided", data);
}
}
node QAChat :Chat: {
has chat_type: ChatType = ChatType.QA;
can respond with infer entry {
can 'Respond to message using chat_history as context and agent_role as the goal of the agent'
respond_with_llm( message:'current message':str,
chat_history: 'chat history':list[dict],
agent_role:'role of the agent responding':str
) -> 'response':str by llm();
here.response = respond_with_llm(here.message, here.chat_history, agent_role="You are a conversation agent designed to help users with their queries");
}
}
#to improve upon user responses listening to feedback
node FeedbackChat :Chat: {
has chat_type: ChatType = ChatType.FEEDBACK;
can respond with infer entry {
can 'Respond to message using chat_history as context and agent_role as the goal of the agent'
respond_with_llm( message:'current message':str,
chat_history: 'chat history':list[dict],
agent_role:'role of the agent responding':str
) -> 'response':str by llm();
here.response = respond_with_llm(here.message, here.chat_history, agent_role="You are a conversation agent designed to help users with their queries");
}
}
node Session {
has id: str;
has chat_history: list[dict];
has status: int = 1;
can chat with interact entry {
self.chat_history.append({"role": "user", "content": here.message});
response = infer(message=here.message, chat_history=self.chat_history) spawn root;
self.chat_history.append({"role": "assistant", "content": response.response});
report {
"response": response.response
};
}
}