Skip to content

Commit

Permalink
new script
Browse files Browse the repository at this point in the history
  • Loading branch information
agola11 committed Dec 12, 2024
1 parent 0593cc1 commit c8d606b
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 22 deletions.
52 changes: 34 additions & 18 deletions python/bench/tracing_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,43 @@
os.environ["LANGCHAIN_PROJECT"] = "llm_messages_test_py"
os.environ["LANGSMITH_USE_PYO3_CLIENT"] = "true"

import openai
from langsmith import traceable
from langsmith.wrappers import wrap_openai

client = wrap_openai(openai.Client())
@traceable
def format_prompt(subject):
return [
{
"role": "system",
"content": "You are a helpful assistant.",
},
{
"role": "user",
"content": f"What's a good name for a store that sells {subject}?"
}
]

@traceable(run_type="tool", name="Retrieve Context")
def my_tool(question: str) -> str:
return "During this morning's meeting, we solved all world conflict."
@traceable(run_type="llm")
def invoke_llm(messages):
return {
"choices": [
{
"message": {
"role": "assistant",
"content": "Sure, how about 'Rainbow Socks'?"
}
}
]
}

@traceable(name="Chat Pipeline")
def chat_pipeline(question: str):
context = my_tool(question)
messages = [
{ "role": "system", "content": "You are a helpful assistant. Please respond to the user's request only based on the given context." },
{ "role": "user", "content": f"Question: {question}\nContext: {context}"}
]
chat_completion = client.chat.completions.create(
model="gpt-4o-mini", messages=messages
)
return chat_completion.choices[0].message.content
@traceable
def parse_output(response):
return response["choices"][0]["message"]["content"]

@traceable
def run_pipeline():
messages = format_prompt("colorful socks")
response = invoke_llm(messages)
return parse_output(response)

if __name__ == "__main__":
chat_pipeline("Can you summarize this morning's meetings?")
run_pipeline()
8 changes: 4 additions & 4 deletions python/langsmith/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1287,10 +1287,10 @@ def create_run(
run_create.get("trace_id") is not None
and run_create.get("dotted_order") is not None
):
if self._pyo3_client is not None:
print("RUN_CREATE", run_create)
self._pyo3_client.create_run(run_create)
elif self.tracing_queue is not None:
# if self._pyo3_client is not None:
# print("RUN_CREATE", run_create)
# self._pyo3_client.create_run(run_create)
if self.tracing_queue is not None:
serialized_op = serialize_run_dict("post", run_create)
self.tracing_queue.put(
TracingQueueItem(run_create["dotted_order"], serialized_op)
Expand Down

0 comments on commit c8d606b

Please sign in to comment.