diff --git a/examples/openai-compatible-agent/README.md b/examples/openai-compatible-agent/README.md index 7489c7f3..95a08dd0 100644 --- a/examples/openai-compatible-agent/README.md +++ b/examples/openai-compatible-agent/README.md @@ -19,6 +19,15 @@ Other LLM providers (e.g., Cohere, HuggingFace) have their own set of endpoints. ## OpenAI API compatible Burr application This example contains a very simple Burr application (`application.py`) and a FastAPI server to deploy this agent behind the OpenAI `v1/chat/completions` endpoint. After starting the server with `server.py`, you should be able to interact with it from your other tools ([Jan](https://jan.ai/docs) is easy and quick to install across platforms). +To run, execute: + +```bash +python server.py +``` + +If you're using Jan, untoggle the `Stream` parameter (we will add an example of a stream-compatible application later). + + ![](statemachine.png) This is great because we can quickly integrate our Burr Agent with high-quality UIs and tools. Simulaneously, you gain Burr's observability, logging, and persistence across your applications. diff --git a/pyproject.toml b/pyproject.toml index b5e78931..dd1d2599 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "burr" -version = "0.37.1" +version = "0.37.1rc0" dependencies = [] # yes, there are none requires-python = ">=3.9" authors = [