Skip to content

Commit

Permalink
feat: Updated to Providers 0.15.2.
Browse files Browse the repository at this point in the history
  • Loading branch information
HavenDV committed Sep 17, 2024
1 parent 549f4aa commit 42f7891
Show file tree
Hide file tree
Showing 23 changed files with 72 additions and 85 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ var answer = await llm.GenerateAsync(
Question: {question}
Helpful Answer:
""", cancellationToken: CancellationToken.None).ConfigureAwait(false);
""");

Console.WriteLine($"LLM answer: {answer}"); // The cloaked figure.
Expand All @@ -80,7 +80,7 @@ var chain =
| CombineDocuments(outputKey: "context") // combine documents together and put them into context
| Template(promptTemplate) // replace context and question in the prompt with their values
| LLM(llm.UseConsoleForDebug()); // send the result to the language model
var chainAnswer = await chain.RunAsync("text", CancellationToken.None); // get chain result
var chainAnswer = await chain.RunAsync("text"); // get chain result
Console.WriteLine("Chain Answer:"+ chainAnswer); // print the result
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@ namespace LangChain.Samples.AspNet.Controllers;
[Route("[controller]")]
public class AnthropicSampleController : ControllerBase
{
private readonly AnthropicModel _anthropicModel;
private readonly AnthropicChatModel _anthropicModel;
private readonly ILogger<AnthropicSampleController> _logger;

public AnthropicSampleController(
AnthropicModel anthropicModel,
AnthropicChatModel anthropicModel,
ILogger<AnthropicSampleController> logger)
{
_anthropicModel = anthropicModel;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
using LangChain.Providers;
using LangChain.Providers.OpenAI;
using Microsoft.AspNetCore.Mvc;
using OpenAI;
Expand Down
3 changes: 2 additions & 1 deletion examples/LangChain.Samples.Azure/Program.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using LangChain.Providers.Azure;
using LangChain.Providers;
using LangChain.Providers.Azure;

var provider = new AzureOpenAiProvider(apiKey: "AZURE_OPEN_AI_KEY", endpoint: "ENDPOINT");
var llm = new AzureOpenAiChatModel(provider, id: "DEPLOYMENT_NAME");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

<ItemGroup>
<PackageReference Include="LangChain.Databases.Sqlite" Version="0.15.2" />
<PackageReference Include="LangChain.Providers.Abstractions" Version="0.15.1-dev.78" />
<PackageReference Include="LangChain.Providers.Abstractions" Version="0.15.2" />
<PackageReference Include="System.Text.Json" Version="8.0.4" />
</ItemGroup>

Expand Down
8 changes: 2 additions & 6 deletions examples/LangChain.Samples.LocalRAG/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,7 @@
using LangChain.Extensions;
using Ollama;

var provider = new OllamaProvider(options: new RequestOptions
{
Stop = ["\n"],
Temperature = 0.0f,
});
var provider = new OllamaProvider();
var embeddingModel = new OllamaEmbeddingModel(provider, id: "all-minilm");
var llm = new OllamaChatModel(provider, id: "llama3");

Expand Down Expand Up @@ -36,6 +32,6 @@ Keep the answer as short as possible.
Question: {question}
Helpful Answer:
""").ConfigureAwait(false);
""");

Console.WriteLine($"LLM answer: {answer}");
3 changes: 2 additions & 1 deletion examples/LangChain.Samples.OpenAI/Program.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using LangChain.Providers.OpenAI.Predefined;
using LangChain.Providers;
using LangChain.Providers.OpenAI.Predefined;

var apiKey =
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="LangChain.Providers.OpenAI" Version="0.15.1-dev.78" />
<PackageReference Include="LangChain.Providers.OpenAI" Version="0.15.2" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.7.3" />
<PackageReference Include="Swashbuckle.AspNetCore.Swagger" Version="6.7.3" />
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerGen" Version="6.7.3" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="LangChain.Providers.Ollama" Version="0.15.1-dev.78" />
<PackageReference Include="LangChain.Providers.Ollama" Version="0.15.2" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.7.3" />
<PackageReference Include="Swashbuckle.AspNetCore.Swagger" Version="6.7.3" />
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerGen" Version="6.7.3" />
Expand Down
8 changes: 2 additions & 6 deletions examples/LangChain.Samples.Serve/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,8 @@
builder.Services.AddLangChainServe();

// 2. Create a model
var model = new OllamaChatModel(new OllamaProvider(options: new RequestOptions
{
Temperature = 0,
Stop = ["User:"],
}), "llama3.1");

var provider = new OllamaProvider();
var model = new OllamaChatModel(provider, id: "llama3.1");

// 3. Optional. Add custom name generator
// After initiating conversation, this will generate a name for it
Expand Down
8 changes: 4 additions & 4 deletions src/Core/src/Chains/LLM/LLMChain.cs
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ protected override async Task<IChainValues> CallAsync(IChainValues values, Callb
}, new ChatSettings
{
StopSequences = stop,
}, cancellationToken).ConfigureAwait(false);
}, cancellationToken);
if (Verbose)
{
Console.WriteLine(string.Join("\n\n", response.Messages.Except(chatMessages)));
Expand All @@ -136,7 +136,7 @@ protected override async Task<IChainValues> CallAsync(IChainValues values, Callb
var returnDict = new Dictionary<string, object>();

var outputKey = string.IsNullOrEmpty(OutputKey) ? "text" : OutputKey;
returnDict[outputKey] = response.Messages.Last().Content;
returnDict[outputKey] = response.LastMessageContent;

returnDict.TryAddKeyValues(values.Value);

Expand Down Expand Up @@ -176,7 +176,7 @@ private async Task<LlmResult> GenerateAsync(
var (prompts, stop) = await PreparePromptsAsync(inputs, runManager, cancellationToken).ConfigureAwait(false);

var responseTasks = prompts
.Select(prompt => Llm.GenerateAsync(
.Select(async prompt => await Llm.GenerateAsync(
request: new ChatRequest
{
Messages = prompt.ToChatMessages(),
Expand All @@ -194,7 +194,7 @@ private async Task<LlmResult> GenerateAsync(
{
new()
{
Text = response.Messages.Last().Content
Text = response.LastMessageContent
}
})
.ToArray();
Expand Down
4 changes: 2 additions & 2 deletions src/Core/src/Chains/StackableChains/LLMChain.cs
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ protected override async Task<IChainValues> InternalCallAsync(
}
}

var response = await _llm.GenerateAsync(prompt, settings: _settings, cancellationToken: cancellationToken).ConfigureAwait(false);
responseContent = response.Messages.Last().Content;
var response = await _llm.GenerateAsync(prompt, settings: _settings, cancellationToken: cancellationToken);
responseContent = response.LastMessageContent;
if (_useCache)
SaveCachedAnswer(prompt, responseContent);
values.Value[OutputKeys[0]] = responseContent;
Expand Down
4 changes: 2 additions & 2 deletions src/Core/src/Memory/MessageFormatter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ private string GetPrefix(MessageRole role)
case MessageRole.Ai:
return AiPrefix;

case MessageRole.FunctionCall:
case MessageRole.ToolCall:
return FunctionCallPrefix;

case MessageRole.FunctionResult:
case MessageRole.ToolResult:
return FunctionResultPrefix;

case MessageRole.Chat:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,15 +56,7 @@ public async Task Call_Ok()
var questionGeneratorLlmMock = new Mock<IChatModel>();
questionGeneratorLlmMock
.Setup(v => v.GenerateAsync(It.IsAny<ChatRequest>(), It.IsAny<ChatSettings>(), It.IsAny<CancellationToken>()))
.Returns<ChatRequest, ChatSettings, CancellationToken>((_, _, _) =>
{
return Task.FromResult(new ChatResponse
{
Messages = new[] { Message.Ai("Bob's asking what is hist name") },
Usage = Usage.Empty,
UsedSettings = ChatSettings.Default,
});
});
.Returns<ChatRequest, ChatSettings, CancellationToken>((_, _, _) => GetChatResponses());

var llmInput = new LlmChainInput(questionGeneratorLlmMock.Object, prompt);
var questionGeneratorChain = new LlmChain(llmInput);
Expand Down Expand Up @@ -107,6 +99,20 @@ public async Task Call_Ok()
It.Is<ChatRequest>(request => request.Messages.Count == 1),
It.IsAny<ChatSettings>(),
It.IsAny<CancellationToken>()));
return;

// Helper method to create IAsyncEnumerable<int>
async IAsyncEnumerable<ChatResponse> GetChatResponses()
{
await Task.CompletedTask;

yield return new ChatResponse
{
Messages = new[] { Message.Ai("Bob's asking what is hist name") },
Usage = Usage.Empty,
UsedSettings = ChatSettings.Default,
};
}
}

[Test]
Expand Down
30 changes: 15 additions & 15 deletions src/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
</PropertyGroup>
<ItemGroup>
<PackageVersion Include="AngleSharp" Version="1.1.2" />
<PackageVersion Include="Aspose.PDF" Version="24.7.0" />
<PackageVersion Include="Aspose.PDF" Version="24.9.0" />
<PackageVersion Include="Docker.DotNet" Version="3.125.15" />
<PackageVersion Include="DocumentFormat.OpenXml" Version="3.1.0" />
<PackageVersion Include="DotNet.ReproducibleBuilds" Version="1.2.25" />
Expand All @@ -17,24 +17,24 @@
<PackageVersion Include="LangChain.Databases.Abstractions" Version="0.15.2" />
<PackageVersion Include="LangChain.Databases.InMemory" Version="0.15.2" />
<PackageVersion Include="LangChain.Databases.Sqlite" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.Abstractions" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.Amazon.Bedrock" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.Anthropic" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.Anyscale" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.Automatic1111" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.Azure" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.DeepSeek" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.Google" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.HuggingFace" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.LLamaSharp" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.Ollama" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.OpenAI" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.OpenRouter" Version="0.15.1-dev.78" />
<PackageVersion Include="LangChain.Providers.Abstractions" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.Amazon.Bedrock" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.Anthropic" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.Anyscale" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.Automatic1111" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.Azure" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.DeepSeek" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.Google" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.HuggingFace" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.LLamaSharp" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.Ollama" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.OpenAI" Version="0.15.2" />
<PackageVersion Include="LangChain.Providers.OpenRouter" Version="0.15.2" />
<PackageVersion Include="Microsoft.CodeAnalysis.CSharp.Workspaces" Version="4.11.0" />
<PackageVersion Include="Microsoft.CodeAnalysis.PublicApiAnalyzers" Version="3.3.4" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageVersion Include="Moq" Version="4.20.72" />
<PackageVersion Include="tryAGI.OpenAI" Version="3.6.3" />
<PackageVersion Include="tryAGI.OpenAI" Version="3.7.0" />
<PackageVersion Include="MSTest.TestAdapter" Version="3.5.1" />
<PackageVersion Include="MSTest.TestFramework" Version="3.5.1" />
<PackageVersion Include="Npgsql" Version="8.0.4" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ public static IServiceCollection AddAnthropic(
.AddOptions<AnthropicConfiguration>()
.BindConfiguration(configSectionPath: AnthropicConfiguration.SectionName);
_ = services
.AddHttpClient<AnthropicModel>();
.AddHttpClient<AnthropicChatModel>();
_ = services
.AddScoped<AnthropicProvider>(static services => AnthropicProvider.FromConfiguration(
configuration: services.GetRequiredService<IOptions<AnthropicConfiguration>>().Value));
Expand Down
4 changes: 2 additions & 2 deletions src/Meta/test/ReadmeTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ Keep the answer as short as possible.
Question: {question}
Helpful Answer:
""", cancellationToken: CancellationToken.None).ConfigureAwait(false);
""");

Console.WriteLine($"LLM answer: {answer}"); // The cloaked figure.

Expand Down Expand Up @@ -152,7 +152,7 @@ Human will provide you with sentence about pet. You need to answer with pet name
Answer: Jerry
Human: {similarDocuments.AsString()}
Answer:
""", cancellationToken: CancellationToken.None).ConfigureAwait(false);
""");

Console.WriteLine($"LLM answer: {petNameResponse}");
Console.WriteLine($"Total usage: {llm.Usage}");
Expand Down
5 changes: 1 addition & 4 deletions src/Meta/test/WikiTests.AgentWithOllama.cs
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,7 @@ public async Task AgentWithOllama()
//// We will start with basic ollama setup and simple question to the LLM:
var provider = new OllamaProvider(
// url: "http://172.16.50.107:11434", // if you have ollama running on different computer/port. Default is "http://localhost:11434/api"
options: new RequestOptions
{
Temperature = 0,
});
);
var model = new OllamaChatModel(provider, id: "llama3.1").UseConsoleForDebug();

var chain =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,7 @@ public async Task CheckingInternetSpeedWithCrewAndOllama()

var provider = new OllamaProvider(
// url: "http://172.16.50.107:11434", // if you have ollama running on different computer/port. Default is "http://localhost:11434/api"
options: new RequestOptions
{
Temperature = 0,
});
);
var model = new OllamaChatModel(provider, id: "llama3.1").UseConsoleForDebug();

//// ## Making a tool
Expand Down
17 changes: 9 additions & 8 deletions src/Meta/test/WikiTests.GettingStartedWithAmazonBedrock.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using Amazon;
using LangChain.Providers;
using LangChain.Providers.Amazon.Bedrock;
using LangChain.Providers.Amazon.Bedrock.Predefined.Anthropic;

Expand Down Expand Up @@ -94,19 +95,19 @@ public async Task GettingStartedWithAmazonBedrock()
UseStreaming = true
}
};

llm.PromptSent += (_, prompt) => Console.WriteLine($"Prompt: {prompt}");
llm.PartialResponseGenerated += (_, delta) => Console.Write(delta);
llm.CompletedResponseGenerated += (_, prompt) => Console.WriteLine($"Completed response: {prompt}");

llm.RequestSent += (_, request) => Console.WriteLine($"Prompt: {request.Messages.AsHistory()}");
llm.DeltaReceived += (_, delta) => Console.Write(delta.Content);
llm.ResponseReceived += (_, response) => Console.WriteLine($"Completed response: {response}");
var prompt = @"
you are a comic book writer. you will be given a question and you will answer it.
question: who are 10 of the most popular superheros and what are their powers?";

string response = await llm.GenerateAsync(prompt);

Console.WriteLine(response);

//// In conclusion, by following these steps, you can set up the AWS CLI,
//// configure the Amazon Bedrock provider, and start using the supported foundation models in your code.
//// With the AWS CLI and Bedrock provider properly configured,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,7 @@ public async Task ImageGenerationWithOllamaAndStableDiffusion()
//// ## Ollama model
//// We will use latest version of `llama3.1` for our task. If you don't have mistral yet - it will be downloaded.

var provider = new OllamaProvider(
options: new RequestOptions
{
Temperature = 0,
});
var provider = new OllamaProvider();
var llm = new OllamaChatModel(provider, id: "llama3.1").UseConsoleForDebug();

//// Here we are stopping generation after `\n` symbol appears. Mistral will put a new line(`\n`) symbol after prompt is generated.
Expand Down
7 changes: 1 addition & 6 deletions src/Meta/test/WikiTests.RagWithOpenAiOllama.cs
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,7 @@ public async Task RagWithOpenAiOllama()
//// This is free, assuming it is running locally--this code assumes it is available at https://localhost:11434.

// prepare Ollama with mistral model
var providerOllama = new OllamaProvider(
options: new RequestOptions
{
Stop = ["\n"],
Temperature = 0.0f,
});
var providerOllama = new OllamaProvider();
var embeddingModelOllama = new OllamaEmbeddingModel(providerOllama, id: "nomic-embed-text");
var llmOllama = new OllamaChatModel(providerOllama, id: "llama3.1").UseConsoleForDebug();

Expand Down
4 changes: 2 additions & 2 deletions src/Serve/OpenAI/ServeExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public static WebApplication UseLangChainServeOpenAi(this WebApplication app, Ac
_ => throw new NotImplementedException(),
}
}).ToList(),
}).ConfigureAwait(false);
});

return Results.Ok(new CreateChatCompletionResponse
{
Expand All @@ -60,7 +60,7 @@ public static WebApplication UseLangChainServeOpenAi(this WebApplication app, Ac
{
Message = new ChatCompletionResponseMessage
{
Content = response.Messages.Last().Content,
Content = response.LastMessageContent,
Role = ChatCompletionResponseMessageRole.Assistant,
},
Index = 0,
Expand Down

0 comments on commit 42f7891

Please sign in to comment.