Skip to content

Commit

Permalink
Merge pull request #10 from neu-se/llm-options
Browse files Browse the repository at this point in the history
read LLM provider from env variable
  • Loading branch information
franktip authored Dec 22, 2024
2 parents 35c877a + 42a79eb commit 5733896
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 9 deletions.
1 change: 1 addition & 0 deletions .github/workflows/openrouter-exp.yml
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ jobs:
env:
LLMORPHEUS_LLM_API_ENDPOINT: '${{ secrets.OPENROUTER_LLM_API_ENDPOINT }}'
LLMORPHEUS_LLM_AUTH_HEADERS: '${{ secrets.OPENROUTER_LLM_AUTH_HEADERS }}'
LLMORPHEUS_LLM_PROVIDER: '${{ secrets.LLMORPHEUS_LLM_PROVIDER }}'
run: |
cd ${{ matrix.package.name }}
BENCHMARK_DIR=`pwd`
Expand Down
16 changes: 9 additions & 7 deletions src/model/IModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,16 @@ export const defaultPostOptions = {
top_p: 1, // no need to change this
};

export const defaultOpenAIPostoptions = {
...defaultPostOptions,
n: 5,
stop: ["\n\n"], // list of tokens to stop at
};
export interface PostOptionsType {
max_tokens: number;
temperature: number;
top_p: number;
provider: {
order: string[];
};
}

export type PostOptions = Partial<typeof defaultPostOptions>;
export type OpenAIPostOptions = Partial<typeof defaultOpenAIPostoptions>;
export type PostOptions = Partial<PostOptionsType>;

export interface IModelFailureCounter {
nrRetries: number;
Expand Down
15 changes: 13 additions & 2 deletions src/model/Model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import {
RateLimiter,
} from "../util/promise-utils";
import { retry } from "../util/promise-utils";
import { IModel, IModelFailureCounter } from "./IModel";
import { IModel, IModelFailureCounter, PostOptionsType } from "./IModel";
import { PostOptions, defaultPostOptions } from "./IModel";
import { getEnv } from "../util/code-utils";
import { IQueryResult } from "./IQueryResult";
Expand All @@ -24,6 +24,10 @@ export class Model implements IModel {
getEnv("LLMORPHEUS_LLM_AUTH_HEADERS")
);

protected static LLMORPHEUS_LLM_PROVIDER = JSON.parse(
getEnv("LLMORPHEUS_LLM_PROVIDER")
);

protected instanceOptions: PostOptions;
protected rateLimiter: RateLimiter;
protected counter: IModelFailureCounter = { nrRetries: 0, nrFailures: 0 };
Expand Down Expand Up @@ -96,14 +100,21 @@ export class Model implements IModel {
`templates/${this.metaInfo.systemPrompt}`,
"utf8"
);
const body = {
let body = {
model: this.getModelName(),
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: prompt },
],
...options,
};
if (Model.LLMORPHEUS_LLM_PROVIDER) {
const provider = Model.LLMORPHEUS_LLM_PROVIDER;
body = {
...body,
provider: provider,
};
}

performance.mark("llm-query-start");
let res;
Expand Down

0 comments on commit 5733896

Please sign in to comment.