Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Generate nullable as type alias and resolve recursive reference in union #2989

Open
wants to merge 14 commits into
base: main
Choose a base branch
from
Open
34 changes: 18 additions & 16 deletions common/config/rush/pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion packages/typespec-test/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
"@azure-tools/typespec-autorest": ">=0.50.0 <1.0.0",
"@typespec/openapi3": ">=0.64.0 <1.0.0",
"@azure-tools/typespec-azure-core": ">=0.50.0 <1.0.0",
"@azure-tools/typespec-client-generator-core": ">=0.50.0 <1.0.0",
"@azure-tools/typespec-client-generator-core": ">=0.50.2 <1.0.0",
"@azure-tools/typespec-azure-resource-manager": ">=0.50.0 <1.0.0",
"@azure-tools/typespec-azure-rulesets": ">=0.50.0 <1.0.0",
"@typespec/compiler": ">=0.64.0 <1.0.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ export interface CreateChatCompletionRequest {
model: "gpt4" | "gpt-4-0314" | "gpt-4-0613" | "gpt-4-32k" | "gpt-4-32k-0314" | "gpt-4-32k-0613" | "gpt-3.5-turbo" | "gpt-3.5-turbo-16k" | "gpt-3.5-turbo-0301" | "gpt-3.5-turbo-0613" | "gpt-3.5-turbo-16k-0613";
n?: number | null;
presence_penalty?: number | null;
stop?: Stop | null;
stop?: Stop;
stream?: boolean | null;
temperature?: number | null;
top_p?: number | null;
Expand Down Expand Up @@ -151,8 +151,8 @@ export interface CreateCompletionRequest {
model: "babbage-002" | "davinci-002" | "text-davinci-003" | "text-davinci-002" | "text-davinci-001" | "code-davinci-002" | "text-curie-001" | "text-babbage-001" | "text-ada-001";
n?: number | null;
presence_penalty?: number | null;
prompt: Prompt | null;
stop?: Stop | null;
prompt: Prompt;
stop?: Stop;
stream?: boolean | null;
suffix?: string | null;
temperature?: number | null;
Expand Down Expand Up @@ -797,10 +797,16 @@ export interface OpenAIFile {
}

// @public
export type Prompt = string | string[] | number[] | number[][];
export type Prompt = Prompt_1 | null;

// @public
export type Stop = string | string[];
export type Prompt_1 = string | string[] | number[] | number[][];
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

is this because tcgc wrongly tagged this union type as name type but it should be an anoymous one?


// @public
export type Stop = Stop_1 | null;

// @public
export type Stop_1 = string | string[];

// (No @packageDocumentation comment for this package)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@ export {
CompletionUsage,
CreateCompletionRequest,
Prompt,
Prompt_1,
Stop,
Stop_1,
CreateCompletionResponse,
CreateFineTuningJobRequest,
FineTuningJob,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,9 @@ export {
CompletionUsage,
CreateCompletionRequest,
Prompt,
Prompt_1,
Stop,
Stop_1,
CreateCompletionResponse,
CreateFineTuningJobRequest,
FineTuningJob,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1133,7 +1133,7 @@ export interface CreateCompletionRequest {
* Note that <|endoftext|> is the document separator that the model sees during training, so if a
* prompt is not specified the model will generate as if from the beginning of a new document.
*/
prompt: Prompt | null;
prompt: Prompt;
/** The suffix that comes after a completion of inserted text. */
suffix?: string | null;
/**
Expand Down Expand Up @@ -1166,7 +1166,7 @@ export interface CreateCompletionRequest {
*/
max_tokens?: number | null;
/** Up to 4 sequences where the API will stop generating further tokens. */
stop?: Stop | null;
stop?: Stop;
/**
* Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear
* in the text so far, increasing the model's likelihood to talk about new topics.
Expand Down Expand Up @@ -1251,14 +1251,18 @@ export function createCompletionRequestSerializer(
}

/** Alias for Prompt */
export type Prompt = string | string[] | number[] | number[][];
export type Prompt = Prompt_1 | null;
/** Alias for Prompt */
export type Prompt_1 = string | string[] | number[] | number[][];

export function promptSerializer(item: Prompt): any {
return item;
}

/** Alias for Stop */
export type Stop = string | string[];
export type Stop = Stop_1 | null;
/** Alias for Stop */
export type Stop_1 = string | string[];

export function stopSerializer(item: Stop): any {
return item;
Expand Down Expand Up @@ -1747,7 +1751,7 @@ export interface CreateChatCompletionRequest {
*/
max_tokens?: number | null;
/** Up to 4 sequences where the API will stop generating further tokens. */
stop?: Stop | null;
stop?: Stop;
/**
* Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear
* in the text so far, increasing the model's likelihood to talk about new topics.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ export interface CreateChatCompletionRequest {
model: "gpt4" | "gpt-4-0314" | "gpt-4-0613" | "gpt-4-32k" | "gpt-4-32k-0314" | "gpt-4-32k-0613" | "gpt-3.5-turbo" | "gpt-3.5-turbo-16k" | "gpt-3.5-turbo-0301" | "gpt-3.5-turbo-0613" | "gpt-3.5-turbo-16k-0613";
n?: number | null;
presencePenalty?: number | null;
stop?: Stop | null;
stop?: Stop;
stream?: boolean | null;
temperature?: number | null;
topP?: number | null;
Expand Down Expand Up @@ -151,8 +151,8 @@ export interface CreateCompletionRequest {
model: "babbage-002" | "davinci-002" | "text-davinci-003" | "text-davinci-002" | "text-davinci-001" | "code-davinci-002" | "text-curie-001" | "text-babbage-001" | "text-ada-001";
n?: number | null;
presencePenalty?: number | null;
prompt: Prompt | null;
stop?: Stop | null;
prompt: Prompt;
stop?: Stop;
stream?: boolean | null;
suffix?: string | null;
temperature?: number | null;
Expand Down Expand Up @@ -797,10 +797,16 @@ export interface OpenAIFile {
}

// @public
export type Prompt = string | string[] | number[] | number[][];
export type Prompt = Prompt_1 | null;

// @public
export type Stop = string | string[];
export type Prompt_1 = string | string[] | number[] | number[][];
Copy link
Member

@MaryGao MaryGao Jan 26, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i was thinking if we should delivery an alpha verion for keyvalut first. the correct fix without xxx_1 should be non-breaking, but this pr would introduce a breaking.


// @public
export type Stop = Stop_1 | null;

// @public
export type Stop_1 = string | string[];

// (No @packageDocumentation comment for this package)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@ export {
CompletionUsage,
CreateCompletionRequest,
Prompt,
Prompt_1,
Stop,
Stop_1,
CreateCompletionResponse,
CreateFineTuningJobRequest,
FineTuningJob,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ export {
CompletionUsage,
CreateCompletionRequest,
Prompt,
Prompt_1,
Stop,
Stop_1,
CreateCompletionResponse,
CreateFineTuningJobRequest,
FineTuningJob,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1132,7 +1132,7 @@ export interface CreateCompletionRequest {
* Note that <|endoftext|> is the document separator that the model sees during training, so if a
* prompt is not specified the model will generate as if from the beginning of a new document.
*/
prompt: Prompt | null;
prompt: Prompt;
/** The suffix that comes after a completion of inserted text. */
suffix?: string | null;
/**
Expand Down Expand Up @@ -1165,7 +1165,7 @@ export interface CreateCompletionRequest {
*/
maxTokens?: number | null;
/** Up to 4 sequences where the API will stop generating further tokens. */
stop?: Stop | null;
stop?: Stop;
/**
* Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear
* in the text so far, increasing the model's likelihood to talk about new topics.
Expand Down Expand Up @@ -1250,14 +1250,18 @@ export function createCompletionRequestSerializer(
}

/** Alias for Prompt */
export type Prompt = string | string[] | number[] | number[][];
export type Prompt = Prompt_1 | null;
/** Alias for Prompt */
export type Prompt_1 = string | string[] | number[] | number[][];

export function promptSerializer(item: Prompt): any {
return item;
}

/** Alias for Stop */
export type Stop = string | string[];
export type Stop = Stop_1 | null;
/** Alias for Stop */
export type Stop_1 = string | string[];

export function stopSerializer(item: Stop): any {
return item;
Expand Down Expand Up @@ -1746,7 +1750,7 @@ export interface CreateChatCompletionRequest {
*/
maxTokens?: number | null;
/** Up to 4 sequences where the API will stop generating further tokens. */
stop?: Stop | null;
stop?: Stop;
/**
* Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear
* in the text so far, increasing the model's likelihood to talk about new topics.
Expand Down
4 changes: 2 additions & 2 deletions packages/typespec-ts/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
"@azure-tools/typespec-autorest": ">=0.50.0 <1.0.0",
"@azure-tools/typespec-azure-core": ">=0.50.0 <1.0.0",
"@azure-tools/typespec-azure-resource-manager": ">=0.50.0 <1.0.0",
"@azure-tools/typespec-client-generator-core": ">=0.50.0 <1.0.0",
"@azure-tools/typespec-client-generator-core": ">=0.50.2 <1.0.0",
"@azure/abort-controller": "^2.1.2",
"@azure/core-auth": "^1.6.0",
"@azure/core-lro": "^3.1.0",
Expand Down Expand Up @@ -114,7 +114,7 @@
},
"peerDependencies": {
"@azure-tools/typespec-azure-core": ">=0.50.0 <1.0.0",
"@azure-tools/typespec-client-generator-core": ">=0.50.0 <1.0.0",
"@azure-tools/typespec-client-generator-core": ">=0.50.2 <1.0.0",
"@typespec/compiler": ">=0.64.0 <1.0.0",
"@typespec/http": ">=0.64.0 <1.0.0",
"@typespec/rest": ">=0.64.0 <1.0.0",
Expand Down
1 change: 1 addition & 0 deletions packages/typespec-ts/src/framework/hooks/sdkTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ export function provideSdkTypes(context: SdkContext) {
);
break;
case "nullable":
sdkTypesContext.types.set(sdkModel.__raw!, sdkModel);
sdkTypesContext.types.set(sdkModel.type.__raw!, sdkModel.type);
break;
}
Expand Down
Loading
Loading