Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,9 @@ partial void ProcessOpenaiChatCompletionsResponseContent(
/// <param name="logprobs">
/// Whether to return log probabilities of the output tokens or not.If true, returns the log probabilities of each output token returned in the `content` of `message`.
/// </param>
/// <param name="streamOptions">
/// streaming options
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Threading.Tasks.Task<string> OpenaiChatCompletionsAsync(
Expand All @@ -320,6 +323,7 @@ partial void ProcessOpenaiChatCompletionsResponseContent(
string? user = default,
int? seed = default,
bool? logprobs = default,
global::DeepInfra.StreamOptions? streamOptions = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
var __request = new global::DeepInfra.OpenAIChatCompletionsIn
Expand All @@ -343,6 +347,7 @@ partial void ProcessOpenaiChatCompletionsResponseContent(
User = user,
Seed = seed,
Logprobs = logprobs,
StreamOptions = streamOptions,
};

return await OpenaiChatCompletionsAsync(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,9 @@ partial void ProcessOpenaiCompletionsResponseContent(
/// <param name="seed">
/// Seed for random number generator. If not provided, a random seed is used. Determinism is not guaranteed.
/// </param>
/// <param name="streamOptions">
/// streaming options
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Threading.Tasks.Task<string> OpenaiCompletionsAsync(
Expand All @@ -316,6 +319,7 @@ partial void ProcessOpenaiCompletionsResponseContent(
double? repetitionPenalty = default,
string? user = default,
int? seed = default,
global::DeepInfra.StreamOptions? streamOptions = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
var __request = new global::DeepInfra.OpenAICompletionsIn
Expand All @@ -338,6 +342,7 @@ partial void ProcessOpenaiCompletionsResponseContent(
RepetitionPenalty = repetitionPenalty,
User = user,
Seed = seed,
StreamOptions = streamOptions,
};

return await OpenaiCompletionsAsync(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,9 @@ public partial interface IDeepInfraClient
/// <param name="logprobs">
/// Whether to return log probabilities of the output tokens or not.If true, returns the log probabilities of each output token returned in the `content` of `message`.
/// </param>
/// <param name="streamOptions">
/// streaming options
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
global::System.Threading.Tasks.Task<string> OpenaiChatCompletionsAsync(
Expand All @@ -119,6 +122,7 @@ public partial interface IDeepInfraClient
string? user = default,
int? seed = default,
bool? logprobs = default,
global::DeepInfra.StreamOptions? streamOptions = default,
global::System.Threading.CancellationToken cancellationToken = default);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,9 @@ public partial interface IDeepInfraClient
/// <param name="seed">
/// Seed for random number generator. If not provided, a random seed is used. Determinism is not guaranteed.
/// </param>
/// <param name="streamOptions">
/// streaming options
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
global::System.Threading.Tasks.Task<string> OpenaiCompletionsAsync(
Expand All @@ -115,6 +118,7 @@ public partial interface IDeepInfraClient
double? repetitionPenalty = default,
string? user = default,
int? seed = default,
global::DeepInfra.StreamOptions? streamOptions = default,
global::System.Threading.CancellationToken cancellationToken = default);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,12 @@ public sealed partial class OpenAIChatCompletionsIn
[global::System.Text.Json.Serialization.JsonPropertyName("logprobs")]
public bool? Logprobs { get; set; }

/// <summary>
/// streaming options
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("stream_options")]
public global::DeepInfra.StreamOptions? StreamOptions { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
/// </summary>
Expand Down Expand Up @@ -216,6 +222,9 @@ public sealed partial class OpenAIChatCompletionsIn
/// <param name="logprobs">
/// Whether to return log probabilities of the output tokens or not.If true, returns the log probabilities of each output token returned in the `content` of `message`.
/// </param>
/// <param name="streamOptions">
/// streaming options
/// </param>
#if NET7_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers]
#endif
Expand All @@ -238,7 +247,8 @@ public OpenAIChatCompletionsIn(
double? repetitionPenalty,
string? user,
int? seed,
bool? logprobs)
bool? logprobs,
global::DeepInfra.StreamOptions? streamOptions)
{
this.Model = model ?? throw new global::System.ArgumentNullException(nameof(model));
this.Messages = messages ?? throw new global::System.ArgumentNullException(nameof(messages));
Expand All @@ -259,6 +269,7 @@ public OpenAIChatCompletionsIn(
this.User = user;
this.Seed = seed;
this.Logprobs = logprobs;
this.StreamOptions = streamOptions;
}

/// <summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,12 @@ public sealed partial class OpenAICompletionsIn
[global::System.Text.Json.Serialization.JsonPropertyName("seed")]
public int? Seed { get; set; }

/// <summary>
/// streaming options
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("stream_options")]
public global::DeepInfra.StreamOptions? StreamOptions { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
/// </summary>
Expand Down Expand Up @@ -207,6 +213,9 @@ public sealed partial class OpenAICompletionsIn
/// <param name="seed">
/// Seed for random number generator. If not provided, a random seed is used. Determinism is not guaranteed.
/// </param>
/// <param name="streamOptions">
/// streaming options
/// </param>
#if NET7_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers]
#endif
Expand All @@ -228,7 +237,8 @@ public OpenAICompletionsIn(
global::DeepInfra.ResponseFormat2? responseFormat,
double? repetitionPenalty,
string? user,
int? seed)
int? seed,
global::DeepInfra.StreamOptions? streamOptions)
{
this.Model = model ?? throw new global::System.ArgumentNullException(nameof(model));
this.Prompt = prompt ?? throw new global::System.ArgumentNullException(nameof(prompt));
Expand All @@ -248,6 +258,7 @@ public OpenAICompletionsIn(
this.RepetitionPenalty = repetitionPenalty;
this.User = user;
this.Seed = seed;
this.StreamOptions = streamOptions;
}

/// <summary>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
#nullable enable

namespace DeepInfra
{
public sealed partial class StreamOptions
{
/// <summary>
/// Serializes the current instance to a JSON string using the provided JsonSerializerContext.
/// </summary>
public string ToJson(
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return global::System.Text.Json.JsonSerializer.Serialize(
this,
this.GetType(),
jsonSerializerContext);
}

/// <summary>
/// Serializes the current instance to a JSON string using the provided JsonSerializerOptions.
/// </summary>
#if NET8_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
public string ToJson(
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
return global::System.Text.Json.JsonSerializer.Serialize(
this,
jsonSerializerOptions);
}

/// <summary>
/// Deserializes a JSON string using the provided JsonSerializerContext.
/// </summary>
public static global::DeepInfra.StreamOptions? FromJson(
string json,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return global::System.Text.Json.JsonSerializer.Deserialize(
json,
typeof(global::DeepInfra.StreamOptions),
jsonSerializerContext) as global::DeepInfra.StreamOptions;
}

/// <summary>
/// Deserializes a JSON string using the provided JsonSerializerOptions.
/// </summary>
#if NET8_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
public static global::DeepInfra.StreamOptions? FromJson(
string json,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
return global::System.Text.Json.JsonSerializer.Deserialize<global::DeepInfra.StreamOptions>(
json,
jsonSerializerOptions);
}

/// <summary>
/// Deserializes a JSON stream using the provided JsonSerializerContext.
/// </summary>
public static async global::System.Threading.Tasks.ValueTask<global::DeepInfra.StreamOptions?> FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return (await global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
typeof(global::DeepInfra.StreamOptions),
jsonSerializerContext).ConfigureAwait(false)) as global::DeepInfra.StreamOptions;
}

/// <summary>
/// Deserializes a JSON stream using the provided JsonSerializerOptions.
/// </summary>
#if NET8_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
public static global::System.Threading.Tasks.ValueTask<global::DeepInfra.StreamOptions?> FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
return global::System.Text.Json.JsonSerializer.DeserializeAsync<global::DeepInfra.StreamOptions?>(
jsonStream,
jsonSerializerOptions);
}
}
}
60 changes: 60 additions & 0 deletions src/libs/DeepInfra/Generated/DeepInfra.Models.StreamOptions.g.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@

#nullable enable

namespace DeepInfra
{
/// <summary>
///
/// </summary>
public sealed partial class StreamOptions
{
/// <summary>
/// whether to include usage data<br/>
/// Default Value: true
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("include_usage")]
public bool? IncludeUsage { get; set; }

/// <summary>
/// whether to include usage stats continuously with each streaming event<br/>
/// Default Value: false
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("continuous_usage_stats")]
public bool? ContinuousUsageStats { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
/// </summary>
[global::System.Text.Json.Serialization.JsonExtensionData]
public global::System.Collections.Generic.IDictionary<string, object> AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary<string, object>();

/// <summary>
/// Initializes a new instance of the <see cref="StreamOptions" /> class.
/// </summary>
/// <param name="includeUsage">
/// whether to include usage data<br/>
/// Default Value: true
/// </param>
/// <param name="continuousUsageStats">
/// whether to include usage stats continuously with each streaming event<br/>
/// Default Value: false
/// </param>
#if NET7_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers]
#endif
public StreamOptions(
bool? includeUsage,
bool? continuousUsageStats)
{
this.IncludeUsage = includeUsage;
this.ContinuousUsageStats = continuousUsageStats;
}

/// <summary>
/// Initializes a new instance of the <see cref="StreamOptions" /> class.
/// </summary>
public StreamOptions()
{
}
}
}
Loading