Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,21 +28,8 @@ public class LemurTaskParamsJsonConverter : global::System.Text.Json.Serializati
{
}

readerCopy = reader;
global::AssemblyAI.LemurBaseParams? value2 = default;
try
{
var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::AssemblyAI.LemurBaseParams), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo<global::AssemblyAI.LemurBaseParams> ??
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurBaseParams).Name}");
value2 = global::System.Text.Json.JsonSerializer.Deserialize(ref readerCopy, typeInfo);
}
catch (global::System.Text.Json.JsonException)
{
}

var result = new global::AssemblyAI.LemurTaskParams(
value1,
value2
value1
);

if (value1 != null)
Expand All @@ -51,12 +38,6 @@ public class LemurTaskParamsJsonConverter : global::System.Text.Json.Serializati
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurTaskParamsVariant1).Name}");
_ = global::System.Text.Json.JsonSerializer.Deserialize(ref reader, typeInfo);
}
else if (value2 != null)
{
var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::AssemblyAI.LemurBaseParams), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo<global::AssemblyAI.LemurBaseParams> ??
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurBaseParams).Name}");
_ = global::System.Text.Json.JsonSerializer.Deserialize(ref reader, typeInfo);
}

return result;
}
Expand All @@ -76,12 +57,6 @@ public override void Write(
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurTaskParamsVariant1).Name}");
global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value1, typeInfo);
}
else if (value.IsValue2)
{
var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::AssemblyAI.LemurBaseParams), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo<global::AssemblyAI.LemurBaseParams?> ??
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurBaseParams).Name}");
global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value2, typeInfo);
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,83 +44,33 @@ public LemurTaskParams(global::AssemblyAI.LemurTaskParamsVariant1? value)
Value1 = value;
}

/// <summary>
///
/// </summary>
#if NET6_0_OR_GREATER
public global::AssemblyAI.LemurBaseParams? Value2 { get; init; }
#else
public global::AssemblyAI.LemurBaseParams? Value2 { get; }
#endif

/// <summary>
///
/// </summary>
#if NET6_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.MemberNotNullWhen(true, nameof(Value2))]
#endif
public bool IsValue2 => Value2 != null;

/// <summary>
///
/// </summary>
public static implicit operator LemurTaskParams(global::AssemblyAI.LemurBaseParams value) => new LemurTaskParams((global::AssemblyAI.LemurBaseParams?)value);

/// <summary>
///
/// </summary>
public static implicit operator global::AssemblyAI.LemurBaseParams?(LemurTaskParams @this) => @this.Value2;

/// <summary>
///
/// </summary>
public LemurTaskParams(global::AssemblyAI.LemurBaseParams? value)
{
Value2 = value;
}

/// <summary>
///
/// </summary>
public LemurTaskParams(
global::AssemblyAI.LemurTaskParamsVariant1? value1,
global::AssemblyAI.LemurBaseParams? value2
)
{
Value1 = value1;
Value2 = value2;
}

/// <summary>
///
/// </summary>
public object? Object =>
Value2 as object ??
Value1 as object
;

/// <summary>
///
/// </summary>
public override string? ToString() =>
Value1?.ToString() ??
Value2?.ToString()
Value1?.ToString()
;

/// <summary>
///
/// </summary>
public bool Validate()
{
return IsValue1 && IsValue2;
return IsValue1;
}

/// <summary>
///
/// </summary>
public TResult? Match<TResult>(
global::System.Func<global::AssemblyAI.LemurTaskParamsVariant1?, TResult>? value1 = null,
global::System.Func<global::AssemblyAI.LemurBaseParams?, TResult>? value2 = null,
bool validate = true)
{
if (validate)
Expand All @@ -132,10 +82,6 @@ public bool Validate()
{
return value1(Value1!);
}
else if (IsValue2 && value2 != null)
{
return value2(Value2!);
}

return default(TResult);
}
Expand All @@ -145,7 +91,6 @@ public bool Validate()
/// </summary>
public void Match(
global::System.Action<global::AssemblyAI.LemurTaskParamsVariant1?>? value1 = null,
global::System.Action<global::AssemblyAI.LemurBaseParams?>? value2 = null,
bool validate = true)
{
if (validate)
Expand All @@ -157,10 +102,6 @@ public void Match(
{
value1?.Invoke(Value1!);
}
else if (IsValue2)
{
value2?.Invoke(Value2!);
}
}

/// <summary>
Expand All @@ -172,8 +113,6 @@ public override int GetHashCode()
{
Value1,
typeof(global::AssemblyAI.LemurTaskParamsVariant1),
Value2,
typeof(global::AssemblyAI.LemurBaseParams),
};
const int offset = unchecked((int)2166136261);
const int prime = 16777619;
Expand All @@ -190,8 +129,7 @@ static int HashCodeAggregator(int hashCode, object? value) => value == null
public bool Equals(LemurTaskParams other)
{
return
global::System.Collections.Generic.EqualityComparer<global::AssemblyAI.LemurTaskParamsVariant1?>.Default.Equals(Value1, other.Value1) &&
global::System.Collections.Generic.EqualityComparer<global::AssemblyAI.LemurBaseParams?>.Default.Equals(Value2, other.Value2)
global::System.Collections.Generic.EqualityComparer<global::AssemblyAI.LemurTaskParamsVariant1?>.Default.Equals(Value1, other.Value1)
;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,52 @@ namespace AssemblyAI
/// </summary>
public sealed partial class LemurTaskParamsVariant1
{
/// <summary>
/// The model that is used for the final prompt after compression is performed.<br/>
/// Default Value: default
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("final_model")]
[global::System.Text.Json.Serialization.JsonConverter(typeof(global::AssemblyAI.JsonConverters.LemurModelJsonConverter))]
[global::System.Text.Json.Serialization.JsonRequired]
public required global::AssemblyAI.LemurModel FinalModel { get; set; }

/// <summary>
/// Custom formatted transcript data. Maximum size is the context limit of the selected model.<br/>
/// Use either transcript_ids or input_text as input into LeMUR.
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("input_text")]
public string? InputText { get; set; }

/// <summary>
/// Max output size in tokens, up to 4000<br/>
/// Default Value: 2000
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("max_output_size")]
public int? MaxOutputSize { get; set; }

/// <summary>
/// Your text to prompt the model to produce a desired output, including any context you want to pass into the model.
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("prompt")]
[global::System.Text.Json.Serialization.JsonRequired]
public required string Prompt { get; set; }

/// <summary>
/// The temperature to use for the model.<br/>
/// Higher values result in answers that are more creative, lower values are more conservative.<br/>
/// Can be any value between 0.0 and 1.0 inclusive.<br/>
/// Default Value: 0F
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("temperature")]
public float? Temperature { get; set; }

/// <summary>
/// A list of completed transcripts with text. Up to a maximum of 100 hours of audio.<br/>
/// Use either transcript_ids or input_text as input into LeMUR.
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("transcript_ids")]
public global::System.Collections.Generic.IList<global::System.Guid>? TranscriptIds { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
/// </summary>
Expand All @@ -24,16 +63,48 @@ public sealed partial class LemurTaskParamsVariant1
/// <summary>
/// Initializes a new instance of the <see cref="LemurTaskParamsVariant1" /> class.
/// </summary>
/// <param name="finalModel">
/// The model that is used for the final prompt after compression is performed.<br/>
/// Default Value: default
/// </param>
/// <param name="inputText">
/// Custom formatted transcript data. Maximum size is the context limit of the selected model.<br/>
/// Use either transcript_ids or input_text as input into LeMUR.
/// </param>
/// <param name="maxOutputSize">
/// Max output size in tokens, up to 4000<br/>
/// Default Value: 2000
/// </param>
/// <param name="prompt">
/// Your text to prompt the model to produce a desired output, including any context you want to pass into the model.
/// </param>
/// <param name="temperature">
/// The temperature to use for the model.<br/>
/// Higher values result in answers that are more creative, lower values are more conservative.<br/>
/// Can be any value between 0.0 and 1.0 inclusive.<br/>
/// Default Value: 0F
/// </param>
/// <param name="transcriptIds">
/// A list of completed transcripts with text. Up to a maximum of 100 hours of audio.<br/>
/// Use either transcript_ids or input_text as input into LeMUR.
/// </param>
#if NET7_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers]
#endif
public LemurTaskParamsVariant1(
string prompt)
global::AssemblyAI.LemurModel finalModel,
string prompt,
string? inputText,
int? maxOutputSize,
float? temperature,
global::System.Collections.Generic.IList<global::System.Guid>? transcriptIds)
{
this.FinalModel = finalModel;
this.Prompt = prompt ?? throw new global::System.ArgumentNullException(nameof(prompt));
this.InputText = inputText;
this.MaxOutputSize = maxOutputSize;
this.Temperature = temperature;
this.TranscriptIds = transcriptIds;
}

/// <summary>
Expand Down
43 changes: 40 additions & 3 deletions src/libs/AssemblyAI/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5644,13 +5644,50 @@ components:
x-label: Prompt
description: Your text to prompt the model to produce a desired output, including any context you want to pass into the model.
type: string
required: [prompt]
- $ref: "#/components/schemas/LemurBaseParams"
transcript_ids:
x-label: Transcript IDs
description: |
A list of completed transcripts with text. Up to a maximum of 100 hours of audio.
Use either transcript_ids or input_text as input into LeMUR.
type: array
items:
x-label: Transcript ID
type: string
format: uuid
input_text:
x-label: Input text
description: |
Custom formatted transcript data. Maximum size is the context limit of the selected model.
Use either transcript_ids or input_text as input into LeMUR.
type: string
final_model:
x-label: Final model
description: |
The model that is used for the final prompt after compression is performed.
default: "default"
oneOf:
- $ref: "#/components/schemas/LemurModel"
max_output_size:
Comment on lines +5664 to +5670
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

final_model type was unintentionally narrowed – restores old behaviour

LemurBaseParams/final_model accepts either an enumerated model OR any arbitrary string via:

anyOf:
  - $ref: '#/components/schemas/LemurModel'
  - type: string

The new definition replaces that with a oneOf that only allows the enum, silently breaking every existing client that was sending a custom model slug.

-              oneOf:
-                - $ref: "#/components/schemas/LemurModel"
+              anyOf:
+                - $ref: "#/components/schemas/LemurModel"
+                - type: string

Please restore the union or this will be a breaking change in the public contract.

🤖 Prompt for AI Agents
In src/libs/AssemblyAI/openapi.yaml around lines 5664 to 5670, the type
definition for final_model was changed from a union allowing either a LemurModel
enum or any string to a oneOf that only allows the enum, which breaks existing
clients using custom strings. To fix this, revert the type definition to use
anyOf with both the LemurModel reference and a string type, restoring the
original union behavior and preventing breaking changes.

x-label: Maximum output size
description: Max output size in tokens, up to 4000
type: integer
default: 2000
temperature:
x-label: Temperature
description: |
The temperature to use for the model.
Higher values result in answers that are more creative, lower values are more conservative.
Can be any value between 0.0 and 1.0 inclusive.
type: number
format: float
default: 0
minimum: 0
maximum: 1
required: [prompt, final_model]
example:
{
transcript_ids: ["64nygnr62k-405c-4ae8-8a6b-d90b40ff3cce"],
prompt: "List all the locations affected by wildfires.",
context: "This is an interview about wildfires.",
final_model: "anthropic/claude-sonnet-4-20250514",
temperature: 0,
max_output_size: 3000,
Expand Down
Loading