Skip to content

Commit

Permalink
feat: Updated OpenAPI spec
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions[bot] authored and HavenDV committed Sep 17, 2024
1 parent b4633a0 commit ed1b5d2
Show file tree
Hide file tree
Showing 21 changed files with 47 additions and 47 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ partial void ProcessGenerateChatCompletionResponse(
global::System.Collections.Generic.IList<global::Ollama.Message> messages,
global::Ollama.ResponseFormat? format = default,
global::Ollama.RequestOptions? options = default,
bool stream = true,
bool? stream = true,
int? keepAlive = default,
global::System.Collections.Generic.IList<global::Ollama.Tool>? tools = default,
[global::System.Runtime.CompilerServices.EnumeratorCancellation] global::System.Threading.CancellationToken cancellationToken = default)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,8 @@ partial void ProcessGenerateCompletionResponse(
global::System.Collections.Generic.IList<long>? context = default,
global::Ollama.RequestOptions? options = default,
global::Ollama.ResponseFormat? format = default,
bool raw = default,
bool stream = true,
bool? raw = default,
bool? stream = true,
int? keepAlive = default,
[global::System.Runtime.CompilerServices.EnumeratorCancellation] global::System.Threading.CancellationToken cancellationToken = default)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ public partial interface IChatClient
global::System.Collections.Generic.IList<global::Ollama.Message> messages,
global::Ollama.ResponseFormat? format = default,
global::Ollama.RequestOptions? options = default,
bool stream = true,
bool? stream = true,
int? keepAlive = default,
global::System.Collections.Generic.IList<global::Ollama.Tool>? tools = default,
global::System.Threading.CancellationToken cancellationToken = default);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,8 @@ public partial interface ICompletionsClient
global::System.Collections.Generic.IList<long>? context = default,
global::Ollama.RequestOptions? options = default,
global::Ollama.ResponseFormat? format = default,
bool raw = default,
bool stream = true,
bool? raw = default,
bool? stream = true,
int? keepAlive = default,
global::System.Threading.CancellationToken cancellationToken = default);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public partial interface IModelsClient
string modelfile,
string? path = default,
string? quantize = default,
bool stream = true,
bool? stream = true,
global::System.Threading.CancellationToken cancellationToken = default);
}
}
4 changes: 2 additions & 2 deletions src/libs/Ollama/Generated/Ollama.IModelsClient.PullModel.g.cs
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,10 @@ public partial interface IModelsClient
/// <exception cref="global::System.InvalidOperationException"></exception>
global::System.Collections.Generic.IAsyncEnumerable<global::Ollama.PullModelResponse> PullModelAsync(
string model,
bool insecure = false,
bool? insecure = false,
string? username = default,
string? password = default,
bool stream = true,
bool? stream = true,
global::System.Threading.CancellationToken cancellationToken = default);
}
}
4 changes: 2 additions & 2 deletions src/libs/Ollama/Generated/Ollama.IModelsClient.PushModel.g.cs
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ public partial interface IModelsClient
/// <exception cref="global::System.InvalidOperationException"></exception>
global::System.Collections.Generic.IAsyncEnumerable<global::Ollama.PushModelResponse> PushModelAsync(
string model,
bool insecure = false,
bool? insecure = false,
string? username = default,
string? password = default,
bool stream = true,
bool? stream = true,
global::System.Threading.CancellationToken cancellationToken = default);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public sealed partial class CreateModelRequest
/// Default Value: true
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("stream")]
public bool Stream { get; set; } = true;
public bool? Stream { get; set; } = true;

/// <summary>
/// Additional properties that are not explicitly defined in the schema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public sealed partial class GenerateChatCompletionRequest
/// Default Value: true
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("stream")]
public bool Stream { get; set; } = true;
public bool? Stream { get; set; } = true;

/// <summary>
/// How long (in minutes) to keep the model loaded in memory.<br/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,42 +51,42 @@ public sealed partial class GenerateChatCompletionResponse
/// Example: 5589157167L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("total_duration")]
public long TotalDuration { get; set; }
public long? TotalDuration { get; set; }

/// <summary>
/// Time spent in nanoseconds loading the model.<br/>
/// Example: 3013701500L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("load_duration")]
public long LoadDuration { get; set; }
public long? LoadDuration { get; set; }

/// <summary>
/// Number of tokens in the prompt.<br/>
/// Example: 46
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("prompt_eval_count")]
public int PromptEvalCount { get; set; }
public int? PromptEvalCount { get; set; }

/// <summary>
/// Time spent in nanoseconds evaluating the prompt.<br/>
/// Example: 1160282000L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("prompt_eval_duration")]
public long PromptEvalDuration { get; set; }
public long? PromptEvalDuration { get; set; }

/// <summary>
/// Number of tokens the response.<br/>
/// Example: 113
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("eval_count")]
public int EvalCount { get; set; }
public int? EvalCount { get; set; }

/// <summary>
/// Time in nanoseconds spent generating the response.<br/>
/// Example: 1325948000L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("eval_duration")]
public long EvalDuration { get; set; }
public long? EvalDuration { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,14 +75,14 @@ public sealed partial class GenerateCompletionRequest
/// You may choose to use the `raw` parameter if you are specifying a full templated prompt in your request to the API, and are managing history yourself.
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("raw")]
public bool Raw { get; set; }
public bool? Raw { get; set; }

/// <summary>
/// If `false` the response will be returned as a single response object, otherwise the response will be streamed as a series of objects.<br/>
/// Default Value: true
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("stream")]
public bool Stream { get; set; } = true;
public bool? Stream { get; set; } = true;

/// <summary>
/// How long (in minutes) to keep the model loaded in memory.<br/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ public sealed partial class GenerateCompletionResponse
/// Date on which a model was created.
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("created_at")]
public global::System.DateTime CreatedAt { get; set; }
public global::System.DateTime? CreatedAt { get; set; }

/// <summary>
/// The response for a given prompt with a provided model.<br/>
Expand All @@ -34,7 +34,7 @@ public sealed partial class GenerateCompletionResponse
/// Example: true
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("done")]
public bool Done { get; set; }
public bool? Done { get; set; }

/// <summary>
/// An encoding of the conversation used in this response, this can be sent in the next request to keep a conversational memory.<br/>
Expand All @@ -48,42 +48,42 @@ public sealed partial class GenerateCompletionResponse
/// Example: 5589157167L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("total_duration")]
public long TotalDuration { get; set; }
public long? TotalDuration { get; set; }

/// <summary>
/// Time spent in nanoseconds loading the model.<br/>
/// Example: 3013701500L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("load_duration")]
public long LoadDuration { get; set; }
public long? LoadDuration { get; set; }

/// <summary>
/// Number of tokens in the prompt.<br/>
/// Example: 46
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("prompt_eval_count")]
public int PromptEvalCount { get; set; }
public int? PromptEvalCount { get; set; }

/// <summary>
/// Time spent in nanoseconds evaluating the prompt.<br/>
/// Example: 1160282000L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("prompt_eval_duration")]
public long PromptEvalDuration { get; set; }
public long? PromptEvalDuration { get; set; }

/// <summary>
/// Number of tokens the response.<br/>
/// Example: 113
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("eval_count")]
public int EvalCount { get; set; }
public int? EvalCount { get; set; }

/// <summary>
/// Time in nanoseconds spent generating the response.<br/>
/// Example: 1325948000L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("eval_duration")]
public long EvalDuration { get; set; }
public long? EvalDuration { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
Expand Down
4 changes: 2 additions & 2 deletions src/libs/Ollama/Generated/Ollama.Models.Model.g.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,14 @@ public sealed partial class Model
/// Model modification date.
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("modified_at")]
public global::System.DateTime ModifiedAt { get; set; }
public global::System.DateTime? ModifiedAt { get; set; }

/// <summary>
/// Size of the model on disk.<br/>
/// Example: 7323310500L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("size")]
public long Size { get; set; }
public long? Size { get; set; }

/// <summary>
/// The model's digest.<br/>
Expand Down
6 changes: 3 additions & 3 deletions src/libs/Ollama/Generated/Ollama.Models.ProcessModel.g.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public sealed partial class ProcessModel
/// Example: 7323310500L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("size")]
public long Size { get; set; }
public long? Size { get; set; }

/// <summary>
/// The model's digest.<br/>
Expand All @@ -40,14 +40,14 @@ public sealed partial class ProcessModel
///
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("expires_at")]
public global::System.DateTime ExpiresAt { get; set; }
public global::System.DateTime? ExpiresAt { get; set; }

/// <summary>
/// Size of the model on disk.<br/>
/// Example: 7323310500L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("size_vram")]
public long SizeVram { get; set; }
public long? SizeVram { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
Expand Down
4 changes: 2 additions & 2 deletions src/libs/Ollama/Generated/Ollama.Models.PullModelRequest.g.cs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ public sealed partial class PullModelRequest
/// Default Value: false
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("insecure")]
public bool Insecure { get; set; } = false;
public bool? Insecure { get; set; } = false;

/// <summary>
/// Ollama username.
Expand All @@ -42,7 +42,7 @@ public sealed partial class PullModelRequest
/// Default Value: true
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("stream")]
public bool Stream { get; set; } = true;
public bool? Stream { get; set; } = true;

/// <summary>
/// Additional properties that are not explicitly defined in the schema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,14 @@ public sealed partial class PullModelResponse
/// Example: 2142590208L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("total")]
public long Total { get; set; }
public long? Total { get; set; }

/// <summary>
/// Total bytes transferred.<br/>
/// Example: 2142590208L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("completed")]
public long Completed { get; set; }
public long? Completed { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
Expand Down
4 changes: 2 additions & 2 deletions src/libs/Ollama/Generated/Ollama.Models.PushModelRequest.g.cs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public sealed partial class PushModelRequest
/// Default Value: false
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("insecure")]
public bool Insecure { get; set; } = false;
public bool? Insecure { get; set; } = false;

/// <summary>
/// Ollama username.
Expand All @@ -41,7 +41,7 @@ public sealed partial class PushModelRequest
/// Default Value: true
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("stream")]
public bool Stream { get; set; } = true;
public bool? Stream { get; set; } = true;

/// <summary>
/// Additional properties that are not explicitly defined in the schema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,14 @@ public sealed partial class PushModelResponse
/// Example: 2142590208L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("total")]
public long Total { get; set; }
public long? Total { get; set; }

/// <summary>
/// Total bytes transferred.<br/>
/// Example: 2142590208L
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("completed")]
public long Completed { get; set; }
public long? Completed { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ partial void ProcessCreateModelResponse(
string modelfile,
string? path = default,
string? quantize = default,
bool stream = true,
bool? stream = true,
[global::System.Runtime.CompilerServices.EnumeratorCancellation] global::System.Threading.CancellationToken cancellationToken = default)
{
var request = new global::Ollama.CreateModelRequest
Expand Down
4 changes: 2 additions & 2 deletions src/libs/Ollama/Generated/Ollama.ModelsClient.PullModel.g.cs
Original file line number Diff line number Diff line change
Expand Up @@ -107,10 +107,10 @@ partial void ProcessPullModelResponse(
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Collections.Generic.IAsyncEnumerable<global::Ollama.PullModelResponse> PullModelAsync(
string model,
bool insecure = false,
bool? insecure = false,
string? username = default,
string? password = default,
bool stream = true,
bool? stream = true,
[global::System.Runtime.CompilerServices.EnumeratorCancellation] global::System.Threading.CancellationToken cancellationToken = default)
{
var request = new global::Ollama.PullModelRequest
Expand Down
4 changes: 2 additions & 2 deletions src/libs/Ollama/Generated/Ollama.ModelsClient.PushModel.g.cs
Original file line number Diff line number Diff line change
Expand Up @@ -106,10 +106,10 @@ partial void ProcessPushModelResponse(
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Collections.Generic.IAsyncEnumerable<global::Ollama.PushModelResponse> PushModelAsync(
string model,
bool insecure = false,
bool? insecure = false,
string? username = default,
string? password = default,
bool stream = true,
bool? stream = true,
[global::System.Runtime.CompilerServices.EnumeratorCancellation] global::System.Threading.CancellationToken cancellationToken = default)
{
var request = new global::Ollama.PushModelRequest
Expand Down

0 comments on commit ed1b5d2

Please sign in to comment.