Skip to content

Commit a15a621

Browse files
authored
Merge pull request #6 from cnblogs/use-interface-for-extensionablity
feat: make parameters as interface to allow extension parameters
2 parents 30ed7c0 + 7d06dc8 commit a15a621

34 files changed

+280
-190
lines changed

src/Cnblogs.DashScope.Sdk/BackgroundGenerationParameters.cs

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3,20 +3,14 @@
33
/// <summary>
44
/// The parameters of background generation task.
55
/// </summary>
6-
public class BackgroundGenerationParameters
6+
public class BackgroundGenerationParameters : IBackgroundGenerationParameters
77
{
8-
/// <summary>
9-
/// The number of images to be generated.
10-
/// </summary>
8+
/// <inheritdoc />
119
public int? N { get; set; }
1210

13-
/// <summary>
14-
/// Range at [0, 999], controls the distance from generated image to reference image.
15-
/// </summary>
11+
/// <inheritdoc />
1612
public int? NoiseLevel { get; set; }
1713

18-
/// <summary>
19-
/// Range at [0,1]. When RefImageUrl and RefPrompt are both set, controls the percentage of ref prompt weight.
20-
/// </summary>
14+
/// <inheritdoc />
2115
public float? RefPromptWeight { get; set; }
2216
}

src/Cnblogs.DashScope.Sdk/BaiChuan/BaiChuanTextGenerationApi.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ public static Task<ModelResponse<TextGenerationOutput, TextGenerationTokenUsage>
3333
string prompt)
3434
{
3535
return client.GetTextCompletionAsync(
36-
new ModelRequest<TextGenerationInput, TextGenerationParameters>
36+
new ModelRequest<TextGenerationInput, ITextGenerationParameters>
3737
{
3838
Model = llm,
3939
Input = new TextGenerationInput { Prompt = prompt },
@@ -73,7 +73,7 @@ public static Task<ModelResponse<TextGenerationOutput, TextGenerationTokenUsage>
7373
string? resultFormat = null)
7474
{
7575
return client.GetTextCompletionAsync(
76-
new ModelRequest<TextGenerationInput, TextGenerationParameters>
76+
new ModelRequest<TextGenerationInput, ITextGenerationParameters>
7777
{
7878
Model = llm,
7979
Input = new TextGenerationInput { Messages = messages },

src/Cnblogs.DashScope.Sdk/BatchGetEmbeddingsParameters.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
/// <summary>
44
/// Optional parameter of batch get embeddings request.
55
/// </summary>
6-
public class BatchGetEmbeddingsParameters
6+
public class BatchGetEmbeddingsParameters : IBatchGetEmbeddingsParameters
77
{
88
/// <summary>
99
/// Text type of input. Use <see cref="TextTypes"/> to get available options. Defaults to 'document'.

src/Cnblogs.DashScope.Sdk/DashScopeClientCore.cs

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ public DashScopeClientCore(HttpClient httpClient)
3333

3434
/// <inheritdoc />
3535
public async Task<ModelResponse<TextGenerationOutput, TextGenerationTokenUsage>> GetTextCompletionAsync(
36-
ModelRequest<TextGenerationInput, TextGenerationParameters> input,
36+
ModelRequest<TextGenerationInput, ITextGenerationParameters> input,
3737
CancellationToken cancellationToken = default)
3838
{
3939
var request = BuildRequest(HttpMethod.Post, ApiLinks.TextGeneration, input);
@@ -44,7 +44,7 @@ public async Task<ModelResponse<TextGenerationOutput, TextGenerationTokenUsage>>
4444

4545
/// <inheritdoc />
4646
public IAsyncEnumerable<ModelResponse<TextGenerationOutput, TextGenerationTokenUsage>> GetTextCompletionStreamAsync(
47-
ModelRequest<TextGenerationInput, TextGenerationParameters> input,
47+
ModelRequest<TextGenerationInput, ITextGenerationParameters> input,
4848
CancellationToken cancellationToken = default)
4949
{
5050
var request = BuildSseRequest(HttpMethod.Post, ApiLinks.TextGeneration, input);
@@ -53,7 +53,7 @@ public IAsyncEnumerable<ModelResponse<TextGenerationOutput, TextGenerationTokenU
5353

5454
/// <inheritdoc />
5555
public async Task<ModelResponse<MultimodalOutput, MultimodalTokenUsage>> GetMultimodalGenerationAsync(
56-
ModelRequest<MultimodalInput, MultimodalParameters> input,
56+
ModelRequest<MultimodalInput, IMultimodalParameters> input,
5757
CancellationToken cancellationToken = default)
5858
{
5959
var request = BuildRequest(HttpMethod.Post, ApiLinks.MultimodalGeneration, input);
@@ -62,7 +62,7 @@ public async Task<ModelResponse<MultimodalOutput, MultimodalTokenUsage>> GetMult
6262

6363
/// <inheritdoc />
6464
public IAsyncEnumerable<ModelResponse<MultimodalOutput, MultimodalTokenUsage>> GetMultimodalGenerationStreamAsync(
65-
ModelRequest<MultimodalInput, MultimodalParameters> input,
65+
ModelRequest<MultimodalInput, IMultimodalParameters> input,
6666
CancellationToken cancellationToken = default)
6767
{
6868
var request = BuildSseRequest(HttpMethod.Post, ApiLinks.MultimodalGeneration, input);
@@ -71,7 +71,7 @@ public IAsyncEnumerable<ModelResponse<MultimodalOutput, MultimodalTokenUsage>> G
7171

7272
/// <inheritdoc />
7373
public async Task<ModelResponse<TextEmbeddingOutput, TextEmbeddingTokenUsage>> GetEmbeddingsAsync(
74-
ModelRequest<TextEmbeddingInput, TextEmbeddingParameters> input,
74+
ModelRequest<TextEmbeddingInput, ITextEmbeddingParameters> input,
7575
CancellationToken cancellationToken = default)
7676
{
7777
var request = BuildRequest(HttpMethod.Post, ApiLinks.TextEmbedding, input);
@@ -83,7 +83,7 @@ public async Task<ModelResponse<TextEmbeddingOutput, TextEmbeddingTokenUsage>> G
8383
/// <inheritdoc />
8484
public async
8585
Task<ModelResponse<BatchGetEmbeddingsOutput, TextEmbeddingTokenUsage>> BatchGetEmbeddingsAsync(
86-
ModelRequest<BatchGetEmbeddingsInput, BatchGetEmbeddingsParameters> input,
86+
ModelRequest<BatchGetEmbeddingsInput, IBatchGetEmbeddingsParameters> input,
8787
CancellationToken cancellationToken = default)
8888
{
8989
var request = BuildRequest(HttpMethod.Post, ApiLinks.TextEmbedding, input, isTask: true);
@@ -94,7 +94,7 @@ Task<ModelResponse<BatchGetEmbeddingsOutput, TextEmbeddingTokenUsage>> BatchGetE
9494

9595
/// <inheritdoc />
9696
public async Task<ModelResponse<ImageSynthesisOutput, ImageSynthesisUsage>> CreateImageSynthesisTaskAsync(
97-
ModelRequest<ImageSynthesisInput, ImageSynthesisParameters> input,
97+
ModelRequest<ImageSynthesisInput, IImageSynthesisParameters> input,
9898
CancellationToken cancellationToken = default)
9999
{
100100
var request = BuildRequest(HttpMethod.Post, ApiLinks.ImageSynthesis, input, isTask: true);
@@ -173,7 +173,7 @@ public async Task<DashScopeTaskOperationResponse> CancelTaskAsync(
173173

174174
/// <inheritdoc />
175175
public async Task<ModelResponse<TokenizationOutput, TokenizationUsage>> TokenizeAsync(
176-
ModelRequest<TextGenerationInput, TextGenerationParameters> input,
176+
ModelRequest<TextGenerationInput, ITextGenerationParameters> input,
177177
CancellationToken cancellationToken = default)
178178
{
179179
var request = BuildRequest(HttpMethod.Post, ApiLinks.Tokenizer, input);
@@ -191,7 +191,10 @@ public async Task<ModelResponse<ImageGenerationOutput, ImageGenerationUsage>> Cr
191191
}
192192

193193
/// <inheritdoc />
194-
public async Task<ModelResponse<BackgroundGenerationOutput, BackgroundGenerationUsage>> CreateBackgroundGenerationTaskAsync(ModelRequest<BackgroundGenerationInput, BackgroundGenerationParameters> input, CancellationToken cancellationToken = default)
194+
public async Task<ModelResponse<BackgroundGenerationOutput, BackgroundGenerationUsage>>
195+
CreateBackgroundGenerationTaskAsync(
196+
ModelRequest<BackgroundGenerationInput, IBackgroundGenerationParameters> input,
197+
CancellationToken cancellationToken = default)
195198
{
196199
var request = BuildRequest(HttpMethod.Post, ApiLinks.BackgroundGeneration, input, isTask: true);
197200
return (await SendAsync<ModelResponse<BackgroundGenerationOutput, BackgroundGenerationUsage>>(
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
namespace Cnblogs.DashScope.Sdk;
2+
3+
/// <summary>
4+
/// The parameters of background generation task.
5+
/// </summary>
6+
public interface IBackgroundGenerationParameters
7+
{
8+
/// <summary>
9+
/// The number of images to be generated.
10+
/// </summary>
11+
public int? N { get; }
12+
13+
/// <summary>
14+
/// Range at [0, 999], controls the distance from generated image to reference image.
15+
/// </summary>
16+
public int? NoiseLevel { get; }
17+
18+
/// <summary>
19+
/// Range at [0,1]. When RefImageUrl and RefPrompt are both set, controls the percentage of ref prompt weight.
20+
/// </summary>
21+
public float? RefPromptWeight { get; }
22+
}
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
namespace Cnblogs.DashScope.Sdk;
2+
3+
/// <summary>
4+
/// Optional parameter of batch get embeddings request.
5+
/// </summary>
6+
public interface IBatchGetEmbeddingsParameters : ITextEmbeddingParameters;

src/Cnblogs.DashScope.Sdk/IDashScopeClient.cs

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ public interface IDashScopeClient
1212
/// <param name="cancellationToken">The cancellation token to use.</param>
1313
/// <returns>The completion result.</returns>
1414
Task<ModelResponse<TextGenerationOutput, TextGenerationTokenUsage>> GetTextCompletionAsync(
15-
ModelRequest<TextGenerationInput, TextGenerationParameters> input,
15+
ModelRequest<TextGenerationInput, ITextGenerationParameters> input,
1616
CancellationToken cancellationToken = default);
1717

1818
/// <summary>
@@ -22,7 +22,7 @@ Task<ModelResponse<TextGenerationOutput, TextGenerationTokenUsage>> GetTextCompl
2222
/// <param name="cancellationToken">The cancellation token to use.</param>
2323
/// <returns></returns>
2424
IAsyncEnumerable<ModelResponse<TextGenerationOutput, TextGenerationTokenUsage>> GetTextCompletionStreamAsync(
25-
ModelRequest<TextGenerationInput, TextGenerationParameters> input,
25+
ModelRequest<TextGenerationInput, ITextGenerationParameters> input,
2626
CancellationToken cancellationToken = default);
2727

2828
/// <summary>
@@ -31,7 +31,7 @@ IAsyncEnumerable<ModelResponse<TextGenerationOutput, TextGenerationTokenUsage>>
3131
/// <param name="input">The raw input payload for completion.</param>
3232
/// <param name="cancellationToken">The cancellation token to use.</param>
3333
Task<ModelResponse<MultimodalOutput, MultimodalTokenUsage>> GetMultimodalGenerationAsync(
34-
ModelRequest<MultimodalInput, MultimodalParameters> input,
34+
ModelRequest<MultimodalInput, IMultimodalParameters> input,
3535
CancellationToken cancellationToken = default);
3636

3737
/// <summary>
@@ -41,7 +41,7 @@ Task<ModelResponse<MultimodalOutput, MultimodalTokenUsage>> GetMultimodalGenerat
4141
/// <param name="cancellationToken">The cancellation token to use.</param>
4242
/// <returns></returns>
4343
IAsyncEnumerable<ModelResponse<MultimodalOutput, MultimodalTokenUsage>> GetMultimodalGenerationStreamAsync(
44-
ModelRequest<MultimodalInput, MultimodalParameters> input,
44+
ModelRequest<MultimodalInput, IMultimodalParameters> input,
4545
CancellationToken cancellationToken = default);
4646

4747
/// <summary>
@@ -51,7 +51,7 @@ IAsyncEnumerable<ModelResponse<MultimodalOutput, MultimodalTokenUsage>> GetMulti
5151
/// <param name="cancellationToken">The cancellation token to use.</param>
5252
/// <returns></returns>
5353
Task<ModelResponse<TextEmbeddingOutput, TextEmbeddingTokenUsage>> GetEmbeddingsAsync(
54-
ModelRequest<TextEmbeddingInput, TextEmbeddingParameters> input,
54+
ModelRequest<TextEmbeddingInput, ITextEmbeddingParameters> input,
5555
CancellationToken cancellationToken = default);
5656

5757
/// <summary>
@@ -62,7 +62,7 @@ Task<ModelResponse<TextEmbeddingOutput, TextEmbeddingTokenUsage>> GetEmbeddingsA
6262
/// <returns></returns>
6363
Task<ModelResponse<BatchGetEmbeddingsOutput, TextEmbeddingTokenUsage>>
6464
BatchGetEmbeddingsAsync(
65-
ModelRequest<BatchGetEmbeddingsInput, BatchGetEmbeddingsParameters> input,
65+
ModelRequest<BatchGetEmbeddingsInput, IBatchGetEmbeddingsParameters> input,
6666
CancellationToken cancellationToken = default);
6767

6868
/// <summary>
@@ -72,7 +72,7 @@ Task<ModelResponse<TextEmbeddingOutput, TextEmbeddingTokenUsage>> GetEmbeddingsA
7272
/// <param name="cancellationToken">The cancellation token to use.</param>
7373
/// <returns></returns>
7474
Task<ModelResponse<ImageSynthesisOutput, ImageSynthesisUsage>> CreateImageSynthesisTaskAsync(
75-
ModelRequest<ImageSynthesisInput, ImageSynthesisParameters> input,
75+
ModelRequest<ImageSynthesisInput, IImageSynthesisParameters> input,
7676
CancellationToken cancellationToken = default);
7777

7878
/// <summary>
@@ -126,7 +126,7 @@ Task<DashScopeTaskList> ListTasksAsync(
126126
/// <param name="cancellationToken">The cancellation token to use.</param>
127127
/// <returns></returns>
128128
Task<ModelResponse<TokenizationOutput, TokenizationUsage>> TokenizeAsync(
129-
ModelRequest<TextGenerationInput, TextGenerationParameters> input,
129+
ModelRequest<TextGenerationInput, ITextGenerationParameters> input,
130130
CancellationToken cancellationToken = default);
131131

132132
/// <summary>
@@ -147,6 +147,6 @@ Task<ModelResponse<ImageGenerationOutput, ImageGenerationUsage>> CreateImageGene
147147
/// <returns></returns>
148148
public Task<ModelResponse<BackgroundGenerationOutput, BackgroundGenerationUsage>>
149149
CreateBackgroundGenerationTaskAsync(
150-
ModelRequest<BackgroundGenerationInput, BackgroundGenerationParameters> input,
150+
ModelRequest<BackgroundGenerationInput, IBackgroundGenerationParameters> input,
151151
CancellationToken cancellationToken = default);
152152
}
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
namespace Cnblogs.DashScope.Sdk;
2+
3+
/// <summary>
4+
/// Optional parameters for image synthesis task.
5+
/// </summary>
6+
public interface IImageSynthesisParameters
7+
{
8+
/// <summary>
9+
/// Generated image style, defaults to '&lt;auto&gt;'. Use <see cref="ImageStyles"/> to get all available options.
10+
/// </summary>
11+
public string? Style { get; }
12+
13+
/// <summary>
14+
/// Generated image size, defaults to 1024*1024. Another options are: 1280*720 and 720*1280.
15+
/// </summary>
16+
public string? Size { get; }
17+
18+
/// <summary>
19+
/// Number of images requested. Max number is 4, defaults to 1.
20+
/// </summary>
21+
public int? N { get; }
22+
23+
/// <summary>
24+
/// Seed for randomizer, max at 4294967290. Once set, generated image will use seed, seed+1, seed+2, seed+3 depends on <see cref="N"/>.
25+
/// </summary>
26+
public uint? Seed { get; }
27+
}
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
namespace Cnblogs.DashScope.Sdk;
2+
3+
/// <summary>
4+
/// Marks parameter accepts incremental output.
5+
/// </summary>
6+
public interface IIncrementalOutputParameter
7+
{
8+
/// <summary>
9+
/// Enable stream output. Defaults to false.
10+
/// </summary>
11+
public bool? IncrementalOutput { get; }
12+
}
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
namespace Cnblogs.DashScope.Sdk;
2+
3+
/// <summary>
4+
/// Optional parameters for multi-model generation request.
5+
/// </summary>
6+
public interface IMultimodalParameters : IProbabilityParameter, ISeedParameter, IIncrementalOutputParameter;

src/Cnblogs.DashScope.Sdk/Internals/IProbabilityParameter.cs renamed to src/Cnblogs.DashScope.Sdk/IProbabilityParameter.cs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
1-
namespace Cnblogs.DashScope.Sdk.Internals;
1+
namespace Cnblogs.DashScope.Sdk;
22

3-
internal interface IProbabilityParameter
3+
/// <summary>
4+
/// Marks parameter accepts top_p and top_k options.
5+
/// </summary>
6+
public interface IProbabilityParameter
47
{
58
/// <summary>
69
/// The probability threshold during generation, defaults to 0.8 when null.
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
namespace Cnblogs.DashScope.Sdk;
2+
3+
/// <summary>
4+
/// Marks parameter supports seed option.
5+
/// </summary>
6+
public interface ISeedParameter
7+
{
8+
/// <summary>
9+
/// The seed for randomizer, defaults to 1234 when null.
10+
/// </summary>
11+
public ulong? Seed { get; }
12+
}
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
namespace Cnblogs.DashScope.Sdk;
2+
3+
/// <summary>
4+
/// The optional parameters for text embedding.
5+
/// </summary>
6+
public interface ITextEmbeddingParameters
7+
{
8+
/// <summary>
9+
/// The text type("query" or "document"). Defaults to "document".
10+
/// </summary>
11+
public string? TextType { get; }
12+
}
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
namespace Cnblogs.DashScope.Sdk;
2+
3+
/// <summary>
4+
/// The text generation options.
5+
/// </summary>
6+
public interface ITextGenerationParameters : IIncrementalOutputParameter, ISeedParameter, IProbabilityParameter
7+
{
8+
/// <summary>
9+
/// The format of the result message, must be <c>text</c> or <c>message</c>.
10+
/// </summary>
11+
/// <remarks>
12+
/// <c>text</c> - original text format.
13+
/// <para><c>message</c> - OpenAI compatible message format</para>
14+
/// </remarks>
15+
public string? ResultFormat { get; }
16+
17+
/// <summary>
18+
/// The maximum number of tokens the model can generate.
19+
/// </summary>
20+
/// <remarks>
21+
/// Default and maximum number of tokens is 1500(qwen-turbo) or 2000(qwen-max, qwen-max-1201, qwen-max-longcontext, qwen-plus).
22+
/// </remarks>
23+
public int? MaxTokens { get; }
24+
25+
/// <summary>
26+
/// Increasing the repetition penalty can reduce the amount of repetition in the model’s output. A value of 1.0 indicates no penalty, with the default set at 1.1.
27+
/// </summary>
28+
public float? RepetitionPenalty { get; }
29+
30+
/// <summary>
31+
/// Controls the diversity of generations. Lower temperature leads to more consistent result.
32+
/// </summary>
33+
/// <remarks>Must be in [0,2), defaults to 0.85.</remarks>
34+
public float? Temperature { get; }
35+
36+
/// <summary>
37+
/// Stop generation when next token or string is in given range.
38+
/// </summary>
39+
public TextGenerationStop? Stop { get; }
40+
41+
/// <summary>
42+
/// Enable internet search when generation. Defaults to false.
43+
/// </summary>
44+
public bool? EnableSearch { get; }
45+
}

0 commit comments

Comments
 (0)