Skip to content

Commit 2bae32e

Browse files
committed
doc: update sample and README
1 parent 1b7dd4c commit 2bae32e

File tree

3 files changed

+62
-20
lines changed

3 files changed

+62
-20
lines changed

README.md

Lines changed: 31 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -123,19 +123,6 @@ var completion = await client.GetQWenCompletionAsync(QWenLlm.QWenMax, prompt);
123123
Console.WriteLine(completion.Output.Text);
124124
```
125125

126-
## Reasoning
127-
128-
Use `completion.Output.Choices![0].Message.ReasoningContent` to access the reasoning content from model.
129-
130-
```csharp
131-
var history = new List<ChatMessage>
132-
{
133-
ChatMessage.User("Calculate 1+1")
134-
};
135-
var completion = await client.GetDeepSeekChatCompletionAsync(DeepSeekLlm.DeepSeekR1, history);
136-
Console.WriteLine(completion.Output.Choices[0]!.Message.ReasoningContent);
137-
```
138-
139126
## Multi-round chat
140127

141128
```csharp
@@ -153,6 +140,36 @@ var completion = await client.GetQWenChatCompletionAsync(QWenLlm.QWenMax, histor
153140
Console.WriteLine(completion.Output.Choices[0].Message.Content); // The number is 42
154141
```
155142

143+
## Reasoning
144+
145+
Use `completion.Output.Choices![0].Message.ReasoningContent` to access the thoughts from reasoning model.
146+
147+
```csharp
148+
var history = new List<ChatMessage>
149+
{
150+
ChatMessage.User("Calculate 1+1")
151+
};
152+
var completion = await client.GetDeepSeekChatCompletionAsync(DeepSeekLlm.DeepSeekR1, history);
153+
Console.WriteLine(completion.Output.Choices[0]!.Message.ReasoningContent);
154+
```
155+
156+
### QWen3
157+
158+
Use `TextGenerationParameters.EnableThinking` to toggle reasoning.
159+
160+
```csharp
161+
var stream = dashScopeClient
162+
.GetQWenChatStreamAsync(
163+
QWenLlm.QWenPlusLatest,
164+
history,
165+
new TextGenerationParameters
166+
{
167+
IncrementalOutput = true,
168+
ResultFormat = ResultFormats.Message,
169+
EnableThinking = true
170+
});
171+
```
172+
156173
## Function Call
157174

158175
Creates a function with parameters
@@ -182,7 +199,7 @@ public enum TemperatureUnit
182199
}
183200
```
184201

185-
Append tool information to chat messages.
202+
Append tool information to chat messages (Here we use `JsonSchema.NET` to generate JSON Schema).
186203

187204
```csharp
188205
var tools = new List<ToolDefinition>()

README.zh-Hans.md

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,23 @@ var completion = await client.GetDeepSeekChatCompletionAsync(DeepSeekLlm.DeepSee
157157
Console.WriteLine(completion.Output.Choices[0]!.Message.ReasoningContent);
158158
```
159159

160+
### QWen3
161+
162+
使用 `TextGenerationParameters.EnableThinking` 决定是否使用模型的推理能力。
163+
164+
```csharp
165+
var stream = dashScopeClient
166+
.GetQWenChatStreamAsync(
167+
QWenLlm.QWenPlusLatest,
168+
history,
169+
new TextGenerationParameters
170+
{
171+
IncrementalOutput = true,
172+
ResultFormat = ResultFormats.Message,
173+
EnableThinking = true
174+
});
175+
```
176+
160177
## 工具调用
161178

162179
创建一个可供模型使用的方法。
@@ -182,7 +199,7 @@ public enum TemperatureUnit
182199
}
183200
```
184201

185-
对话时带上方法的名称、描述和参数列表,参数列表以 JSON Schema 的形式提供。
202+
对话时带上方法的名称、描述和参数列表,参数列表以 JSON Schema 的形式提供(这里使用 `JsonSchema.Net` 库,您也可以使用其它具有类似功能的库)
186203

187204
```csharp
188205
var tools = new List<ToolDefinition>()

sample/Cnblogs.DashScope.Sample/Program.cs

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -32,12 +32,12 @@
3232
switch (type)
3333
{
3434
case SampleType.TextCompletion:
35-
Console.WriteLine("Prompt > ");
35+
Console.Write("Prompt > ");
3636
userInput = Console.ReadLine()!;
3737
await TextCompletionAsync(userInput);
3838
break;
3939
case SampleType.TextCompletionSse:
40-
Console.WriteLine("Prompt > ");
40+
Console.Write("Prompt > ");
4141
userInput = Console.ReadLine()!;
4242
await TextCompletionStreamAsync(userInput);
4343
break;
@@ -97,9 +97,14 @@ async Task ChatStreamAsync()
9797
history.Add(TextChatMessage.User(input));
9898
var stream = dashScopeClient
9999
.GetQWenChatStreamAsync(
100-
QWenLlm.QWenMax,
100+
QWenLlm.QWenPlusLatest,
101101
history,
102-
new TextGenerationParameters { IncrementalOutput = true, ResultFormat = ResultFormats.Message });
102+
new TextGenerationParameters
103+
{
104+
IncrementalOutput = true,
105+
ResultFormat = ResultFormats.Message,
106+
EnableThinking = true
107+
});
103108
var role = string.Empty;
104109
var message = new StringBuilder();
105110
await foreach (var modelResponse in stream)
@@ -112,7 +117,10 @@ async Task ChatStreamAsync()
112117
}
113118

114119
message.Append(chunk.Message.Content);
115-
Console.Write(chunk.Message.Content);
120+
var write = string.IsNullOrEmpty(chunk.Message.ReasoningContent)
121+
? chunk.Message.Content
122+
: chunk.Message.ReasoningContent;
123+
Console.Write(write);
116124
}
117125

118126
Console.WriteLine();

0 commit comments

Comments
 (0)