Skip to content

Commit ed2d3ea

Browse files
committed
Replace Options with EvalAllowableValues
1 parent e951f40 commit ed2d3ea

File tree

2 files changed

+7
-3
lines changed

2 files changed

+7
-3
lines changed

ServiceStack/src/ServiceStack.AI.Chat/ChatCompletion.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ public class ChatCompletion : IPost, IReturn<ChatResponse>
3838

3939
[Description("Constrains effort on reasoning for reasoning models. Currently supported values are minimal, low, medium, and high (none, default). Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response.")]
4040
[DataMember(Name = "reasoning_effort")]
41-
[Input(Type="combobox", Options = "{ allowableValues:['low','medium','high','none','default'] }", Help = "Constrains effort on reasoning for reasoning models")]
41+
[Input(Type="combobox", EvalAllowableValues = "['low','medium','high','none','default']", Help = "Constrains effort on reasoning for reasoning models")]
4242
public string? ReasoningEffort { get; set; }
4343

4444
[Description("An object specifying the format that the model must output. Compatible with GPT-4 Turbo and all GPT-3.5 Turbo models newer than `gpt-3.5-turbo-1106`. Setting Type to ResponseFormat.JsonObject enables JSON mode, which guarantees the message the model generates is valid JSON.")]
@@ -47,7 +47,7 @@ public class ChatCompletion : IPost, IReturn<ChatResponse>
4747

4848
[Description("Specifies the processing type used for serving the request.")]
4949
[DataMember(Name = "service_tier")]
50-
[Input(Type = "combobox", Options = "{ allowableValues:['auto','default'] }", Help = "Processing type for serving the request")]
50+
[Input(Type = "combobox", EvalAllowableValues = "['auto','default']", Help = "Processing type for serving the request")]
5151
public string? ServiceTier { get; set; }
5252

5353
[Description("A stable identifier used to help detect users of your application that may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies each user.")]
@@ -76,7 +76,7 @@ public class ChatCompletion : IPost, IReturn<ChatResponse>
7676

7777
[Description("Constrains the verbosity of the model's response. Lower values will result in more concise responses, while higher values will result in more verbose responses. Currently supported values are low, medium, and high.")]
7878
[DataMember(Name = "verbosity")]
79-
[Input(Type = "combobox", Options = "{ allowableValues:['low','medium','high'] }", Placeholder = "e.g. low", Help = "Constrains verbosity of model's response")]
79+
[Input(Type = "combobox", EvalAllowableValues = "['low','medium','high']", Placeholder = "e.g. low", Help = "Constrains verbosity of model's response")]
8080
public string? Verbosity { get; set; }
8181

8282
[Description("What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.")]

ServiceStack/tests/AdhocNew/Configure.AI.Chat.cs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
using ServiceStack.AI;
2+
using ServiceStack.Configuration;
23
using ServiceStack.Data;
34
using ServiceStack.OrmLite;
45

@@ -12,6 +13,9 @@ public void Configure(IWebHostBuilder builder) => builder
1213
.ConfigureServices(services => {
1314
services.AddPlugin(new ChatFeature
1415
{
16+
// ValidateRequest = async (req) => req.GetApiKey()?.HasScope(RoleNames.Admin) == true
17+
// ? null
18+
// : HttpResult.Redirect("/admin-ui"),
1519
OnChatCompletionSuccessAsync = async (request, response, req) => {
1620
using var db = await req.Resolve<IDbConnectionFactory>().OpenAsync();
1721
await db.InsertAsync(req.ToChatCompletionLog(request, response));

0 commit comments

Comments
 (0)