From d4d3af5de838c031d23b79678e4978f1bfc44d68 Mon Sep 17 00:00:00 2001 From: awstools Date: Thu, 17 Oct 2024 18:20:15 +0000 Subject: [PATCH] feat(client-bedrock-agent): Removing support for topK property in PromptModelInferenceConfiguration object, Making PromptTemplateConfiguration property as required, Limiting the maximum PromptVariant to 1 --- .../src/commands/CreateFlowCommand.ts | 2 -- .../src/commands/CreateFlowVersionCommand.ts | 1 - .../src/commands/CreatePromptCommand.ts | 2 -- .../src/commands/CreatePromptVersionCommand.ts | 1 - .../src/commands/GetFlowCommand.ts | 1 - .../src/commands/GetFlowVersionCommand.ts | 1 - .../src/commands/GetIngestionJobCommand.ts | 2 +- .../src/commands/GetPromptCommand.ts | 1 - .../src/commands/UpdateFlowCommand.ts | 2 -- .../src/commands/UpdatePromptCommand.ts | 2 -- clients/client-bedrock-agent/src/models/models_0.ts | 8 +------- .../src/protocols/Aws_restJson1.ts | 2 -- codegen/sdk-codegen/aws-models/bedrock-agent.json | 13 ++++--------- 13 files changed, 6 insertions(+), 32 deletions(-) diff --git a/clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts b/clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts index c4279e062dc2e..66a8dd6222f9e 100644 --- a/clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts +++ b/clients/client-bedrock-agent/src/commands/CreateFlowCommand.ts @@ -91,7 +91,6 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB * text: { // PromptModelInferenceConfiguration * temperature: Number("float"), * topP: Number("float"), - * topK: Number("int"), * maxTokens: Number("int"), * stopSequences: [ // StopSequences * "STRING_VALUE", @@ -221,7 +220,6 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts b/clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts index 92a01e0dfe15b..4c3ac5d9accd7 100644 --- a/clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts +++ b/clients/client-bedrock-agent/src/commands/CreateFlowVersionCommand.ts @@ -102,7 +102,6 @@ export interface CreateFlowVersionCommandOutput extends CreateFlowVersionRespons * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts b/clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts index d2b0b6b01480c..cf4443c9d2042 100644 --- a/clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts +++ b/clients/client-bedrock-agent/src/commands/CreatePromptCommand.ts @@ -64,7 +64,6 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad * text: { // PromptModelInferenceConfiguration * temperature: Number("float"), * topP: Number("float"), - * topK: Number("int"), * maxTokens: Number("int"), * stopSequences: [ // StopSequences * "STRING_VALUE", @@ -110,7 +109,6 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts b/clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts index 81477bcbfd263..73b6a04252fe5 100644 --- a/clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts +++ b/clients/client-bedrock-agent/src/commands/CreatePromptVersionCommand.ts @@ -73,7 +73,6 @@ export interface CreatePromptVersionCommandOutput extends CreatePromptVersionRes * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/GetFlowCommand.ts b/clients/client-bedrock-agent/src/commands/GetFlowCommand.ts index 08e53cd2f9d18..293c20aebba15 100644 --- a/clients/client-bedrock-agent/src/commands/GetFlowCommand.ts +++ b/clients/client-bedrock-agent/src/commands/GetFlowCommand.ts @@ -97,7 +97,6 @@ export interface GetFlowCommandOutput extends GetFlowResponse, __MetadataBearer * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts b/clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts index 2b965edf1c85b..38bd428a4d65d 100644 --- a/clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts +++ b/clients/client-bedrock-agent/src/commands/GetFlowVersionCommand.ts @@ -101,7 +101,6 @@ export interface GetFlowVersionCommandOutput extends GetFlowVersionResponse, __M * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts b/clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts index 91a41a4d25298..6f0b5c1ecd0e2 100644 --- a/clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts +++ b/clients/client-bedrock-agent/src/commands/GetIngestionJobCommand.ts @@ -28,7 +28,7 @@ export interface GetIngestionJobCommandInput extends GetIngestionJobRequest {} export interface GetIngestionJobCommandOutput extends GetIngestionJobResponse, __MetadataBearer {} /** - *

Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Lanaguage Models (LLMs) can use your data.

+ *

Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Language Models (LLMs) can use your data.

* @example * Use a bare-bones client and the command you need to make an API call. * ```javascript diff --git a/clients/client-bedrock-agent/src/commands/GetPromptCommand.ts b/clients/client-bedrock-agent/src/commands/GetPromptCommand.ts index 41a8631c004f5..38d4863513b72 100644 --- a/clients/client-bedrock-agent/src/commands/GetPromptCommand.ts +++ b/clients/client-bedrock-agent/src/commands/GetPromptCommand.ts @@ -65,7 +65,6 @@ export interface GetPromptCommandOutput extends GetPromptResponse, __MetadataBea * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts b/clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts index 00e463f1434b4..e306413070bdd 100644 --- a/clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts +++ b/clients/client-bedrock-agent/src/commands/UpdateFlowCommand.ts @@ -91,7 +91,6 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB * text: { // PromptModelInferenceConfiguration * temperature: Number("float"), * topP: Number("float"), - * topK: Number("int"), * maxTokens: Number("int"), * stopSequences: [ // StopSequences * "STRING_VALUE", @@ -218,7 +217,6 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts b/clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts index 5edd3b94fa6e7..38e6c050c8698 100644 --- a/clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts +++ b/clients/client-bedrock-agent/src/commands/UpdatePromptCommand.ts @@ -64,7 +64,6 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad * text: { // PromptModelInferenceConfiguration * temperature: Number("float"), * topP: Number("float"), - * topK: Number("int"), * maxTokens: Number("int"), * stopSequences: [ // StopSequences * "STRING_VALUE", @@ -107,7 +106,6 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad * // text: { // PromptModelInferenceConfiguration * // temperature: Number("float"), * // topP: Number("float"), - * // topK: Number("int"), * // maxTokens: Number("int"), * // stopSequences: [ // StopSequences * // "STRING_VALUE", diff --git a/clients/client-bedrock-agent/src/models/models_0.ts b/clients/client-bedrock-agent/src/models/models_0.ts index ed458d33552b5..5bebad6d2b1a1 100644 --- a/clients/client-bedrock-agent/src/models/models_0.ts +++ b/clients/client-bedrock-agent/src/models/models_0.ts @@ -3972,12 +3972,6 @@ export interface PromptModelInferenceConfiguration { */ topP?: number; - /** - *

The number of most-likely candidates that the model considers for the next token during generation.

- * @public - */ - topK?: number; - /** *

The maximum number of tokens to return in the response.

* @public @@ -7514,7 +7508,7 @@ export interface PromptVariant { *

Contains configurations for the prompt template.

* @public */ - templateConfiguration?: PromptTemplateConfiguration; + templateConfiguration: PromptTemplateConfiguration | undefined; /** *

The unique identifier of the model or inference profile with which to run inference on the prompt.

diff --git a/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts b/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts index 353cdb8e430d6..7e4915f1d4b2a 100644 --- a/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts +++ b/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts @@ -3533,7 +3533,6 @@ const se_PromptModelInferenceConfiguration = ( maxTokens: [], stopSequences: _json, temperature: __serializeFloat, - topK: [], topP: __serializeFloat, }); }; @@ -4484,7 +4483,6 @@ const de_PromptModelInferenceConfiguration = ( maxTokens: __expectInt32, stopSequences: _json, temperature: __limitedParseFloat32, - topK: __expectInt32, topP: __limitedParseFloat32, }) as any; }; diff --git a/codegen/sdk-codegen/aws-models/bedrock-agent.json b/codegen/sdk-codegen/aws-models/bedrock-agent.json index 612b4d427d27f..7e3c284bfc03c 100644 --- a/codegen/sdk-codegen/aws-models/bedrock-agent.json +++ b/codegen/sdk-codegen/aws-models/bedrock-agent.json @@ -7088,7 +7088,7 @@ } ], "traits": { - "smithy.api#documentation": "

Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Lanaguage Models (LLMs) can use your data.

", + "smithy.api#documentation": "

Gets information about a data ingestion job. Data sources are ingested into your knowledge base so that Large Language Models (LLMs) can use your data.

", "smithy.api#http": { "code": 200, "method": "GET", @@ -10520,12 +10520,6 @@ "smithy.api#documentation": "

The percentage of most-likely candidates that the model considers for the next token.

" } }, - "topK": { - "target": "com.amazonaws.bedrockagent#TopK", - "traits": { - "smithy.api#documentation": "

The number of most-likely candidates that the model considers for the next token during generation.

" - } - }, "maxTokens": { "target": "com.amazonaws.bedrockagent#MaximumLength", "traits": { @@ -10762,7 +10756,8 @@ "templateConfiguration": { "target": "com.amazonaws.bedrockagent#PromptTemplateConfiguration", "traits": { - "smithy.api#documentation": "

Contains configurations for the prompt template.

" + "smithy.api#documentation": "

Contains configurations for the prompt template.

", + "smithy.api#required": {} } }, "modelId": { @@ -10796,7 +10791,7 @@ }, "traits": { "smithy.api#length": { - "max": 3 + "max": 1 }, "smithy.api#sensitive": {} }