Skip to content

Commit

Permalink
chore: Updates version to 0.43.0
Browse files Browse the repository at this point in the history
  • Loading branch information
aws-sdk-swift-automation committed May 7, 2024
1 parent 1194575 commit c479168
Show file tree
Hide file tree
Showing 93 changed files with 27,025 additions and 3,065 deletions.
2 changes: 1 addition & 1 deletion Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ func addResolvedTargets() {
// MARK: - Generated

addDependencies(
clientRuntimeVersion: "0.47.0",
clientRuntimeVersion: "0.48.0",
crtVersion: "0.30.0"
)

Expand Down
2 changes: 1 addition & 1 deletion Package.version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.42.0
0.43.0
1 change: 1 addition & 0 deletions Sources/Services/AWSAppSync/models/Models.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12700,6 +12700,7 @@ public struct UpdateGraphqlApiInput {
/// This member is required.
public var apiId: Swift.String?
/// The new authentication type for the GraphqlApi object.
/// This member is required.
public var authenticationType: AppSyncClientTypes.AuthenticationType?
/// The enhancedMetricsConfig object.
public var enhancedMetricsConfig: AppSyncClientTypes.EnhancedMetricsConfig?
Expand Down
657 changes: 628 additions & 29 deletions Sources/Services/AWSBedrock/BedrockClient.swift

Large diffs are not rendered by default.

7,640 changes: 5,998 additions & 1,642 deletions Sources/Services/AWSBedrock/models/Models.swift

Large diffs are not rendered by default.

317 changes: 312 additions & 5 deletions Sources/Services/AWSBedrockAgent/models/Models.swift

Large diffs are not rendered by default.

304 changes: 304 additions & 0 deletions Sources/Services/AWSBedrockAgentRuntime/models/Models.swift

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions Sources/Services/AWSBedrockRuntime/BedrockRuntimeClient.swift
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ public struct BedrockRuntimeClientLogHandlerFactory: ClientRuntime.SDKLogHandler
extension BedrockRuntimeClient {
/// Performs the `InvokeModel` operation on the `AmazonBedrockFrontendService` service.
///
/// Invokes the specified Bedrock model to run inference using the input provided in the request body. You use InvokeModel to run inference for text models, image models, and embedding models. For more information, see [Run inference](https://docs.aws.amazon.com/bedrock/latest/userguide/api-methods-run.html) in the Bedrock User Guide. For example requests, see Examples (after the Errors section).
/// Invokes the specified Amazon Bedrock model to run inference using the prompt and inference parameters provided in the request body. You use model inference to generate text, images, and embeddings. For example code, see Invoke model code examples in the Amazon Bedrock User Guide. This operation requires permission for the bedrock:InvokeModel action.
///
/// - Parameter InvokeModelInput : [no documentation found]
///
Expand Down Expand Up @@ -201,7 +201,7 @@ extension BedrockRuntimeClient {

/// Performs the `InvokeModelWithResponseStream` operation on the `AmazonBedrockFrontendService` service.
///
/// Invoke the specified Bedrock model to run inference using the input provided. Return the response in a stream. For more information, see [Run inference](https://docs.aws.amazon.com/bedrock/latest/userguide/api-methods-run.html) in the Bedrock User Guide. For an example request and response, see Examples (after the Errors section).
/// Invoke the specified Amazon Bedrock model to run inference using the prompt and inference parameters provided in the request body. The response is returned in a stream. To see if a model supports streaming, call [GetFoundationModel](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_GetFoundationModel.html) and check the responseStreamingSupported field in the response. The CLI doesn't support InvokeModelWithResponseStream. For example code, see Invoke model with streaming code example in the Amazon Bedrock User Guide. This operation requires permissions to perform the bedrock:InvokeModelWithResponseStream action.
///
/// - Parameter InvokeModelWithResponseStreamInput : [no documentation found]
///
Expand All @@ -214,7 +214,7 @@ extension BedrockRuntimeClient {
/// - `InternalServerException` : An internal server error occurred. Retry your request.
/// - `ModelErrorException` : The request failed due to an error while processing the model.
/// - `ModelNotReadyException` : The model specified in the request is not ready to serve inference requests.
/// - `ModelStreamErrorException` : An error occurred while streaming the response.
/// - `ModelStreamErrorException` : An error occurred while streaming the response. Retry your request.
/// - `ModelTimeoutException` : The request took too long to process. Processing time exceeded the model timeout length.
/// - `ResourceNotFoundException` : The specified resource ARN was not found. Check the ARN and try your request again.
/// - `ServiceQuotaExceededException` : The number of requests exceeds the service quota. Resubmit your request later.
Expand Down
120 changes: 109 additions & 11 deletions Sources/Services/AWSBedrockRuntime/models/Models.swift
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ extension InternalServerExceptionBody: Swift.Decodable {

extension InvokeModelInput: Swift.CustomDebugStringConvertible {
public var debugDescription: Swift.String {
"InvokeModelInput(accept: \(Swift.String(describing: accept)), contentType: \(Swift.String(describing: contentType)), modelId: \(Swift.String(describing: modelId)), body: \"CONTENT_REDACTED\")"}
"InvokeModelInput(accept: \(Swift.String(describing: accept)), contentType: \(Swift.String(describing: contentType)), guardrailIdentifier: \(Swift.String(describing: guardrailIdentifier)), guardrailVersion: \(Swift.String(describing: guardrailVersion)), modelId: \(Swift.String(describing: modelId)), trace: \(Swift.String(describing: trace)), body: \"CONTENT_REDACTED\")"}
}

extension InvokeModelInput: Swift.Encodable {
Expand All @@ -161,6 +161,15 @@ extension InvokeModelInput {
if let contentType = value.contentType {
items.add(Header(name: "Content-Type", value: Swift.String(contentType)))
}
if let guardrailIdentifier = value.guardrailIdentifier {
items.add(Header(name: "X-Amzn-Bedrock-GuardrailIdentifier", value: Swift.String(guardrailIdentifier)))
}
if let guardrailVersion = value.guardrailVersion {
items.add(Header(name: "X-Amzn-Bedrock-GuardrailVersion", value: Swift.String(guardrailVersion)))
}
if let trace = value.trace {
items.add(Header(name: "X-Amzn-Bedrock-Trace", value: Swift.String(trace.rawValue)))
}
return items
}
}
Expand All @@ -178,26 +187,50 @@ extension InvokeModelInput {
public struct InvokeModelInput {
/// The desired MIME type of the inference body in the response. The default value is application/json.
public var accept: Swift.String?
/// Input data in the format specified in the content-type request header. To see the format and content of this field for different models, refer to [Inference parameters](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html).
/// The prompt and inference parameters in the format specified in the contentType in the header. To see the format and content of the request and response bodies for different models, refer to [Inference parameters](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html). For more information, see [Run inference](https://docs.aws.amazon.com/bedrock/latest/userguide/api-methods-run.html) in the Bedrock User Guide.
/// This member is required.
public var body: ClientRuntime.Data?
/// The MIME type of the input data in the request. The default value is application/json.
public var contentType: Swift.String?
/// Identifier of the model.
/// The unique identifier of the guardrail that you want to use. If you don't provide a value, no guardrail is applied to the invocation. An error will be thrown in the following situations.
///
/// * You don't provide a guardrail identifier but you specify the amazon-bedrock-guardrailConfig field in the request body.
///
/// * You enable the guardrail but the contentType isn't application/json.
///
/// * You provide a guardrail identifier, but guardrailVersion isn't specified.
public var guardrailIdentifier: Swift.String?
/// The version number for the guardrail. The value can also be DRAFT.
public var guardrailVersion: Swift.String?
/// The unique identifier of the model to invoke to run inference. The modelId to provide depends on the type of model that you use:
///
/// * If you use a base model, specify the model ID or its ARN. For a list of model IDs for base models, see [Amazon Bedrock base model IDs (on-demand throughput)](https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns) in the Amazon Bedrock User Guide.
///
/// * If you use a provisioned model, specify the ARN of the Provisioned Throughput. For more information, see [Run inference using a Provisioned Throughput](https://docs.aws.amazon.com/bedrock/latest/userguide/prov-thru-use.html) in the Amazon Bedrock User Guide.
///
/// * If you use a custom model, first purchase Provisioned Throughput for it. Then specify the ARN of the resulting provisioned model. For more information, see [Use a custom model in Amazon Bedrock](https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-use.html) in the Amazon Bedrock User Guide.
/// This member is required.
public var modelId: Swift.String?
/// Specifies whether to enable or disable the Bedrock trace. If enabled, you can see the full Bedrock trace.
public var trace: BedrockRuntimeClientTypes.Trace?

public init(
accept: Swift.String? = nil,
body: ClientRuntime.Data? = nil,
contentType: Swift.String? = nil,
modelId: Swift.String? = nil
guardrailIdentifier: Swift.String? = nil,
guardrailVersion: Swift.String? = nil,
modelId: Swift.String? = nil,
trace: BedrockRuntimeClientTypes.Trace? = nil
)
{
self.accept = accept
self.body = body
self.contentType = contentType
self.guardrailIdentifier = guardrailIdentifier
self.guardrailVersion = guardrailVersion
self.modelId = modelId
self.trace = trace
}
}

Expand Down Expand Up @@ -241,7 +274,7 @@ extension InvokeModelOutput: ClientRuntime.HttpResponseBinding {
}

public struct InvokeModelOutput {
/// Inference response from the model in the format specified in the content-type header field. To see the format and content of this field for different models, refer to [Inference parameters](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html).
/// Inference response from the model in the format specified in the contentType header. To see the format and content of the request and response bodies for different models, refer to [Inference parameters](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html).
/// This member is required.
public var body: ClientRuntime.Data?
/// The MIME type of the inference result.
Expand Down Expand Up @@ -295,7 +328,7 @@ enum InvokeModelOutputError: ClientRuntime.HttpResponseErrorBinding {

extension InvokeModelWithResponseStreamInput: Swift.CustomDebugStringConvertible {
public var debugDescription: Swift.String {
"InvokeModelWithResponseStreamInput(accept: \(Swift.String(describing: accept)), contentType: \(Swift.String(describing: contentType)), modelId: \(Swift.String(describing: modelId)), body: \"CONTENT_REDACTED\")"}
"InvokeModelWithResponseStreamInput(accept: \(Swift.String(describing: accept)), contentType: \(Swift.String(describing: contentType)), guardrailIdentifier: \(Swift.String(describing: guardrailIdentifier)), guardrailVersion: \(Swift.String(describing: guardrailVersion)), modelId: \(Swift.String(describing: modelId)), trace: \(Swift.String(describing: trace)), body: \"CONTENT_REDACTED\")"}
}

extension InvokeModelWithResponseStreamInput: Swift.Encodable {
Expand All @@ -321,6 +354,15 @@ extension InvokeModelWithResponseStreamInput {
if let contentType = value.contentType {
items.add(Header(name: "Content-Type", value: Swift.String(contentType)))
}
if let guardrailIdentifier = value.guardrailIdentifier {
items.add(Header(name: "X-Amzn-Bedrock-GuardrailIdentifier", value: Swift.String(guardrailIdentifier)))
}
if let guardrailVersion = value.guardrailVersion {
items.add(Header(name: "X-Amzn-Bedrock-GuardrailVersion", value: Swift.String(guardrailVersion)))
}
if let trace = value.trace {
items.add(Header(name: "X-Amzn-Bedrock-Trace", value: Swift.String(trace.rawValue)))
}
return items
}
}
Expand All @@ -338,26 +380,50 @@ extension InvokeModelWithResponseStreamInput {
public struct InvokeModelWithResponseStreamInput {
/// The desired MIME type of the inference body in the response. The default value is application/json.
public var accept: Swift.String?
/// Inference input in the format specified by the content-type. To see the format and content of this field for different models, refer to [Inference parameters](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html).
/// The prompt and inference parameters in the format specified in the contentType in the header. To see the format and content of the request and response bodies for different models, refer to [Inference parameters](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html). For more information, see [Run inference](https://docs.aws.amazon.com/bedrock/latest/userguide/api-methods-run.html) in the Bedrock User Guide.
/// This member is required.
public var body: ClientRuntime.Data?
/// The MIME type of the input data in the request. The default value is application/json.
public var contentType: Swift.String?
/// Id of the model to invoke using the streaming request.
/// The unique identifier of the guardrail that you want to use. If you don't provide a value, no guardrail is applied to the invocation. An error is thrown in the following situations.
///
/// * You don't provide a guardrail identifier but you specify the amazon-bedrock-guardrailConfig field in the request body.
///
/// * You enable the guardrail but the contentType isn't application/json.
///
/// * You provide a guardrail identifier, but guardrailVersion isn't specified.
public var guardrailIdentifier: Swift.String?
/// The version number for the guardrail. The value can also be DRAFT.
public var guardrailVersion: Swift.String?
/// The unique identifier of the model to invoke to run inference. The modelId to provide depends on the type of model that you use:
///
/// * If you use a base model, specify the model ID or its ARN. For a list of model IDs for base models, see [Amazon Bedrock base model IDs (on-demand throughput)](https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns) in the Amazon Bedrock User Guide.
///
/// * If you use a provisioned model, specify the ARN of the Provisioned Throughput. For more information, see [Run inference using a Provisioned Throughput](https://docs.aws.amazon.com/bedrock/latest/userguide/prov-thru-use.html) in the Amazon Bedrock User Guide.
///
/// * If you use a custom model, first purchase Provisioned Throughput for it. Then specify the ARN of the resulting provisioned model. For more information, see [Use a custom model in Amazon Bedrock](https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-use.html) in the Amazon Bedrock User Guide.
/// This member is required.
public var modelId: Swift.String?
/// Specifies whether to enable or disable the Bedrock trace. If enabled, you can see the full Bedrock trace.
public var trace: BedrockRuntimeClientTypes.Trace?

public init(
accept: Swift.String? = nil,
body: ClientRuntime.Data? = nil,
contentType: Swift.String? = nil,
modelId: Swift.String? = nil
guardrailIdentifier: Swift.String? = nil,
guardrailVersion: Swift.String? = nil,
modelId: Swift.String? = nil,
trace: BedrockRuntimeClientTypes.Trace? = nil
)
{
self.accept = accept
self.body = body
self.contentType = contentType
self.guardrailIdentifier = guardrailIdentifier
self.guardrailVersion = guardrailVersion
self.modelId = modelId
self.trace = trace
}
}

Expand Down Expand Up @@ -395,7 +461,7 @@ extension InvokeModelWithResponseStreamOutput: ClientRuntime.HttpResponseBinding
}

public struct InvokeModelWithResponseStreamOutput {
/// Inference response from the model in the format specified by Content-Type. To see the format and content of this field for different models, refer to [Inference parameters](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html).
/// Inference response from the model in the format specified by the contentType header. To see the format and content of this field for different models, refer to [Inference parameters](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html).
/// This member is required.
public var body: AsyncThrowingStream<BedrockRuntimeClientTypes.ResponseStream, Swift.Error>?
/// The MIME type of the inference result.
Expand Down Expand Up @@ -612,7 +678,7 @@ extension ModelStreamErrorException {
}
}

/// An error occurred while streaming the response.
/// An error occurred while streaming the response. Retry your request.
public struct ModelStreamErrorException: ClientRuntime.ModeledError, AWSClientRuntime.AWSServiceError, ClientRuntime.HTTPError, Swift.Error {

public struct Properties {
Expand Down Expand Up @@ -1016,6 +1082,38 @@ extension ThrottlingExceptionBody: Swift.Decodable {
}
}

extension BedrockRuntimeClientTypes {
public enum Trace: Swift.Equatable, Swift.RawRepresentable, Swift.CaseIterable, Swift.Codable, Swift.Hashable {
case disabled
case enabled
case sdkUnknown(Swift.String)

public static var allCases: [Trace] {
return [
.disabled,
.enabled,
.sdkUnknown("")
]
}
public init?(rawValue: Swift.String) {
let value = Self.allCases.first(where: { $0.rawValue == rawValue })
self = value ?? Self.sdkUnknown(rawValue)
}
public var rawValue: Swift.String {
switch self {
case .disabled: return "DISABLED"
case .enabled: return "ENABLED"
case let .sdkUnknown(s): return s
}
}
public init(from decoder: Swift.Decoder) throws {
let container = try decoder.singleValueContainer()
let rawValue = try container.decode(RawValue.self)
self = Trace(rawValue: rawValue) ?? Trace.sdkUnknown(rawValue)
}
}
}

extension ValidationException: Swift.Codable {
enum CodingKeys: Swift.String, Swift.CodingKey {
case message
Expand Down
Loading

0 comments on commit c479168

Please sign in to comment.