@pulumi/aws-native
Version:
The Pulumi AWS Cloud Control Provider enables you to build, deploy, and manage [any AWS resource that's supported by the AWS Cloud Control API](https://github.com/pulumi/pulumi-aws-native/blob/master/provider/cmd/pulumi-gen-aws-native/supported-types.txt)
65 lines (64 loc) • 2.75 kB
TypeScript
import * as pulumi from "@pulumi/pulumi";
import * as outputs from "../types/output";
import * as enums from "../types/enums";
/**
* Definition of AWS::Bedrock::ApplicationInferenceProfile Resource Type
*/
export declare function getApplicationInferenceProfile(args: GetApplicationInferenceProfileArgs, opts?: pulumi.InvokeOptions): Promise<GetApplicationInferenceProfileResult>;
export interface GetApplicationInferenceProfileArgs {
/**
* Inference profile identifier. Supports both system-defined inference profile ids, and inference profile ARNs.
*/
inferenceProfileIdentifier: string;
}
export interface GetApplicationInferenceProfileResult {
/**
* Time Stamp
*/
readonly createdAt?: string;
/**
* The Amazon Resource Name (ARN) of the inference profile.
*/
readonly inferenceProfileArn?: string;
/**
* The unique identifier of the inference profile.
*/
readonly inferenceProfileId?: string;
/**
* Inference profile identifier. Supports both system-defined inference profile ids, and inference profile ARNs.
*/
readonly inferenceProfileIdentifier?: string;
/**
* List of model configuration
*/
readonly models?: outputs.bedrock.ApplicationInferenceProfileInferenceProfileModel[];
/**
* The status of the inference profile. `ACTIVE` means that the inference profile is ready to be used.
*/
readonly status?: enums.bedrock.ApplicationInferenceProfileInferenceProfileStatus;
/**
* List of Tags
*/
readonly tags?: outputs.Tag[];
/**
* The type of the inference profile. The following types are possible:
*
* - `SYSTEM_DEFINED` – The inference profile is defined by Amazon Bedrock. You can route inference requests across regions with these inference profiles.
* - `APPLICATION` – The inference profile was created by a user. This type of inference profile can track metrics and costs when invoking the model in it. The inference profile may route requests to one or multiple regions.
*/
readonly type?: enums.bedrock.ApplicationInferenceProfileInferenceProfileType;
/**
* Time Stamp
*/
readonly updatedAt?: string;
}
/**
* Definition of AWS::Bedrock::ApplicationInferenceProfile Resource Type
*/
export declare function getApplicationInferenceProfileOutput(args: GetApplicationInferenceProfileOutputArgs, opts?: pulumi.InvokeOutputOptions): pulumi.Output<GetApplicationInferenceProfileResult>;
export interface GetApplicationInferenceProfileOutputArgs {
/**
* Inference profile identifier. Supports both system-defined inference profile ids, and inference profile ARNs.
*/
inferenceProfileIdentifier: pulumi.Input<string>;
}