aws.bedrock.InferenceProfile
Explore with Pulumi AI
Resource for managing an AWS Bedrock Inference Profile.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const current = aws.getCallerIdentity({});
const example = new aws.bedrock.InferenceProfile("example", {
    name: "Claude Sonnet for Project 123",
    description: "Profile with tag for cost allocation tracking",
    modelSource: {
        copyFrom: "arn:aws:bedrock:us-west-2::foundation-model/anthropic.claude-3-5-sonnet-20241022-v2:0",
    },
    tags: {
        ProjectID: "123",
    },
});
import pulumi
import pulumi_aws as aws
current = aws.get_caller_identity()
example = aws.bedrock.InferenceProfile("example",
    name="Claude Sonnet for Project 123",
    description="Profile with tag for cost allocation tracking",
    model_source={
        "copy_from": "arn:aws:bedrock:us-west-2::foundation-model/anthropic.claude-3-5-sonnet-20241022-v2:0",
    },
    tags={
        "ProjectID": "123",
    })
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrock"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := aws.GetCallerIdentity(ctx, &aws.GetCallerIdentityArgs{}, nil)
		if err != nil {
			return err
		}
		_, err = bedrock.NewInferenceProfile(ctx, "example", &bedrock.InferenceProfileArgs{
			Name:        pulumi.String("Claude Sonnet for Project 123"),
			Description: pulumi.String("Profile with tag for cost allocation tracking"),
			ModelSource: &bedrock.InferenceProfileModelSourceArgs{
				CopyFrom: pulumi.String("arn:aws:bedrock:us-west-2::foundation-model/anthropic.claude-3-5-sonnet-20241022-v2:0"),
			},
			Tags: pulumi.StringMap{
				"ProjectID": pulumi.String("123"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var current = Aws.GetCallerIdentity.Invoke();
    var example = new Aws.Bedrock.InferenceProfile("example", new()
    {
        Name = "Claude Sonnet for Project 123",
        Description = "Profile with tag for cost allocation tracking",
        ModelSource = new Aws.Bedrock.Inputs.InferenceProfileModelSourceArgs
        {
            CopyFrom = "arn:aws:bedrock:us-west-2::foundation-model/anthropic.claude-3-5-sonnet-20241022-v2:0",
        },
        Tags = 
        {
            { "ProjectID", "123" },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.AwsFunctions;
import com.pulumi.aws.inputs.GetCallerIdentityArgs;
import com.pulumi.aws.bedrock.InferenceProfile;
import com.pulumi.aws.bedrock.InferenceProfileArgs;
import com.pulumi.aws.bedrock.inputs.InferenceProfileModelSourceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var current = AwsFunctions.getCallerIdentity();
        var example = new InferenceProfile("example", InferenceProfileArgs.builder()
            .name("Claude Sonnet for Project 123")
            .description("Profile with tag for cost allocation tracking")
            .modelSource(InferenceProfileModelSourceArgs.builder()
                .copyFrom("arn:aws:bedrock:us-west-2::foundation-model/anthropic.claude-3-5-sonnet-20241022-v2:0")
                .build())
            .tags(Map.of("ProjectID", "123"))
            .build());
    }
}
resources:
  example:
    type: aws:bedrock:InferenceProfile
    properties:
      name: Claude Sonnet for Project 123
      description: Profile with tag for cost allocation tracking
      modelSource:
        copyFrom: arn:aws:bedrock:us-west-2::foundation-model/anthropic.claude-3-5-sonnet-20241022-v2:0
      tags:
        ProjectID: '123'
variables:
  current:
    fn::invoke:
      function: aws:getCallerIdentity
      arguments: {}
Create InferenceProfile Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new InferenceProfile(name: string, args?: InferenceProfileArgs, opts?: CustomResourceOptions);@overload
def InferenceProfile(resource_name: str,
                     args: Optional[InferenceProfileArgs] = None,
                     opts: Optional[ResourceOptions] = None)
@overload
def InferenceProfile(resource_name: str,
                     opts: Optional[ResourceOptions] = None,
                     description: Optional[str] = None,
                     model_source: Optional[InferenceProfileModelSourceArgs] = None,
                     name: Optional[str] = None,
                     tags: Optional[Mapping[str, str]] = None,
                     timeouts: Optional[InferenceProfileTimeoutsArgs] = None)func NewInferenceProfile(ctx *Context, name string, args *InferenceProfileArgs, opts ...ResourceOption) (*InferenceProfile, error)public InferenceProfile(string name, InferenceProfileArgs? args = null, CustomResourceOptions? opts = null)
public InferenceProfile(String name, InferenceProfileArgs args)
public InferenceProfile(String name, InferenceProfileArgs args, CustomResourceOptions options)
type: aws:bedrock:InferenceProfile
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args InferenceProfileArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args InferenceProfileArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args InferenceProfileArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args InferenceProfileArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args InferenceProfileArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var inferenceProfileResource = new Aws.Bedrock.InferenceProfile("inferenceProfileResource", new()
{
    Description = "string",
    ModelSource = new Aws.Bedrock.Inputs.InferenceProfileModelSourceArgs
    {
        CopyFrom = "string",
    },
    Name = "string",
    Tags = 
    {
        { "string", "string" },
    },
    Timeouts = new Aws.Bedrock.Inputs.InferenceProfileTimeoutsArgs
    {
        Create = "string",
        Delete = "string",
        Update = "string",
    },
});
example, err := bedrock.NewInferenceProfile(ctx, "inferenceProfileResource", &bedrock.InferenceProfileArgs{
	Description: pulumi.String("string"),
	ModelSource: &bedrock.InferenceProfileModelSourceArgs{
		CopyFrom: pulumi.String("string"),
	},
	Name: pulumi.String("string"),
	Tags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Timeouts: &bedrock.InferenceProfileTimeoutsArgs{
		Create: pulumi.String("string"),
		Delete: pulumi.String("string"),
		Update: pulumi.String("string"),
	},
})
var inferenceProfileResource = new InferenceProfile("inferenceProfileResource", InferenceProfileArgs.builder()
    .description("string")
    .modelSource(InferenceProfileModelSourceArgs.builder()
        .copyFrom("string")
        .build())
    .name("string")
    .tags(Map.of("string", "string"))
    .timeouts(InferenceProfileTimeoutsArgs.builder()
        .create("string")
        .delete("string")
        .update("string")
        .build())
    .build());
inference_profile_resource = aws.bedrock.InferenceProfile("inferenceProfileResource",
    description="string",
    model_source={
        "copy_from": "string",
    },
    name="string",
    tags={
        "string": "string",
    },
    timeouts={
        "create": "string",
        "delete": "string",
        "update": "string",
    })
const inferenceProfileResource = new aws.bedrock.InferenceProfile("inferenceProfileResource", {
    description: "string",
    modelSource: {
        copyFrom: "string",
    },
    name: "string",
    tags: {
        string: "string",
    },
    timeouts: {
        create: "string",
        "delete": "string",
        update: "string",
    },
});
type: aws:bedrock:InferenceProfile
properties:
    description: string
    modelSource:
        copyFrom: string
    name: string
    tags:
        string: string
    timeouts:
        create: string
        delete: string
        update: string
InferenceProfile Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The InferenceProfile resource accepts the following input properties:
- Description string
- The description of the inference profile.
- ModelSource InferenceProfile Model Source 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- Name string
- The name of the inference profile.
- Dictionary<string, string>
- Key-value mapping of resource tags for the inference profile.
- Timeouts
InferenceProfile Timeouts 
- Description string
- The description of the inference profile.
- ModelSource InferenceProfile Model Source Args 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- Name string
- The name of the inference profile.
- map[string]string
- Key-value mapping of resource tags for the inference profile.
- Timeouts
InferenceProfile Timeouts Args 
- description String
- The description of the inference profile.
- modelSource InferenceProfile Model Source 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- name String
- The name of the inference profile.
- Map<String,String>
- Key-value mapping of resource tags for the inference profile.
- timeouts
InferenceProfile Timeouts 
- description string
- The description of the inference profile.
- modelSource InferenceProfile Model Source 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- name string
- The name of the inference profile.
- {[key: string]: string}
- Key-value mapping of resource tags for the inference profile.
- timeouts
InferenceProfile Timeouts 
- description str
- The description of the inference profile.
- model_source InferenceProfile Model Source Args 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- name str
- The name of the inference profile.
- Mapping[str, str]
- Key-value mapping of resource tags for the inference profile.
- timeouts
InferenceProfile Timeouts Args 
- description String
- The description of the inference profile.
- modelSource Property Map
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- name String
- The name of the inference profile.
- Map<String>
- Key-value mapping of resource tags for the inference profile.
- timeouts Property Map
Outputs
All input properties are implicitly available as output properties. Additionally, the InferenceProfile resource produces the following output properties:
- Arn string
- The Amazon Resource Name (ARN) of the inference profile.
- CreatedAt string
- The time at which the inference profile was created.
- Id string
- The provider-assigned unique ID for this managed resource.
- Models
List<InferenceProfile Model> 
- A list of information about each model in the inference profile. See models.
- Status string
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- Dictionary<string, string>
- Type string
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- UpdatedAt string
- The time at which the inference profile was last updated.
- Arn string
- The Amazon Resource Name (ARN) of the inference profile.
- CreatedAt string
- The time at which the inference profile was created.
- Id string
- The provider-assigned unique ID for this managed resource.
- Models
[]InferenceProfile Model 
- A list of information about each model in the inference profile. See models.
- Status string
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- map[string]string
- Type string
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- UpdatedAt string
- The time at which the inference profile was last updated.
- arn String
- The Amazon Resource Name (ARN) of the inference profile.
- createdAt String
- The time at which the inference profile was created.
- id String
- The provider-assigned unique ID for this managed resource.
- models
List<InferenceProfile Model> 
- A list of information about each model in the inference profile. See models.
- status String
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- Map<String,String>
- type String
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- updatedAt String
- The time at which the inference profile was last updated.
- arn string
- The Amazon Resource Name (ARN) of the inference profile.
- createdAt string
- The time at which the inference profile was created.
- id string
- The provider-assigned unique ID for this managed resource.
- models
InferenceProfile Model[] 
- A list of information about each model in the inference profile. See models.
- status string
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- {[key: string]: string}
- type string
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- updatedAt string
- The time at which the inference profile was last updated.
- arn str
- The Amazon Resource Name (ARN) of the inference profile.
- created_at str
- The time at which the inference profile was created.
- id str
- The provider-assigned unique ID for this managed resource.
- models
Sequence[InferenceProfile Model] 
- A list of information about each model in the inference profile. See models.
- status str
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- Mapping[str, str]
- type str
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- updated_at str
- The time at which the inference profile was last updated.
- arn String
- The Amazon Resource Name (ARN) of the inference profile.
- createdAt String
- The time at which the inference profile was created.
- id String
- The provider-assigned unique ID for this managed resource.
- models List<Property Map>
- A list of information about each model in the inference profile. See models.
- status String
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- Map<String>
- type String
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- updatedAt String
- The time at which the inference profile was last updated.
Look up Existing InferenceProfile Resource
Get an existing InferenceProfile resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: InferenceProfileState, opts?: CustomResourceOptions): InferenceProfile@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        arn: Optional[str] = None,
        created_at: Optional[str] = None,
        description: Optional[str] = None,
        model_source: Optional[InferenceProfileModelSourceArgs] = None,
        models: Optional[Sequence[InferenceProfileModelArgs]] = None,
        name: Optional[str] = None,
        status: Optional[str] = None,
        tags: Optional[Mapping[str, str]] = None,
        tags_all: Optional[Mapping[str, str]] = None,
        timeouts: Optional[InferenceProfileTimeoutsArgs] = None,
        type: Optional[str] = None,
        updated_at: Optional[str] = None) -> InferenceProfilefunc GetInferenceProfile(ctx *Context, name string, id IDInput, state *InferenceProfileState, opts ...ResourceOption) (*InferenceProfile, error)public static InferenceProfile Get(string name, Input<string> id, InferenceProfileState? state, CustomResourceOptions? opts = null)public static InferenceProfile get(String name, Output<String> id, InferenceProfileState state, CustomResourceOptions options)resources:  _:    type: aws:bedrock:InferenceProfile    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arn string
- The Amazon Resource Name (ARN) of the inference profile.
- CreatedAt string
- The time at which the inference profile was created.
- Description string
- The description of the inference profile.
- ModelSource InferenceProfile Model Source 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- Models
List<InferenceProfile Model> 
- A list of information about each model in the inference profile. See models.
- Name string
- The name of the inference profile.
- Status string
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- Dictionary<string, string>
- Key-value mapping of resource tags for the inference profile.
- Dictionary<string, string>
- Timeouts
InferenceProfile Timeouts 
- Type string
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- UpdatedAt string
- The time at which the inference profile was last updated.
- Arn string
- The Amazon Resource Name (ARN) of the inference profile.
- CreatedAt string
- The time at which the inference profile was created.
- Description string
- The description of the inference profile.
- ModelSource InferenceProfile Model Source Args 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- Models
[]InferenceProfile Model Args 
- A list of information about each model in the inference profile. See models.
- Name string
- The name of the inference profile.
- Status string
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- map[string]string
- Key-value mapping of resource tags for the inference profile.
- map[string]string
- Timeouts
InferenceProfile Timeouts Args 
- Type string
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- UpdatedAt string
- The time at which the inference profile was last updated.
- arn String
- The Amazon Resource Name (ARN) of the inference profile.
- createdAt String
- The time at which the inference profile was created.
- description String
- The description of the inference profile.
- modelSource InferenceProfile Model Source 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- models
List<InferenceProfile Model> 
- A list of information about each model in the inference profile. See models.
- name String
- The name of the inference profile.
- status String
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- Map<String,String>
- Key-value mapping of resource tags for the inference profile.
- Map<String,String>
- timeouts
InferenceProfile Timeouts 
- type String
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- updatedAt String
- The time at which the inference profile was last updated.
- arn string
- The Amazon Resource Name (ARN) of the inference profile.
- createdAt string
- The time at which the inference profile was created.
- description string
- The description of the inference profile.
- modelSource InferenceProfile Model Source 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- models
InferenceProfile Model[] 
- A list of information about each model in the inference profile. See models.
- name string
- The name of the inference profile.
- status string
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- {[key: string]: string}
- Key-value mapping of resource tags for the inference profile.
- {[key: string]: string}
- timeouts
InferenceProfile Timeouts 
- type string
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- updatedAt string
- The time at which the inference profile was last updated.
- arn str
- The Amazon Resource Name (ARN) of the inference profile.
- created_at str
- The time at which the inference profile was created.
- description str
- The description of the inference profile.
- model_source InferenceProfile Model Source Args 
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- models
Sequence[InferenceProfile Model Args] 
- A list of information about each model in the inference profile. See models.
- name str
- The name of the inference profile.
- status str
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- Mapping[str, str]
- Key-value mapping of resource tags for the inference profile.
- Mapping[str, str]
- timeouts
InferenceProfile Timeouts Args 
- type str
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- updated_at str
- The time at which the inference profile was last updated.
- arn String
- The Amazon Resource Name (ARN) of the inference profile.
- createdAt String
- The time at which the inference profile was created.
- description String
- The description of the inference profile.
- modelSource Property Map
- The source of the model this inference profile will track metrics and cost for. See - model_source.- The following arguments are optional: 
- models List<Property Map>
- A list of information about each model in the inference profile. See models.
- name String
- The name of the inference profile.
- status String
- The status of the inference profile. ACTIVEmeans that the inference profile is available to use.
- Map<String>
- Key-value mapping of resource tags for the inference profile.
- Map<String>
- timeouts Property Map
- type String
- The type of the inference profile. SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user.
- updatedAt String
- The time at which the inference profile was last updated.
Supporting Types
InferenceProfileModel, InferenceProfileModelArgs      
- ModelArn string
- The Amazon Resource Name (ARN) of the model.
- ModelArn string
- The Amazon Resource Name (ARN) of the model.
- modelArn String
- The Amazon Resource Name (ARN) of the model.
- modelArn string
- The Amazon Resource Name (ARN) of the model.
- model_arn str
- The Amazon Resource Name (ARN) of the model.
- modelArn String
- The Amazon Resource Name (ARN) of the model.
InferenceProfileModelSource, InferenceProfileModelSourceArgs        
- CopyFrom string
- The Amazon Resource Name (ARN) of the model.
- CopyFrom string
- The Amazon Resource Name (ARN) of the model.
- copyFrom String
- The Amazon Resource Name (ARN) of the model.
- copyFrom string
- The Amazon Resource Name (ARN) of the model.
- copy_from str
- The Amazon Resource Name (ARN) of the model.
- copyFrom String
- The Amazon Resource Name (ARN) of the model.
InferenceProfileTimeouts, InferenceProfileTimeoutsArgs      
- Create string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- Delete string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- Update string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- Create string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- Delete string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- Update string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- create String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- delete String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- update String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- create string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- delete string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- update string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- create str
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- delete str
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- update str
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- create String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- delete String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- update String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
Import
Using pulumi import, import Bedrock Inference Profile using the example_id_arg. For example:
$ pulumi import aws:bedrock/inferenceProfile:InferenceProfile example inference_profile-id-12345678
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the awsTerraform Provider.