AWS v6.73.0 published on Wednesday, Mar 19, 2025 by Pulumi
aws.bedrockfoundation.getModel
Explore with Pulumi AI
Data source for managing an AWS Bedrock Foundation Model.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const test = aws.bedrockfoundation.getModels({});
const testGetModel = test.then(test => aws.bedrockfoundation.getModel({
    modelId: test.modelSummaries?.[0]?.modelId,
}));
import pulumi
import pulumi_aws as aws
test = aws.bedrockfoundation.get_models()
test_get_model = aws.bedrockfoundation.get_model(model_id=test.model_summaries[0].model_id)
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrockfoundation"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		test, err := bedrockfoundation.GetModels(ctx, &bedrockfoundation.GetModelsArgs{}, nil)
		if err != nil {
			return err
		}
		_, err = bedrockfoundation.GetModel(ctx, &bedrockfoundation.GetModelArgs{
			ModelId: test.ModelSummaries[0].ModelId,
		}, nil)
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var test = Aws.BedrockFoundation.GetModels.Invoke();
    var testGetModel = Aws.BedrockFoundation.GetModel.Invoke(new()
    {
        ModelId = test.Apply(getModelsResult => getModelsResult.ModelSummaries[0]?.ModelId),
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.bedrockfoundation.BedrockfoundationFunctions;
import com.pulumi.aws.bedrockfoundation.inputs.GetModelsArgs;
import com.pulumi.aws.bedrockfoundation.inputs.GetModelArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var test = BedrockfoundationFunctions.getModels();
        final var testGetModel = BedrockfoundationFunctions.getModel(GetModelArgs.builder()
            .modelId(test.applyValue(getModelsResult -> getModelsResult.modelSummaries()[0].modelId()))
            .build());
    }
}
variables:
  test:
    fn::invoke:
      function: aws:bedrockfoundation:getModels
      arguments: {}
  testGetModel:
    fn::invoke:
      function: aws:bedrockfoundation:getModel
      arguments:
        modelId: ${test.modelSummaries[0].modelId}
Using getModel
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getModel(args: GetModelArgs, opts?: InvokeOptions): Promise<GetModelResult>
function getModelOutput(args: GetModelOutputArgs, opts?: InvokeOptions): Output<GetModelResult>def get_model(model_id: Optional[str] = None,
              opts: Optional[InvokeOptions] = None) -> GetModelResult
def get_model_output(model_id: Optional[pulumi.Input[str]] = None,
              opts: Optional[InvokeOptions] = None) -> Output[GetModelResult]func GetModel(ctx *Context, args *GetModelArgs, opts ...InvokeOption) (*GetModelResult, error)
func GetModelOutput(ctx *Context, args *GetModelOutputArgs, opts ...InvokeOption) GetModelResultOutput> Note: This function is named GetModel in the Go SDK.
public static class GetModel 
{
    public static Task<GetModelResult> InvokeAsync(GetModelArgs args, InvokeOptions? opts = null)
    public static Output<GetModelResult> Invoke(GetModelInvokeArgs args, InvokeOptions? opts = null)
}public static CompletableFuture<GetModelResult> getModel(GetModelArgs args, InvokeOptions options)
public static Output<GetModelResult> getModel(GetModelArgs args, InvokeOptions options)
fn::invoke:
  function: aws:bedrockfoundation/getModel:getModel
  arguments:
    # arguments dictionaryThe following arguments are supported:
- ModelId string
- Model identifier.
- ModelId string
- Model identifier.
- modelId String
- Model identifier.
- modelId string
- Model identifier.
- model_id str
- Model identifier.
- modelId String
- Model identifier.
getModel Result
The following output properties are available:
- CustomizationsSupporteds List<string>
- Customizations that the model supports.
- Id string
- InferenceTypes List<string>Supporteds 
- Inference types that the model supports.
- InputModalities List<string>
- Input modalities that the model supports.
- ModelArn string
- Model ARN.
- ModelId string
- ModelName string
- Model name.
- OutputModalities List<string>
- Output modalities that the model supports.
- ProviderName string
- Model provider name.
- ResponseStreaming boolSupported 
- Indicates whether the model supports streaming.
- CustomizationsSupporteds []string
- Customizations that the model supports.
- Id string
- InferenceTypes []stringSupporteds 
- Inference types that the model supports.
- InputModalities []string
- Input modalities that the model supports.
- ModelArn string
- Model ARN.
- ModelId string
- ModelName string
- Model name.
- OutputModalities []string
- Output modalities that the model supports.
- ProviderName string
- Model provider name.
- ResponseStreaming boolSupported 
- Indicates whether the model supports streaming.
- customizationsSupporteds List<String>
- Customizations that the model supports.
- id String
- inferenceTypes List<String>Supporteds 
- Inference types that the model supports.
- inputModalities List<String>
- Input modalities that the model supports.
- modelArn String
- Model ARN.
- modelId String
- modelName String
- Model name.
- outputModalities List<String>
- Output modalities that the model supports.
- providerName String
- Model provider name.
- responseStreaming BooleanSupported 
- Indicates whether the model supports streaming.
- customizationsSupporteds string[]
- Customizations that the model supports.
- id string
- inferenceTypes string[]Supporteds 
- Inference types that the model supports.
- inputModalities string[]
- Input modalities that the model supports.
- modelArn string
- Model ARN.
- modelId string
- modelName string
- Model name.
- outputModalities string[]
- Output modalities that the model supports.
- providerName string
- Model provider name.
- responseStreaming booleanSupported 
- Indicates whether the model supports streaming.
- customizations_supporteds Sequence[str]
- Customizations that the model supports.
- id str
- inference_types_ Sequence[str]supporteds 
- Inference types that the model supports.
- input_modalities Sequence[str]
- Input modalities that the model supports.
- model_arn str
- Model ARN.
- model_id str
- model_name str
- Model name.
- output_modalities Sequence[str]
- Output modalities that the model supports.
- provider_name str
- Model provider name.
- response_streaming_ boolsupported 
- Indicates whether the model supports streaming.
- customizationsSupporteds List<String>
- Customizations that the model supports.
- id String
- inferenceTypes List<String>Supporteds 
- Inference types that the model supports.
- inputModalities List<String>
- Input modalities that the model supports.
- modelArn String
- Model ARN.
- modelId String
- modelName String
- Model name.
- outputModalities List<String>
- Output modalities that the model supports.
- providerName String
- Model provider name.
- responseStreaming BooleanSupported 
- Indicates whether the model supports streaming.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the awsTerraform Provider.