We recommend new projects start with resources from the AWS provider.
aws-native.pipes.Pipe
Explore with Pulumi AI
We recommend new projects start with resources from the AWS provider.
Definition of AWS::Pipes::Pipe Resource Type
Example Usage
Example
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AwsNative = Pulumi.AwsNative;
return await Deployment.RunAsync(() => 
{
    var testPipe = new AwsNative.Pipes.Pipe("testPipe", new()
    {
        Name = "PipeCfnExample",
        RoleArn = "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
        Source = "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
        Enrichment = "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
        Target = "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
    });
});
package main
import (
	"github.com/pulumi/pulumi-aws-native/sdk/go/aws/pipes"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := pipes.NewPipe(ctx, "testPipe", &pipes.PipeArgs{
			Name:       pulumi.String("PipeCfnExample"),
			RoleArn:    pulumi.String("arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role"),
			Source:     pulumi.String("arn:aws:sqs:us-east-1:123456789123:pipeDemoSource"),
			Enrichment: pulumi.String("arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets"),
			Target:     pulumi.String("arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Coming soon!
import * as pulumi from "@pulumi/pulumi";
import * as aws_native from "@pulumi/aws-native";
const testPipe = new aws_native.pipes.Pipe("testPipe", {
    name: "PipeCfnExample",
    roleArn: "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
    source: "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
    enrichment: "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
    target: "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
});
import pulumi
import pulumi_aws_native as aws_native
test_pipe = aws_native.pipes.Pipe("testPipe",
    name="PipeCfnExample",
    role_arn="arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
    source="arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
    enrichment="arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
    target="arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine")
Coming soon!
Example
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AwsNative = Pulumi.AwsNative;
return await Deployment.RunAsync(() => 
{
    var testPipe = new AwsNative.Pipes.Pipe("testPipe", new()
    {
        Name = "PipeCfnExample",
        RoleArn = "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
        Source = "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
        Enrichment = "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
        Target = "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
    });
});
package main
import (
	"github.com/pulumi/pulumi-aws-native/sdk/go/aws/pipes"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := pipes.NewPipe(ctx, "testPipe", &pipes.PipeArgs{
			Name:       pulumi.String("PipeCfnExample"),
			RoleArn:    pulumi.String("arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role"),
			Source:     pulumi.String("arn:aws:sqs:us-east-1:123456789123:pipeDemoSource"),
			Enrichment: pulumi.String("arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets"),
			Target:     pulumi.String("arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Coming soon!
import * as pulumi from "@pulumi/pulumi";
import * as aws_native from "@pulumi/aws-native";
const testPipe = new aws_native.pipes.Pipe("testPipe", {
    name: "PipeCfnExample",
    roleArn: "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
    source: "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
    enrichment: "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
    target: "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
});
import pulumi
import pulumi_aws_native as aws_native
test_pipe = aws_native.pipes.Pipe("testPipe",
    name="PipeCfnExample",
    role_arn="arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
    source="arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
    enrichment="arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
    target="arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine")
Coming soon!
Create Pipe Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Pipe(name: string, args: PipeArgs, opts?: CustomResourceOptions);@overload
def Pipe(resource_name: str,
         args: PipeArgs,
         opts: Optional[ResourceOptions] = None)
@overload
def Pipe(resource_name: str,
         opts: Optional[ResourceOptions] = None,
         role_arn: Optional[str] = None,
         target: Optional[str] = None,
         source: Optional[str] = None,
         name: Optional[str] = None,
         kms_key_identifier: Optional[str] = None,
         log_configuration: Optional[PipeLogConfigurationArgs] = None,
         description: Optional[str] = None,
         enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
         enrichment: Optional[str] = None,
         source_parameters: Optional[PipeSourceParametersArgs] = None,
         tags: Optional[Mapping[str, str]] = None,
         desired_state: Optional[PipeRequestedPipeState] = None,
         target_parameters: Optional[PipeTargetParametersArgs] = None)func NewPipe(ctx *Context, name string, args PipeArgs, opts ...ResourceOption) (*Pipe, error)public Pipe(string name, PipeArgs args, CustomResourceOptions? opts = null)type: aws-native:pipes:Pipe
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Pipe Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Pipe resource accepts the following input properties:
- RoleArn string
- The ARN of the role that allows the pipe to send data to the target.
- Source string
- The ARN of the source resource.
- Target string
- The ARN of the target resource.
- Description string
- A description of the pipe.
- DesiredState Pulumi.Aws Native. Pipes. Pipe Requested Pipe State 
- The state the pipe should be in.
- Enrichment string
- The ARN of the enrichment resource.
- EnrichmentParameters Pulumi.Aws Native. Pipes. Inputs. Pipe Enrichment Parameters 
- The parameters required to set up enrichment on your pipe.
- KmsKey stringIdentifier 
- The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. - To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier. - To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string. - For more information, see Managing keys in the AWS Key Management Service Developer Guide . 
- LogConfiguration Pulumi.Aws Native. Pipes. Inputs. Pipe Log Configuration 
- The logging configuration settings for the pipe.
- Name string
- The name of the pipe.
- SourceParameters Pulumi.Aws Native. Pipes. Inputs. Pipe Source Parameters 
- The parameters required to set up a source for your pipe.
- Dictionary<string, string>
- The list of key-value pairs to associate with the pipe.
- TargetParameters Pulumi.Aws Native. Pipes. Inputs. Pipe Target Parameters 
- The parameters required to set up a target for your pipe. - For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide . 
- RoleArn string
- The ARN of the role that allows the pipe to send data to the target.
- Source string
- The ARN of the source resource.
- Target string
- The ARN of the target resource.
- Description string
- A description of the pipe.
- DesiredState PipeRequested Pipe State 
- The state the pipe should be in.
- Enrichment string
- The ARN of the enrichment resource.
- EnrichmentParameters PipeEnrichment Parameters Args 
- The parameters required to set up enrichment on your pipe.
- KmsKey stringIdentifier 
- The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. - To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier. - To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string. - For more information, see Managing keys in the AWS Key Management Service Developer Guide . 
- LogConfiguration PipeLog Configuration Args 
- The logging configuration settings for the pipe.
- Name string
- The name of the pipe.
- SourceParameters PipeSource Parameters Args 
- The parameters required to set up a source for your pipe.
- map[string]string
- The list of key-value pairs to associate with the pipe.
- TargetParameters PipeTarget Parameters Args 
- The parameters required to set up a target for your pipe. - For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide . 
- roleArn String
- The ARN of the role that allows the pipe to send data to the target.
- source String
- The ARN of the source resource.
- target String
- The ARN of the target resource.
- description String
- A description of the pipe.
- desiredState PipeRequested Pipe State 
- The state the pipe should be in.
- enrichment String
- The ARN of the enrichment resource.
- enrichmentParameters PipeEnrichment Parameters 
- The parameters required to set up enrichment on your pipe.
- kmsKey StringIdentifier 
- The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. - To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier. - To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string. - For more information, see Managing keys in the AWS Key Management Service Developer Guide . 
- logConfiguration PipeLog Configuration 
- The logging configuration settings for the pipe.
- name String
- The name of the pipe.
- sourceParameters PipeSource Parameters 
- The parameters required to set up a source for your pipe.
- Map<String,String>
- The list of key-value pairs to associate with the pipe.
- targetParameters PipeTarget Parameters 
- The parameters required to set up a target for your pipe. - For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide . 
- roleArn string
- The ARN of the role that allows the pipe to send data to the target.
- source string
- The ARN of the source resource.
- target string
- The ARN of the target resource.
- description string
- A description of the pipe.
- desiredState PipeRequested Pipe State 
- The state the pipe should be in.
- enrichment string
- The ARN of the enrichment resource.
- enrichmentParameters PipeEnrichment Parameters 
- The parameters required to set up enrichment on your pipe.
- kmsKey stringIdentifier 
- The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. - To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier. - To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string. - For more information, see Managing keys in the AWS Key Management Service Developer Guide . 
- logConfiguration PipeLog Configuration 
- The logging configuration settings for the pipe.
- name string
- The name of the pipe.
- sourceParameters PipeSource Parameters 
- The parameters required to set up a source for your pipe.
- {[key: string]: string}
- The list of key-value pairs to associate with the pipe.
- targetParameters PipeTarget Parameters 
- The parameters required to set up a target for your pipe. - For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide . 
- role_arn str
- The ARN of the role that allows the pipe to send data to the target.
- source str
- The ARN of the source resource.
- target str
- The ARN of the target resource.
- description str
- A description of the pipe.
- desired_state PipeRequested Pipe State 
- The state the pipe should be in.
- enrichment str
- The ARN of the enrichment resource.
- enrichment_parameters PipeEnrichment Parameters Args 
- The parameters required to set up enrichment on your pipe.
- kms_key_ stridentifier 
- The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. - To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier. - To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string. - For more information, see Managing keys in the AWS Key Management Service Developer Guide . 
- log_configuration PipeLog Configuration Args 
- The logging configuration settings for the pipe.
- name str
- The name of the pipe.
- source_parameters PipeSource Parameters Args 
- The parameters required to set up a source for your pipe.
- Mapping[str, str]
- The list of key-value pairs to associate with the pipe.
- target_parameters PipeTarget Parameters Args 
- The parameters required to set up a target for your pipe. - For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide . 
- roleArn String
- The ARN of the role that allows the pipe to send data to the target.
- source String
- The ARN of the source resource.
- target String
- The ARN of the target resource.
- description String
- A description of the pipe.
- desiredState "RUNNING" | "STOPPED"
- The state the pipe should be in.
- enrichment String
- The ARN of the enrichment resource.
- enrichmentParameters Property Map
- The parameters required to set up enrichment on your pipe.
- kmsKey StringIdentifier 
- The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. - To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier. - To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string. - For more information, see Managing keys in the AWS Key Management Service Developer Guide . 
- logConfiguration Property Map
- The logging configuration settings for the pipe.
- name String
- The name of the pipe.
- sourceParameters Property Map
- The parameters required to set up a source for your pipe.
- Map<String>
- The list of key-value pairs to associate with the pipe.
- targetParameters Property Map
- The parameters required to set up a target for your pipe. - For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide . 
Outputs
All input properties are implicitly available as output properties. Additionally, the Pipe resource produces the following output properties:
- Arn string
- The ARN of the pipe.
- CreationTime string
- The time the pipe was created.
- CurrentState Pulumi.Aws Native. Pipes. Pipe State 
- The state the pipe is in.
- Id string
- The provider-assigned unique ID for this managed resource.
- LastModified stringTime 
- When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- StateReason string
- The reason the pipe is in its current state.
- Arn string
- The ARN of the pipe.
- CreationTime string
- The time the pipe was created.
- CurrentState PipeState Enum 
- The state the pipe is in.
- Id string
- The provider-assigned unique ID for this managed resource.
- LastModified stringTime 
- When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- StateReason string
- The reason the pipe is in its current state.
- arn String
- The ARN of the pipe.
- creationTime String
- The time the pipe was created.
- currentState PipeState 
- The state the pipe is in.
- id String
- The provider-assigned unique ID for this managed resource.
- lastModified StringTime 
- When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- stateReason String
- The reason the pipe is in its current state.
- arn string
- The ARN of the pipe.
- creationTime string
- The time the pipe was created.
- currentState PipeState 
- The state the pipe is in.
- id string
- The provider-assigned unique ID for this managed resource.
- lastModified stringTime 
- When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- stateReason string
- The reason the pipe is in its current state.
- arn str
- The ARN of the pipe.
- creation_time str
- The time the pipe was created.
- current_state PipeState 
- The state the pipe is in.
- id str
- The provider-assigned unique ID for this managed resource.
- last_modified_ strtime 
- When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- state_reason str
- The reason the pipe is in its current state.
- arn String
- The ARN of the pipe.
- creationTime String
- The time the pipe was created.
- currentState "RUNNING" | "STOPPED" | "CREATING" | "UPDATING" | "DELETING" | "STARTING" | "STOPPING" | "CREATE_FAILED" | "UPDATE_FAILED" | "START_FAILED" | "STOP_FAILED" | "DELETE_FAILED" | "CREATE_ROLLBACK_FAILED" | "DELETE_ROLLBACK_FAILED" | "UPDATE_ROLLBACK_FAILED"
- The state the pipe is in.
- id String
- The provider-assigned unique ID for this managed resource.
- lastModified StringTime 
- When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- stateReason String
- The reason the pipe is in its current state.
Supporting Types
PipeAssignPublicIp, PipeAssignPublicIpArgs        
- Enabled
- ENABLED
- Disabled
- DISABLED
- PipeAssign Public Ip Enabled 
- ENABLED
- PipeAssign Public Ip Disabled 
- DISABLED
- Enabled
- ENABLED
- Disabled
- DISABLED
- Enabled
- ENABLED
- Disabled
- DISABLED
- ENABLED
- ENABLED
- DISABLED
- DISABLED
- "ENABLED"
- ENABLED
- "DISABLED"
- DISABLED
PipeAwsVpcConfiguration, PipeAwsVpcConfigurationArgs        
- Subnets List<string>
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- AssignPublic Pulumi.Ip Aws Native. Pipes. Pipe Assign Public Ip 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLEDonly whenLaunchTypeinEcsParametersis set toFARGATE.
- SecurityGroups List<string>
- Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- Subnets []string
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- AssignPublic PipeIp Assign Public Ip 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLEDonly whenLaunchTypeinEcsParametersis set toFARGATE.
- SecurityGroups []string
- Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- subnets List<String>
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- assignPublic PipeIp Assign Public Ip 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLEDonly whenLaunchTypeinEcsParametersis set toFARGATE.
- securityGroups List<String>
- Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- subnets string[]
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- assignPublic PipeIp Assign Public Ip 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLEDonly whenLaunchTypeinEcsParametersis set toFARGATE.
- securityGroups string[]
- Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- subnets Sequence[str]
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- assign_public_ Pipeip Assign Public Ip 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLEDonly whenLaunchTypeinEcsParametersis set toFARGATE.
- security_groups Sequence[str]
- Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- subnets List<String>
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- assignPublic "ENABLED" | "DISABLED"Ip 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLEDonly whenLaunchTypeinEcsParametersis set toFARGATE.
- securityGroups List<String>
- Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
PipeBatchArrayProperties, PipeBatchArrayPropertiesArgs        
- Size int
- The size of the array, if this is an array batch job.
- Size int
- The size of the array, if this is an array batch job.
- size Integer
- The size of the array, if this is an array batch job.
- size number
- The size of the array, if this is an array batch job.
- size int
- The size of the array, if this is an array batch job.
- size Number
- The size of the array, if this is an array batch job.
PipeBatchContainerOverrides, PipeBatchContainerOverridesArgs        
- Command List<string>
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- Environment
List<Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Environment Variable> 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. - Environment variables cannot start with " - AWS Batch". This naming convention is reserved for variables that AWS Batch sets.
- InstanceType string
- The instance type to use for a multi-node parallel job. - This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided. 
- ResourceRequirements List<Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Resource Requirement> 
- The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU,MEMORY, andVCPU.
- Command []string
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- Environment
[]PipeBatch Environment Variable 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. - Environment variables cannot start with " - AWS Batch". This naming convention is reserved for variables that AWS Batch sets.
- InstanceType string
- The instance type to use for a multi-node parallel job. - This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided. 
- ResourceRequirements []PipeBatch Resource Requirement 
- The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU,MEMORY, andVCPU.
- command List<String>
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- environment
List<PipeBatch Environment Variable> 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. - Environment variables cannot start with " - AWS Batch". This naming convention is reserved for variables that AWS Batch sets.
- instanceType String
- The instance type to use for a multi-node parallel job. - This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided. 
- resourceRequirements List<PipeBatch Resource Requirement> 
- The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU,MEMORY, andVCPU.
- command string[]
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- environment
PipeBatch Environment Variable[] 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. - Environment variables cannot start with " - AWS Batch". This naming convention is reserved for variables that AWS Batch sets.
- instanceType string
- The instance type to use for a multi-node parallel job. - This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided. 
- resourceRequirements PipeBatch Resource Requirement[] 
- The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU,MEMORY, andVCPU.
- command Sequence[str]
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- environment
Sequence[PipeBatch Environment Variable] 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. - Environment variables cannot start with " - AWS Batch". This naming convention is reserved for variables that AWS Batch sets.
- instance_type str
- The instance type to use for a multi-node parallel job. - This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided. 
- resource_requirements Sequence[PipeBatch Resource Requirement] 
- The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU,MEMORY, andVCPU.
- command List<String>
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- environment List<Property Map>
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. - Environment variables cannot start with " - AWS Batch". This naming convention is reserved for variables that AWS Batch sets.
- instanceType String
- The instance type to use for a multi-node parallel job. - This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided. 
- resourceRequirements List<Property Map>
- The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU,MEMORY, andVCPU.
PipeBatchEnvironmentVariable, PipeBatchEnvironmentVariableArgs        
PipeBatchJobDependency, PipeBatchJobDependencyArgs        
- JobId string
- The job ID of the AWS Batch job that's associated with this dependency.
- Type
Pulumi.Aws Native. Pipes. Pipe Batch Job Dependency Type 
- The type of the job dependency.
- JobId string
- The job ID of the AWS Batch job that's associated with this dependency.
- Type
PipeBatch Job Dependency Type 
- The type of the job dependency.
- jobId String
- The job ID of the AWS Batch job that's associated with this dependency.
- type
PipeBatch Job Dependency Type 
- The type of the job dependency.
- jobId string
- The job ID of the AWS Batch job that's associated with this dependency.
- type
PipeBatch Job Dependency Type 
- The type of the job dependency.
- job_id str
- The job ID of the AWS Batch job that's associated with this dependency.
- type
PipeBatch Job Dependency Type 
- The type of the job dependency.
- jobId String
- The job ID of the AWS Batch job that's associated with this dependency.
- type "N_TO_N" | "SEQUENTIAL"
- The type of the job dependency.
PipeBatchJobDependencyType, PipeBatchJobDependencyTypeArgs          
- NToN 
- N_TO_N
- Sequential
- SEQUENTIAL
- PipeBatch Job Dependency Type NTo N 
- N_TO_N
- PipeBatch Job Dependency Type Sequential 
- SEQUENTIAL
- NToN 
- N_TO_N
- Sequential
- SEQUENTIAL
- NToN 
- N_TO_N
- Sequential
- SEQUENTIAL
- N_TO_N
- N_TO_N
- SEQUENTIAL
- SEQUENTIAL
- "N_TO_N"
- N_TO_N
- "SEQUENTIAL"
- SEQUENTIAL
PipeBatchResourceRequirement, PipeBatchResourceRequirementArgs        
- Type
Pulumi.Aws Native. Pipes. Pipe Batch Resource Requirement Type 
- The type of resource to assign to a container. The supported resources include GPU,MEMORY, andVCPU.
- Value string
- The quantity of the specified resource to reserve for the container. The values vary based on the - typespecified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
 - GPUs aren't available for jobs that are running on Fargate resources. - type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run .
 - If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide . - For jobs that are running on Fargate resources, then - valueis the hard limit (in MiB), and must match one of the supported values and the- VCPUvalues must be one of the values supported for that memory value.- value = 512 - VCPU= 0.25
- value = 1024 - VCPU= 0.25 or 0.5
- value = 2048 - VCPU= 0.25, 0.5, or 1
- value = 3072 - VCPU= 0.5, or 1
- value = 4096 - VCPU= 0.5, 1, or 2
- value = 5120, 6144, or 7168 - VCPU= 1 or 2
- value = 8192 - VCPU= 1, 2, 4, or 8
- value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU= 2 or 4
- value = 16384 - VCPU= 2, 4, or 8
- value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU= 4
- value = 20480, 24576, or 28672 - VCPU= 4 or 8
- value = 36864, 45056, 53248, or 61440 - VCPU= 8
- value = 32768, 40960, 49152, or 57344 - VCPU= 8 or 16
- value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU= 16
- type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuSharesin the Create a container section of the Docker Remote API and the--cpu-sharesoption to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
 - The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference . - For jobs that are running on Fargate resources, then - valuemust match one of the supported values and the- MEMORYvalues must be one of the values supported for that- VCPUvalue. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 - MEMORY= 512, 1024, or 2048
- value = 0.5 - MEMORY= 1024, 2048, 3072, or 4096
- value = 1 - MEMORY= 2048, 3072, 4096, 5120, 6144, 7168, or 8192
- value = 2 - MEMORY= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
- value = 4 - MEMORY= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
- value = 8 - MEMORY= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
- value = 16 - MEMORY= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
 
- Type
PipeBatch Resource Requirement Type 
- The type of resource to assign to a container. The supported resources include GPU,MEMORY, andVCPU.
- Value string
- The quantity of the specified resource to reserve for the container. The values vary based on the - typespecified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
 - GPUs aren't available for jobs that are running on Fargate resources. - type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run .
 - If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide . - For jobs that are running on Fargate resources, then - valueis the hard limit (in MiB), and must match one of the supported values and the- VCPUvalues must be one of the values supported for that memory value.- value = 512 - VCPU= 0.25
- value = 1024 - VCPU= 0.25 or 0.5
- value = 2048 - VCPU= 0.25, 0.5, or 1
- value = 3072 - VCPU= 0.5, or 1
- value = 4096 - VCPU= 0.5, 1, or 2
- value = 5120, 6144, or 7168 - VCPU= 1 or 2
- value = 8192 - VCPU= 1, 2, 4, or 8
- value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU= 2 or 4
- value = 16384 - VCPU= 2, 4, or 8
- value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU= 4
- value = 20480, 24576, or 28672 - VCPU= 4 or 8
- value = 36864, 45056, 53248, or 61440 - VCPU= 8
- value = 32768, 40960, 49152, or 57344 - VCPU= 8 or 16
- value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU= 16
- type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuSharesin the Create a container section of the Docker Remote API and the--cpu-sharesoption to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
 - The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference . - For jobs that are running on Fargate resources, then - valuemust match one of the supported values and the- MEMORYvalues must be one of the values supported for that- VCPUvalue. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 - MEMORY= 512, 1024, or 2048
- value = 0.5 - MEMORY= 1024, 2048, 3072, or 4096
- value = 1 - MEMORY= 2048, 3072, 4096, 5120, 6144, 7168, or 8192
- value = 2 - MEMORY= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
- value = 4 - MEMORY= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
- value = 8 - MEMORY= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
- value = 16 - MEMORY= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
 
- type
PipeBatch Resource Requirement Type 
- The type of resource to assign to a container. The supported resources include GPU,MEMORY, andVCPU.
- value String
- The quantity of the specified resource to reserve for the container. The values vary based on the - typespecified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
 - GPUs aren't available for jobs that are running on Fargate resources. - type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run .
 - If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide . - For jobs that are running on Fargate resources, then - valueis the hard limit (in MiB), and must match one of the supported values and the- VCPUvalues must be one of the values supported for that memory value.- value = 512 - VCPU= 0.25
- value = 1024 - VCPU= 0.25 or 0.5
- value = 2048 - VCPU= 0.25, 0.5, or 1
- value = 3072 - VCPU= 0.5, or 1
- value = 4096 - VCPU= 0.5, 1, or 2
- value = 5120, 6144, or 7168 - VCPU= 1 or 2
- value = 8192 - VCPU= 1, 2, 4, or 8
- value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU= 2 or 4
- value = 16384 - VCPU= 2, 4, or 8
- value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU= 4
- value = 20480, 24576, or 28672 - VCPU= 4 or 8
- value = 36864, 45056, 53248, or 61440 - VCPU= 8
- value = 32768, 40960, 49152, or 57344 - VCPU= 8 or 16
- value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU= 16
- type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuSharesin the Create a container section of the Docker Remote API and the--cpu-sharesoption to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
 - The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference . - For jobs that are running on Fargate resources, then - valuemust match one of the supported values and the- MEMORYvalues must be one of the values supported for that- VCPUvalue. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 - MEMORY= 512, 1024, or 2048
- value = 0.5 - MEMORY= 1024, 2048, 3072, or 4096
- value = 1 - MEMORY= 2048, 3072, 4096, 5120, 6144, 7168, or 8192
- value = 2 - MEMORY= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
- value = 4 - MEMORY= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
- value = 8 - MEMORY= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
- value = 16 - MEMORY= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
 
- type
PipeBatch Resource Requirement Type 
- The type of resource to assign to a container. The supported resources include GPU,MEMORY, andVCPU.
- value string
- The quantity of the specified resource to reserve for the container. The values vary based on the - typespecified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
 - GPUs aren't available for jobs that are running on Fargate resources. - type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run .
 - If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide . - For jobs that are running on Fargate resources, then - valueis the hard limit (in MiB), and must match one of the supported values and the- VCPUvalues must be one of the values supported for that memory value.- value = 512 - VCPU= 0.25
- value = 1024 - VCPU= 0.25 or 0.5
- value = 2048 - VCPU= 0.25, 0.5, or 1
- value = 3072 - VCPU= 0.5, or 1
- value = 4096 - VCPU= 0.5, 1, or 2
- value = 5120, 6144, or 7168 - VCPU= 1 or 2
- value = 8192 - VCPU= 1, 2, 4, or 8
- value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU= 2 or 4
- value = 16384 - VCPU= 2, 4, or 8
- value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU= 4
- value = 20480, 24576, or 28672 - VCPU= 4 or 8
- value = 36864, 45056, 53248, or 61440 - VCPU= 8
- value = 32768, 40960, 49152, or 57344 - VCPU= 8 or 16
- value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU= 16
- type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuSharesin the Create a container section of the Docker Remote API and the--cpu-sharesoption to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
 - The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference . - For jobs that are running on Fargate resources, then - valuemust match one of the supported values and the- MEMORYvalues must be one of the values supported for that- VCPUvalue. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 - MEMORY= 512, 1024, or 2048
- value = 0.5 - MEMORY= 1024, 2048, 3072, or 4096
- value = 1 - MEMORY= 2048, 3072, 4096, 5120, 6144, 7168, or 8192
- value = 2 - MEMORY= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
- value = 4 - MEMORY= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
- value = 8 - MEMORY= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
- value = 16 - MEMORY= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
 
- type
PipeBatch Resource Requirement Type 
- The type of resource to assign to a container. The supported resources include GPU,MEMORY, andVCPU.
- value str
- The quantity of the specified resource to reserve for the container. The values vary based on the - typespecified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
 - GPUs aren't available for jobs that are running on Fargate resources. - type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run .
 - If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide . - For jobs that are running on Fargate resources, then - valueis the hard limit (in MiB), and must match one of the supported values and the- VCPUvalues must be one of the values supported for that memory value.- value = 512 - VCPU= 0.25
- value = 1024 - VCPU= 0.25 or 0.5
- value = 2048 - VCPU= 0.25, 0.5, or 1
- value = 3072 - VCPU= 0.5, or 1
- value = 4096 - VCPU= 0.5, 1, or 2
- value = 5120, 6144, or 7168 - VCPU= 1 or 2
- value = 8192 - VCPU= 1, 2, 4, or 8
- value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU= 2 or 4
- value = 16384 - VCPU= 2, 4, or 8
- value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU= 4
- value = 20480, 24576, or 28672 - VCPU= 4 or 8
- value = 36864, 45056, 53248, or 61440 - VCPU= 8
- value = 32768, 40960, 49152, or 57344 - VCPU= 8 or 16
- value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU= 16
- type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuSharesin the Create a container section of the Docker Remote API and the--cpu-sharesoption to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
 - The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference . - For jobs that are running on Fargate resources, then - valuemust match one of the supported values and the- MEMORYvalues must be one of the values supported for that- VCPUvalue. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 - MEMORY= 512, 1024, or 2048
- value = 0.5 - MEMORY= 1024, 2048, 3072, or 4096
- value = 1 - MEMORY= 2048, 3072, 4096, 5120, 6144, 7168, or 8192
- value = 2 - MEMORY= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
- value = 4 - MEMORY= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
- value = 8 - MEMORY= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
- value = 16 - MEMORY= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
 
- type "GPU" | "MEMORY" | "VCPU"
- The type of resource to assign to a container. The supported resources include GPU,MEMORY, andVCPU.
- value String
- The quantity of the specified resource to reserve for the container. The values vary based on the - typespecified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
 - GPUs aren't available for jobs that are running on Fargate resources. - type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to Memoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemoryin the Create a container section of the Docker Remote API and the--memoryoption to docker run .
 - If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide . - For jobs that are running on Fargate resources, then - valueis the hard limit (in MiB), and must match one of the supported values and the- VCPUvalues must be one of the values supported for that memory value.- value = 512 - VCPU= 0.25
- value = 1024 - VCPU= 0.25 or 0.5
- value = 2048 - VCPU= 0.25, 0.5, or 1
- value = 3072 - VCPU= 0.5, or 1
- value = 4096 - VCPU= 0.5, 1, or 2
- value = 5120, 6144, or 7168 - VCPU= 1 or 2
- value = 8192 - VCPU= 1, 2, 4, or 8
- value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 - VCPU= 2 or 4
- value = 16384 - VCPU= 2, 4, or 8
- value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 - VCPU= 4
- value = 20480, 24576, or 28672 - VCPU= 4 or 8
- value = 36864, 45056, 53248, or 61440 - VCPU= 8
- value = 32768, 40960, 49152, or 57344 - VCPU= 8 or 16
- value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 - VCPU= 16
- type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to CpuSharesin the Create a container section of the Docker Remote API and the--cpu-sharesoption to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
 - The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference . - For jobs that are running on Fargate resources, then - valuemust match one of the supported values and the- MEMORYvalues must be one of the values supported for that- VCPUvalue. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 - MEMORY= 512, 1024, or 2048
- value = 0.5 - MEMORY= 1024, 2048, 3072, or 4096
- value = 1 - MEMORY= 2048, 3072, 4096, 5120, 6144, 7168, or 8192
- value = 2 - MEMORY= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384
- value = 4 - MEMORY= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720
- value = 8 - MEMORY= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440
- value = 16 - MEMORY= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
 
PipeBatchResourceRequirementType, PipeBatchResourceRequirementTypeArgs          
- Gpu
- GPU
- Memory
- MEMORY
- Vcpu
- VCPU
- PipeBatch Resource Requirement Type Gpu 
- GPU
- PipeBatch Resource Requirement Type Memory 
- MEMORY
- PipeBatch Resource Requirement Type Vcpu 
- VCPU
- Gpu
- GPU
- Memory
- MEMORY
- Vcpu
- VCPU
- Gpu
- GPU
- Memory
- MEMORY
- Vcpu
- VCPU
- GPU
- GPU
- MEMORY
- MEMORY
- VCPU
- VCPU
- "GPU"
- GPU
- "MEMORY"
- MEMORY
- "VCPU"
- VCPU
PipeBatchRetryStrategy, PipeBatchRetryStrategyArgs        
- Attempts int
- The number of times to move a job to the RUNNABLEstatus. If the value ofattemptsis greater than one, the job is retried on failure the same number of attempts as the value.
- Attempts int
- The number of times to move a job to the RUNNABLEstatus. If the value ofattemptsis greater than one, the job is retried on failure the same number of attempts as the value.
- attempts Integer
- The number of times to move a job to the RUNNABLEstatus. If the value ofattemptsis greater than one, the job is retried on failure the same number of attempts as the value.
- attempts number
- The number of times to move a job to the RUNNABLEstatus. If the value ofattemptsis greater than one, the job is retried on failure the same number of attempts as the value.
- attempts int
- The number of times to move a job to the RUNNABLEstatus. If the value ofattemptsis greater than one, the job is retried on failure the same number of attempts as the value.
- attempts Number
- The number of times to move a job to the RUNNABLEstatus. If the value ofattemptsis greater than one, the job is retried on failure the same number of attempts as the value.
PipeCapacityProviderStrategyItem, PipeCapacityProviderStrategyItemArgs          
- CapacityProvider string
- The short name of the capacity provider.
- Base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- Weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- CapacityProvider string
- The short name of the capacity provider.
- Base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- Weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- capacityProvider String
- The short name of the capacity provider.
- base Integer
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- weight Integer
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- capacityProvider string
- The short name of the capacity provider.
- base number
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- weight number
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- capacity_provider str
- The short name of the capacity provider.
- base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- capacityProvider String
- The short name of the capacity provider.
- base Number
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- weight Number
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
PipeCloudwatchLogsLogDestination, PipeCloudwatchLogsLogDestinationArgs          
- LogGroup stringArn 
- The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- LogGroup stringArn 
- The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- logGroup StringArn 
- The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- logGroup stringArn 
- The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log_group_ strarn 
- The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- logGroup StringArn 
- The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
PipeDeadLetterConfig, PipeDeadLetterConfigArgs        
- Arn string
- The ARN of the specified target for the dead-letter queue. - For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN. 
- Arn string
- The ARN of the specified target for the dead-letter queue. - For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN. 
- arn String
- The ARN of the specified target for the dead-letter queue. - For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN. 
- arn string
- The ARN of the specified target for the dead-letter queue. - For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN. 
- arn str
- The ARN of the specified target for the dead-letter queue. - For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN. 
- arn String
- The ARN of the specified target for the dead-letter queue. - For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN. 
PipeDimensionMapping, PipeDimensionMappingArgs      
- DimensionName string
- The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- DimensionValue string
- Dynamic path to the dimension value in the source event.
- DimensionValue Pulumi.Type Aws Native. Pipes. Pipe Dimension Value Type 
- The data type of the dimension for the time-series data.
- DimensionName string
- The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- DimensionValue string
- Dynamic path to the dimension value in the source event.
- DimensionValue PipeType Dimension Value Type 
- The data type of the dimension for the time-series data.
- dimensionName String
- The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- dimensionValue String
- Dynamic path to the dimension value in the source event.
- dimensionValue PipeType Dimension Value Type 
- The data type of the dimension for the time-series data.
- dimensionName string
- The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- dimensionValue string
- Dynamic path to the dimension value in the source event.
- dimensionValue PipeType Dimension Value Type 
- The data type of the dimension for the time-series data.
- dimension_name str
- The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- dimension_value str
- Dynamic path to the dimension value in the source event.
- dimension_value_ Pipetype Dimension Value Type 
- The data type of the dimension for the time-series data.
- dimensionName String
- The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- dimensionValue String
- Dynamic path to the dimension value in the source event.
- dimensionValue "VARCHAR"Type 
- The data type of the dimension for the time-series data.
PipeDimensionValueType, PipeDimensionValueTypeArgs        
- Varchar
- VARCHAR
- PipeDimension Value Type Varchar 
- VARCHAR
- Varchar
- VARCHAR
- Varchar
- VARCHAR
- VARCHAR
- VARCHAR
- "VARCHAR"
- VARCHAR
PipeDynamoDbStreamStartPosition, PipeDynamoDbStreamStartPositionArgs            
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- PipeDynamo Db Stream Start Position Trim Horizon 
- TRIM_HORIZON
- PipeDynamo Db Stream Start Position Latest 
- LATEST
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- TRIM_HORIZON
- TRIM_HORIZON
- LATEST
- LATEST
- "TRIM_HORIZON"
- TRIM_HORIZON
- "LATEST"
- LATEST
PipeEcsContainerOverride, PipeEcsContainerOverrideArgs        
- Command List<string>
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Cpu int
- The number of cpuunits reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- Environment
List<Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Environment Variable> 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- EnvironmentFiles List<Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Environment File> 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- Memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- MemoryReservation int
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- Name string
- The name of the container that receives the override. This parameter is required if any override is specified.
- ResourceRequirements List<Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Resource Requirement> 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- Command []string
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Cpu int
- The number of cpuunits reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- Environment
[]PipeEcs Environment Variable 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- EnvironmentFiles []PipeEcs Environment File 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- Memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- MemoryReservation int
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- Name string
- The name of the container that receives the override. This parameter is required if any override is specified.
- ResourceRequirements []PipeEcs Resource Requirement 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- command List<String>
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu Integer
- The number of cpuunits reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environment
List<PipeEcs Environment Variable> 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- environmentFiles List<PipeEcs Environment File> 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- memory Integer
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memoryReservation Integer
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name String
- The name of the container that receives the override. This parameter is required if any override is specified.
- resourceRequirements List<PipeEcs Resource Requirement> 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- command string[]
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu number
- The number of cpuunits reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environment
PipeEcs Environment Variable[] 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- environmentFiles PipeEcs Environment File[] 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- memory number
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memoryReservation number
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name string
- The name of the container that receives the override. This parameter is required if any override is specified.
- resourceRequirements PipeEcs Resource Requirement[] 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- command Sequence[str]
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu int
- The number of cpuunits reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environment
Sequence[PipeEcs Environment Variable] 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- environment_files Sequence[PipeEcs Environment File] 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory_reservation int
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name str
- The name of the container that receives the override. This parameter is required if any override is specified.
- resource_requirements Sequence[PipeEcs Resource Requirement] 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- command List<String>
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu Number
- The number of cpuunits reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environment List<Property Map>
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- environmentFiles List<Property Map>
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- memory Number
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memoryReservation Number
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name String
- The name of the container that receives the override. This parameter is required if any override is specified.
- resourceRequirements List<Property Map>
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
PipeEcsEnvironmentFile, PipeEcsEnvironmentFileArgs        
- Type
Pulumi.Aws Native. Pipes. Pipe Ecs Environment File Type 
- The file type to use. The only supported value is s3.
- Value string
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
- Type
PipeEcs Environment File Type 
- The file type to use. The only supported value is s3.
- Value string
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
- type
PipeEcs Environment File Type 
- The file type to use. The only supported value is s3.
- value String
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
- type
PipeEcs Environment File Type 
- The file type to use. The only supported value is s3.
- value string
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
- type
PipeEcs Environment File Type 
- The file type to use. The only supported value is s3.
- value str
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
PipeEcsEnvironmentFileType, PipeEcsEnvironmentFileTypeArgs          
- S3
- s3
- PipeEcs Environment File Type S3 
- s3
- S3
- s3
- S3
- s3
- S3
- s3
- "s3"
- s3
PipeEcsEnvironmentVariable, PipeEcsEnvironmentVariableArgs        
PipeEcsEphemeralStorage, PipeEcsEphemeralStorageArgs        
- SizeIn intGi B 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21GiB and the maximum supported value is200GiB.
- SizeIn intGi B 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21GiB and the maximum supported value is200GiB.
- sizeIn IntegerGi B 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21GiB and the maximum supported value is200GiB.
- sizeIn numberGi B 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21GiB and the maximum supported value is200GiB.
- size_in_ intgi_ b 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21GiB and the maximum supported value is200GiB.
- sizeIn NumberGi B 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21GiB and the maximum supported value is200GiB.
PipeEcsInferenceAcceleratorOverride, PipeEcsInferenceAcceleratorOverrideArgs          
- DeviceName string
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceNamespecified in the task definition.
- DeviceType string
- The Elastic Inference accelerator type to use.
- DeviceName string
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceNamespecified in the task definition.
- DeviceType string
- The Elastic Inference accelerator type to use.
- deviceName String
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceNamespecified in the task definition.
- deviceType String
- The Elastic Inference accelerator type to use.
- deviceName string
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceNamespecified in the task definition.
- deviceType string
- The Elastic Inference accelerator type to use.
- device_name str
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceNamespecified in the task definition.
- device_type str
- The Elastic Inference accelerator type to use.
- deviceName String
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceNamespecified in the task definition.
- deviceType String
- The Elastic Inference accelerator type to use.
PipeEcsResourceRequirement, PipeEcsResourceRequirementArgs        
- Type
Pulumi.Aws Native. Pipes. Pipe Ecs Resource Requirement Type 
- The type of resource to assign to a container. The supported values are GPUorInferenceAccelerator.
- Value string
- The value for the specified resource type. - If the - GPUtype is used, the value is the number of physical- GPUsthe Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.- If the - InferenceAcceleratortype is used, the- valuematches the- deviceNamefor an InferenceAccelerator specified in a task definition.
- Type
PipeEcs Resource Requirement Type 
- The type of resource to assign to a container. The supported values are GPUorInferenceAccelerator.
- Value string
- The value for the specified resource type. - If the - GPUtype is used, the value is the number of physical- GPUsthe Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.- If the - InferenceAcceleratortype is used, the- valuematches the- deviceNamefor an InferenceAccelerator specified in a task definition.
- type
PipeEcs Resource Requirement Type 
- The type of resource to assign to a container. The supported values are GPUorInferenceAccelerator.
- value String
- The value for the specified resource type. - If the - GPUtype is used, the value is the number of physical- GPUsthe Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.- If the - InferenceAcceleratortype is used, the- valuematches the- deviceNamefor an InferenceAccelerator specified in a task definition.
- type
PipeEcs Resource Requirement Type 
- The type of resource to assign to a container. The supported values are GPUorInferenceAccelerator.
- value string
- The value for the specified resource type. - If the - GPUtype is used, the value is the number of physical- GPUsthe Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.- If the - InferenceAcceleratortype is used, the- valuematches the- deviceNamefor an InferenceAccelerator specified in a task definition.
- type
PipeEcs Resource Requirement Type 
- The type of resource to assign to a container. The supported values are GPUorInferenceAccelerator.
- value str
- The value for the specified resource type. - If the - GPUtype is used, the value is the number of physical- GPUsthe Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.- If the - InferenceAcceleratortype is used, the- valuematches the- deviceNamefor an InferenceAccelerator specified in a task definition.
- type
"GPU" | "InferenceAccelerator" 
- The type of resource to assign to a container. The supported values are GPUorInferenceAccelerator.
- value String
- The value for the specified resource type. - If the - GPUtype is used, the value is the number of physical- GPUsthe Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.- If the - InferenceAcceleratortype is used, the- valuematches the- deviceNamefor an InferenceAccelerator specified in a task definition.
PipeEcsResourceRequirementType, PipeEcsResourceRequirementTypeArgs          
- Gpu
- GPU
- InferenceAccelerator 
- InferenceAccelerator
- PipeEcs Resource Requirement Type Gpu 
- GPU
- PipeEcs Resource Requirement Type Inference Accelerator 
- InferenceAccelerator
- Gpu
- GPU
- InferenceAccelerator 
- InferenceAccelerator
- Gpu
- GPU
- InferenceAccelerator 
- InferenceAccelerator
- GPU
- GPU
- INFERENCE_ACCELERATOR
- InferenceAccelerator
- "GPU"
- GPU
- "InferenceAccelerator" 
- InferenceAccelerator
PipeEcsTaskOverride, PipeEcsTaskOverrideArgs        
- ContainerOverrides List<Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Container Override> 
- One or more container overrides that are sent to a task.
- Cpu string
- The cpu override for the task.
- EphemeralStorage Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Ephemeral Storage 
- The ephemeral storage setting override for the task. - This parameter is only supported for tasks hosted on Fargate that use the following platform versions: - Linux platform version 1.4.0or later.
- Windows platform version 1.0.0or later.
 
- Linux platform version 
- ExecutionRole stringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- InferenceAccelerator List<Pulumi.Overrides Aws Native. Pipes. Inputs. Pipe Ecs Inference Accelerator Override> 
- The Elastic Inference accelerator override for the task.
- Memory string
- The memory override for the task.
- TaskRole stringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- ContainerOverrides []PipeEcs Container Override 
- One or more container overrides that are sent to a task.
- Cpu string
- The cpu override for the task.
- EphemeralStorage PipeEcs Ephemeral Storage 
- The ephemeral storage setting override for the task. - This parameter is only supported for tasks hosted on Fargate that use the following platform versions: - Linux platform version 1.4.0or later.
- Windows platform version 1.0.0or later.
 
- Linux platform version 
- ExecutionRole stringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- InferenceAccelerator []PipeOverrides Ecs Inference Accelerator Override 
- The Elastic Inference accelerator override for the task.
- Memory string
- The memory override for the task.
- TaskRole stringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- containerOverrides List<PipeEcs Container Override> 
- One or more container overrides that are sent to a task.
- cpu String
- The cpu override for the task.
- ephemeralStorage PipeEcs Ephemeral Storage 
- The ephemeral storage setting override for the task. - This parameter is only supported for tasks hosted on Fargate that use the following platform versions: - Linux platform version 1.4.0or later.
- Windows platform version 1.0.0or later.
 
- Linux platform version 
- executionRole StringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- inferenceAccelerator List<PipeOverrides Ecs Inference Accelerator Override> 
- The Elastic Inference accelerator override for the task.
- memory String
- The memory override for the task.
- taskRole StringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- containerOverrides PipeEcs Container Override[] 
- One or more container overrides that are sent to a task.
- cpu string
- The cpu override for the task.
- ephemeralStorage PipeEcs Ephemeral Storage 
- The ephemeral storage setting override for the task. - This parameter is only supported for tasks hosted on Fargate that use the following platform versions: - Linux platform version 1.4.0or later.
- Windows platform version 1.0.0or later.
 
- Linux platform version 
- executionRole stringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- inferenceAccelerator PipeOverrides Ecs Inference Accelerator Override[] 
- The Elastic Inference accelerator override for the task.
- memory string
- The memory override for the task.
- taskRole stringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- container_overrides Sequence[PipeEcs Container Override] 
- One or more container overrides that are sent to a task.
- cpu str
- The cpu override for the task.
- ephemeral_storage PipeEcs Ephemeral Storage 
- The ephemeral storage setting override for the task. - This parameter is only supported for tasks hosted on Fargate that use the following platform versions: - Linux platform version 1.4.0or later.
- Windows platform version 1.0.0or later.
 
- Linux platform version 
- execution_role_ strarn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- inference_accelerator_ Sequence[Pipeoverrides Ecs Inference Accelerator Override] 
- The Elastic Inference accelerator override for the task.
- memory str
- The memory override for the task.
- task_role_ strarn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- containerOverrides List<Property Map>
- One or more container overrides that are sent to a task.
- cpu String
- The cpu override for the task.
- ephemeralStorage Property Map
- The ephemeral storage setting override for the task. - This parameter is only supported for tasks hosted on Fargate that use the following platform versions: - Linux platform version 1.4.0or later.
- Windows platform version 1.0.0or later.
 
- Linux platform version 
- executionRole StringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- inferenceAccelerator List<Property Map>Overrides 
- The Elastic Inference accelerator override for the task.
- memory String
- The memory override for the task.
- taskRole StringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
PipeEnrichmentHttpParameters, PipeEnrichmentHttpParametersArgs        
- HeaderParameters Dictionary<string, string>
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- PathParameter List<string>Values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- QueryString Dictionary<string, string>Parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- HeaderParameters map[string]string
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- PathParameter []stringValues 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- QueryString map[string]stringParameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- headerParameters Map<String,String>
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- pathParameter List<String>Values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- queryString Map<String,String>Parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- headerParameters {[key: string]: string}
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- pathParameter string[]Values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- queryString {[key: string]: string}Parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header_parameters Mapping[str, str]
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path_parameter_ Sequence[str]values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query_string_ Mapping[str, str]parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- headerParameters Map<String>
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- pathParameter List<String>Values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- queryString Map<String>Parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
PipeEnrichmentParameters, PipeEnrichmentParametersArgs      
- HttpParameters Pulumi.Aws Native. Pipes. Inputs. Pipe Enrichment Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. - If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. 
- InputTemplate string
- Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- HttpParameters PipeEnrichment Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. - If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. 
- InputTemplate string
- Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- httpParameters PipeEnrichment Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. - If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. 
- inputTemplate String
- Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- httpParameters PipeEnrichment Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. - If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. 
- inputTemplate string
- Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- http_parameters PipeEnrichment Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. - If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. 
- input_template str
- Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- httpParameters Property Map
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. - If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. 
- inputTemplate String
- Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
PipeEpochTimeUnit, PipeEpochTimeUnitArgs        
- Milliseconds
- MILLISECONDS
- Seconds
- SECONDS
- Microseconds
- MICROSECONDS
- Nanoseconds
- NANOSECONDS
- PipeEpoch Time Unit Milliseconds 
- MILLISECONDS
- PipeEpoch Time Unit Seconds 
- SECONDS
- PipeEpoch Time Unit Microseconds 
- MICROSECONDS
- PipeEpoch Time Unit Nanoseconds 
- NANOSECONDS
- Milliseconds
- MILLISECONDS
- Seconds
- SECONDS
- Microseconds
- MICROSECONDS
- Nanoseconds
- NANOSECONDS
- Milliseconds
- MILLISECONDS
- Seconds
- SECONDS
- Microseconds
- MICROSECONDS
- Nanoseconds
- NANOSECONDS
- MILLISECONDS
- MILLISECONDS
- SECONDS
- SECONDS
- MICROSECONDS
- MICROSECONDS
- NANOSECONDS
- NANOSECONDS
- "MILLISECONDS"
- MILLISECONDS
- "SECONDS"
- SECONDS
- "MICROSECONDS"
- MICROSECONDS
- "NANOSECONDS"
- NANOSECONDS
PipeFilter, PipeFilterArgs    
- Pattern string
- The event pattern.
- Pattern string
- The event pattern.
- pattern String
- The event pattern.
- pattern string
- The event pattern.
- pattern str
- The event pattern.
- pattern String
- The event pattern.
PipeFilterCriteria, PipeFilterCriteriaArgs      
- Filters
List<Pulumi.Aws Native. Pipes. Inputs. Pipe Filter> 
- The event patterns.
- Filters
[]PipeFilter 
- The event patterns.
- filters
List<PipeFilter> 
- The event patterns.
- filters
PipeFilter[] 
- The event patterns.
- filters
Sequence[PipeFilter] 
- The event patterns.
- filters List<Property Map>
- The event patterns.
PipeFirehoseLogDestination, PipeFirehoseLogDestinationArgs        
- DeliveryStream stringArn 
- The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- DeliveryStream stringArn 
- The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- deliveryStream StringArn 
- The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- deliveryStream stringArn 
- The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery_stream_ strarn 
- The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- deliveryStream StringArn 
- The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
PipeIncludeExecutionDataOption, PipeIncludeExecutionDataOptionArgs          
- All
- ALL
- PipeInclude Execution Data Option All 
- ALL
- All
- ALL
- All
- ALL
- ALL
- ALL
- "ALL"
- ALL
PipeKinesisStreamStartPosition, PipeKinesisStreamStartPositionArgs          
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- AtTimestamp 
- AT_TIMESTAMP
- PipeKinesis Stream Start Position Trim Horizon 
- TRIM_HORIZON
- PipeKinesis Stream Start Position Latest 
- LATEST
- PipeKinesis Stream Start Position At Timestamp 
- AT_TIMESTAMP
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- AtTimestamp 
- AT_TIMESTAMP
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- AtTimestamp 
- AT_TIMESTAMP
- TRIM_HORIZON
- TRIM_HORIZON
- LATEST
- LATEST
- AT_TIMESTAMP
- AT_TIMESTAMP
- "TRIM_HORIZON"
- TRIM_HORIZON
- "LATEST"
- LATEST
- "AT_TIMESTAMP"
- AT_TIMESTAMP
PipeLaunchType, PipeLaunchTypeArgs      
- Ec2
- EC2
- Fargate
- FARGATE
- External
- EXTERNAL
- PipeLaunch Type Ec2 
- EC2
- PipeLaunch Type Fargate 
- FARGATE
- PipeLaunch Type External 
- EXTERNAL
- Ec2
- EC2
- Fargate
- FARGATE
- External
- EXTERNAL
- Ec2
- EC2
- Fargate
- FARGATE
- External
- EXTERNAL
- EC2
- EC2
- FARGATE
- FARGATE
- EXTERNAL
- EXTERNAL
- "EC2"
- EC2
- "FARGATE"
- FARGATE
- "EXTERNAL"
- EXTERNAL
PipeLogConfiguration, PipeLogConfigurationArgs      
- CloudwatchLogs Pulumi.Log Destination Aws Native. Pipes. Inputs. Pipe Cloudwatch Logs Log Destination 
- The logging configuration settings for the pipe.
- FirehoseLog Pulumi.Destination Aws Native. Pipes. Inputs. Pipe Firehose Log Destination 
- The Amazon Data Firehose logging configuration settings for the pipe.
- IncludeExecution List<Pulumi.Data Aws Native. Pipes. Pipe Include Execution Data Option> 
- Whether the execution data (specifically, the - payload,- awsRequest, and- awsResponsefields) is included in the log messages for this pipe.- This applies to all log destinations for the pipe. - For more information, see Including execution data in logs in the Amazon EventBridge User Guide . - Allowed values: - ALL
- Level
Pulumi.Aws Native. Pipes. Pipe Log Level 
- The level of logging detail to include. This applies to all log destinations for the pipe.
- S3LogDestination Pulumi.Aws Native. Pipes. Inputs. Pipe S3Log Destination 
- The Amazon S3 logging configuration settings for the pipe.
- CloudwatchLogs PipeLog Destination Cloudwatch Logs Log Destination 
- The logging configuration settings for the pipe.
- FirehoseLog PipeDestination Firehose Log Destination 
- The Amazon Data Firehose logging configuration settings for the pipe.
- IncludeExecution []PipeData Include Execution Data Option 
- Whether the execution data (specifically, the - payload,- awsRequest, and- awsResponsefields) is included in the log messages for this pipe.- This applies to all log destinations for the pipe. - For more information, see Including execution data in logs in the Amazon EventBridge User Guide . - Allowed values: - ALL
- Level
PipeLog Level 
- The level of logging detail to include. This applies to all log destinations for the pipe.
- S3LogDestination PipeS3Log Destination 
- The Amazon S3 logging configuration settings for the pipe.
- cloudwatchLogs PipeLog Destination Cloudwatch Logs Log Destination 
- The logging configuration settings for the pipe.
- firehoseLog PipeDestination Firehose Log Destination 
- The Amazon Data Firehose logging configuration settings for the pipe.
- includeExecution List<PipeData Include Execution Data Option> 
- Whether the execution data (specifically, the - payload,- awsRequest, and- awsResponsefields) is included in the log messages for this pipe.- This applies to all log destinations for the pipe. - For more information, see Including execution data in logs in the Amazon EventBridge User Guide . - Allowed values: - ALL
- level
PipeLog Level 
- The level of logging detail to include. This applies to all log destinations for the pipe.
- s3LogDestination PipeS3Log Destination 
- The Amazon S3 logging configuration settings for the pipe.
- cloudwatchLogs PipeLog Destination Cloudwatch Logs Log Destination 
- The logging configuration settings for the pipe.
- firehoseLog PipeDestination Firehose Log Destination 
- The Amazon Data Firehose logging configuration settings for the pipe.
- includeExecution PipeData Include Execution Data Option[] 
- Whether the execution data (specifically, the - payload,- awsRequest, and- awsResponsefields) is included in the log messages for this pipe.- This applies to all log destinations for the pipe. - For more information, see Including execution data in logs in the Amazon EventBridge User Guide . - Allowed values: - ALL
- level
PipeLog Level 
- The level of logging detail to include. This applies to all log destinations for the pipe.
- s3LogDestination PipeS3Log Destination 
- The Amazon S3 logging configuration settings for the pipe.
- cloudwatch_logs_ Pipelog_ destination Cloudwatch Logs Log Destination 
- The logging configuration settings for the pipe.
- firehose_log_ Pipedestination Firehose Log Destination 
- The Amazon Data Firehose logging configuration settings for the pipe.
- include_execution_ Sequence[Pipedata Include Execution Data Option] 
- Whether the execution data (specifically, the - payload,- awsRequest, and- awsResponsefields) is included in the log messages for this pipe.- This applies to all log destinations for the pipe. - For more information, see Including execution data in logs in the Amazon EventBridge User Guide . - Allowed values: - ALL
- level
PipeLog Level 
- The level of logging detail to include. This applies to all log destinations for the pipe.
- s3_log_ Pipedestination S3Log Destination 
- The Amazon S3 logging configuration settings for the pipe.
- cloudwatchLogs Property MapLog Destination 
- The logging configuration settings for the pipe.
- firehoseLog Property MapDestination 
- The Amazon Data Firehose logging configuration settings for the pipe.
- includeExecution List<"ALL">Data 
- Whether the execution data (specifically, the - payload,- awsRequest, and- awsResponsefields) is included in the log messages for this pipe.- This applies to all log destinations for the pipe. - For more information, see Including execution data in logs in the Amazon EventBridge User Guide . - Allowed values: - ALL
- level "OFF" | "ERROR" | "INFO" | "TRACE"
- The level of logging detail to include. This applies to all log destinations for the pipe.
- s3LogDestination Property Map
- The Amazon S3 logging configuration settings for the pipe.
PipeLogLevel, PipeLogLevelArgs      
- Off
- OFF
- Error
- ERROR
- Info
- INFO
- Trace
- TRACE
- PipeLog Level Off 
- OFF
- PipeLog Level Error 
- ERROR
- PipeLog Level Info 
- INFO
- PipeLog Level Trace 
- TRACE
- Off
- OFF
- Error
- ERROR
- Info
- INFO
- Trace
- TRACE
- Off
- OFF
- Error
- ERROR
- Info
- INFO
- Trace
- TRACE
- OFF
- OFF
- ERROR
- ERROR
- INFO
- INFO
- TRACE
- TRACE
- "OFF"
- OFF
- "ERROR"
- ERROR
- "INFO"
- INFO
- "TRACE"
- TRACE
PipeMeasureValueType, PipeMeasureValueTypeArgs        
- Double
- DOUBLE
- Bigint
- BIGINT
- Varchar
- VARCHAR
- Boolean
- BOOLEAN
- Timestamp
- TIMESTAMP
- PipeMeasure Value Type Double 
- DOUBLE
- PipeMeasure Value Type Bigint 
- BIGINT
- PipeMeasure Value Type Varchar 
- VARCHAR
- PipeMeasure Value Type Boolean 
- BOOLEAN
- PipeMeasure Value Type Timestamp 
- TIMESTAMP
- Double
- DOUBLE
- Bigint
- BIGINT
- Varchar
- VARCHAR
- Boolean
- BOOLEAN
- Timestamp
- TIMESTAMP
- Double
- DOUBLE
- Bigint
- BIGINT
- Varchar
- VARCHAR
- Boolean
- BOOLEAN
- Timestamp
- TIMESTAMP
- DOUBLE
- DOUBLE
- BIGINT
- BIGINT
- VARCHAR
- VARCHAR
- BOOLEAN
- BOOLEAN
- TIMESTAMP
- TIMESTAMP
- "DOUBLE"
- DOUBLE
- "BIGINT"
- BIGINT
- "VARCHAR"
- VARCHAR
- "BOOLEAN"
- BOOLEAN
- "TIMESTAMP"
- TIMESTAMP
PipeMqBrokerAccessCredentialsProperties, PipeMqBrokerAccessCredentialsPropertiesArgs            
- BasicAuth string
- Optional SecretManager ARN which stores the database credentials
- BasicAuth string
- Optional SecretManager ARN which stores the database credentials
- basicAuth String
- Optional SecretManager ARN which stores the database credentials
- basicAuth string
- Optional SecretManager ARN which stores the database credentials
- basic_auth str
- Optional SecretManager ARN which stores the database credentials
- basicAuth String
- Optional SecretManager ARN which stores the database credentials
PipeMskAccessCredentials0Properties, PipeMskAccessCredentials0PropertiesArgs        
- SaslScram512Auth string
- Optional SecretManager ARN which stores the database credentials
- SaslScram512Auth string
- Optional SecretManager ARN which stores the database credentials
- saslScram512Auth String
- Optional SecretManager ARN which stores the database credentials
- saslScram512Auth string
- Optional SecretManager ARN which stores the database credentials
- sasl_scram512_ strauth 
- Optional SecretManager ARN which stores the database credentials
- saslScram512Auth String
- Optional SecretManager ARN which stores the database credentials
PipeMskAccessCredentials1Properties, PipeMskAccessCredentials1PropertiesArgs        
- ClientCertificate stringTls Auth 
- Optional SecretManager ARN which stores the database credentials
- ClientCertificate stringTls Auth 
- Optional SecretManager ARN which stores the database credentials
- clientCertificate StringTls Auth 
- Optional SecretManager ARN which stores the database credentials
- clientCertificate stringTls Auth 
- Optional SecretManager ARN which stores the database credentials
- client_certificate_ strtls_ auth 
- Optional SecretManager ARN which stores the database credentials
- clientCertificate StringTls Auth 
- Optional SecretManager ARN which stores the database credentials
PipeMskStartPosition, PipeMskStartPositionArgs        
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- PipeMsk Start Position Trim Horizon 
- TRIM_HORIZON
- PipeMsk Start Position Latest 
- LATEST
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- TRIM_HORIZON
- TRIM_HORIZON
- LATEST
- LATEST
- "TRIM_HORIZON"
- TRIM_HORIZON
- "LATEST"
- LATEST
PipeMultiMeasureAttributeMapping, PipeMultiMeasureAttributeMappingArgs          
- MeasureValue string
- Dynamic path to the measurement attribute in the source event.
- MeasureValue Pulumi.Type Aws Native. Pipes. Pipe Measure Value Type 
- Data type of the measurement attribute in the source event.
- MultiMeasure stringAttribute Name 
- Target measure name to be used.
- MeasureValue string
- Dynamic path to the measurement attribute in the source event.
- MeasureValue PipeType Measure Value Type 
- Data type of the measurement attribute in the source event.
- MultiMeasure stringAttribute Name 
- Target measure name to be used.
- measureValue String
- Dynamic path to the measurement attribute in the source event.
- measureValue PipeType Measure Value Type 
- Data type of the measurement attribute in the source event.
- multiMeasure StringAttribute Name 
- Target measure name to be used.
- measureValue string
- Dynamic path to the measurement attribute in the source event.
- measureValue PipeType Measure Value Type 
- Data type of the measurement attribute in the source event.
- multiMeasure stringAttribute Name 
- Target measure name to be used.
- measure_value str
- Dynamic path to the measurement attribute in the source event.
- measure_value_ Pipetype Measure Value Type 
- Data type of the measurement attribute in the source event.
- multi_measure_ strattribute_ name 
- Target measure name to be used.
- measureValue String
- Dynamic path to the measurement attribute in the source event.
- measureValue "DOUBLE" | "BIGINT" | "VARCHAR" | "BOOLEAN" | "TIMESTAMP"Type 
- Data type of the measurement attribute in the source event.
- multiMeasure StringAttribute Name 
- Target measure name to be used.
PipeMultiMeasureMapping, PipeMultiMeasureMappingArgs        
- MultiMeasure List<Pulumi.Attribute Mappings Aws Native. Pipes. Inputs. Pipe Multi Measure Attribute Mapping> 
- Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- MultiMeasure stringName 
- The name of the multiple measurements per record (multi-measure).
- MultiMeasure []PipeAttribute Mappings Multi Measure Attribute Mapping 
- Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- MultiMeasure stringName 
- The name of the multiple measurements per record (multi-measure).
- multiMeasure List<PipeAttribute Mappings Multi Measure Attribute Mapping> 
- Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- multiMeasure StringName 
- The name of the multiple measurements per record (multi-measure).
- multiMeasure PipeAttribute Mappings Multi Measure Attribute Mapping[] 
- Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- multiMeasure stringName 
- The name of the multiple measurements per record (multi-measure).
- multi_measure_ Sequence[Pipeattribute_ mappings Multi Measure Attribute Mapping] 
- Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- multi_measure_ strname 
- The name of the multiple measurements per record (multi-measure).
- multiMeasure List<Property Map>Attribute Mappings 
- Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- multiMeasure StringName 
- The name of the multiple measurements per record (multi-measure).
PipeNetworkConfiguration, PipeNetworkConfigurationArgs      
- AwsvpcConfiguration Pulumi.Aws Native. Pipes. Inputs. Pipe Aws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpcnetwork mode.
- AwsvpcConfiguration PipeAws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpcnetwork mode.
- awsvpcConfiguration PipeAws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpcnetwork mode.
- awsvpcConfiguration PipeAws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpcnetwork mode.
- awsvpc_configuration PipeAws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpcnetwork mode.
- awsvpcConfiguration Property Map
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpcnetwork mode.
PipeOnPartialBatchItemFailureStreams, PipeOnPartialBatchItemFailureStreamsArgs              
- AutomaticBisect 
- AUTOMATIC_BISECT
- PipeOn Partial Batch Item Failure Streams Automatic Bisect 
- AUTOMATIC_BISECT
- AutomaticBisect 
- AUTOMATIC_BISECT
- AutomaticBisect 
- AUTOMATIC_BISECT
- AUTOMATIC_BISECT
- AUTOMATIC_BISECT
- "AUTOMATIC_BISECT"
- AUTOMATIC_BISECT
PipePlacementConstraint, PipePlacementConstraintArgs      
- Expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
- Type
Pulumi.Aws Native. Pipes. Pipe Placement Constraint Type 
- The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- Expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
- Type
PipePlacement Constraint Type 
- The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- expression String
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
- type
PipePlacement Constraint Type 
- The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
- type
PipePlacement Constraint Type 
- The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- expression str
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
- type
PipePlacement Constraint Type 
- The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- expression String
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide.
- type
"distinctInstance" | "member Of" 
- The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
PipePlacementConstraintType, PipePlacementConstraintTypeArgs        
- DistinctInstance 
- distinctInstance
- MemberOf 
- memberOf
- PipePlacement Constraint Type Distinct Instance 
- distinctInstance
- PipePlacement Constraint Type Member Of 
- memberOf
- DistinctInstance 
- distinctInstance
- MemberOf 
- memberOf
- DistinctInstance 
- distinctInstance
- MemberOf 
- memberOf
- DISTINCT_INSTANCE
- distinctInstance
- MEMBER_OF
- memberOf
- "distinctInstance" 
- distinctInstance
- "memberOf" 
- memberOf
PipePlacementStrategy, PipePlacementStrategyArgs      
- Field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- Type
Pulumi.Aws Native. Pipes. Pipe Placement Strategy Type 
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- Field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- Type
PipePlacement Strategy Type 
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- field String
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- type
PipePlacement Strategy Type 
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- type
PipePlacement Strategy Type 
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- field str
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- type
PipePlacement Strategy Type 
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- field String
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- type "random" | "spread" | "binpack"
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
PipePlacementStrategyType, PipePlacementStrategyTypeArgs        
- Random
- random
- Spread
- spread
- Binpack
- binpack
- PipePlacement Strategy Type Random 
- random
- PipePlacement Strategy Type Spread 
- spread
- PipePlacement Strategy Type Binpack 
- binpack
- Random
- random
- Spread
- spread
- Binpack
- binpack
- Random
- random
- Spread
- spread
- Binpack
- binpack
- RANDOM
- random
- SPREAD
- spread
- BINPACK
- binpack
- "random"
- random
- "spread"
- spread
- "binpack"
- binpack
PipePropagateTags, PipePropagateTagsArgs      
- TaskDefinition 
- TASK_DEFINITION
- PipePropagate Tags Task Definition 
- TASK_DEFINITION
- TaskDefinition 
- TASK_DEFINITION
- TaskDefinition 
- TASK_DEFINITION
- TASK_DEFINITION
- TASK_DEFINITION
- "TASK_DEFINITION"
- TASK_DEFINITION
PipeRequestedPipeState, PipeRequestedPipeStateArgs        
- Running
- RUNNING
- Stopped
- STOPPED
- PipeRequested Pipe State Running 
- RUNNING
- PipeRequested Pipe State Stopped 
- STOPPED
- Running
- RUNNING
- Stopped
- STOPPED
- Running
- RUNNING
- Stopped
- STOPPED
- RUNNING
- RUNNING
- STOPPED
- STOPPED
- "RUNNING"
- RUNNING
- "STOPPED"
- STOPPED
PipeS3LogDestination, PipeS3LogDestinationArgs      
- BucketName string
- The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- BucketOwner string
- The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- OutputFormat Pulumi.Aws Native. Pipes. Pipe S3Output Format 
- The format EventBridge uses for the log records. - EventBridge currently only supports - jsonformatting.
- Prefix string
- The prefix text with which to begin Amazon S3 log object names. - For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide . 
- BucketName string
- The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- BucketOwner string
- The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- OutputFormat PipeS3Output Format 
- The format EventBridge uses for the log records. - EventBridge currently only supports - jsonformatting.
- Prefix string
- The prefix text with which to begin Amazon S3 log object names. - For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide . 
- bucketName String
- The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucketOwner String
- The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- outputFormat PipeS3Output Format 
- The format EventBridge uses for the log records. - EventBridge currently only supports - jsonformatting.
- prefix String
- The prefix text with which to begin Amazon S3 log object names. - For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide . 
- bucketName string
- The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucketOwner string
- The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- outputFormat PipeS3Output Format 
- The format EventBridge uses for the log records. - EventBridge currently only supports - jsonformatting.
- prefix string
- The prefix text with which to begin Amazon S3 log object names. - For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide . 
- bucket_name str
- The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket_owner str
- The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output_format PipeS3Output Format 
- The format EventBridge uses for the log records. - EventBridge currently only supports - jsonformatting.
- prefix str
- The prefix text with which to begin Amazon S3 log object names. - For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide . 
- bucketName String
- The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucketOwner String
- The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- outputFormat "json" | "plain" | "w3c"
- The format EventBridge uses for the log records. - EventBridge currently only supports - jsonformatting.
- prefix String
- The prefix text with which to begin Amazon S3 log object names. - For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide . 
PipeS3OutputFormat, PipeS3OutputFormatArgs      
- Json
- json
- Plain
- plain
- W3c
- w3c
- PipeS3Output Format Json 
- json
- PipeS3Output Format Plain 
- plain
- PipeS3Output Format W3c 
- w3c
- Json
- json
- Plain
- plain
- W3c
- w3c
- Json
- json
- Plain
- plain
- W3c
- w3c
- JSON
- json
- PLAIN
- plain
- W3C
- w3c
- "json"
- json
- "plain"
- plain
- "w3c"
- w3c
PipeSageMakerPipelineParameter, PipeSageMakerPipelineParameterArgs          
PipeSelfManagedKafkaAccessConfigurationCredentials0Properties, PipeSelfManagedKafkaAccessConfigurationCredentials0PropertiesArgs              
- BasicAuth string
- Optional SecretManager ARN which stores the database credentials
- BasicAuth string
- Optional SecretManager ARN which stores the database credentials
- basicAuth String
- Optional SecretManager ARN which stores the database credentials
- basicAuth string
- Optional SecretManager ARN which stores the database credentials
- basic_auth str
- Optional SecretManager ARN which stores the database credentials
- basicAuth String
- Optional SecretManager ARN which stores the database credentials
PipeSelfManagedKafkaAccessConfigurationCredentials1Properties, PipeSelfManagedKafkaAccessConfigurationCredentials1PropertiesArgs              
- SaslScram512Auth string
- Optional SecretManager ARN which stores the database credentials
- SaslScram512Auth string
- Optional SecretManager ARN which stores the database credentials
- saslScram512Auth String
- Optional SecretManager ARN which stores the database credentials
- saslScram512Auth string
- Optional SecretManager ARN which stores the database credentials
- sasl_scram512_ strauth 
- Optional SecretManager ARN which stores the database credentials
- saslScram512Auth String
- Optional SecretManager ARN which stores the database credentials
PipeSelfManagedKafkaAccessConfigurationCredentials2Properties, PipeSelfManagedKafkaAccessConfigurationCredentials2PropertiesArgs              
- SaslScram256Auth string
- Optional SecretManager ARN which stores the database credentials
- SaslScram256Auth string
- Optional SecretManager ARN which stores the database credentials
- saslScram256Auth String
- Optional SecretManager ARN which stores the database credentials
- saslScram256Auth string
- Optional SecretManager ARN which stores the database credentials
- sasl_scram256_ strauth 
- Optional SecretManager ARN which stores the database credentials
- saslScram256Auth String
- Optional SecretManager ARN which stores the database credentials
PipeSelfManagedKafkaAccessConfigurationCredentials3Properties, PipeSelfManagedKafkaAccessConfigurationCredentials3PropertiesArgs              
- ClientCertificate stringTls Auth 
- Optional SecretManager ARN which stores the database credentials
- ClientCertificate stringTls Auth 
- Optional SecretManager ARN which stores the database credentials
- clientCertificate StringTls Auth 
- Optional SecretManager ARN which stores the database credentials
- clientCertificate stringTls Auth 
- Optional SecretManager ARN which stores the database credentials
- client_certificate_ strtls_ auth 
- Optional SecretManager ARN which stores the database credentials
- clientCertificate StringTls Auth 
- Optional SecretManager ARN which stores the database credentials
PipeSelfManagedKafkaAccessConfigurationVpc, PipeSelfManagedKafkaAccessConfigurationVpcArgs              
- SecurityGroup List<string>
- List of SecurityGroupId.
- Subnets List<string>
- List of SubnetId.
- SecurityGroup []string
- List of SecurityGroupId.
- Subnets []string
- List of SubnetId.
- securityGroup List<String>
- List of SecurityGroupId.
- subnets List<String>
- List of SubnetId.
- securityGroup string[]
- List of SecurityGroupId.
- subnets string[]
- List of SubnetId.
- security_group Sequence[str]
- List of SecurityGroupId.
- subnets Sequence[str]
- List of SubnetId.
- securityGroup List<String>
- List of SecurityGroupId.
- subnets List<String>
- List of SubnetId.
PipeSelfManagedKafkaStartPosition, PipeSelfManagedKafkaStartPositionArgs            
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- PipeSelf Managed Kafka Start Position Trim Horizon 
- TRIM_HORIZON
- PipeSelf Managed Kafka Start Position Latest 
- LATEST
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- TrimHorizon 
- TRIM_HORIZON
- Latest
- LATEST
- TRIM_HORIZON
- TRIM_HORIZON
- LATEST
- LATEST
- "TRIM_HORIZON"
- TRIM_HORIZON
- "LATEST"
- LATEST
PipeSingleMeasureMapping, PipeSingleMeasureMappingArgs        
- MeasureName string
- Target measure name for the measurement attribute in the Timestream table.
- MeasureValue string
- Dynamic path of the source field to map to the measure in the record.
- MeasureValue Pulumi.Type Aws Native. Pipes. Pipe Measure Value Type 
- Data type of the source field.
- MeasureName string
- Target measure name for the measurement attribute in the Timestream table.
- MeasureValue string
- Dynamic path of the source field to map to the measure in the record.
- MeasureValue PipeType Measure Value Type 
- Data type of the source field.
- measureName String
- Target measure name for the measurement attribute in the Timestream table.
- measureValue String
- Dynamic path of the source field to map to the measure in the record.
- measureValue PipeType Measure Value Type 
- Data type of the source field.
- measureName string
- Target measure name for the measurement attribute in the Timestream table.
- measureValue string
- Dynamic path of the source field to map to the measure in the record.
- measureValue PipeType Measure Value Type 
- Data type of the source field.
- measure_name str
- Target measure name for the measurement attribute in the Timestream table.
- measure_value str
- Dynamic path of the source field to map to the measure in the record.
- measure_value_ Pipetype Measure Value Type 
- Data type of the source field.
- measureName String
- Target measure name for the measurement attribute in the Timestream table.
- measureValue String
- Dynamic path of the source field to map to the measure in the record.
- measureValue "DOUBLE" | "BIGINT" | "VARCHAR" | "BOOLEAN" | "TIMESTAMP"Type 
- Data type of the source field.
PipeSourceActiveMqBrokerParameters, PipeSourceActiveMqBrokerParametersArgs            
- Credentials
Pulumi.Aws Native. Pipes. Inputs. Pipe Mq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- QueueName string
- The name of the destination queue to consume.
- BatchSize int
- The maximum number of records to include in each batch.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- Credentials
PipeMq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- QueueName string
- The name of the destination queue to consume.
- BatchSize int
- The maximum number of records to include in each batch.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- credentials
PipeMq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- queueName String
- The name of the destination queue to consume.
- batchSize Integer
- The maximum number of records to include in each batch.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events.
- credentials
PipeMq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- queueName string
- The name of the destination queue to consume.
- batchSize number
- The maximum number of records to include in each batch.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events.
- credentials
PipeMq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- queue_name str
- The name of the destination queue to consume.
- batch_size int
- The maximum number of records to include in each batch.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events.
- credentials Property Map
- The credentials needed to access the resource.
- queueName String
- The name of the destination queue to consume.
- batchSize Number
- The maximum number of records to include in each batch.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events.
PipeSourceDynamoDbStreamParameters, PipeSourceDynamoDbStreamParametersArgs            
- StartingPosition Pulumi.Aws Native. Pipes. Pipe Dynamo Db Stream Start Position 
- (Streams only) The position in a stream from which to start reading. - Valid values : - TRIM_HORIZON | LATEST
- BatchSize int
- The maximum number of records to include in each batch.
- DeadLetter Pulumi.Config Aws Native. Pipes. Inputs. Pipe Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- MaximumRecord intAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- MaximumRetry intAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- OnPartial Pulumi.Batch Item Failure Aws Native. Pipes. Pipe On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- ParallelizationFactor int
- The number of batches to process concurrently from each shard. The default value is 1.
- StartingPosition PipeDynamo Db Stream Start Position 
- (Streams only) The position in a stream from which to start reading. - Valid values : - TRIM_HORIZON | LATEST
- BatchSize int
- The maximum number of records to include in each batch.
- DeadLetter PipeConfig Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- MaximumRecord intAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- MaximumRetry intAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- OnPartial PipeBatch Item Failure On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- ParallelizationFactor int
- The number of batches to process concurrently from each shard. The default value is 1.
- startingPosition PipeDynamo Db Stream Start Position 
- (Streams only) The position in a stream from which to start reading. - Valid values : - TRIM_HORIZON | LATEST
- batchSize Integer
- The maximum number of records to include in each batch.
- deadLetter PipeConfig Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events.
- maximumRecord IntegerAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximumRetry IntegerAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- onPartial PipeBatch Item Failure On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- parallelizationFactor Integer
- The number of batches to process concurrently from each shard. The default value is 1.
- startingPosition PipeDynamo Db Stream Start Position 
- (Streams only) The position in a stream from which to start reading. - Valid values : - TRIM_HORIZON | LATEST
- batchSize number
- The maximum number of records to include in each batch.
- deadLetter PipeConfig Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events.
- maximumRecord numberAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximumRetry numberAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- onPartial PipeBatch Item Failure On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- parallelizationFactor number
- The number of batches to process concurrently from each shard. The default value is 1.
- starting_position PipeDynamo Db Stream Start Position 
- (Streams only) The position in a stream from which to start reading. - Valid values : - TRIM_HORIZON | LATEST
- batch_size int
- The maximum number of records to include in each batch.
- dead_letter_ Pipeconfig Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events.
- maximum_record_ intage_ in_ seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum_retry_ intattempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on_partial_ Pipebatch_ item_ failure On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- parallelization_factor int
- The number of batches to process concurrently from each shard. The default value is 1.
- startingPosition "TRIM_HORIZON" | "LATEST"
- (Streams only) The position in a stream from which to start reading. - Valid values : - TRIM_HORIZON | LATEST
- batchSize Number
- The maximum number of records to include in each batch.
- deadLetter Property MapConfig 
- Define the target queue to send dead-letter queue events to.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events.
- maximumRecord NumberAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximumRetry NumberAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- onPartial "AUTOMATIC_BISECT"Batch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- parallelizationFactor Number
- The number of batches to process concurrently from each shard. The default value is 1.
PipeSourceKinesisStreamParameters, PipeSourceKinesisStreamParametersArgs          
- StartingPosition Pulumi.Aws Native. Pipes. Pipe Kinesis Stream Start Position 
- The position in a stream from which to start reading.
- BatchSize int
- The maximum number of records to include in each batch.
- DeadLetter Pulumi.Config Aws Native. Pipes. Inputs. Pipe Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- MaximumRecord intAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- MaximumRetry intAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- OnPartial Pulumi.Batch Item Failure Aws Native. Pipes. Pipe On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- ParallelizationFactor int
- The number of batches to process concurrently from each shard. The default value is 1.
- StartingPosition stringTimestamp 
- With StartingPositionset toAT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- StartingPosition PipeKinesis Stream Start Position 
- The position in a stream from which to start reading.
- BatchSize int
- The maximum number of records to include in each batch.
- DeadLetter PipeConfig Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- MaximumRecord intAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- MaximumRetry intAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- OnPartial PipeBatch Item Failure On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- ParallelizationFactor int
- The number of batches to process concurrently from each shard. The default value is 1.
- StartingPosition stringTimestamp 
- With StartingPositionset toAT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- startingPosition PipeKinesis Stream Start Position 
- The position in a stream from which to start reading.
- batchSize Integer
- The maximum number of records to include in each batch.
- deadLetter PipeConfig Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events.
- maximumRecord IntegerAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximumRetry IntegerAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- onPartial PipeBatch Item Failure On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- parallelizationFactor Integer
- The number of batches to process concurrently from each shard. The default value is 1.
- startingPosition StringTimestamp 
- With StartingPositionset toAT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- startingPosition PipeKinesis Stream Start Position 
- The position in a stream from which to start reading.
- batchSize number
- The maximum number of records to include in each batch.
- deadLetter PipeConfig Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events.
- maximumRecord numberAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximumRetry numberAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- onPartial PipeBatch Item Failure On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- parallelizationFactor number
- The number of batches to process concurrently from each shard. The default value is 1.
- startingPosition stringTimestamp 
- With StartingPositionset toAT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- starting_position PipeKinesis Stream Start Position 
- The position in a stream from which to start reading.
- batch_size int
- The maximum number of records to include in each batch.
- dead_letter_ Pipeconfig Dead Letter Config 
- Define the target queue to send dead-letter queue events to.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events.
- maximum_record_ intage_ in_ seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum_retry_ intattempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on_partial_ Pipebatch_ item_ failure On Partial Batch Item Failure Streams 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- parallelization_factor int
- The number of batches to process concurrently from each shard. The default value is 1.
- starting_position_ strtimestamp 
- With StartingPositionset toAT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- startingPosition "TRIM_HORIZON" | "LATEST" | "AT_TIMESTAMP"
- The position in a stream from which to start reading.
- batchSize Number
- The maximum number of records to include in each batch.
- deadLetter Property MapConfig 
- Define the target queue to send dead-letter queue events to.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events.
- maximumRecord NumberAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximumRetry NumberAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- onPartial "AUTOMATIC_BISECT"Batch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECThalves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
- parallelizationFactor Number
- The number of batches to process concurrently from each shard. The default value is 1.
- startingPosition StringTimestamp 
- With StartingPositionset toAT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
PipeSourceManagedStreamingKafkaParameters, PipeSourceManagedStreamingKafkaParametersArgs            
- TopicName string
- The name of the topic that the pipe will read from.
- BatchSize int
- The maximum number of records to include in each batch.
- ConsumerGroup stringId 
- The name of the destination queue to consume.
- Credentials
Pulumi.Aws | Pulumi.Native. Pipes. Inputs. Pipe Msk Access Credentials0Properties Aws Native. Pipes. Inputs. Pipe Msk Access Credentials1Properties 
- The credentials needed to access the resource.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- StartingPosition Pulumi.Aws Native. Pipes. Pipe Msk Start Position 
- The position in a stream from which to start reading.
- TopicName string
- The name of the topic that the pipe will read from.
- BatchSize int
- The maximum number of records to include in each batch.
- ConsumerGroup stringId 
- The name of the destination queue to consume.
- Credentials
PipeMsk | PipeAccess Credentials0Properties Msk Access Credentials1Properties 
- The credentials needed to access the resource.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- StartingPosition PipeMsk Start Position 
- The position in a stream from which to start reading.
- topicName String
- The name of the topic that the pipe will read from.
- batchSize Integer
- The maximum number of records to include in each batch.
- consumerGroup StringId 
- The name of the destination queue to consume.
- credentials
PipeMsk | PipeAccess Credentials0Properties Msk Access Credentials1Properties 
- The credentials needed to access the resource.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events.
- startingPosition PipeMsk Start Position 
- The position in a stream from which to start reading.
- topicName string
- The name of the topic that the pipe will read from.
- batchSize number
- The maximum number of records to include in each batch.
- consumerGroup stringId 
- The name of the destination queue to consume.
- credentials
PipeMsk | PipeAccess Credentials0Properties Msk Access Credentials1Properties 
- The credentials needed to access the resource.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events.
- startingPosition PipeMsk Start Position 
- The position in a stream from which to start reading.
- topic_name str
- The name of the topic that the pipe will read from.
- batch_size int
- The maximum number of records to include in each batch.
- consumer_group_ strid 
- The name of the destination queue to consume.
- credentials
PipeMsk | PipeAccess Credentials0Properties Msk Access Credentials1Properties 
- The credentials needed to access the resource.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events.
- starting_position PipeMsk Start Position 
- The position in a stream from which to start reading.
- topicName String
- The name of the topic that the pipe will read from.
- batchSize Number
- The maximum number of records to include in each batch.
- consumerGroup StringId 
- The name of the destination queue to consume.
- credentials Property Map | Property Map
- The credentials needed to access the resource.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events.
- startingPosition "TRIM_HORIZON" | "LATEST"
- The position in a stream from which to start reading.
PipeSourceParameters, PipeSourceParametersArgs      
- ActiveMq Pulumi.Broker Parameters Aws Native. Pipes. Inputs. Pipe Source Active Mq Broker Parameters 
- The parameters for using an Active MQ broker as a source.
- DynamoDb Pulumi.Stream Parameters Aws Native. Pipes. Inputs. Pipe Source Dynamo Db Stream Parameters 
- The parameters for using a DynamoDB stream as a source.
- FilterCriteria Pulumi.Aws Native. Pipes. Inputs. Pipe Filter Criteria 
- The collection of event patterns used to filter events. - To remove a filter, specify a - FilterCriteriaobject with an empty array of- Filterobjects.- For more information, see Events and Event Patterns in the Amazon EventBridge User Guide . 
- KinesisStream Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Source Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source.
- ManagedStreaming Pulumi.Kafka Parameters Aws Native. Pipes. Inputs. Pipe Source Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source.
- RabbitMq Pulumi.Broker Parameters Aws Native. Pipes. Inputs. Pipe Source Rabbit Mq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source.
- SelfManaged Pulumi.Kafka Parameters Aws Native. Pipes. Inputs. Pipe Source Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. - A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide . 
- SqsQueue Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Source Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source.
- ActiveMq PipeBroker Parameters Source Active Mq Broker Parameters 
- The parameters for using an Active MQ broker as a source.
- DynamoDb PipeStream Parameters Source Dynamo Db Stream Parameters 
- The parameters for using a DynamoDB stream as a source.
- FilterCriteria PipeFilter Criteria 
- The collection of event patterns used to filter events. - To remove a filter, specify a - FilterCriteriaobject with an empty array of- Filterobjects.- For more information, see Events and Event Patterns in the Amazon EventBridge User Guide . 
- KinesisStream PipeParameters Source Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source.
- ManagedStreaming PipeKafka Parameters Source Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source.
- RabbitMq PipeBroker Parameters Source Rabbit Mq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source.
- SelfManaged PipeKafka Parameters Source Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. - A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide . 
- SqsQueue PipeParameters Source Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source.
- activeMq PipeBroker Parameters Source Active Mq Broker Parameters 
- The parameters for using an Active MQ broker as a source.
- dynamoDb PipeStream Parameters Source Dynamo Db Stream Parameters 
- The parameters for using a DynamoDB stream as a source.
- filterCriteria PipeFilter Criteria 
- The collection of event patterns used to filter events. - To remove a filter, specify a - FilterCriteriaobject with an empty array of- Filterobjects.- For more information, see Events and Event Patterns in the Amazon EventBridge User Guide . 
- kinesisStream PipeParameters Source Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source.
- managedStreaming PipeKafka Parameters Source Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source.
- rabbitMq PipeBroker Parameters Source Rabbit Mq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source.
- selfManaged PipeKafka Parameters Source Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. - A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide . 
- sqsQueue PipeParameters Source Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source.
- activeMq PipeBroker Parameters Source Active Mq Broker Parameters 
- The parameters for using an Active MQ broker as a source.
- dynamoDb PipeStream Parameters Source Dynamo Db Stream Parameters 
- The parameters for using a DynamoDB stream as a source.
- filterCriteria PipeFilter Criteria 
- The collection of event patterns used to filter events. - To remove a filter, specify a - FilterCriteriaobject with an empty array of- Filterobjects.- For more information, see Events and Event Patterns in the Amazon EventBridge User Guide . 
- kinesisStream PipeParameters Source Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source.
- managedStreaming PipeKafka Parameters Source Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source.
- rabbitMq PipeBroker Parameters Source Rabbit Mq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source.
- selfManaged PipeKafka Parameters Source Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. - A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide . 
- sqsQueue PipeParameters Source Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source.
- active_mq_ Pipebroker_ parameters Source Active Mq Broker Parameters 
- The parameters for using an Active MQ broker as a source.
- dynamo_db_ Pipestream_ parameters Source Dynamo Db Stream Parameters 
- The parameters for using a DynamoDB stream as a source.
- filter_criteria PipeFilter Criteria 
- The collection of event patterns used to filter events. - To remove a filter, specify a - FilterCriteriaobject with an empty array of- Filterobjects.- For more information, see Events and Event Patterns in the Amazon EventBridge User Guide . 
- kinesis_stream_ Pipeparameters Source Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source.
- managed_streaming_ Pipekafka_ parameters Source Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source.
- rabbit_mq_ Pipebroker_ parameters Source Rabbit Mq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source.
- self_managed_ Pipekafka_ parameters Source Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. - A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide . 
- sqs_queue_ Pipeparameters Source Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source.
- activeMq Property MapBroker Parameters 
- The parameters for using an Active MQ broker as a source.
- dynamoDb Property MapStream Parameters 
- The parameters for using a DynamoDB stream as a source.
- filterCriteria Property Map
- The collection of event patterns used to filter events. - To remove a filter, specify a - FilterCriteriaobject with an empty array of- Filterobjects.- For more information, see Events and Event Patterns in the Amazon EventBridge User Guide . 
- kinesisStream Property MapParameters 
- The parameters for using a Kinesis stream as a source.
- managedStreaming Property MapKafka Parameters 
- The parameters for using an MSK stream as a source.
- rabbitMq Property MapBroker Parameters 
- The parameters for using a Rabbit MQ broker as a source.
- selfManaged Property MapKafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. - A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide . 
- sqsQueue Property MapParameters 
- The parameters for using a Amazon SQS stream as a source.
PipeSourceRabbitMqBrokerParameters, PipeSourceRabbitMqBrokerParametersArgs            
- Credentials
Pulumi.Aws Native. Pipes. Inputs. Pipe Mq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- QueueName string
- The name of the destination queue to consume.
- BatchSize int
- The maximum number of records to include in each batch.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- VirtualHost string
- The name of the virtual host associated with the source broker.
- Credentials
PipeMq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- QueueName string
- The name of the destination queue to consume.
- BatchSize int
- The maximum number of records to include in each batch.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- VirtualHost string
- The name of the virtual host associated with the source broker.
- credentials
PipeMq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- queueName String
- The name of the destination queue to consume.
- batchSize Integer
- The maximum number of records to include in each batch.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events.
- virtualHost String
- The name of the virtual host associated with the source broker.
- credentials
PipeMq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- queueName string
- The name of the destination queue to consume.
- batchSize number
- The maximum number of records to include in each batch.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events.
- virtualHost string
- The name of the virtual host associated with the source broker.
- credentials
PipeMq Broker Access Credentials Properties 
- The credentials needed to access the resource.
- queue_name str
- The name of the destination queue to consume.
- batch_size int
- The maximum number of records to include in each batch.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events.
- virtual_host str
- The name of the virtual host associated with the source broker.
- credentials Property Map
- The credentials needed to access the resource.
- queueName String
- The name of the destination queue to consume.
- batchSize Number
- The maximum number of records to include in each batch.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events.
- virtualHost String
- The name of the virtual host associated with the source broker.
PipeSourceSelfManagedKafkaParameters, PipeSourceSelfManagedKafkaParametersArgs            
- TopicName string
- The name of the topic that the pipe will read from.
- AdditionalBootstrap List<string>Servers 
- An array of server URLs.
- BatchSize int
- The maximum number of records to include in each batch.
- ConsumerGroup stringId 
- The name of the destination queue to consume.
- Credentials
Pulumi.Aws | Pulumi.Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Credentials0Properties Aws | Pulumi.Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Credentials1Properties Aws | Pulumi.Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Credentials2Properties Aws Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Credentials3Properties 
- The credentials needed to access the resource.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- ServerRoot stringCa Certificate 
- Optional SecretManager ARN which stores the database credentials
- StartingPosition Pulumi.Aws Native. Pipes. Pipe Self Managed Kafka Start Position 
- The position in a stream from which to start reading.
- Vpc
Pulumi.Aws Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- TopicName string
- The name of the topic that the pipe will read from.
- AdditionalBootstrap []stringServers 
- An array of server URLs.
- BatchSize int
- The maximum number of records to include in each batch.
- ConsumerGroup stringId 
- The name of the destination queue to consume.
- Credentials
PipeSelf | PipeManaged Kafka Access Configuration Credentials0Properties Self | PipeManaged Kafka Access Configuration Credentials1Properties Self | PipeManaged Kafka Access Configuration Credentials2Properties Self Managed Kafka Access Configuration Credentials3Properties 
- The credentials needed to access the resource.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- ServerRoot stringCa Certificate 
- Optional SecretManager ARN which stores the database credentials
- StartingPosition PipeSelf Managed Kafka Start Position 
- The position in a stream from which to start reading.
- Vpc
PipeSelf Managed Kafka Access Configuration Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- topicName String
- The name of the topic that the pipe will read from.
- additionalBootstrap List<String>Servers 
- An array of server URLs.
- batchSize Integer
- The maximum number of records to include in each batch.
- consumerGroup StringId 
- The name of the destination queue to consume.
- credentials
PipeSelf | PipeManaged Kafka Access Configuration Credentials0Properties Self | PipeManaged Kafka Access Configuration Credentials1Properties Self | PipeManaged Kafka Access Configuration Credentials2Properties Self Managed Kafka Access Configuration Credentials3Properties 
- The credentials needed to access the resource.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events.
- serverRoot StringCa Certificate 
- Optional SecretManager ARN which stores the database credentials
- startingPosition PipeSelf Managed Kafka Start Position 
- The position in a stream from which to start reading.
- vpc
PipeSelf Managed Kafka Access Configuration Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- topicName string
- The name of the topic that the pipe will read from.
- additionalBootstrap string[]Servers 
- An array of server URLs.
- batchSize number
- The maximum number of records to include in each batch.
- consumerGroup stringId 
- The name of the destination queue to consume.
- credentials
PipeSelf | PipeManaged Kafka Access Configuration Credentials0Properties Self | PipeManaged Kafka Access Configuration Credentials1Properties Self | PipeManaged Kafka Access Configuration Credentials2Properties Self Managed Kafka Access Configuration Credentials3Properties 
- The credentials needed to access the resource.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events.
- serverRoot stringCa Certificate 
- Optional SecretManager ARN which stores the database credentials
- startingPosition PipeSelf Managed Kafka Start Position 
- The position in a stream from which to start reading.
- vpc
PipeSelf Managed Kafka Access Configuration Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- topic_name str
- The name of the topic that the pipe will read from.
- additional_bootstrap_ Sequence[str]servers 
- An array of server URLs.
- batch_size int
- The maximum number of records to include in each batch.
- consumer_group_ strid 
- The name of the destination queue to consume.
- credentials
PipeSelf | PipeManaged Kafka Access Configuration Credentials0Properties Self | PipeManaged Kafka Access Configuration Credentials1Properties Self | PipeManaged Kafka Access Configuration Credentials2Properties Self Managed Kafka Access Configuration Credentials3Properties 
- The credentials needed to access the resource.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events.
- server_root_ strca_ certificate 
- Optional SecretManager ARN which stores the database credentials
- starting_position PipeSelf Managed Kafka Start Position 
- The position in a stream from which to start reading.
- vpc
PipeSelf Managed Kafka Access Configuration Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- topicName String
- The name of the topic that the pipe will read from.
- additionalBootstrap List<String>Servers 
- An array of server URLs.
- batchSize Number
- The maximum number of records to include in each batch.
- consumerGroup StringId 
- The name of the destination queue to consume.
- credentials Property Map | Property Map | Property Map | Property Map
- The credentials needed to access the resource.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events.
- serverRoot StringCa Certificate 
- Optional SecretManager ARN which stores the database credentials
- startingPosition "TRIM_HORIZON" | "LATEST"
- The position in a stream from which to start reading.
- vpc Property Map
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
PipeSourceSqsQueueParameters, PipeSourceSqsQueueParametersArgs          
- BatchSize int
- The maximum number of records to include in each batch.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- BatchSize int
- The maximum number of records to include in each batch.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events.
- batchSize Integer
- The maximum number of records to include in each batch.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events.
- batchSize number
- The maximum number of records to include in each batch.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events.
- batch_size int
- The maximum number of records to include in each batch.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events.
- batchSize Number
- The maximum number of records to include in each batch.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events.
PipeState, PipeStateArgs    
- Running
- RUNNING
- Stopped
- STOPPED
- Creating
- CREATING
- Updating
- UPDATING
- Deleting
- DELETING
- Starting
- STARTING
- Stopping
- STOPPING
- CreateFailed 
- CREATE_FAILED
- UpdateFailed 
- UPDATE_FAILED
- StartFailed 
- START_FAILED
- StopFailed 
- STOP_FAILED
- DeleteFailed 
- DELETE_FAILED
- CreateRollback Failed 
- CREATE_ROLLBACK_FAILED
- DeleteRollback Failed 
- DELETE_ROLLBACK_FAILED
- UpdateRollback Failed 
- UPDATE_ROLLBACK_FAILED
- PipeState Running 
- RUNNING
- PipeState Stopped 
- STOPPED
- PipeState Creating 
- CREATING
- PipeState Updating 
- UPDATING
- PipeState Deleting 
- DELETING
- PipeState Starting 
- STARTING
- PipeState Stopping 
- STOPPING
- PipeState Create Failed 
- CREATE_FAILED
- PipeState Update Failed 
- UPDATE_FAILED
- PipeState Start Failed 
- START_FAILED
- PipeState Stop Failed 
- STOP_FAILED
- PipeState Delete Failed 
- DELETE_FAILED
- PipeState Create Rollback Failed 
- CREATE_ROLLBACK_FAILED
- PipeState Delete Rollback Failed 
- DELETE_ROLLBACK_FAILED
- PipeState Update Rollback Failed 
- UPDATE_ROLLBACK_FAILED
- Running
- RUNNING
- Stopped
- STOPPED
- Creating
- CREATING
- Updating
- UPDATING
- Deleting
- DELETING
- Starting
- STARTING
- Stopping
- STOPPING
- CreateFailed 
- CREATE_FAILED
- UpdateFailed 
- UPDATE_FAILED
- StartFailed 
- START_FAILED
- StopFailed 
- STOP_FAILED
- DeleteFailed 
- DELETE_FAILED
- CreateRollback Failed 
- CREATE_ROLLBACK_FAILED
- DeleteRollback Failed 
- DELETE_ROLLBACK_FAILED
- UpdateRollback Failed 
- UPDATE_ROLLBACK_FAILED
- Running
- RUNNING
- Stopped
- STOPPED
- Creating
- CREATING
- Updating
- UPDATING
- Deleting
- DELETING
- Starting
- STARTING
- Stopping
- STOPPING
- CreateFailed 
- CREATE_FAILED
- UpdateFailed 
- UPDATE_FAILED
- StartFailed 
- START_FAILED
- StopFailed 
- STOP_FAILED
- DeleteFailed 
- DELETE_FAILED
- CreateRollback Failed 
- CREATE_ROLLBACK_FAILED
- DeleteRollback Failed 
- DELETE_ROLLBACK_FAILED
- UpdateRollback Failed 
- UPDATE_ROLLBACK_FAILED
- RUNNING
- RUNNING
- STOPPED
- STOPPED
- CREATING
- CREATING
- UPDATING
- UPDATING
- DELETING
- DELETING
- STARTING
- STARTING
- STOPPING
- STOPPING
- CREATE_FAILED
- CREATE_FAILED
- UPDATE_FAILED
- UPDATE_FAILED
- START_FAILED
- START_FAILED
- STOP_FAILED
- STOP_FAILED
- DELETE_FAILED
- DELETE_FAILED
- CREATE_ROLLBACK_FAILED
- CREATE_ROLLBACK_FAILED
- DELETE_ROLLBACK_FAILED
- DELETE_ROLLBACK_FAILED
- UPDATE_ROLLBACK_FAILED
- UPDATE_ROLLBACK_FAILED
- "RUNNING"
- RUNNING
- "STOPPED"
- STOPPED
- "CREATING"
- CREATING
- "UPDATING"
- UPDATING
- "DELETING"
- DELETING
- "STARTING"
- STARTING
- "STOPPING"
- STOPPING
- "CREATE_FAILED"
- CREATE_FAILED
- "UPDATE_FAILED"
- UPDATE_FAILED
- "START_FAILED"
- START_FAILED
- "STOP_FAILED"
- STOP_FAILED
- "DELETE_FAILED"
- DELETE_FAILED
- "CREATE_ROLLBACK_FAILED"
- CREATE_ROLLBACK_FAILED
- "DELETE_ROLLBACK_FAILED"
- DELETE_ROLLBACK_FAILED
- "UPDATE_ROLLBACK_FAILED"
- UPDATE_ROLLBACK_FAILED
PipeTag, PipeTagArgs    
PipeTargetBatchJobParameters, PipeTargetBatchJobParametersArgs          
- JobDefinition string
- The job definition used by this job. This value can be one of name,name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- JobName string
- The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- ArrayProperties Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- ContainerOverrides Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Container Overrides 
- The overrides that are sent to a container.
- DependsOn List<Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Job Dependency> 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIALtype dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_Ntype dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
- Parameters Dictionary<string, string>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- RetryStrategy Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- JobDefinition string
- The job definition used by this job. This value can be one of name,name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- JobName string
- The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- ArrayProperties PipeBatch Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- ContainerOverrides PipeBatch Container Overrides 
- The overrides that are sent to a container.
- DependsOn []PipeBatch Job Dependency 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIALtype dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_Ntype dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
- Parameters map[string]string
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- RetryStrategy PipeBatch Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- jobDefinition String
- The job definition used by this job. This value can be one of name,name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- jobName String
- The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- arrayProperties PipeBatch Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- containerOverrides PipeBatch Container Overrides 
- The overrides that are sent to a container.
- dependsOn List<PipeBatch Job Dependency> 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIALtype dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_Ntype dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
- parameters Map<String,String>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- retryStrategy PipeBatch Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- jobDefinition string
- The job definition used by this job. This value can be one of name,name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- jobName string
- The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- arrayProperties PipeBatch Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- containerOverrides PipeBatch Container Overrides 
- The overrides that are sent to a container.
- dependsOn PipeBatch Job Dependency[] 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIALtype dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_Ntype dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
- parameters {[key: string]: string}
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- retryStrategy PipeBatch Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- job_definition str
- The job definition used by this job. This value can be one of name,name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- job_name str
- The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- array_properties PipeBatch Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- container_overrides PipeBatch Container Overrides 
- The overrides that are sent to a container.
- depends_on Sequence[PipeBatch Job Dependency] 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIALtype dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_Ntype dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
- parameters Mapping[str, str]
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- retry_strategy PipeBatch Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- jobDefinition String
- The job definition used by this job. This value can be one of name,name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- jobName String
- The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- arrayProperties Property Map
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- containerOverrides Property Map
- The overrides that are sent to a container.
- dependsOn List<Property Map>
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIALtype dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_Ntype dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin.
- parameters Map<String>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- retryStrategy Property Map
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
PipeTargetCloudWatchLogsParameters, PipeTargetCloudWatchLogsParametersArgs            
- LogStream stringName 
- The name of the log stream.
- Timestamp string
- A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. - The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered. - If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe. 
- LogStream stringName 
- The name of the log stream.
- Timestamp string
- A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. - The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered. - If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe. 
- logStream StringName 
- The name of the log stream.
- timestamp String
- A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. - The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered. - If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe. 
- logStream stringName 
- The name of the log stream.
- timestamp string
- A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. - The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered. - If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe. 
- log_stream_ strname 
- The name of the log stream.
- timestamp str
- A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. - The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered. - If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe. 
- logStream StringName 
- The name of the log stream.
- timestamp String
- A dynamic path parameter to a field in the payload containing the time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. - The value cannot be a static timestamp as the provided timestamp would be applied to all events delivered by the Pipe, regardless of when they are actually delivered. - If no dynamic path parameter is provided, the default value is the time the invocation is processed by the Pipe. 
PipeTargetEcsTaskParameters, PipeTargetEcsTaskParametersArgs          
- TaskDefinition stringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- CapacityProvider List<Pulumi.Strategy Aws Native. Pipes. Inputs. Pipe Capacity Provider Strategy Item> 
- The capacity provider strategy to use for the task. - If a - capacityProviderStrategyis specified, the- launchTypeparameter must be omitted. If no- capacityProviderStrategyor launchType is specified, the- defaultCapacityProviderStrategyfor the cluster is used.
- bool
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- EnableExecute boolCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- Group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- LaunchType Pulumi.Aws Native. Pipes. Pipe Launch Type 
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATEvalue is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
- NetworkConfiguration Pulumi.Aws Native. Pipes. Inputs. Pipe Network Configuration 
- Use this structure if the Amazon ECS task uses the - awsvpcnetwork mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if- LaunchTypeis- FARGATEbecause the- awsvpcmode is required for Fargate tasks.- If you specify - NetworkConfigurationwhen the target ECS task does not use the- awsvpcnetwork mode, the task fails.
- Overrides
Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Task Override 
- The overrides that are associated with a task.
- PlacementConstraints List<Pulumi.Aws Native. Pipes. Inputs. Pipe Placement Constraint> 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- PlacementStrategy List<Pulumi.Aws Native. Pipes. Inputs. Pipe Placement Strategy> 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- PlatformVersion string
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as - 1.1.0.- This structure is used only if - LaunchTypeis- FARGATE. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .
- 
Pulumi.Aws Native. Pipes. Pipe Propagate Tags 
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResourceAPI action.
- ReferenceId string
- The reference ID to use for the task.
- 
List<Pulumi.Aws Native. Pipes. Inputs. Pipe Tag> 
- The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- TaskCount int
- The number of tasks to create based on TaskDefinition. The default is 1.
- TaskDefinition stringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- CapacityProvider []PipeStrategy Capacity Provider Strategy Item 
- The capacity provider strategy to use for the task. - If a - capacityProviderStrategyis specified, the- launchTypeparameter must be omitted. If no- capacityProviderStrategyor launchType is specified, the- defaultCapacityProviderStrategyfor the cluster is used.
- bool
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- EnableExecute boolCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- Group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- LaunchType PipeLaunch Type 
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATEvalue is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
- NetworkConfiguration PipeNetwork Configuration 
- Use this structure if the Amazon ECS task uses the - awsvpcnetwork mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if- LaunchTypeis- FARGATEbecause the- awsvpcmode is required for Fargate tasks.- If you specify - NetworkConfigurationwhen the target ECS task does not use the- awsvpcnetwork mode, the task fails.
- Overrides
PipeEcs Task Override 
- The overrides that are associated with a task.
- PlacementConstraints []PipePlacement Constraint 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- PlacementStrategy []PipePlacement Strategy 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- PlatformVersion string
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as - 1.1.0.- This structure is used only if - LaunchTypeis- FARGATE. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .
- 
PipePropagate Tags 
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResourceAPI action.
- ReferenceId string
- The reference ID to use for the task.
- 
[]PipeTag 
- The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- TaskCount int
- The number of tasks to create based on TaskDefinition. The default is 1.
- taskDefinition StringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacityProvider List<PipeStrategy Capacity Provider Strategy Item> 
- The capacity provider strategy to use for the task. - If a - capacityProviderStrategyis specified, the- launchTypeparameter must be omitted. If no- capacityProviderStrategyor launchType is specified, the- defaultCapacityProviderStrategyfor the cluster is used.
- Boolean
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- enableExecute BooleanCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- group String
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launchType PipeLaunch Type 
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATEvalue is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
- networkConfiguration PipeNetwork Configuration 
- Use this structure if the Amazon ECS task uses the - awsvpcnetwork mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if- LaunchTypeis- FARGATEbecause the- awsvpcmode is required for Fargate tasks.- If you specify - NetworkConfigurationwhen the target ECS task does not use the- awsvpcnetwork mode, the task fails.
- overrides
PipeEcs Task Override 
- The overrides that are associated with a task.
- placementConstraints List<PipePlacement Constraint> 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- placementStrategy List<PipePlacement Strategy> 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- platformVersion String
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as - 1.1.0.- This structure is used only if - LaunchTypeis- FARGATE. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .
- 
PipePropagate Tags 
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResourceAPI action.
- referenceId String
- The reference ID to use for the task.
- 
List<PipeTag> 
- The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- taskCount Integer
- The number of tasks to create based on TaskDefinition. The default is 1.
- taskDefinition stringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacityProvider PipeStrategy Capacity Provider Strategy Item[] 
- The capacity provider strategy to use for the task. - If a - capacityProviderStrategyis specified, the- launchTypeparameter must be omitted. If no- capacityProviderStrategyor launchType is specified, the- defaultCapacityProviderStrategyfor the cluster is used.
- boolean
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- enableExecute booleanCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launchType PipeLaunch Type 
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATEvalue is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
- networkConfiguration PipeNetwork Configuration 
- Use this structure if the Amazon ECS task uses the - awsvpcnetwork mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if- LaunchTypeis- FARGATEbecause the- awsvpcmode is required for Fargate tasks.- If you specify - NetworkConfigurationwhen the target ECS task does not use the- awsvpcnetwork mode, the task fails.
- overrides
PipeEcs Task Override 
- The overrides that are associated with a task.
- placementConstraints PipePlacement Constraint[] 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- placementStrategy PipePlacement Strategy[] 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- platformVersion string
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as - 1.1.0.- This structure is used only if - LaunchTypeis- FARGATE. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .
- 
PipePropagate Tags 
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResourceAPI action.
- referenceId string
- The reference ID to use for the task.
- 
PipeTag[] 
- The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- taskCount number
- The number of tasks to create based on TaskDefinition. The default is 1.
- task_definition_ strarn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity_provider_ Sequence[Pipestrategy Capacity Provider Strategy Item] 
- The capacity provider strategy to use for the task. - If a - capacityProviderStrategyis specified, the- launchTypeparameter must be omitted. If no- capacityProviderStrategyor launchType is specified, the- defaultCapacityProviderStrategyfor the cluster is used.
- bool
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- enable_execute_ boolcommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- group str
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch_type PipeLaunch Type 
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATEvalue is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
- network_configuration PipeNetwork Configuration 
- Use this structure if the Amazon ECS task uses the - awsvpcnetwork mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if- LaunchTypeis- FARGATEbecause the- awsvpcmode is required for Fargate tasks.- If you specify - NetworkConfigurationwhen the target ECS task does not use the- awsvpcnetwork mode, the task fails.
- overrides
PipeEcs Task Override 
- The overrides that are associated with a task.
- placement_constraints Sequence[PipePlacement Constraint] 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- placement_strategy Sequence[PipePlacement Strategy] 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- platform_version str
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as - 1.1.0.- This structure is used only if - LaunchTypeis- FARGATE. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .
- 
PipePropagate Tags 
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResourceAPI action.
- reference_id str
- The reference ID to use for the task.
- 
Sequence[PipeTag] 
- The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- task_count int
- The number of tasks to create based on TaskDefinition. The default is 1.
- taskDefinition StringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacityProvider List<Property Map>Strategy 
- The capacity provider strategy to use for the task. - If a - capacityProviderStrategyis specified, the- launchTypeparameter must be omitted. If no- capacityProviderStrategyor launchType is specified, the- defaultCapacityProviderStrategyfor the cluster is used.
- Boolean
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- enableExecute BooleanCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- group String
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launchType "EC2" | "FARGATE" | "EXTERNAL"
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATEvalue is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide .
- networkConfiguration Property Map
- Use this structure if the Amazon ECS task uses the - awsvpcnetwork mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if- LaunchTypeis- FARGATEbecause the- awsvpcmode is required for Fargate tasks.- If you specify - NetworkConfigurationwhen the target ECS task does not use the- awsvpcnetwork mode, the task fails.
- overrides Property Map
- The overrides that are associated with a task.
- placementConstraints List<Property Map>
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- placementStrategy List<Property Map>
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- platformVersion String
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as - 1.1.0.- This structure is used only if - LaunchTypeis- FARGATE. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .
- "TASK_DEFINITION"
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResourceAPI action.
- referenceId String
- The reference ID to use for the task.
- List<Property Map>
- The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- taskCount Number
- The number of tasks to create based on TaskDefinition. The default is 1.
PipeTargetEventBridgeEventBusParameters, PipeTargetEventBridgeEventBusParametersArgs              
- DetailType string
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- EndpointId string
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- Resources List<string>
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- Source string
- The source of the event.
- Time string
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- DetailType string
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- EndpointId string
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- Resources []string
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- Source string
- The source of the event.
- Time string
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- detailType String
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpointId String
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources List<String>
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source String
- The source of the event.
- time String
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- detailType string
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpointId string
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources string[]
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source string
- The source of the event.
- time string
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- detail_type str
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint_id str
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources Sequence[str]
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source str
- The source of the event.
- time str
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- detailType String
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpointId String
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources List<String>
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source String
- The source of the event.
- time String
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
PipeTargetHttpParameters, PipeTargetHttpParametersArgs        
- HeaderParameters Dictionary<string, string>
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- PathParameter List<string>Values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- QueryString Dictionary<string, string>Parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- HeaderParameters map[string]string
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- PathParameter []stringValues 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- QueryString map[string]stringParameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- headerParameters Map<String,String>
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- pathParameter List<String>Values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- queryString Map<String,String>Parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- headerParameters {[key: string]: string}
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- pathParameter string[]Values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- queryString {[key: string]: string}Parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header_parameters Mapping[str, str]
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path_parameter_ Sequence[str]values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query_string_ Mapping[str, str]parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- headerParameters Map<String>
- The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- pathParameter List<String>Values 
- The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- queryString Map<String>Parameters 
- The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
PipeTargetInvocationType, PipeTargetInvocationTypeArgs        
- RequestResponse 
- REQUEST_RESPONSE
- FireAnd Forget 
- FIRE_AND_FORGET
- PipeTarget Invocation Type Request Response 
- REQUEST_RESPONSE
- PipeTarget Invocation Type Fire And Forget 
- FIRE_AND_FORGET
- RequestResponse 
- REQUEST_RESPONSE
- FireAnd Forget 
- FIRE_AND_FORGET
- RequestResponse 
- REQUEST_RESPONSE
- FireAnd Forget 
- FIRE_AND_FORGET
- REQUEST_RESPONSE
- REQUEST_RESPONSE
- FIRE_AND_FORGET
- FIRE_AND_FORGET
- "REQUEST_RESPONSE"
- REQUEST_RESPONSE
- "FIRE_AND_FORGET"
- FIRE_AND_FORGET
PipeTargetKinesisStreamParameters, PipeTargetKinesisStreamParametersArgs          
- PartitionKey string
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- PartitionKey string
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partitionKey String
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partitionKey string
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition_key str
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partitionKey String
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
PipeTargetLambdaFunctionParameters, PipeTargetLambdaFunctionParametersArgs          
- InvocationType Pulumi.Aws Native. Pipes. Pipe Target Invocation Type 
- Specify whether to invoke the function synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. This corresponds to the- RequestResponseoption in the- InvocationTypeparameter for the Lambda Invoke API.
- FIRE_AND_FORGET- Invoke asynchronously. This corresponds to the- Eventoption in the- InvocationTypeparameter for the Lambda Invoke API.
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- InvocationType PipeTarget Invocation Type 
- Specify whether to invoke the function synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. This corresponds to the- RequestResponseoption in the- InvocationTypeparameter for the Lambda Invoke API.
- FIRE_AND_FORGET- Invoke asynchronously. This corresponds to the- Eventoption in the- InvocationTypeparameter for the Lambda Invoke API.
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- invocationType PipeTarget Invocation Type 
- Specify whether to invoke the function synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. This corresponds to the- RequestResponseoption in the- InvocationTypeparameter for the Lambda Invoke API.
- FIRE_AND_FORGET- Invoke asynchronously. This corresponds to the- Eventoption in the- InvocationTypeparameter for the Lambda Invoke API.
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- invocationType PipeTarget Invocation Type 
- Specify whether to invoke the function synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. This corresponds to the- RequestResponseoption in the- InvocationTypeparameter for the Lambda Invoke API.
- FIRE_AND_FORGET- Invoke asynchronously. This corresponds to the- Eventoption in the- InvocationTypeparameter for the Lambda Invoke API.
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- invocation_type PipeTarget Invocation Type 
- Specify whether to invoke the function synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. This corresponds to the- RequestResponseoption in the- InvocationTypeparameter for the Lambda Invoke API.
- FIRE_AND_FORGET- Invoke asynchronously. This corresponds to the- Eventoption in the- InvocationTypeparameter for the Lambda Invoke API.
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- invocationType "REQUEST_RESPONSE" | "FIRE_AND_FORGET"
- Specify whether to invoke the function synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. This corresponds to the- RequestResponseoption in the- InvocationTypeparameter for the Lambda Invoke API.
- FIRE_AND_FORGET- Invoke asynchronously. This corresponds to the- Eventoption in the- InvocationTypeparameter for the Lambda Invoke API.
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
PipeTargetParameters, PipeTargetParametersArgs      
- BatchJob Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Batch Job Parameters 
- The parameters for using an AWS Batch job as a target.
- CloudWatch Pulumi.Logs Parameters Aws Native. Pipes. Inputs. Pipe Target Cloud Watch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target.
- EcsTask Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target.
- EventBridge Pulumi.Event Bus Parameters Aws Native. Pipes. Inputs. Pipe Target Event Bridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target.
- HttpParameters Pulumi.Aws Native. Pipes. Inputs. Pipe Target Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- InputTemplate string
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- KinesisStream Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a target.
- LambdaFunction Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Lambda Function Parameters 
- The parameters for using a Lambda function as a target.
- RedshiftData Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- SageMaker Pulumi.Pipeline Parameters Aws Native. Pipes. Inputs. Pipe Target Sage Maker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target.
- SqsQueue Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target.
- StepFunction Pulumi.State Machine Parameters Aws Native. Pipes. Inputs. Pipe Target State Machine Parameters 
- The parameters for using a Step Functions state machine as a target.
- TimestreamParameters Pulumi.Aws Native. Pipes. Inputs. Pipe Target Timestream Parameters 
- The parameters for using a Timestream for LiveAnalytics table as a target.
- BatchJob PipeParameters Target Batch Job Parameters 
- The parameters for using an AWS Batch job as a target.
- CloudWatch PipeLogs Parameters Target Cloud Watch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target.
- EcsTask PipeParameters Target Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target.
- EventBridge PipeEvent Bus Parameters Target Event Bridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target.
- HttpParameters PipeTarget Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- InputTemplate string
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- KinesisStream PipeParameters Target Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a target.
- LambdaFunction PipeParameters Target Lambda Function Parameters 
- The parameters for using a Lambda function as a target.
- RedshiftData PipeParameters Target Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- SageMaker PipePipeline Parameters Target Sage Maker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target.
- SqsQueue PipeParameters Target Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target.
- StepFunction PipeState Machine Parameters Target State Machine Parameters 
- The parameters for using a Step Functions state machine as a target.
- TimestreamParameters PipeTarget Timestream Parameters 
- The parameters for using a Timestream for LiveAnalytics table as a target.
- batchJob PipeParameters Target Batch Job Parameters 
- The parameters for using an AWS Batch job as a target.
- cloudWatch PipeLogs Parameters Target Cloud Watch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target.
- ecsTask PipeParameters Target Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target.
- eventBridge PipeEvent Bus Parameters Target Event Bridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target.
- httpParameters PipeTarget Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- inputTemplate String
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- kinesisStream PipeParameters Target Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a target.
- lambdaFunction PipeParameters Target Lambda Function Parameters 
- The parameters for using a Lambda function as a target.
- redshiftData PipeParameters Target Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- sageMaker PipePipeline Parameters Target Sage Maker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target.
- sqsQueue PipeParameters Target Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target.
- stepFunction PipeState Machine Parameters Target State Machine Parameters 
- The parameters for using a Step Functions state machine as a target.
- timestreamParameters PipeTarget Timestream Parameters 
- The parameters for using a Timestream for LiveAnalytics table as a target.
- batchJob PipeParameters Target Batch Job Parameters 
- The parameters for using an AWS Batch job as a target.
- cloudWatch PipeLogs Parameters Target Cloud Watch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target.
- ecsTask PipeParameters Target Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target.
- eventBridge PipeEvent Bus Parameters Target Event Bridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target.
- httpParameters PipeTarget Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- inputTemplate string
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- kinesisStream PipeParameters Target Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a target.
- lambdaFunction PipeParameters Target Lambda Function Parameters 
- The parameters for using a Lambda function as a target.
- redshiftData PipeParameters Target Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- sageMaker PipePipeline Parameters Target Sage Maker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target.
- sqsQueue PipeParameters Target Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target.
- stepFunction PipeState Machine Parameters Target State Machine Parameters 
- The parameters for using a Step Functions state machine as a target.
- timestreamParameters PipeTarget Timestream Parameters 
- The parameters for using a Timestream for LiveAnalytics table as a target.
- batch_job_ Pipeparameters Target Batch Job Parameters 
- The parameters for using an AWS Batch job as a target.
- cloud_watch_ Pipelogs_ parameters Target Cloud Watch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target.
- ecs_task_ Pipeparameters Target Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target.
- event_bridge_ Pipeevent_ bus_ parameters Target Event Bridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target.
- http_parameters PipeTarget Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- input_template str
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- kinesis_stream_ Pipeparameters Target Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a target.
- lambda_function_ Pipeparameters Target Lambda Function Parameters 
- The parameters for using a Lambda function as a target.
- redshift_data_ Pipeparameters Target Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- sage_maker_ Pipepipeline_ parameters Target Sage Maker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target.
- sqs_queue_ Pipeparameters Target Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target.
- step_function_ Pipestate_ machine_ parameters Target State Machine Parameters 
- The parameters for using a Step Functions state machine as a target.
- timestream_parameters PipeTarget Timestream Parameters 
- The parameters for using a Timestream for LiveAnalytics table as a target.
- batchJob Property MapParameters 
- The parameters for using an AWS Batch job as a target.
- cloudWatch Property MapLogs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target.
- ecsTask Property MapParameters 
- The parameters for using an Amazon ECS task as a target.
- eventBridge Property MapEvent Bus Parameters 
- The parameters for using an EventBridge event bus as a target.
- httpParameters Property Map
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- inputTemplate String
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format . - To remove an input template, specify an empty string. 
- kinesisStream Property MapParameters 
- The parameters for using a Kinesis stream as a target.
- lambdaFunction Property MapParameters 
- The parameters for using a Lambda function as a target.
- redshiftData Property MapParameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- sageMaker Property MapPipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target.
- sqsQueue Property MapParameters 
- The parameters for using a Amazon SQS stream as a target.
- stepFunction Property MapState Machine Parameters 
- The parameters for using a Step Functions state machine as a target.
- timestreamParameters Property Map
- The parameters for using a Timestream for LiveAnalytics table as a target.
PipeTargetRedshiftDataParameters, PipeTargetRedshiftDataParametersArgs          
- Database string
- Redshift Database
- Sqls List<string>
- A list of SQLs.
- DbUser string
- Database user name
- SecretManager stringArn 
- Optional SecretManager ARN which stores the database credentials
- StatementName string
- A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- WithEvent bool
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- Database string
- Redshift Database
- Sqls []string
- A list of SQLs.
- DbUser string
- Database user name
- SecretManager stringArn 
- Optional SecretManager ARN which stores the database credentials
- StatementName string
- A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- WithEvent bool
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database String
- Redshift Database
- sqls List<String>
- A list of SQLs.
- dbUser String
- Database user name
- secretManager StringArn 
- Optional SecretManager ARN which stores the database credentials
- statementName String
- A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- withEvent Boolean
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database string
- Redshift Database
- sqls string[]
- A list of SQLs.
- dbUser string
- Database user name
- secretManager stringArn 
- Optional SecretManager ARN which stores the database credentials
- statementName string
- A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- withEvent boolean
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database str
- Redshift Database
- sqls Sequence[str]
- A list of SQLs.
- db_user str
- Database user name
- secret_manager_ strarn 
- Optional SecretManager ARN which stores the database credentials
- statement_name str
- A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- with_event bool
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database String
- Redshift Database
- sqls List<String>
- A list of SQLs.
- dbUser String
- Database user name
- secretManager StringArn 
- Optional SecretManager ARN which stores the database credentials
- statementName String
- A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- withEvent Boolean
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
PipeTargetSageMakerPipelineParameters, PipeTargetSageMakerPipelineParametersArgs            
- PipelineParameter List<Pulumi.List Aws Native. Pipes. Inputs. Pipe Sage Maker Pipeline Parameter> 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
- PipelineParameter []PipeList Sage Maker Pipeline Parameter 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
- pipelineParameter List<PipeList Sage Maker Pipeline Parameter> 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
- pipelineParameter PipeList Sage Maker Pipeline Parameter[] 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
- pipeline_parameter_ Sequence[Pipelist Sage Maker Pipeline Parameter] 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
- pipelineParameter List<Property Map>List 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution.
PipeTargetSqsQueueParameters, PipeTargetSqsQueueParametersArgs          
- MessageDeduplication stringId 
- This parameter applies only to FIFO (first-in-first-out) queues. - The token used for deduplication of sent messages. 
- MessageGroup stringId 
- The FIFO message group ID to use as the target.
- MessageDeduplication stringId 
- This parameter applies only to FIFO (first-in-first-out) queues. - The token used for deduplication of sent messages. 
- MessageGroup stringId 
- The FIFO message group ID to use as the target.
- messageDeduplication StringId 
- This parameter applies only to FIFO (first-in-first-out) queues. - The token used for deduplication of sent messages. 
- messageGroup StringId 
- The FIFO message group ID to use as the target.
- messageDeduplication stringId 
- This parameter applies only to FIFO (first-in-first-out) queues. - The token used for deduplication of sent messages. 
- messageGroup stringId 
- The FIFO message group ID to use as the target.
- message_deduplication_ strid 
- This parameter applies only to FIFO (first-in-first-out) queues. - The token used for deduplication of sent messages. 
- message_group_ strid 
- The FIFO message group ID to use as the target.
- messageDeduplication StringId 
- This parameter applies only to FIFO (first-in-first-out) queues. - The token used for deduplication of sent messages. 
- messageGroup StringId 
- The FIFO message group ID to use as the target.
PipeTargetStateMachineParameters, PipeTargetStateMachineParametersArgs          
- InvocationType Pulumi.Aws Native. Pipes. Pipe Target Invocation Type 
- Specify whether to invoke the Step Functions state machine synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
 - REQUEST_RESPONSEis not supported for- STANDARDstate machine workflows.- FIRE_AND_FORGET- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- InvocationType PipeTarget Invocation Type 
- Specify whether to invoke the Step Functions state machine synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
 - REQUEST_RESPONSEis not supported for- STANDARDstate machine workflows.- FIRE_AND_FORGET- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- invocationType PipeTarget Invocation Type 
- Specify whether to invoke the Step Functions state machine synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
 - REQUEST_RESPONSEis not supported for- STANDARDstate machine workflows.- FIRE_AND_FORGET- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- invocationType PipeTarget Invocation Type 
- Specify whether to invoke the Step Functions state machine synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
 - REQUEST_RESPONSEis not supported for- STANDARDstate machine workflows.- FIRE_AND_FORGET- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- invocation_type PipeTarget Invocation Type 
- Specify whether to invoke the Step Functions state machine synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
 - REQUEST_RESPONSEis not supported for- STANDARDstate machine workflows.- FIRE_AND_FORGET- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
- invocationType "REQUEST_RESPONSE" | "FIRE_AND_FORGET"
- Specify whether to invoke the Step Functions state machine synchronously or asynchronously. - REQUEST_RESPONSE(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
 - REQUEST_RESPONSEis not supported for- STANDARDstate machine workflows.- FIRE_AND_FORGET- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
 - For more information, see Invocation types in the Amazon EventBridge User Guide . 
PipeTargetTimestreamParameters, PipeTargetTimestreamParametersArgs        
- DimensionMappings List<Pulumi.Aws Native. Pipes. Inputs. Pipe Dimension Mapping> 
- Map source data to dimensions in the target Timestream for LiveAnalytics table. - For more information, see Amazon Timestream for LiveAnalytics concepts 
- TimeValue string
- Dynamic path to the source data field that represents the time value for your data.
- VersionValue string
- 64 bit version value or source data field that represents the version value for your data. - Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated. - Default value is 1. - Timestream for LiveAnalytics does not support updating partial measure values in a record. - Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, - Versionwill still be updated. Default value is- 1.- Versionmust be- 1or greater, or you will receive a- ValidationExceptionerror.
- EpochTime Pulumi.Unit Aws Native. Pipes. Pipe Epoch Time Unit 
- The granularity of the time units used. Default is - MILLISECONDS.- Required if - TimeFieldTypeis specified as- EPOCH.
- MultiMeasure List<Pulumi.Mappings Aws Native. Pipes. Inputs. Pipe Multi Measure Mapping> 
- Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- SingleMeasure List<Pulumi.Mappings Aws Native. Pipes. Inputs. Pipe Single Measure Mapping> 
- Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- TimeField Pulumi.Type Aws Native. Pipes. Pipe Time Field Type 
- The type of time value used. - The default is - EPOCH.
- TimestampFormat string
- How to format the timestamps. For example, - yyyy-MM-dd'T'HH:mm:ss'Z'.- Required if - TimeFieldTypeis specified as- TIMESTAMP_FORMAT.
- DimensionMappings []PipeDimension Mapping 
- Map source data to dimensions in the target Timestream for LiveAnalytics table. - For more information, see Amazon Timestream for LiveAnalytics concepts 
- TimeValue string
- Dynamic path to the source data field that represents the time value for your data.
- VersionValue string
- 64 bit version value or source data field that represents the version value for your data. - Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated. - Default value is 1. - Timestream for LiveAnalytics does not support updating partial measure values in a record. - Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, - Versionwill still be updated. Default value is- 1.- Versionmust be- 1or greater, or you will receive a- ValidationExceptionerror.
- EpochTime PipeUnit Epoch Time Unit 
- The granularity of the time units used. Default is - MILLISECONDS.- Required if - TimeFieldTypeis specified as- EPOCH.
- MultiMeasure []PipeMappings Multi Measure Mapping 
- Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- SingleMeasure []PipeMappings Single Measure Mapping 
- Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- TimeField PipeType Time Field Type 
- The type of time value used. - The default is - EPOCH.
- TimestampFormat string
- How to format the timestamps. For example, - yyyy-MM-dd'T'HH:mm:ss'Z'.- Required if - TimeFieldTypeis specified as- TIMESTAMP_FORMAT.
- dimensionMappings List<PipeDimension Mapping> 
- Map source data to dimensions in the target Timestream for LiveAnalytics table. - For more information, see Amazon Timestream for LiveAnalytics concepts 
- timeValue String
- Dynamic path to the source data field that represents the time value for your data.
- versionValue String
- 64 bit version value or source data field that represents the version value for your data. - Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated. - Default value is 1. - Timestream for LiveAnalytics does not support updating partial measure values in a record. - Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, - Versionwill still be updated. Default value is- 1.- Versionmust be- 1or greater, or you will receive a- ValidationExceptionerror.
- epochTime PipeUnit Epoch Time Unit 
- The granularity of the time units used. Default is - MILLISECONDS.- Required if - TimeFieldTypeis specified as- EPOCH.
- multiMeasure List<PipeMappings Multi Measure Mapping> 
- Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- singleMeasure List<PipeMappings Single Measure Mapping> 
- Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- timeField PipeType Time Field Type 
- The type of time value used. - The default is - EPOCH.
- timestampFormat String
- How to format the timestamps. For example, - yyyy-MM-dd'T'HH:mm:ss'Z'.- Required if - TimeFieldTypeis specified as- TIMESTAMP_FORMAT.
- dimensionMappings PipeDimension Mapping[] 
- Map source data to dimensions in the target Timestream for LiveAnalytics table. - For more information, see Amazon Timestream for LiveAnalytics concepts 
- timeValue string
- Dynamic path to the source data field that represents the time value for your data.
- versionValue string
- 64 bit version value or source data field that represents the version value for your data. - Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated. - Default value is 1. - Timestream for LiveAnalytics does not support updating partial measure values in a record. - Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, - Versionwill still be updated. Default value is- 1.- Versionmust be- 1or greater, or you will receive a- ValidationExceptionerror.
- epochTime PipeUnit Epoch Time Unit 
- The granularity of the time units used. Default is - MILLISECONDS.- Required if - TimeFieldTypeis specified as- EPOCH.
- multiMeasure PipeMappings Multi Measure Mapping[] 
- Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- singleMeasure PipeMappings Single Measure Mapping[] 
- Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- timeField PipeType Time Field Type 
- The type of time value used. - The default is - EPOCH.
- timestampFormat string
- How to format the timestamps. For example, - yyyy-MM-dd'T'HH:mm:ss'Z'.- Required if - TimeFieldTypeis specified as- TIMESTAMP_FORMAT.
- dimension_mappings Sequence[PipeDimension Mapping] 
- Map source data to dimensions in the target Timestream for LiveAnalytics table. - For more information, see Amazon Timestream for LiveAnalytics concepts 
- time_value str
- Dynamic path to the source data field that represents the time value for your data.
- version_value str
- 64 bit version value or source data field that represents the version value for your data. - Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated. - Default value is 1. - Timestream for LiveAnalytics does not support updating partial measure values in a record. - Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, - Versionwill still be updated. Default value is- 1.- Versionmust be- 1or greater, or you will receive a- ValidationExceptionerror.
- epoch_time_ Pipeunit Epoch Time Unit 
- The granularity of the time units used. Default is - MILLISECONDS.- Required if - TimeFieldTypeis specified as- EPOCH.
- multi_measure_ Sequence[Pipemappings Multi Measure Mapping] 
- Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- single_measure_ Sequence[Pipemappings Single Measure Mapping] 
- Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- time_field_ Pipetype Time Field Type 
- The type of time value used. - The default is - EPOCH.
- timestamp_format str
- How to format the timestamps. For example, - yyyy-MM-dd'T'HH:mm:ss'Z'.- Required if - TimeFieldTypeis specified as- TIMESTAMP_FORMAT.
- dimensionMappings List<Property Map>
- Map source data to dimensions in the target Timestream for LiveAnalytics table. - For more information, see Amazon Timestream for LiveAnalytics concepts 
- timeValue String
- Dynamic path to the source data field that represents the time value for your data.
- versionValue String
- 64 bit version value or source data field that represents the version value for your data. - Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated. - Default value is 1. - Timestream for LiveAnalytics does not support updating partial measure values in a record. - Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same, - Versionwill still be updated. Default value is- 1.- Versionmust be- 1or greater, or you will receive a- ValidationExceptionerror.
- epochTime "MILLISECONDS" | "SECONDS" | "MICROSECONDS" | "NANOSECONDS"Unit 
- The granularity of the time units used. Default is - MILLISECONDS.- Required if - TimeFieldTypeis specified as- EPOCH.
- multiMeasure List<Property Map>Mappings 
- Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- singleMeasure List<Property Map>Mappings 
- Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- timeField "EPOCH" | "TIMESTAMP_FORMAT"Type 
- The type of time value used. - The default is - EPOCH.
- timestampFormat String
- How to format the timestamps. For example, - yyyy-MM-dd'T'HH:mm:ss'Z'.- Required if - TimeFieldTypeis specified as- TIMESTAMP_FORMAT.
PipeTimeFieldType, PipeTimeFieldTypeArgs        
- Epoch
- EPOCH
- TimestampFormat 
- TIMESTAMP_FORMAT
- PipeTime Field Type Epoch 
- EPOCH
- PipeTime Field Type Timestamp Format 
- TIMESTAMP_FORMAT
- Epoch
- EPOCH
- TimestampFormat 
- TIMESTAMP_FORMAT
- Epoch
- EPOCH
- TimestampFormat 
- TIMESTAMP_FORMAT
- EPOCH
- EPOCH
- TIMESTAMP_FORMAT
- TIMESTAMP_FORMAT
- "EPOCH"
- EPOCH
- "TIMESTAMP_FORMAT"
- TIMESTAMP_FORMAT
Package Details
- Repository
- AWS Native pulumi/pulumi-aws-native
- License
- Apache-2.0
We recommend new projects start with resources from the AWS provider.