aws.pipes.Pipe
Explore with Pulumi AI
Resource for managing an AWS EventBridge Pipes Pipe.
You can find out more about EventBridge Pipes in the User Guide.
EventBridge Pipes are very configurable, and may require IAM permissions to work correctly. More information on the configuration options and IAM permissions can be found in the User Guide.
Note: EventBridge was formerly known as CloudWatch Events. The functionality is identical.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const main = aws.getCallerIdentity({});
const example = new aws.iam.Role("example", {assumeRolePolicy: JSON.stringify({
    Version: "2012-10-17",
    Statement: {
        Effect: "Allow",
        Action: "sts:AssumeRole",
        Principal: {
            Service: "pipes.amazonaws.com",
        },
        Condition: {
            StringEquals: {
                "aws:SourceAccount": main.then(main => main.accountId),
            },
        },
    },
})});
const sourceQueue = new aws.sqs.Queue("source", {});
const source = new aws.iam.RolePolicy("source", {
    role: example.id,
    policy: pulumi.jsonStringify({
        Version: "2012-10-17",
        Statement: [{
            Effect: "Allow",
            Action: [
                "sqs:DeleteMessage",
                "sqs:GetQueueAttributes",
                "sqs:ReceiveMessage",
            ],
            Resource: [sourceQueue.arn],
        }],
    }),
});
const targetQueue = new aws.sqs.Queue("target", {});
const target = new aws.iam.RolePolicy("target", {
    role: example.id,
    policy: pulumi.jsonStringify({
        Version: "2012-10-17",
        Statement: [{
            Effect: "Allow",
            Action: ["sqs:SendMessage"],
            Resource: [targetQueue.arn],
        }],
    }),
});
const examplePipe = new aws.pipes.Pipe("example", {
    name: "example-pipe",
    roleArn: example.arn,
    source: sourceQueue.arn,
    target: targetQueue.arn,
}, {
    dependsOn: [
        source,
        target,
    ],
});
import pulumi
import json
import pulumi_aws as aws
main = aws.get_caller_identity()
example = aws.iam.Role("example", assume_role_policy=json.dumps({
    "Version": "2012-10-17",
    "Statement": {
        "Effect": "Allow",
        "Action": "sts:AssumeRole",
        "Principal": {
            "Service": "pipes.amazonaws.com",
        },
        "Condition": {
            "StringEquals": {
                "aws:SourceAccount": main.account_id,
            },
        },
    },
}))
source_queue = aws.sqs.Queue("source")
source = aws.iam.RolePolicy("source",
    role=example.id,
    policy=pulumi.Output.json_dumps({
        "Version": "2012-10-17",
        "Statement": [{
            "Effect": "Allow",
            "Action": [
                "sqs:DeleteMessage",
                "sqs:GetQueueAttributes",
                "sqs:ReceiveMessage",
            ],
            "Resource": [source_queue.arn],
        }],
    }))
target_queue = aws.sqs.Queue("target")
target = aws.iam.RolePolicy("target",
    role=example.id,
    policy=pulumi.Output.json_dumps({
        "Version": "2012-10-17",
        "Statement": [{
            "Effect": "Allow",
            "Action": ["sqs:SendMessage"],
            "Resource": [target_queue.arn],
        }],
    }))
example_pipe = aws.pipes.Pipe("example",
    name="example-pipe",
    role_arn=example.arn,
    source=source_queue.arn,
    target=target_queue.arn,
    opts = pulumi.ResourceOptions(depends_on=[
            source,
            target,
        ]))
package main
import (
	"encoding/json"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/sqs"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		main, err := aws.GetCallerIdentity(ctx, &aws.GetCallerIdentityArgs{}, nil)
		if err != nil {
			return err
		}
		tmpJSON0, err := json.Marshal(map[string]interface{}{
			"Version": "2012-10-17",
			"Statement": map[string]interface{}{
				"Effect": "Allow",
				"Action": "sts:AssumeRole",
				"Principal": map[string]interface{}{
					"Service": "pipes.amazonaws.com",
				},
				"Condition": map[string]interface{}{
					"StringEquals": map[string]interface{}{
						"aws:SourceAccount": main.AccountId,
					},
				},
			},
		})
		if err != nil {
			return err
		}
		json0 := string(tmpJSON0)
		example, err := iam.NewRole(ctx, "example", &iam.RoleArgs{
			AssumeRolePolicy: pulumi.String(json0),
		})
		if err != nil {
			return err
		}
		sourceQueue, err := sqs.NewQueue(ctx, "source", nil)
		if err != nil {
			return err
		}
		source, err := iam.NewRolePolicy(ctx, "source", &iam.RolePolicyArgs{
			Role: example.ID(),
			Policy: sourceQueue.Arn.ApplyT(func(arn string) (pulumi.String, error) {
				var _zero pulumi.String
				tmpJSON1, err := json.Marshal(map[string]interface{}{
					"Version": "2012-10-17",
					"Statement": []map[string]interface{}{
						map[string]interface{}{
							"Effect": "Allow",
							"Action": []string{
								"sqs:DeleteMessage",
								"sqs:GetQueueAttributes",
								"sqs:ReceiveMessage",
							},
							"Resource": []string{
								arn,
							},
						},
					},
				})
				if err != nil {
					return _zero, err
				}
				json1 := string(tmpJSON1)
				return pulumi.String(json1), nil
			}).(pulumi.StringOutput),
		})
		if err != nil {
			return err
		}
		targetQueue, err := sqs.NewQueue(ctx, "target", nil)
		if err != nil {
			return err
		}
		target, err := iam.NewRolePolicy(ctx, "target", &iam.RolePolicyArgs{
			Role: example.ID(),
			Policy: targetQueue.Arn.ApplyT(func(arn string) (pulumi.String, error) {
				var _zero pulumi.String
				tmpJSON2, err := json.Marshal(map[string]interface{}{
					"Version": "2012-10-17",
					"Statement": []map[string]interface{}{
						map[string]interface{}{
							"Effect": "Allow",
							"Action": []string{
								"sqs:SendMessage",
							},
							"Resource": []string{
								arn,
							},
						},
					},
				})
				if err != nil {
					return _zero, err
				}
				json2 := string(tmpJSON2)
				return pulumi.String(json2), nil
			}).(pulumi.StringOutput),
		})
		if err != nil {
			return err
		}
		_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
			Name:    pulumi.String("example-pipe"),
			RoleArn: example.Arn,
			Source:  sourceQueue.Arn,
			Target:  targetQueue.Arn,
		}, pulumi.DependsOn([]pulumi.Resource{
			source,
			target,
		}))
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var main = Aws.GetCallerIdentity.Invoke();
    var example = new Aws.Iam.Role("example", new()
    {
        AssumeRolePolicy = JsonSerializer.Serialize(new Dictionary<string, object?>
        {
            ["Version"] = "2012-10-17",
            ["Statement"] = new Dictionary<string, object?>
            {
                ["Effect"] = "Allow",
                ["Action"] = "sts:AssumeRole",
                ["Principal"] = new Dictionary<string, object?>
                {
                    ["Service"] = "pipes.amazonaws.com",
                },
                ["Condition"] = new Dictionary<string, object?>
                {
                    ["StringEquals"] = new Dictionary<string, object?>
                    {
                        ["aws:SourceAccount"] = main.Apply(getCallerIdentityResult => getCallerIdentityResult.AccountId),
                    },
                },
            },
        }),
    });
    var sourceQueue = new Aws.Sqs.Queue("source");
    var source = new Aws.Iam.RolePolicy("source", new()
    {
        Role = example.Id,
        Policy = Output.JsonSerialize(Output.Create(new Dictionary<string, object?>
        {
            ["Version"] = "2012-10-17",
            ["Statement"] = new[]
            {
                new Dictionary<string, object?>
                {
                    ["Effect"] = "Allow",
                    ["Action"] = new[]
                    {
                        "sqs:DeleteMessage",
                        "sqs:GetQueueAttributes",
                        "sqs:ReceiveMessage",
                    },
                    ["Resource"] = new[]
                    {
                        sourceQueue.Arn,
                    },
                },
            },
        })),
    });
    var targetQueue = new Aws.Sqs.Queue("target");
    var target = new Aws.Iam.RolePolicy("target", new()
    {
        Role = example.Id,
        Policy = Output.JsonSerialize(Output.Create(new Dictionary<string, object?>
        {
            ["Version"] = "2012-10-17",
            ["Statement"] = new[]
            {
                new Dictionary<string, object?>
                {
                    ["Effect"] = "Allow",
                    ["Action"] = new[]
                    {
                        "sqs:SendMessage",
                    },
                    ["Resource"] = new[]
                    {
                        targetQueue.Arn,
                    },
                },
            },
        })),
    });
    var examplePipe = new Aws.Pipes.Pipe("example", new()
    {
        Name = "example-pipe",
        RoleArn = example.Arn,
        Source = sourceQueue.Arn,
        Target = targetQueue.Arn,
    }, new CustomResourceOptions
    {
        DependsOn =
        {
            source,
            target,
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.AwsFunctions;
import com.pulumi.aws.inputs.GetCallerIdentityArgs;
import com.pulumi.aws.iam.Role;
import com.pulumi.aws.iam.RoleArgs;
import com.pulumi.aws.sqs.Queue;
import com.pulumi.aws.iam.RolePolicy;
import com.pulumi.aws.iam.RolePolicyArgs;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var main = AwsFunctions.getCallerIdentity();
        var example = new Role("example", RoleArgs.builder()
            .assumeRolePolicy(serializeJson(
                jsonObject(
                    jsonProperty("Version", "2012-10-17"),
                    jsonProperty("Statement", jsonObject(
                        jsonProperty("Effect", "Allow"),
                        jsonProperty("Action", "sts:AssumeRole"),
                        jsonProperty("Principal", jsonObject(
                            jsonProperty("Service", "pipes.amazonaws.com")
                        )),
                        jsonProperty("Condition", jsonObject(
                            jsonProperty("StringEquals", jsonObject(
                                jsonProperty("aws:SourceAccount", main.applyValue(getCallerIdentityResult -> getCallerIdentityResult.accountId()))
                            ))
                        ))
                    ))
                )))
            .build());
        var sourceQueue = new Queue("sourceQueue");
        var source = new RolePolicy("source", RolePolicyArgs.builder()
            .role(example.id())
            .policy(sourceQueue.arn().applyValue(arn -> serializeJson(
                jsonObject(
                    jsonProperty("Version", "2012-10-17"),
                    jsonProperty("Statement", jsonArray(jsonObject(
                        jsonProperty("Effect", "Allow"),
                        jsonProperty("Action", jsonArray(
                            "sqs:DeleteMessage", 
                            "sqs:GetQueueAttributes", 
                            "sqs:ReceiveMessage"
                        )),
                        jsonProperty("Resource", jsonArray(arn))
                    )))
                ))))
            .build());
        var targetQueue = new Queue("targetQueue");
        var target = new RolePolicy("target", RolePolicyArgs.builder()
            .role(example.id())
            .policy(targetQueue.arn().applyValue(arn -> serializeJson(
                jsonObject(
                    jsonProperty("Version", "2012-10-17"),
                    jsonProperty("Statement", jsonArray(jsonObject(
                        jsonProperty("Effect", "Allow"),
                        jsonProperty("Action", jsonArray("sqs:SendMessage")),
                        jsonProperty("Resource", jsonArray(arn))
                    )))
                ))))
            .build());
        var examplePipe = new Pipe("examplePipe", PipeArgs.builder()
            .name("example-pipe")
            .roleArn(example.arn())
            .source(sourceQueue.arn())
            .target(targetQueue.arn())
            .build(), CustomResourceOptions.builder()
                .dependsOn(                
                    source,
                    target)
                .build());
    }
}
resources:
  example:
    type: aws:iam:Role
    properties:
      assumeRolePolicy:
        fn::toJSON:
          Version: 2012-10-17
          Statement:
            Effect: Allow
            Action: sts:AssumeRole
            Principal:
              Service: pipes.amazonaws.com
            Condition:
              StringEquals:
                aws:SourceAccount: ${main.accountId}
  source:
    type: aws:iam:RolePolicy
    properties:
      role: ${example.id}
      policy:
        fn::toJSON:
          Version: 2012-10-17
          Statement:
            - Effect: Allow
              Action:
                - sqs:DeleteMessage
                - sqs:GetQueueAttributes
                - sqs:ReceiveMessage
              Resource:
                - ${sourceQueue.arn}
  sourceQueue:
    type: aws:sqs:Queue
    name: source
  target:
    type: aws:iam:RolePolicy
    properties:
      role: ${example.id}
      policy:
        fn::toJSON:
          Version: 2012-10-17
          Statement:
            - Effect: Allow
              Action:
                - sqs:SendMessage
              Resource:
                - ${targetQueue.arn}
  targetQueue:
    type: aws:sqs:Queue
    name: target
  examplePipe:
    type: aws:pipes:Pipe
    name: example
    properties:
      name: example-pipe
      roleArn: ${example.arn}
      source: ${sourceQueue.arn}
      target: ${targetQueue.arn}
    options:
      dependsOn:
        - ${source}
        - ${target}
variables:
  main:
    fn::invoke:
      function: aws:getCallerIdentity
      arguments: {}
Enrichment Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.pipes.Pipe("example", {
    name: "example-pipe",
    roleArn: exampleAwsIamRole.arn,
    source: source.arn,
    target: target.arn,
    enrichment: exampleAwsCloudwatchEventApiDestination.arn,
    enrichmentParameters: {
        httpParameters: {
            pathParameterValues: "example-path-param",
            headerParameters: {
                "example-header": "example-value",
                "second-example-header": "second-example-value",
            },
            queryStringParameters: {
                "example-query-string": "example-value",
                "second-example-query-string": "second-example-value",
            },
        },
    },
});
import pulumi
import pulumi_aws as aws
example = aws.pipes.Pipe("example",
    name="example-pipe",
    role_arn=example_aws_iam_role["arn"],
    source=source["arn"],
    target=target["arn"],
    enrichment=example_aws_cloudwatch_event_api_destination["arn"],
    enrichment_parameters={
        "http_parameters": {
            "path_parameter_values": "example-path-param",
            "header_parameters": {
                "example-header": "example-value",
                "second-example-header": "second-example-value",
            },
            "query_string_parameters": {
                "example-query-string": "example-value",
                "second-example-query-string": "second-example-value",
            },
        },
    })
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
			Name:       pulumi.String("example-pipe"),
			RoleArn:    pulumi.Any(exampleAwsIamRole.Arn),
			Source:     pulumi.Any(source.Arn),
			Target:     pulumi.Any(target.Arn),
			Enrichment: pulumi.Any(exampleAwsCloudwatchEventApiDestination.Arn),
			EnrichmentParameters: &pipes.PipeEnrichmentParametersArgs{
				HttpParameters: &pipes.PipeEnrichmentParametersHttpParametersArgs{
					PathParameterValues: pulumi.String("example-path-param"),
					HeaderParameters: pulumi.StringMap{
						"example-header":        pulumi.String("example-value"),
						"second-example-header": pulumi.String("second-example-value"),
					},
					QueryStringParameters: pulumi.StringMap{
						"example-query-string":        pulumi.String("example-value"),
						"second-example-query-string": pulumi.String("second-example-value"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Pipes.Pipe("example", new()
    {
        Name = "example-pipe",
        RoleArn = exampleAwsIamRole.Arn,
        Source = source.Arn,
        Target = target.Arn,
        Enrichment = exampleAwsCloudwatchEventApiDestination.Arn,
        EnrichmentParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersArgs
        {
            HttpParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersHttpParametersArgs
            {
                PathParameterValues = "example-path-param",
                HeaderParameters = 
                {
                    { "example-header", "example-value" },
                    { "second-example-header", "second-example-value" },
                },
                QueryStringParameters = 
                {
                    { "example-query-string", "example-value" },
                    { "second-example-query-string", "second-example-value" },
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeEnrichmentParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeEnrichmentParametersHttpParametersArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new Pipe("example", PipeArgs.builder()
            .name("example-pipe")
            .roleArn(exampleAwsIamRole.arn())
            .source(source.arn())
            .target(target.arn())
            .enrichment(exampleAwsCloudwatchEventApiDestination.arn())
            .enrichmentParameters(PipeEnrichmentParametersArgs.builder()
                .httpParameters(PipeEnrichmentParametersHttpParametersArgs.builder()
                    .pathParameterValues("example-path-param")
                    .headerParameters(Map.ofEntries(
                        Map.entry("example-header", "example-value"),
                        Map.entry("second-example-header", "second-example-value")
                    ))
                    .queryStringParameters(Map.ofEntries(
                        Map.entry("example-query-string", "example-value"),
                        Map.entry("second-example-query-string", "second-example-value")
                    ))
                    .build())
                .build())
            .build());
    }
}
resources:
  example:
    type: aws:pipes:Pipe
    properties:
      name: example-pipe
      roleArn: ${exampleAwsIamRole.arn}
      source: ${source.arn}
      target: ${target.arn}
      enrichment: ${exampleAwsCloudwatchEventApiDestination.arn}
      enrichmentParameters:
        httpParameters:
          pathParameterValues: example-path-param
          headerParameters:
            example-header: example-value
            second-example-header: second-example-value
          queryStringParameters:
            example-query-string: example-value
            second-example-query-string: second-example-value
Filter Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.pipes.Pipe("example", {
    name: "example-pipe",
    roleArn: exampleAwsIamRole.arn,
    source: source.arn,
    target: target.arn,
    sourceParameters: {
        filterCriteria: {
            filters: [{
                pattern: JSON.stringify({
                    source: ["event-source"],
                }),
            }],
        },
    },
});
import pulumi
import json
import pulumi_aws as aws
example = aws.pipes.Pipe("example",
    name="example-pipe",
    role_arn=example_aws_iam_role["arn"],
    source=source["arn"],
    target=target["arn"],
    source_parameters={
        "filter_criteria": {
            "filters": [{
                "pattern": json.dumps({
                    "source": ["event-source"],
                }),
            }],
        },
    })
package main
import (
	"encoding/json"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		tmpJSON0, err := json.Marshal(map[string]interface{}{
			"source": []string{
				"event-source",
			},
		})
		if err != nil {
			return err
		}
		json0 := string(tmpJSON0)
		_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
			Name:    pulumi.String("example-pipe"),
			RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
			Source:  pulumi.Any(source.Arn),
			Target:  pulumi.Any(target.Arn),
			SourceParameters: &pipes.PipeSourceParametersArgs{
				FilterCriteria: &pipes.PipeSourceParametersFilterCriteriaArgs{
					Filters: pipes.PipeSourceParametersFilterCriteriaFilterArray{
						&pipes.PipeSourceParametersFilterCriteriaFilterArgs{
							Pattern: pulumi.String(json0),
						},
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Pipes.Pipe("example", new()
    {
        Name = "example-pipe",
        RoleArn = exampleAwsIamRole.Arn,
        Source = source.Arn,
        Target = target.Arn,
        SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
        {
            FilterCriteria = new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaArgs
            {
                Filters = new[]
                {
                    new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaFilterArgs
                    {
                        Pattern = JsonSerializer.Serialize(new Dictionary<string, object?>
                        {
                            ["source"] = new[]
                            {
                                "event-source",
                            },
                        }),
                    },
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersFilterCriteriaArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new Pipe("example", PipeArgs.builder()
            .name("example-pipe")
            .roleArn(exampleAwsIamRole.arn())
            .source(source.arn())
            .target(target.arn())
            .sourceParameters(PipeSourceParametersArgs.builder()
                .filterCriteria(PipeSourceParametersFilterCriteriaArgs.builder()
                    .filters(PipeSourceParametersFilterCriteriaFilterArgs.builder()
                        .pattern(serializeJson(
                            jsonObject(
                                jsonProperty("source", jsonArray("event-source"))
                            )))
                        .build())
                    .build())
                .build())
            .build());
    }
}
resources:
  example:
    type: aws:pipes:Pipe
    properties:
      name: example-pipe
      roleArn: ${exampleAwsIamRole.arn}
      source: ${source.arn}
      target: ${target.arn}
      sourceParameters:
        filterCriteria:
          filters:
            - pattern:
                fn::toJSON:
                  source:
                    - event-source
CloudWatch Logs Logging Configuration Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.cloudwatch.LogGroup("example", {name: "example-pipe-target"});
const examplePipe = new aws.pipes.Pipe("example", {
    name: "example-pipe",
    roleArn: exampleAwsIamRole.arn,
    source: sourceAwsSqsQueue.arn,
    target: targetAwsSqsQueue.arn,
    logConfiguration: {
        includeExecutionDatas: ["ALL"],
        level: "INFO",
        cloudwatchLogsLogDestination: {
            logGroupArn: targetAwsCloudwatchLogGroup.arn,
        },
    },
}, {
    dependsOn: [
        source,
        target,
    ],
});
import pulumi
import pulumi_aws as aws
example = aws.cloudwatch.LogGroup("example", name="example-pipe-target")
example_pipe = aws.pipes.Pipe("example",
    name="example-pipe",
    role_arn=example_aws_iam_role["arn"],
    source=source_aws_sqs_queue["arn"],
    target=target_aws_sqs_queue["arn"],
    log_configuration={
        "include_execution_datas": ["ALL"],
        "level": "INFO",
        "cloudwatch_logs_log_destination": {
            "log_group_arn": target_aws_cloudwatch_log_group["arn"],
        },
    },
    opts = pulumi.ResourceOptions(depends_on=[
            source,
            target,
        ]))
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/cloudwatch"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := cloudwatch.NewLogGroup(ctx, "example", &cloudwatch.LogGroupArgs{
			Name: pulumi.String("example-pipe-target"),
		})
		if err != nil {
			return err
		}
		_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
			Name:    pulumi.String("example-pipe"),
			RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
			Source:  pulumi.Any(sourceAwsSqsQueue.Arn),
			Target:  pulumi.Any(targetAwsSqsQueue.Arn),
			LogConfiguration: &pipes.PipeLogConfigurationArgs{
				IncludeExecutionDatas: pulumi.StringArray{
					pulumi.String("ALL"),
				},
				Level: pulumi.String("INFO"),
				CloudwatchLogsLogDestination: &pipes.PipeLogConfigurationCloudwatchLogsLogDestinationArgs{
					LogGroupArn: pulumi.Any(targetAwsCloudwatchLogGroup.Arn),
				},
			},
		}, pulumi.DependsOn([]pulumi.Resource{
			source,
			target,
		}))
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.CloudWatch.LogGroup("example", new()
    {
        Name = "example-pipe-target",
    });
    var examplePipe = new Aws.Pipes.Pipe("example", new()
    {
        Name = "example-pipe",
        RoleArn = exampleAwsIamRole.Arn,
        Source = sourceAwsSqsQueue.Arn,
        Target = targetAwsSqsQueue.Arn,
        LogConfiguration = new Aws.Pipes.Inputs.PipeLogConfigurationArgs
        {
            IncludeExecutionDatas = new[]
            {
                "ALL",
            },
            Level = "INFO",
            CloudwatchLogsLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs
            {
                LogGroupArn = targetAwsCloudwatchLogGroup.Arn,
            },
        },
    }, new CustomResourceOptions
    {
        DependsOn =
        {
            source,
            target,
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.cloudwatch.LogGroup;
import com.pulumi.aws.cloudwatch.LogGroupArgs;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeLogConfigurationArgs;
import com.pulumi.aws.pipes.inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new LogGroup("example", LogGroupArgs.builder()
            .name("example-pipe-target")
            .build());
        var examplePipe = new Pipe("examplePipe", PipeArgs.builder()
            .name("example-pipe")
            .roleArn(exampleAwsIamRole.arn())
            .source(sourceAwsSqsQueue.arn())
            .target(targetAwsSqsQueue.arn())
            .logConfiguration(PipeLogConfigurationArgs.builder()
                .includeExecutionDatas("ALL")
                .level("INFO")
                .cloudwatchLogsLogDestination(PipeLogConfigurationCloudwatchLogsLogDestinationArgs.builder()
                    .logGroupArn(targetAwsCloudwatchLogGroup.arn())
                    .build())
                .build())
            .build(), CustomResourceOptions.builder()
                .dependsOn(                
                    source,
                    target)
                .build());
    }
}
resources:
  example:
    type: aws:cloudwatch:LogGroup
    properties:
      name: example-pipe-target
  examplePipe:
    type: aws:pipes:Pipe
    name: example
    properties:
      name: example-pipe
      roleArn: ${exampleAwsIamRole.arn}
      source: ${sourceAwsSqsQueue.arn}
      target: ${targetAwsSqsQueue.arn}
      logConfiguration:
        includeExecutionDatas:
          - ALL
        level: INFO
        cloudwatchLogsLogDestination:
          logGroupArn: ${targetAwsCloudwatchLogGroup.arn}
    options:
      dependsOn:
        - ${source}
        - ${target}
SQS Source and Target Configuration Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.pipes.Pipe("example", {
    name: "example-pipe",
    roleArn: exampleAwsIamRole.arn,
    source: source.arn,
    target: target.arn,
    sourceParameters: {
        sqsQueueParameters: {
            batchSize: 1,
            maximumBatchingWindowInSeconds: 2,
        },
    },
    targetParameters: {
        sqsQueueParameters: {
            messageDeduplicationId: "example-dedupe",
            messageGroupId: "example-group",
        },
    },
});
import pulumi
import pulumi_aws as aws
example = aws.pipes.Pipe("example",
    name="example-pipe",
    role_arn=example_aws_iam_role["arn"],
    source=source["arn"],
    target=target["arn"],
    source_parameters={
        "sqs_queue_parameters": {
            "batch_size": 1,
            "maximum_batching_window_in_seconds": 2,
        },
    },
    target_parameters={
        "sqs_queue_parameters": {
            "message_deduplication_id": "example-dedupe",
            "message_group_id": "example-group",
        },
    })
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
			Name:    pulumi.String("example-pipe"),
			RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
			Source:  pulumi.Any(source.Arn),
			Target:  pulumi.Any(target.Arn),
			SourceParameters: &pipes.PipeSourceParametersArgs{
				SqsQueueParameters: &pipes.PipeSourceParametersSqsQueueParametersArgs{
					BatchSize:                      pulumi.Int(1),
					MaximumBatchingWindowInSeconds: pulumi.Int(2),
				},
			},
			TargetParameters: &pipes.PipeTargetParametersArgs{
				SqsQueueParameters: &pipes.PipeTargetParametersSqsQueueParametersArgs{
					MessageDeduplicationId: pulumi.String("example-dedupe"),
					MessageGroupId:         pulumi.String("example-group"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.Pipes.Pipe("example", new()
    {
        Name = "example-pipe",
        RoleArn = exampleAwsIamRole.Arn,
        Source = source.Arn,
        Target = target.Arn,
        SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
        {
            SqsQueueParameters = new Aws.Pipes.Inputs.PipeSourceParametersSqsQueueParametersArgs
            {
                BatchSize = 1,
                MaximumBatchingWindowInSeconds = 2,
            },
        },
        TargetParameters = new Aws.Pipes.Inputs.PipeTargetParametersArgs
        {
            SqsQueueParameters = new Aws.Pipes.Inputs.PipeTargetParametersSqsQueueParametersArgs
            {
                MessageDeduplicationId = "example-dedupe",
                MessageGroupId = "example-group",
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersSqsQueueParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeTargetParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeTargetParametersSqsQueueParametersArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new Pipe("example", PipeArgs.builder()
            .name("example-pipe")
            .roleArn(exampleAwsIamRole.arn())
            .source(source.arn())
            .target(target.arn())
            .sourceParameters(PipeSourceParametersArgs.builder()
                .sqsQueueParameters(PipeSourceParametersSqsQueueParametersArgs.builder()
                    .batchSize(1)
                    .maximumBatchingWindowInSeconds(2)
                    .build())
                .build())
            .targetParameters(PipeTargetParametersArgs.builder()
                .sqsQueueParameters(PipeTargetParametersSqsQueueParametersArgs.builder()
                    .messageDeduplicationId("example-dedupe")
                    .messageGroupId("example-group")
                    .build())
                .build())
            .build());
    }
}
resources:
  example:
    type: aws:pipes:Pipe
    properties:
      name: example-pipe
      roleArn: ${exampleAwsIamRole.arn}
      source: ${source.arn}
      target: ${target.arn}
      sourceParameters:
        sqsQueueParameters:
          batchSize: 1
          maximumBatchingWindowInSeconds: 2
      targetParameters:
        sqsQueueParameters:
          messageDeduplicationId: example-dedupe
          messageGroupId: example-group
Create Pipe Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Pipe(name: string, args: PipeArgs, opts?: CustomResourceOptions);@overload
def Pipe(resource_name: str,
         args: PipeArgs,
         opts: Optional[ResourceOptions] = None)
@overload
def Pipe(resource_name: str,
         opts: Optional[ResourceOptions] = None,
         role_arn: Optional[str] = None,
         target: Optional[str] = None,
         source: Optional[str] = None,
         enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
         kms_key_identifier: Optional[str] = None,
         log_configuration: Optional[PipeLogConfigurationArgs] = None,
         name: Optional[str] = None,
         name_prefix: Optional[str] = None,
         description: Optional[str] = None,
         enrichment: Optional[str] = None,
         source_parameters: Optional[PipeSourceParametersArgs] = None,
         tags: Optional[Mapping[str, str]] = None,
         desired_state: Optional[str] = None,
         target_parameters: Optional[PipeTargetParametersArgs] = None)func NewPipe(ctx *Context, name string, args PipeArgs, opts ...ResourceOption) (*Pipe, error)public Pipe(string name, PipeArgs args, CustomResourceOptions? opts = null)type: aws:pipes:Pipe
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var pipeResource = new Aws.Pipes.Pipe("pipeResource", new()
{
    RoleArn = "string",
    Target = "string",
    Source = "string",
    EnrichmentParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersArgs
    {
        HttpParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersHttpParametersArgs
        {
            HeaderParameters = 
            {
                { "string", "string" },
            },
            PathParameterValues = "string",
            QueryStringParameters = 
            {
                { "string", "string" },
            },
        },
        InputTemplate = "string",
    },
    KmsKeyIdentifier = "string",
    LogConfiguration = new Aws.Pipes.Inputs.PipeLogConfigurationArgs
    {
        Level = "string",
        CloudwatchLogsLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs
        {
            LogGroupArn = "string",
        },
        FirehoseLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationFirehoseLogDestinationArgs
        {
            DeliveryStreamArn = "string",
        },
        IncludeExecutionDatas = new[]
        {
            "string",
        },
        S3LogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationS3LogDestinationArgs
        {
            BucketName = "string",
            BucketOwner = "string",
            OutputFormat = "string",
            Prefix = "string",
        },
    },
    Name = "string",
    NamePrefix = "string",
    Description = "string",
    Enrichment = "string",
    SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
    {
        ActivemqBrokerParameters = new Aws.Pipes.Inputs.PipeSourceParametersActivemqBrokerParametersArgs
        {
            Credentials = new Aws.Pipes.Inputs.PipeSourceParametersActivemqBrokerParametersCredentialsArgs
            {
                BasicAuth = "string",
            },
            QueueName = "string",
            BatchSize = 0,
            MaximumBatchingWindowInSeconds = 0,
        },
        DynamodbStreamParameters = new Aws.Pipes.Inputs.PipeSourceParametersDynamodbStreamParametersArgs
        {
            StartingPosition = "string",
            BatchSize = 0,
            DeadLetterConfig = new Aws.Pipes.Inputs.PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs
            {
                Arn = "string",
            },
            MaximumBatchingWindowInSeconds = 0,
            MaximumRecordAgeInSeconds = 0,
            MaximumRetryAttempts = 0,
            OnPartialBatchItemFailure = "string",
            ParallelizationFactor = 0,
        },
        FilterCriteria = new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaArgs
        {
            Filters = new[]
            {
                new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaFilterArgs
                {
                    Pattern = "string",
                },
            },
        },
        KinesisStreamParameters = new Aws.Pipes.Inputs.PipeSourceParametersKinesisStreamParametersArgs
        {
            StartingPosition = "string",
            BatchSize = 0,
            DeadLetterConfig = new Aws.Pipes.Inputs.PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs
            {
                Arn = "string",
            },
            MaximumBatchingWindowInSeconds = 0,
            MaximumRecordAgeInSeconds = 0,
            MaximumRetryAttempts = 0,
            OnPartialBatchItemFailure = "string",
            ParallelizationFactor = 0,
            StartingPositionTimestamp = "string",
        },
        ManagedStreamingKafkaParameters = new Aws.Pipes.Inputs.PipeSourceParametersManagedStreamingKafkaParametersArgs
        {
            TopicName = "string",
            BatchSize = 0,
            ConsumerGroupId = "string",
            Credentials = new Aws.Pipes.Inputs.PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs
            {
                ClientCertificateTlsAuth = "string",
                SaslScram512Auth = "string",
            },
            MaximumBatchingWindowInSeconds = 0,
            StartingPosition = "string",
        },
        RabbitmqBrokerParameters = new Aws.Pipes.Inputs.PipeSourceParametersRabbitmqBrokerParametersArgs
        {
            Credentials = new Aws.Pipes.Inputs.PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs
            {
                BasicAuth = "string",
            },
            QueueName = "string",
            BatchSize = 0,
            MaximumBatchingWindowInSeconds = 0,
            VirtualHost = "string",
        },
        SelfManagedKafkaParameters = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersArgs
        {
            TopicName = "string",
            AdditionalBootstrapServers = new[]
            {
                "string",
            },
            BatchSize = 0,
            ConsumerGroupId = "string",
            Credentials = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs
            {
                BasicAuth = "string",
                ClientCertificateTlsAuth = "string",
                SaslScram256Auth = "string",
                SaslScram512Auth = "string",
            },
            MaximumBatchingWindowInSeconds = 0,
            ServerRootCaCertificate = "string",
            StartingPosition = "string",
            Vpc = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersVpcArgs
            {
                SecurityGroups = new[]
                {
                    "string",
                },
                Subnets = new[]
                {
                    "string",
                },
            },
        },
        SqsQueueParameters = new Aws.Pipes.Inputs.PipeSourceParametersSqsQueueParametersArgs
        {
            BatchSize = 0,
            MaximumBatchingWindowInSeconds = 0,
        },
    },
    Tags = 
    {
        { "string", "string" },
    },
    DesiredState = "string",
    TargetParameters = new Aws.Pipes.Inputs.PipeTargetParametersArgs
    {
        BatchJobParameters = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersArgs
        {
            JobDefinition = "string",
            JobName = "string",
            ArrayProperties = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersArrayPropertiesArgs
            {
                Size = 0,
            },
            ContainerOverrides = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesArgs
            {
                Commands = new[]
                {
                    "string",
                },
                Environments = new[]
                {
                    new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs
                    {
                        Name = "string",
                        Value = "string",
                    },
                },
                InstanceType = "string",
                ResourceRequirements = new[]
                {
                    new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs
                    {
                        Type = "string",
                        Value = "string",
                    },
                },
            },
            DependsOns = new[]
            {
                new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersDependsOnArgs
                {
                    JobId = "string",
                    Type = "string",
                },
            },
            Parameters = 
            {
                { "string", "string" },
            },
            RetryStrategy = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersRetryStrategyArgs
            {
                Attempts = 0,
            },
        },
        CloudwatchLogsParameters = new Aws.Pipes.Inputs.PipeTargetParametersCloudwatchLogsParametersArgs
        {
            LogStreamName = "string",
            Timestamp = "string",
        },
        EcsTaskParameters = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersArgs
        {
            TaskDefinitionArn = "string",
            Overrides = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesArgs
            {
                ContainerOverrides = new[]
                {
                    new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs
                    {
                        Commands = new[]
                        {
                            "string",
                        },
                        Cpu = 0,
                        EnvironmentFiles = new[]
                        {
                            new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs
                            {
                                Type = "string",
                                Value = "string",
                            },
                        },
                        Environments = new[]
                        {
                            new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs
                            {
                                Name = "string",
                                Value = "string",
                            },
                        },
                        Memory = 0,
                        MemoryReservation = 0,
                        Name = "string",
                        ResourceRequirements = new[]
                        {
                            new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs
                            {
                                Type = "string",
                                Value = "string",
                            },
                        },
                    },
                },
                Cpu = "string",
                EphemeralStorage = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs
                {
                    SizeInGib = 0,
                },
                ExecutionRoleArn = "string",
                InferenceAcceleratorOverrides = new[]
                {
                    new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs
                    {
                        DeviceName = "string",
                        DeviceType = "string",
                    },
                },
                Memory = "string",
                TaskRoleArn = "string",
            },
            PlacementStrategies = new[]
            {
                new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersPlacementStrategyArgs
                {
                    Field = "string",
                    Type = "string",
                },
            },
            Group = "string",
            LaunchType = "string",
            NetworkConfiguration = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs
            {
                AwsVpcConfiguration = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs
                {
                    AssignPublicIp = "string",
                    SecurityGroups = new[]
                    {
                        "string",
                    },
                    Subnets = new[]
                    {
                        "string",
                    },
                },
            },
            CapacityProviderStrategies = new[]
            {
                new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs
                {
                    CapacityProvider = "string",
                    Base = 0,
                    Weight = 0,
                },
            },
            PlacementConstraints = new[]
            {
                new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersPlacementConstraintArgs
                {
                    Expression = "string",
                    Type = "string",
                },
            },
            EnableExecuteCommand = false,
            PlatformVersion = "string",
            PropagateTags = "string",
            ReferenceId = "string",
            Tags = 
            {
                { "string", "string" },
            },
            TaskCount = 0,
            EnableEcsManagedTags = false,
        },
        EventbridgeEventBusParameters = new Aws.Pipes.Inputs.PipeTargetParametersEventbridgeEventBusParametersArgs
        {
            DetailType = "string",
            EndpointId = "string",
            Resources = new[]
            {
                "string",
            },
            Source = "string",
            Time = "string",
        },
        HttpParameters = new Aws.Pipes.Inputs.PipeTargetParametersHttpParametersArgs
        {
            HeaderParameters = 
            {
                { "string", "string" },
            },
            PathParameterValues = "string",
            QueryStringParameters = 
            {
                { "string", "string" },
            },
        },
        InputTemplate = "string",
        KinesisStreamParameters = new Aws.Pipes.Inputs.PipeTargetParametersKinesisStreamParametersArgs
        {
            PartitionKey = "string",
        },
        LambdaFunctionParameters = new Aws.Pipes.Inputs.PipeTargetParametersLambdaFunctionParametersArgs
        {
            InvocationType = "string",
        },
        RedshiftDataParameters = new Aws.Pipes.Inputs.PipeTargetParametersRedshiftDataParametersArgs
        {
            Database = "string",
            Sqls = new[]
            {
                "string",
            },
            DbUser = "string",
            SecretManagerArn = "string",
            StatementName = "string",
            WithEvent = false,
        },
        SagemakerPipelineParameters = new Aws.Pipes.Inputs.PipeTargetParametersSagemakerPipelineParametersArgs
        {
            PipelineParameters = new[]
            {
                new Aws.Pipes.Inputs.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs
                {
                    Name = "string",
                    Value = "string",
                },
            },
        },
        SqsQueueParameters = new Aws.Pipes.Inputs.PipeTargetParametersSqsQueueParametersArgs
        {
            MessageDeduplicationId = "string",
            MessageGroupId = "string",
        },
        StepFunctionStateMachineParameters = new Aws.Pipes.Inputs.PipeTargetParametersStepFunctionStateMachineParametersArgs
        {
            InvocationType = "string",
        },
    },
});
example, err := pipes.NewPipe(ctx, "pipeResource", &pipes.PipeArgs{
	RoleArn: pulumi.String("string"),
	Target:  pulumi.String("string"),
	Source:  pulumi.String("string"),
	EnrichmentParameters: &pipes.PipeEnrichmentParametersArgs{
		HttpParameters: &pipes.PipeEnrichmentParametersHttpParametersArgs{
			HeaderParameters: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			PathParameterValues: pulumi.String("string"),
			QueryStringParameters: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
		InputTemplate: pulumi.String("string"),
	},
	KmsKeyIdentifier: pulumi.String("string"),
	LogConfiguration: &pipes.PipeLogConfigurationArgs{
		Level: pulumi.String("string"),
		CloudwatchLogsLogDestination: &pipes.PipeLogConfigurationCloudwatchLogsLogDestinationArgs{
			LogGroupArn: pulumi.String("string"),
		},
		FirehoseLogDestination: &pipes.PipeLogConfigurationFirehoseLogDestinationArgs{
			DeliveryStreamArn: pulumi.String("string"),
		},
		IncludeExecutionDatas: pulumi.StringArray{
			pulumi.String("string"),
		},
		S3LogDestination: &pipes.PipeLogConfigurationS3LogDestinationArgs{
			BucketName:   pulumi.String("string"),
			BucketOwner:  pulumi.String("string"),
			OutputFormat: pulumi.String("string"),
			Prefix:       pulumi.String("string"),
		},
	},
	Name:        pulumi.String("string"),
	NamePrefix:  pulumi.String("string"),
	Description: pulumi.String("string"),
	Enrichment:  pulumi.String("string"),
	SourceParameters: &pipes.PipeSourceParametersArgs{
		ActivemqBrokerParameters: &pipes.PipeSourceParametersActivemqBrokerParametersArgs{
			Credentials: &pipes.PipeSourceParametersActivemqBrokerParametersCredentialsArgs{
				BasicAuth: pulumi.String("string"),
			},
			QueueName:                      pulumi.String("string"),
			BatchSize:                      pulumi.Int(0),
			MaximumBatchingWindowInSeconds: pulumi.Int(0),
		},
		DynamodbStreamParameters: &pipes.PipeSourceParametersDynamodbStreamParametersArgs{
			StartingPosition: pulumi.String("string"),
			BatchSize:        pulumi.Int(0),
			DeadLetterConfig: &pipes.PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs{
				Arn: pulumi.String("string"),
			},
			MaximumBatchingWindowInSeconds: pulumi.Int(0),
			MaximumRecordAgeInSeconds:      pulumi.Int(0),
			MaximumRetryAttempts:           pulumi.Int(0),
			OnPartialBatchItemFailure:      pulumi.String("string"),
			ParallelizationFactor:          pulumi.Int(0),
		},
		FilterCriteria: &pipes.PipeSourceParametersFilterCriteriaArgs{
			Filters: pipes.PipeSourceParametersFilterCriteriaFilterArray{
				&pipes.PipeSourceParametersFilterCriteriaFilterArgs{
					Pattern: pulumi.String("string"),
				},
			},
		},
		KinesisStreamParameters: &pipes.PipeSourceParametersKinesisStreamParametersArgs{
			StartingPosition: pulumi.String("string"),
			BatchSize:        pulumi.Int(0),
			DeadLetterConfig: &pipes.PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs{
				Arn: pulumi.String("string"),
			},
			MaximumBatchingWindowInSeconds: pulumi.Int(0),
			MaximumRecordAgeInSeconds:      pulumi.Int(0),
			MaximumRetryAttempts:           pulumi.Int(0),
			OnPartialBatchItemFailure:      pulumi.String("string"),
			ParallelizationFactor:          pulumi.Int(0),
			StartingPositionTimestamp:      pulumi.String("string"),
		},
		ManagedStreamingKafkaParameters: &pipes.PipeSourceParametersManagedStreamingKafkaParametersArgs{
			TopicName:       pulumi.String("string"),
			BatchSize:       pulumi.Int(0),
			ConsumerGroupId: pulumi.String("string"),
			Credentials: &pipes.PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs{
				ClientCertificateTlsAuth: pulumi.String("string"),
				SaslScram512Auth:         pulumi.String("string"),
			},
			MaximumBatchingWindowInSeconds: pulumi.Int(0),
			StartingPosition:               pulumi.String("string"),
		},
		RabbitmqBrokerParameters: &pipes.PipeSourceParametersRabbitmqBrokerParametersArgs{
			Credentials: &pipes.PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs{
				BasicAuth: pulumi.String("string"),
			},
			QueueName:                      pulumi.String("string"),
			BatchSize:                      pulumi.Int(0),
			MaximumBatchingWindowInSeconds: pulumi.Int(0),
			VirtualHost:                    pulumi.String("string"),
		},
		SelfManagedKafkaParameters: &pipes.PipeSourceParametersSelfManagedKafkaParametersArgs{
			TopicName: pulumi.String("string"),
			AdditionalBootstrapServers: pulumi.StringArray{
				pulumi.String("string"),
			},
			BatchSize:       pulumi.Int(0),
			ConsumerGroupId: pulumi.String("string"),
			Credentials: &pipes.PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs{
				BasicAuth:                pulumi.String("string"),
				ClientCertificateTlsAuth: pulumi.String("string"),
				SaslScram256Auth:         pulumi.String("string"),
				SaslScram512Auth:         pulumi.String("string"),
			},
			MaximumBatchingWindowInSeconds: pulumi.Int(0),
			ServerRootCaCertificate:        pulumi.String("string"),
			StartingPosition:               pulumi.String("string"),
			Vpc: &pipes.PipeSourceParametersSelfManagedKafkaParametersVpcArgs{
				SecurityGroups: pulumi.StringArray{
					pulumi.String("string"),
				},
				Subnets: pulumi.StringArray{
					pulumi.String("string"),
				},
			},
		},
		SqsQueueParameters: &pipes.PipeSourceParametersSqsQueueParametersArgs{
			BatchSize:                      pulumi.Int(0),
			MaximumBatchingWindowInSeconds: pulumi.Int(0),
		},
	},
	Tags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	DesiredState: pulumi.String("string"),
	TargetParameters: &pipes.PipeTargetParametersArgs{
		BatchJobParameters: &pipes.PipeTargetParametersBatchJobParametersArgs{
			JobDefinition: pulumi.String("string"),
			JobName:       pulumi.String("string"),
			ArrayProperties: &pipes.PipeTargetParametersBatchJobParametersArrayPropertiesArgs{
				Size: pulumi.Int(0),
			},
			ContainerOverrides: &pipes.PipeTargetParametersBatchJobParametersContainerOverridesArgs{
				Commands: pulumi.StringArray{
					pulumi.String("string"),
				},
				Environments: pipes.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArray{
					&pipes.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs{
						Name:  pulumi.String("string"),
						Value: pulumi.String("string"),
					},
				},
				InstanceType: pulumi.String("string"),
				ResourceRequirements: pipes.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArray{
					&pipes.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs{
						Type:  pulumi.String("string"),
						Value: pulumi.String("string"),
					},
				},
			},
			DependsOns: pipes.PipeTargetParametersBatchJobParametersDependsOnArray{
				&pipes.PipeTargetParametersBatchJobParametersDependsOnArgs{
					JobId: pulumi.String("string"),
					Type:  pulumi.String("string"),
				},
			},
			Parameters: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			RetryStrategy: &pipes.PipeTargetParametersBatchJobParametersRetryStrategyArgs{
				Attempts: pulumi.Int(0),
			},
		},
		CloudwatchLogsParameters: &pipes.PipeTargetParametersCloudwatchLogsParametersArgs{
			LogStreamName: pulumi.String("string"),
			Timestamp:     pulumi.String("string"),
		},
		EcsTaskParameters: &pipes.PipeTargetParametersEcsTaskParametersArgs{
			TaskDefinitionArn: pulumi.String("string"),
			Overrides: &pipes.PipeTargetParametersEcsTaskParametersOverridesArgs{
				ContainerOverrides: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArray{
					&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs{
						Commands: pulumi.StringArray{
							pulumi.String("string"),
						},
						Cpu: pulumi.Int(0),
						EnvironmentFiles: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArray{
							&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs{
								Type:  pulumi.String("string"),
								Value: pulumi.String("string"),
							},
						},
						Environments: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArray{
							&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs{
								Name:  pulumi.String("string"),
								Value: pulumi.String("string"),
							},
						},
						Memory:            pulumi.Int(0),
						MemoryReservation: pulumi.Int(0),
						Name:              pulumi.String("string"),
						ResourceRequirements: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArray{
							&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs{
								Type:  pulumi.String("string"),
								Value: pulumi.String("string"),
							},
						},
					},
				},
				Cpu: pulumi.String("string"),
				EphemeralStorage: &pipes.PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs{
					SizeInGib: pulumi.Int(0),
				},
				ExecutionRoleArn: pulumi.String("string"),
				InferenceAcceleratorOverrides: pipes.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArray{
					&pipes.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs{
						DeviceName: pulumi.String("string"),
						DeviceType: pulumi.String("string"),
					},
				},
				Memory:      pulumi.String("string"),
				TaskRoleArn: pulumi.String("string"),
			},
			PlacementStrategies: pipes.PipeTargetParametersEcsTaskParametersPlacementStrategyArray{
				&pipes.PipeTargetParametersEcsTaskParametersPlacementStrategyArgs{
					Field: pulumi.String("string"),
					Type:  pulumi.String("string"),
				},
			},
			Group:      pulumi.String("string"),
			LaunchType: pulumi.String("string"),
			NetworkConfiguration: &pipes.PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs{
				AwsVpcConfiguration: &pipes.PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs{
					AssignPublicIp: pulumi.String("string"),
					SecurityGroups: pulumi.StringArray{
						pulumi.String("string"),
					},
					Subnets: pulumi.StringArray{
						pulumi.String("string"),
					},
				},
			},
			CapacityProviderStrategies: pipes.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArray{
				&pipes.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs{
					CapacityProvider: pulumi.String("string"),
					Base:             pulumi.Int(0),
					Weight:           pulumi.Int(0),
				},
			},
			PlacementConstraints: pipes.PipeTargetParametersEcsTaskParametersPlacementConstraintArray{
				&pipes.PipeTargetParametersEcsTaskParametersPlacementConstraintArgs{
					Expression: pulumi.String("string"),
					Type:       pulumi.String("string"),
				},
			},
			EnableExecuteCommand: pulumi.Bool(false),
			PlatformVersion:      pulumi.String("string"),
			PropagateTags:        pulumi.String("string"),
			ReferenceId:          pulumi.String("string"),
			Tags: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			TaskCount:            pulumi.Int(0),
			EnableEcsManagedTags: pulumi.Bool(false),
		},
		EventbridgeEventBusParameters: &pipes.PipeTargetParametersEventbridgeEventBusParametersArgs{
			DetailType: pulumi.String("string"),
			EndpointId: pulumi.String("string"),
			Resources: pulumi.StringArray{
				pulumi.String("string"),
			},
			Source: pulumi.String("string"),
			Time:   pulumi.String("string"),
		},
		HttpParameters: &pipes.PipeTargetParametersHttpParametersArgs{
			HeaderParameters: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
			PathParameterValues: pulumi.String("string"),
			QueryStringParameters: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
		InputTemplate: pulumi.String("string"),
		KinesisStreamParameters: &pipes.PipeTargetParametersKinesisStreamParametersArgs{
			PartitionKey: pulumi.String("string"),
		},
		LambdaFunctionParameters: &pipes.PipeTargetParametersLambdaFunctionParametersArgs{
			InvocationType: pulumi.String("string"),
		},
		RedshiftDataParameters: &pipes.PipeTargetParametersRedshiftDataParametersArgs{
			Database: pulumi.String("string"),
			Sqls: pulumi.StringArray{
				pulumi.String("string"),
			},
			DbUser:           pulumi.String("string"),
			SecretManagerArn: pulumi.String("string"),
			StatementName:    pulumi.String("string"),
			WithEvent:        pulumi.Bool(false),
		},
		SagemakerPipelineParameters: &pipes.PipeTargetParametersSagemakerPipelineParametersArgs{
			PipelineParameters: pipes.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArray{
				&pipes.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs{
					Name:  pulumi.String("string"),
					Value: pulumi.String("string"),
				},
			},
		},
		SqsQueueParameters: &pipes.PipeTargetParametersSqsQueueParametersArgs{
			MessageDeduplicationId: pulumi.String("string"),
			MessageGroupId:         pulumi.String("string"),
		},
		StepFunctionStateMachineParameters: &pipes.PipeTargetParametersStepFunctionStateMachineParametersArgs{
			InvocationType: pulumi.String("string"),
		},
	},
})
var pipeResource = new Pipe("pipeResource", PipeArgs.builder()
    .roleArn("string")
    .target("string")
    .source("string")
    .enrichmentParameters(PipeEnrichmentParametersArgs.builder()
        .httpParameters(PipeEnrichmentParametersHttpParametersArgs.builder()
            .headerParameters(Map.of("string", "string"))
            .pathParameterValues("string")
            .queryStringParameters(Map.of("string", "string"))
            .build())
        .inputTemplate("string")
        .build())
    .kmsKeyIdentifier("string")
    .logConfiguration(PipeLogConfigurationArgs.builder()
        .level("string")
        .cloudwatchLogsLogDestination(PipeLogConfigurationCloudwatchLogsLogDestinationArgs.builder()
            .logGroupArn("string")
            .build())
        .firehoseLogDestination(PipeLogConfigurationFirehoseLogDestinationArgs.builder()
            .deliveryStreamArn("string")
            .build())
        .includeExecutionDatas("string")
        .s3LogDestination(PipeLogConfigurationS3LogDestinationArgs.builder()
            .bucketName("string")
            .bucketOwner("string")
            .outputFormat("string")
            .prefix("string")
            .build())
        .build())
    .name("string")
    .namePrefix("string")
    .description("string")
    .enrichment("string")
    .sourceParameters(PipeSourceParametersArgs.builder()
        .activemqBrokerParameters(PipeSourceParametersActivemqBrokerParametersArgs.builder()
            .credentials(PipeSourceParametersActivemqBrokerParametersCredentialsArgs.builder()
                .basicAuth("string")
                .build())
            .queueName("string")
            .batchSize(0)
            .maximumBatchingWindowInSeconds(0)
            .build())
        .dynamodbStreamParameters(PipeSourceParametersDynamodbStreamParametersArgs.builder()
            .startingPosition("string")
            .batchSize(0)
            .deadLetterConfig(PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs.builder()
                .arn("string")
                .build())
            .maximumBatchingWindowInSeconds(0)
            .maximumRecordAgeInSeconds(0)
            .maximumRetryAttempts(0)
            .onPartialBatchItemFailure("string")
            .parallelizationFactor(0)
            .build())
        .filterCriteria(PipeSourceParametersFilterCriteriaArgs.builder()
            .filters(PipeSourceParametersFilterCriteriaFilterArgs.builder()
                .pattern("string")
                .build())
            .build())
        .kinesisStreamParameters(PipeSourceParametersKinesisStreamParametersArgs.builder()
            .startingPosition("string")
            .batchSize(0)
            .deadLetterConfig(PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs.builder()
                .arn("string")
                .build())
            .maximumBatchingWindowInSeconds(0)
            .maximumRecordAgeInSeconds(0)
            .maximumRetryAttempts(0)
            .onPartialBatchItemFailure("string")
            .parallelizationFactor(0)
            .startingPositionTimestamp("string")
            .build())
        .managedStreamingKafkaParameters(PipeSourceParametersManagedStreamingKafkaParametersArgs.builder()
            .topicName("string")
            .batchSize(0)
            .consumerGroupId("string")
            .credentials(PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs.builder()
                .clientCertificateTlsAuth("string")
                .saslScram512Auth("string")
                .build())
            .maximumBatchingWindowInSeconds(0)
            .startingPosition("string")
            .build())
        .rabbitmqBrokerParameters(PipeSourceParametersRabbitmqBrokerParametersArgs.builder()
            .credentials(PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs.builder()
                .basicAuth("string")
                .build())
            .queueName("string")
            .batchSize(0)
            .maximumBatchingWindowInSeconds(0)
            .virtualHost("string")
            .build())
        .selfManagedKafkaParameters(PipeSourceParametersSelfManagedKafkaParametersArgs.builder()
            .topicName("string")
            .additionalBootstrapServers("string")
            .batchSize(0)
            .consumerGroupId("string")
            .credentials(PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs.builder()
                .basicAuth("string")
                .clientCertificateTlsAuth("string")
                .saslScram256Auth("string")
                .saslScram512Auth("string")
                .build())
            .maximumBatchingWindowInSeconds(0)
            .serverRootCaCertificate("string")
            .startingPosition("string")
            .vpc(PipeSourceParametersSelfManagedKafkaParametersVpcArgs.builder()
                .securityGroups("string")
                .subnets("string")
                .build())
            .build())
        .sqsQueueParameters(PipeSourceParametersSqsQueueParametersArgs.builder()
            .batchSize(0)
            .maximumBatchingWindowInSeconds(0)
            .build())
        .build())
    .tags(Map.of("string", "string"))
    .desiredState("string")
    .targetParameters(PipeTargetParametersArgs.builder()
        .batchJobParameters(PipeTargetParametersBatchJobParametersArgs.builder()
            .jobDefinition("string")
            .jobName("string")
            .arrayProperties(PipeTargetParametersBatchJobParametersArrayPropertiesArgs.builder()
                .size(0)
                .build())
            .containerOverrides(PipeTargetParametersBatchJobParametersContainerOverridesArgs.builder()
                .commands("string")
                .environments(PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs.builder()
                    .name("string")
                    .value("string")
                    .build())
                .instanceType("string")
                .resourceRequirements(PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs.builder()
                    .type("string")
                    .value("string")
                    .build())
                .build())
            .dependsOns(PipeTargetParametersBatchJobParametersDependsOnArgs.builder()
                .jobId("string")
                .type("string")
                .build())
            .parameters(Map.of("string", "string"))
            .retryStrategy(PipeTargetParametersBatchJobParametersRetryStrategyArgs.builder()
                .attempts(0)
                .build())
            .build())
        .cloudwatchLogsParameters(PipeTargetParametersCloudwatchLogsParametersArgs.builder()
            .logStreamName("string")
            .timestamp("string")
            .build())
        .ecsTaskParameters(PipeTargetParametersEcsTaskParametersArgs.builder()
            .taskDefinitionArn("string")
            .overrides(PipeTargetParametersEcsTaskParametersOverridesArgs.builder()
                .containerOverrides(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs.builder()
                    .commands("string")
                    .cpu(0)
                    .environmentFiles(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs.builder()
                        .type("string")
                        .value("string")
                        .build())
                    .environments(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs.builder()
                        .name("string")
                        .value("string")
                        .build())
                    .memory(0)
                    .memoryReservation(0)
                    .name("string")
                    .resourceRequirements(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs.builder()
                        .type("string")
                        .value("string")
                        .build())
                    .build())
                .cpu("string")
                .ephemeralStorage(PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs.builder()
                    .sizeInGib(0)
                    .build())
                .executionRoleArn("string")
                .inferenceAcceleratorOverrides(PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs.builder()
                    .deviceName("string")
                    .deviceType("string")
                    .build())
                .memory("string")
                .taskRoleArn("string")
                .build())
            .placementStrategies(PipeTargetParametersEcsTaskParametersPlacementStrategyArgs.builder()
                .field("string")
                .type("string")
                .build())
            .group("string")
            .launchType("string")
            .networkConfiguration(PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs.builder()
                .awsVpcConfiguration(PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs.builder()
                    .assignPublicIp("string")
                    .securityGroups("string")
                    .subnets("string")
                    .build())
                .build())
            .capacityProviderStrategies(PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs.builder()
                .capacityProvider("string")
                .base(0)
                .weight(0)
                .build())
            .placementConstraints(PipeTargetParametersEcsTaskParametersPlacementConstraintArgs.builder()
                .expression("string")
                .type("string")
                .build())
            .enableExecuteCommand(false)
            .platformVersion("string")
            .propagateTags("string")
            .referenceId("string")
            .tags(Map.of("string", "string"))
            .taskCount(0)
            .enableEcsManagedTags(false)
            .build())
        .eventbridgeEventBusParameters(PipeTargetParametersEventbridgeEventBusParametersArgs.builder()
            .detailType("string")
            .endpointId("string")
            .resources("string")
            .source("string")
            .time("string")
            .build())
        .httpParameters(PipeTargetParametersHttpParametersArgs.builder()
            .headerParameters(Map.of("string", "string"))
            .pathParameterValues("string")
            .queryStringParameters(Map.of("string", "string"))
            .build())
        .inputTemplate("string")
        .kinesisStreamParameters(PipeTargetParametersKinesisStreamParametersArgs.builder()
            .partitionKey("string")
            .build())
        .lambdaFunctionParameters(PipeTargetParametersLambdaFunctionParametersArgs.builder()
            .invocationType("string")
            .build())
        .redshiftDataParameters(PipeTargetParametersRedshiftDataParametersArgs.builder()
            .database("string")
            .sqls("string")
            .dbUser("string")
            .secretManagerArn("string")
            .statementName("string")
            .withEvent(false)
            .build())
        .sagemakerPipelineParameters(PipeTargetParametersSagemakerPipelineParametersArgs.builder()
            .pipelineParameters(PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs.builder()
                .name("string")
                .value("string")
                .build())
            .build())
        .sqsQueueParameters(PipeTargetParametersSqsQueueParametersArgs.builder()
            .messageDeduplicationId("string")
            .messageGroupId("string")
            .build())
        .stepFunctionStateMachineParameters(PipeTargetParametersStepFunctionStateMachineParametersArgs.builder()
            .invocationType("string")
            .build())
        .build())
    .build());
pipe_resource = aws.pipes.Pipe("pipeResource",
    role_arn="string",
    target="string",
    source="string",
    enrichment_parameters={
        "http_parameters": {
            "header_parameters": {
                "string": "string",
            },
            "path_parameter_values": "string",
            "query_string_parameters": {
                "string": "string",
            },
        },
        "input_template": "string",
    },
    kms_key_identifier="string",
    log_configuration={
        "level": "string",
        "cloudwatch_logs_log_destination": {
            "log_group_arn": "string",
        },
        "firehose_log_destination": {
            "delivery_stream_arn": "string",
        },
        "include_execution_datas": ["string"],
        "s3_log_destination": {
            "bucket_name": "string",
            "bucket_owner": "string",
            "output_format": "string",
            "prefix": "string",
        },
    },
    name="string",
    name_prefix="string",
    description="string",
    enrichment="string",
    source_parameters={
        "activemq_broker_parameters": {
            "credentials": {
                "basic_auth": "string",
            },
            "queue_name": "string",
            "batch_size": 0,
            "maximum_batching_window_in_seconds": 0,
        },
        "dynamodb_stream_parameters": {
            "starting_position": "string",
            "batch_size": 0,
            "dead_letter_config": {
                "arn": "string",
            },
            "maximum_batching_window_in_seconds": 0,
            "maximum_record_age_in_seconds": 0,
            "maximum_retry_attempts": 0,
            "on_partial_batch_item_failure": "string",
            "parallelization_factor": 0,
        },
        "filter_criteria": {
            "filters": [{
                "pattern": "string",
            }],
        },
        "kinesis_stream_parameters": {
            "starting_position": "string",
            "batch_size": 0,
            "dead_letter_config": {
                "arn": "string",
            },
            "maximum_batching_window_in_seconds": 0,
            "maximum_record_age_in_seconds": 0,
            "maximum_retry_attempts": 0,
            "on_partial_batch_item_failure": "string",
            "parallelization_factor": 0,
            "starting_position_timestamp": "string",
        },
        "managed_streaming_kafka_parameters": {
            "topic_name": "string",
            "batch_size": 0,
            "consumer_group_id": "string",
            "credentials": {
                "client_certificate_tls_auth": "string",
                "sasl_scram512_auth": "string",
            },
            "maximum_batching_window_in_seconds": 0,
            "starting_position": "string",
        },
        "rabbitmq_broker_parameters": {
            "credentials": {
                "basic_auth": "string",
            },
            "queue_name": "string",
            "batch_size": 0,
            "maximum_batching_window_in_seconds": 0,
            "virtual_host": "string",
        },
        "self_managed_kafka_parameters": {
            "topic_name": "string",
            "additional_bootstrap_servers": ["string"],
            "batch_size": 0,
            "consumer_group_id": "string",
            "credentials": {
                "basic_auth": "string",
                "client_certificate_tls_auth": "string",
                "sasl_scram256_auth": "string",
                "sasl_scram512_auth": "string",
            },
            "maximum_batching_window_in_seconds": 0,
            "server_root_ca_certificate": "string",
            "starting_position": "string",
            "vpc": {
                "security_groups": ["string"],
                "subnets": ["string"],
            },
        },
        "sqs_queue_parameters": {
            "batch_size": 0,
            "maximum_batching_window_in_seconds": 0,
        },
    },
    tags={
        "string": "string",
    },
    desired_state="string",
    target_parameters={
        "batch_job_parameters": {
            "job_definition": "string",
            "job_name": "string",
            "array_properties": {
                "size": 0,
            },
            "container_overrides": {
                "commands": ["string"],
                "environments": [{
                    "name": "string",
                    "value": "string",
                }],
                "instance_type": "string",
                "resource_requirements": [{
                    "type": "string",
                    "value": "string",
                }],
            },
            "depends_ons": [{
                "job_id": "string",
                "type": "string",
            }],
            "parameters": {
                "string": "string",
            },
            "retry_strategy": {
                "attempts": 0,
            },
        },
        "cloudwatch_logs_parameters": {
            "log_stream_name": "string",
            "timestamp": "string",
        },
        "ecs_task_parameters": {
            "task_definition_arn": "string",
            "overrides": {
                "container_overrides": [{
                    "commands": ["string"],
                    "cpu": 0,
                    "environment_files": [{
                        "type": "string",
                        "value": "string",
                    }],
                    "environments": [{
                        "name": "string",
                        "value": "string",
                    }],
                    "memory": 0,
                    "memory_reservation": 0,
                    "name": "string",
                    "resource_requirements": [{
                        "type": "string",
                        "value": "string",
                    }],
                }],
                "cpu": "string",
                "ephemeral_storage": {
                    "size_in_gib": 0,
                },
                "execution_role_arn": "string",
                "inference_accelerator_overrides": [{
                    "device_name": "string",
                    "device_type": "string",
                }],
                "memory": "string",
                "task_role_arn": "string",
            },
            "placement_strategies": [{
                "field": "string",
                "type": "string",
            }],
            "group": "string",
            "launch_type": "string",
            "network_configuration": {
                "aws_vpc_configuration": {
                    "assign_public_ip": "string",
                    "security_groups": ["string"],
                    "subnets": ["string"],
                },
            },
            "capacity_provider_strategies": [{
                "capacity_provider": "string",
                "base": 0,
                "weight": 0,
            }],
            "placement_constraints": [{
                "expression": "string",
                "type": "string",
            }],
            "enable_execute_command": False,
            "platform_version": "string",
            "propagate_tags": "string",
            "reference_id": "string",
            "tags": {
                "string": "string",
            },
            "task_count": 0,
            "enable_ecs_managed_tags": False,
        },
        "eventbridge_event_bus_parameters": {
            "detail_type": "string",
            "endpoint_id": "string",
            "resources": ["string"],
            "source": "string",
            "time": "string",
        },
        "http_parameters": {
            "header_parameters": {
                "string": "string",
            },
            "path_parameter_values": "string",
            "query_string_parameters": {
                "string": "string",
            },
        },
        "input_template": "string",
        "kinesis_stream_parameters": {
            "partition_key": "string",
        },
        "lambda_function_parameters": {
            "invocation_type": "string",
        },
        "redshift_data_parameters": {
            "database": "string",
            "sqls": ["string"],
            "db_user": "string",
            "secret_manager_arn": "string",
            "statement_name": "string",
            "with_event": False,
        },
        "sagemaker_pipeline_parameters": {
            "pipeline_parameters": [{
                "name": "string",
                "value": "string",
            }],
        },
        "sqs_queue_parameters": {
            "message_deduplication_id": "string",
            "message_group_id": "string",
        },
        "step_function_state_machine_parameters": {
            "invocation_type": "string",
        },
    })
const pipeResource = new aws.pipes.Pipe("pipeResource", {
    roleArn: "string",
    target: "string",
    source: "string",
    enrichmentParameters: {
        httpParameters: {
            headerParameters: {
                string: "string",
            },
            pathParameterValues: "string",
            queryStringParameters: {
                string: "string",
            },
        },
        inputTemplate: "string",
    },
    kmsKeyIdentifier: "string",
    logConfiguration: {
        level: "string",
        cloudwatchLogsLogDestination: {
            logGroupArn: "string",
        },
        firehoseLogDestination: {
            deliveryStreamArn: "string",
        },
        includeExecutionDatas: ["string"],
        s3LogDestination: {
            bucketName: "string",
            bucketOwner: "string",
            outputFormat: "string",
            prefix: "string",
        },
    },
    name: "string",
    namePrefix: "string",
    description: "string",
    enrichment: "string",
    sourceParameters: {
        activemqBrokerParameters: {
            credentials: {
                basicAuth: "string",
            },
            queueName: "string",
            batchSize: 0,
            maximumBatchingWindowInSeconds: 0,
        },
        dynamodbStreamParameters: {
            startingPosition: "string",
            batchSize: 0,
            deadLetterConfig: {
                arn: "string",
            },
            maximumBatchingWindowInSeconds: 0,
            maximumRecordAgeInSeconds: 0,
            maximumRetryAttempts: 0,
            onPartialBatchItemFailure: "string",
            parallelizationFactor: 0,
        },
        filterCriteria: {
            filters: [{
                pattern: "string",
            }],
        },
        kinesisStreamParameters: {
            startingPosition: "string",
            batchSize: 0,
            deadLetterConfig: {
                arn: "string",
            },
            maximumBatchingWindowInSeconds: 0,
            maximumRecordAgeInSeconds: 0,
            maximumRetryAttempts: 0,
            onPartialBatchItemFailure: "string",
            parallelizationFactor: 0,
            startingPositionTimestamp: "string",
        },
        managedStreamingKafkaParameters: {
            topicName: "string",
            batchSize: 0,
            consumerGroupId: "string",
            credentials: {
                clientCertificateTlsAuth: "string",
                saslScram512Auth: "string",
            },
            maximumBatchingWindowInSeconds: 0,
            startingPosition: "string",
        },
        rabbitmqBrokerParameters: {
            credentials: {
                basicAuth: "string",
            },
            queueName: "string",
            batchSize: 0,
            maximumBatchingWindowInSeconds: 0,
            virtualHost: "string",
        },
        selfManagedKafkaParameters: {
            topicName: "string",
            additionalBootstrapServers: ["string"],
            batchSize: 0,
            consumerGroupId: "string",
            credentials: {
                basicAuth: "string",
                clientCertificateTlsAuth: "string",
                saslScram256Auth: "string",
                saslScram512Auth: "string",
            },
            maximumBatchingWindowInSeconds: 0,
            serverRootCaCertificate: "string",
            startingPosition: "string",
            vpc: {
                securityGroups: ["string"],
                subnets: ["string"],
            },
        },
        sqsQueueParameters: {
            batchSize: 0,
            maximumBatchingWindowInSeconds: 0,
        },
    },
    tags: {
        string: "string",
    },
    desiredState: "string",
    targetParameters: {
        batchJobParameters: {
            jobDefinition: "string",
            jobName: "string",
            arrayProperties: {
                size: 0,
            },
            containerOverrides: {
                commands: ["string"],
                environments: [{
                    name: "string",
                    value: "string",
                }],
                instanceType: "string",
                resourceRequirements: [{
                    type: "string",
                    value: "string",
                }],
            },
            dependsOns: [{
                jobId: "string",
                type: "string",
            }],
            parameters: {
                string: "string",
            },
            retryStrategy: {
                attempts: 0,
            },
        },
        cloudwatchLogsParameters: {
            logStreamName: "string",
            timestamp: "string",
        },
        ecsTaskParameters: {
            taskDefinitionArn: "string",
            overrides: {
                containerOverrides: [{
                    commands: ["string"],
                    cpu: 0,
                    environmentFiles: [{
                        type: "string",
                        value: "string",
                    }],
                    environments: [{
                        name: "string",
                        value: "string",
                    }],
                    memory: 0,
                    memoryReservation: 0,
                    name: "string",
                    resourceRequirements: [{
                        type: "string",
                        value: "string",
                    }],
                }],
                cpu: "string",
                ephemeralStorage: {
                    sizeInGib: 0,
                },
                executionRoleArn: "string",
                inferenceAcceleratorOverrides: [{
                    deviceName: "string",
                    deviceType: "string",
                }],
                memory: "string",
                taskRoleArn: "string",
            },
            placementStrategies: [{
                field: "string",
                type: "string",
            }],
            group: "string",
            launchType: "string",
            networkConfiguration: {
                awsVpcConfiguration: {
                    assignPublicIp: "string",
                    securityGroups: ["string"],
                    subnets: ["string"],
                },
            },
            capacityProviderStrategies: [{
                capacityProvider: "string",
                base: 0,
                weight: 0,
            }],
            placementConstraints: [{
                expression: "string",
                type: "string",
            }],
            enableExecuteCommand: false,
            platformVersion: "string",
            propagateTags: "string",
            referenceId: "string",
            tags: {
                string: "string",
            },
            taskCount: 0,
            enableEcsManagedTags: false,
        },
        eventbridgeEventBusParameters: {
            detailType: "string",
            endpointId: "string",
            resources: ["string"],
            source: "string",
            time: "string",
        },
        httpParameters: {
            headerParameters: {
                string: "string",
            },
            pathParameterValues: "string",
            queryStringParameters: {
                string: "string",
            },
        },
        inputTemplate: "string",
        kinesisStreamParameters: {
            partitionKey: "string",
        },
        lambdaFunctionParameters: {
            invocationType: "string",
        },
        redshiftDataParameters: {
            database: "string",
            sqls: ["string"],
            dbUser: "string",
            secretManagerArn: "string",
            statementName: "string",
            withEvent: false,
        },
        sagemakerPipelineParameters: {
            pipelineParameters: [{
                name: "string",
                value: "string",
            }],
        },
        sqsQueueParameters: {
            messageDeduplicationId: "string",
            messageGroupId: "string",
        },
        stepFunctionStateMachineParameters: {
            invocationType: "string",
        },
    },
});
type: aws:pipes:Pipe
properties:
    description: string
    desiredState: string
    enrichment: string
    enrichmentParameters:
        httpParameters:
            headerParameters:
                string: string
            pathParameterValues: string
            queryStringParameters:
                string: string
        inputTemplate: string
    kmsKeyIdentifier: string
    logConfiguration:
        cloudwatchLogsLogDestination:
            logGroupArn: string
        firehoseLogDestination:
            deliveryStreamArn: string
        includeExecutionDatas:
            - string
        level: string
        s3LogDestination:
            bucketName: string
            bucketOwner: string
            outputFormat: string
            prefix: string
    name: string
    namePrefix: string
    roleArn: string
    source: string
    sourceParameters:
        activemqBrokerParameters:
            batchSize: 0
            credentials:
                basicAuth: string
            maximumBatchingWindowInSeconds: 0
            queueName: string
        dynamodbStreamParameters:
            batchSize: 0
            deadLetterConfig:
                arn: string
            maximumBatchingWindowInSeconds: 0
            maximumRecordAgeInSeconds: 0
            maximumRetryAttempts: 0
            onPartialBatchItemFailure: string
            parallelizationFactor: 0
            startingPosition: string
        filterCriteria:
            filters:
                - pattern: string
        kinesisStreamParameters:
            batchSize: 0
            deadLetterConfig:
                arn: string
            maximumBatchingWindowInSeconds: 0
            maximumRecordAgeInSeconds: 0
            maximumRetryAttempts: 0
            onPartialBatchItemFailure: string
            parallelizationFactor: 0
            startingPosition: string
            startingPositionTimestamp: string
        managedStreamingKafkaParameters:
            batchSize: 0
            consumerGroupId: string
            credentials:
                clientCertificateTlsAuth: string
                saslScram512Auth: string
            maximumBatchingWindowInSeconds: 0
            startingPosition: string
            topicName: string
        rabbitmqBrokerParameters:
            batchSize: 0
            credentials:
                basicAuth: string
            maximumBatchingWindowInSeconds: 0
            queueName: string
            virtualHost: string
        selfManagedKafkaParameters:
            additionalBootstrapServers:
                - string
            batchSize: 0
            consumerGroupId: string
            credentials:
                basicAuth: string
                clientCertificateTlsAuth: string
                saslScram256Auth: string
                saslScram512Auth: string
            maximumBatchingWindowInSeconds: 0
            serverRootCaCertificate: string
            startingPosition: string
            topicName: string
            vpc:
                securityGroups:
                    - string
                subnets:
                    - string
        sqsQueueParameters:
            batchSize: 0
            maximumBatchingWindowInSeconds: 0
    tags:
        string: string
    target: string
    targetParameters:
        batchJobParameters:
            arrayProperties:
                size: 0
            containerOverrides:
                commands:
                    - string
                environments:
                    - name: string
                      value: string
                instanceType: string
                resourceRequirements:
                    - type: string
                      value: string
            dependsOns:
                - jobId: string
                  type: string
            jobDefinition: string
            jobName: string
            parameters:
                string: string
            retryStrategy:
                attempts: 0
        cloudwatchLogsParameters:
            logStreamName: string
            timestamp: string
        ecsTaskParameters:
            capacityProviderStrategies:
                - base: 0
                  capacityProvider: string
                  weight: 0
            enableEcsManagedTags: false
            enableExecuteCommand: false
            group: string
            launchType: string
            networkConfiguration:
                awsVpcConfiguration:
                    assignPublicIp: string
                    securityGroups:
                        - string
                    subnets:
                        - string
            overrides:
                containerOverrides:
                    - commands:
                        - string
                      cpu: 0
                      environmentFiles:
                        - type: string
                          value: string
                      environments:
                        - name: string
                          value: string
                      memory: 0
                      memoryReservation: 0
                      name: string
                      resourceRequirements:
                        - type: string
                          value: string
                cpu: string
                ephemeralStorage:
                    sizeInGib: 0
                executionRoleArn: string
                inferenceAcceleratorOverrides:
                    - deviceName: string
                      deviceType: string
                memory: string
                taskRoleArn: string
            placementConstraints:
                - expression: string
                  type: string
            placementStrategies:
                - field: string
                  type: string
            platformVersion: string
            propagateTags: string
            referenceId: string
            tags:
                string: string
            taskCount: 0
            taskDefinitionArn: string
        eventbridgeEventBusParameters:
            detailType: string
            endpointId: string
            resources:
                - string
            source: string
            time: string
        httpParameters:
            headerParameters:
                string: string
            pathParameterValues: string
            queryStringParameters:
                string: string
        inputTemplate: string
        kinesisStreamParameters:
            partitionKey: string
        lambdaFunctionParameters:
            invocationType: string
        redshiftDataParameters:
            database: string
            dbUser: string
            secretManagerArn: string
            sqls:
                - string
            statementName: string
            withEvent: false
        sagemakerPipelineParameters:
            pipelineParameters:
                - name: string
                  value: string
        sqsQueueParameters:
            messageDeduplicationId: string
            messageGroupId: string
        stepFunctionStateMachineParameters:
            invocationType: string
Pipe Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Pipe resource accepts the following input properties:
- RoleArn string
- ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Target string
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- Description string
- A description of the pipe. At most 512 characters.
- DesiredState string
- The state the pipe should be in. One of: RUNNING,STOPPED.
- Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- EnrichmentParameters PipeEnrichment Parameters 
- Parameters to configure enrichment for your pipe. Detailed below.
- KmsKey stringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- LogConfiguration PipeLog Configuration 
- Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- NamePrefix string
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- SourceParameters PipeSource Parameters 
- Parameters to configure a source for the pipe. Detailed below.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- TargetParameters PipeTarget Parameters 
- Parameters to configure a target for your pipe. Detailed below.
- RoleArn string
- ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Target string
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- Description string
- A description of the pipe. At most 512 characters.
- DesiredState string
- The state the pipe should be in. One of: RUNNING,STOPPED.
- Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- EnrichmentParameters PipeEnrichment Parameters Args 
- Parameters to configure enrichment for your pipe. Detailed below.
- KmsKey stringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- LogConfiguration PipeLog Configuration Args 
- Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- NamePrefix string
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- SourceParameters PipeSource Parameters Args 
- Parameters to configure a source for the pipe. Detailed below.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- TargetParameters PipeTarget Parameters Args 
- Parameters to configure a target for your pipe. Detailed below.
- roleArn String
- ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target String
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- description String
- A description of the pipe. At most 512 characters.
- desiredState String
- The state the pipe should be in. One of: RUNNING,STOPPED.
- enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichmentParameters PipeEnrichment Parameters 
- Parameters to configure enrichment for your pipe. Detailed below.
- kmsKey StringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- logConfiguration PipeLog Configuration 
- Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- namePrefix String
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- sourceParameters PipeSource Parameters 
- Parameters to configure a source for the pipe. Detailed below.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- targetParameters PipeTarget Parameters 
- Parameters to configure a target for your pipe. Detailed below.
- roleArn string
- ARN of the role that allows the pipe to send data to the target.
- source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target string
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- description string
- A description of the pipe. At most 512 characters.
- desiredState string
- The state the pipe should be in. One of: RUNNING,STOPPED.
- enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichmentParameters PipeEnrichment Parameters 
- Parameters to configure enrichment for your pipe. Detailed below.
- kmsKey stringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- logConfiguration PipeLog Configuration 
- Logging configuration settings for the pipe. Detailed below.
- name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- namePrefix string
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- sourceParameters PipeSource Parameters 
- Parameters to configure a source for the pipe. Detailed below.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- targetParameters PipeTarget Parameters 
- Parameters to configure a target for your pipe. Detailed below.
- role_arn str
- ARN of the role that allows the pipe to send data to the target.
- source str
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target str
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- description str
- A description of the pipe. At most 512 characters.
- desired_state str
- The state the pipe should be in. One of: RUNNING,STOPPED.
- enrichment str
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment_parameters PipeEnrichment Parameters Args 
- Parameters to configure enrichment for your pipe. Detailed below.
- kms_key_ stridentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- log_configuration PipeLog Configuration Args 
- Logging configuration settings for the pipe. Detailed below.
- name str
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- name_prefix str
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- source_parameters PipeSource Parameters Args 
- Parameters to configure a source for the pipe. Detailed below.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- target_parameters PipeTarget Parameters Args 
- Parameters to configure a target for your pipe. Detailed below.
- roleArn String
- ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target String
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- description String
- A description of the pipe. At most 512 characters.
- desiredState String
- The state the pipe should be in. One of: RUNNING,STOPPED.
- enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichmentParameters Property Map
- Parameters to configure enrichment for your pipe. Detailed below.
- kmsKey StringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- logConfiguration Property Map
- Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- namePrefix String
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- sourceParameters Property Map
- Parameters to configure a source for the pipe. Detailed below.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- targetParameters Property Map
- Parameters to configure a target for your pipe. Detailed below.
Outputs
All input properties are implicitly available as output properties. Additionally, the Pipe resource produces the following output properties:
Look up Existing Pipe Resource
Get an existing Pipe resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: PipeState, opts?: CustomResourceOptions): Pipe@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        arn: Optional[str] = None,
        description: Optional[str] = None,
        desired_state: Optional[str] = None,
        enrichment: Optional[str] = None,
        enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
        kms_key_identifier: Optional[str] = None,
        log_configuration: Optional[PipeLogConfigurationArgs] = None,
        name: Optional[str] = None,
        name_prefix: Optional[str] = None,
        role_arn: Optional[str] = None,
        source: Optional[str] = None,
        source_parameters: Optional[PipeSourceParametersArgs] = None,
        tags: Optional[Mapping[str, str]] = None,
        tags_all: Optional[Mapping[str, str]] = None,
        target: Optional[str] = None,
        target_parameters: Optional[PipeTargetParametersArgs] = None) -> Pipefunc GetPipe(ctx *Context, name string, id IDInput, state *PipeState, opts ...ResourceOption) (*Pipe, error)public static Pipe Get(string name, Input<string> id, PipeState? state, CustomResourceOptions? opts = null)public static Pipe get(String name, Output<String> id, PipeState state, CustomResourceOptions options)resources:  _:    type: aws:pipes:Pipe    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arn string
- ARN of this pipe.
- Description string
- A description of the pipe. At most 512 characters.
- DesiredState string
- The state the pipe should be in. One of: RUNNING,STOPPED.
- Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- EnrichmentParameters PipeEnrichment Parameters 
- Parameters to configure enrichment for your pipe. Detailed below.
- KmsKey stringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- LogConfiguration PipeLog Configuration 
- Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- NamePrefix string
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- RoleArn string
- ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- SourceParameters PipeSource Parameters 
- Parameters to configure a source for the pipe. Detailed below.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Dictionary<string, string>
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Target string
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- TargetParameters PipeTarget Parameters 
- Parameters to configure a target for your pipe. Detailed below.
- Arn string
- ARN of this pipe.
- Description string
- A description of the pipe. At most 512 characters.
- DesiredState string
- The state the pipe should be in. One of: RUNNING,STOPPED.
- Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- EnrichmentParameters PipeEnrichment Parameters Args 
- Parameters to configure enrichment for your pipe. Detailed below.
- KmsKey stringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- LogConfiguration PipeLog Configuration Args 
- Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- NamePrefix string
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- RoleArn string
- ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- SourceParameters PipeSource Parameters Args 
- Parameters to configure a source for the pipe. Detailed below.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- map[string]string
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Target string
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- TargetParameters PipeTarget Parameters Args 
- Parameters to configure a target for your pipe. Detailed below.
- arn String
- ARN of this pipe.
- description String
- A description of the pipe. At most 512 characters.
- desiredState String
- The state the pipe should be in. One of: RUNNING,STOPPED.
- enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichmentParameters PipeEnrichment Parameters 
- Parameters to configure enrichment for your pipe. Detailed below.
- kmsKey StringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- logConfiguration PipeLog Configuration 
- Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- namePrefix String
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- roleArn String
- ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- sourceParameters PipeSource Parameters 
- Parameters to configure a source for the pipe. Detailed below.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String,String>
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- target String
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- targetParameters PipeTarget Parameters 
- Parameters to configure a target for your pipe. Detailed below.
- arn string
- ARN of this pipe.
- description string
- A description of the pipe. At most 512 characters.
- desiredState string
- The state the pipe should be in. One of: RUNNING,STOPPED.
- enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichmentParameters PipeEnrichment Parameters 
- Parameters to configure enrichment for your pipe. Detailed below.
- kmsKey stringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- logConfiguration PipeLog Configuration 
- Logging configuration settings for the pipe. Detailed below.
- name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- namePrefix string
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- roleArn string
- ARN of the role that allows the pipe to send data to the target.
- source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- sourceParameters PipeSource Parameters 
- Parameters to configure a source for the pipe. Detailed below.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- {[key: string]: string}
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- target string
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- targetParameters PipeTarget Parameters 
- Parameters to configure a target for your pipe. Detailed below.
- arn str
- ARN of this pipe.
- description str
- A description of the pipe. At most 512 characters.
- desired_state str
- The state the pipe should be in. One of: RUNNING,STOPPED.
- enrichment str
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment_parameters PipeEnrichment Parameters Args 
- Parameters to configure enrichment for your pipe. Detailed below.
- kms_key_ stridentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- log_configuration PipeLog Configuration Args 
- Logging configuration settings for the pipe. Detailed below.
- name str
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- name_prefix str
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- role_arn str
- ARN of the role that allows the pipe to send data to the target.
- source str
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- source_parameters PipeSource Parameters Args 
- Parameters to configure a source for the pipe. Detailed below.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Mapping[str, str]
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- target str
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- target_parameters PipeTarget Parameters Args 
- Parameters to configure a target for your pipe. Detailed below.
- arn String
- ARN of this pipe.
- description String
- A description of the pipe. At most 512 characters.
- desiredState String
- The state the pipe should be in. One of: RUNNING,STOPPED.
- enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichmentParameters Property Map
- Parameters to configure enrichment for your pipe. Detailed below.
- kmsKey StringIdentifier 
- Identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN. If not set, EventBridge uses an AWS owned key to encrypt pipe data.
- logConfiguration Property Map
- Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- namePrefix String
- Creates a unique name beginning with the specified prefix. Conflicts with name.
- roleArn String
- ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- sourceParameters Property Map
- Parameters to configure a source for the pipe. Detailed below.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String>
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- target String
- Target resource of the pipe (typically an ARN). - The following arguments are optional: 
- targetParameters Property Map
- Parameters to configure a target for your pipe. Detailed below.
Supporting Types
PipeEnrichmentParameters, PipeEnrichmentParametersArgs      
- HttpParameters PipeEnrichment Parameters Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- InputTemplate string
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- HttpParameters PipeEnrichment Parameters Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- InputTemplate string
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- httpParameters PipeEnrichment Parameters Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- inputTemplate String
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- httpParameters PipeEnrichment Parameters Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- inputTemplate string
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- http_parameters PipeEnrichment Parameters Http Parameters 
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- input_template str
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- httpParameters Property Map
- Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- inputTemplate String
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
PipeEnrichmentParametersHttpParameters, PipeEnrichmentParametersHttpParametersArgs          
- HeaderParameters Dictionary<string, string>
- PathParameter stringValues 
- QueryString Dictionary<string, string>Parameters 
- HeaderParameters map[string]string
- PathParameter stringValues 
- QueryString map[string]stringParameters 
- headerParameters Map<String,String>
- pathParameter StringValues 
- queryString Map<String,String>Parameters 
- headerParameters {[key: string]: string}
- pathParameter stringValues 
- queryString {[key: string]: string}Parameters 
- header_parameters Mapping[str, str]
- path_parameter_ strvalues 
- query_string_ Mapping[str, str]parameters 
- headerParameters Map<String>
- pathParameter StringValues 
- queryString Map<String>Parameters 
PipeLogConfiguration, PipeLogConfigurationArgs      
- Level string
- The level of logging detail to include. Valid values OFF,ERROR,INFOandTRACE.
- CloudwatchLogs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination 
- Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- FirehoseLog PipeDestination Log Configuration Firehose Log Destination 
- Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- IncludeExecution List<string>Datas 
- String list that specifies whether the execution data (specifically, the payload,awsRequest, andawsResponsefields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL.
- S3LogDestination PipeLog Configuration S3Log Destination 
- Amazon S3 logging configuration settings for the pipe. Detailed below.
- Level string
- The level of logging detail to include. Valid values OFF,ERROR,INFOandTRACE.
- CloudwatchLogs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination 
- Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- FirehoseLog PipeDestination Log Configuration Firehose Log Destination 
- Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- IncludeExecution []stringDatas 
- String list that specifies whether the execution data (specifically, the payload,awsRequest, andawsResponsefields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL.
- S3LogDestination PipeLog Configuration S3Log Destination 
- Amazon S3 logging configuration settings for the pipe. Detailed below.
- level String
- The level of logging detail to include. Valid values OFF,ERROR,INFOandTRACE.
- cloudwatchLogs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination 
- Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehoseLog PipeDestination Log Configuration Firehose Log Destination 
- Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- includeExecution List<String>Datas 
- String list that specifies whether the execution data (specifically, the payload,awsRequest, andawsResponsefields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL.
- s3LogDestination PipeLog Configuration S3Log Destination 
- Amazon S3 logging configuration settings for the pipe. Detailed below.
- level string
- The level of logging detail to include. Valid values OFF,ERROR,INFOandTRACE.
- cloudwatchLogs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination 
- Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehoseLog PipeDestination Log Configuration Firehose Log Destination 
- Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- includeExecution string[]Datas 
- String list that specifies whether the execution data (specifically, the payload,awsRequest, andawsResponsefields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL.
- s3LogDestination PipeLog Configuration S3Log Destination 
- Amazon S3 logging configuration settings for the pipe. Detailed below.
- level str
- The level of logging detail to include. Valid values OFF,ERROR,INFOandTRACE.
- cloudwatch_logs_ Pipelog_ destination Log Configuration Cloudwatch Logs Log Destination 
- Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehose_log_ Pipedestination Log Configuration Firehose Log Destination 
- Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- include_execution_ Sequence[str]datas 
- String list that specifies whether the execution data (specifically, the payload,awsRequest, andawsResponsefields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL.
- s3_log_ Pipedestination Log Configuration S3Log Destination 
- Amazon S3 logging configuration settings for the pipe. Detailed below.
- level String
- The level of logging detail to include. Valid values OFF,ERROR,INFOandTRACE.
- cloudwatchLogs Property MapLog Destination 
- Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehoseLog Property MapDestination 
- Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- includeExecution List<String>Datas 
- String list that specifies whether the execution data (specifically, the payload,awsRequest, andawsResponsefields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL.
- s3LogDestination Property Map
- Amazon S3 logging configuration settings for the pipe. Detailed below.
PipeLogConfigurationCloudwatchLogsLogDestination, PipeLogConfigurationCloudwatchLogsLogDestinationArgs              
- LogGroup stringArn 
- Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- LogGroup stringArn 
- Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- logGroup StringArn 
- Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- logGroup stringArn 
- Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log_group_ strarn 
- Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- logGroup StringArn 
- Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
PipeLogConfigurationFirehoseLogDestination, PipeLogConfigurationFirehoseLogDestinationArgs            
- DeliveryStream stringArn 
- Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- DeliveryStream stringArn 
- Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- deliveryStream StringArn 
- Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- deliveryStream stringArn 
- Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery_stream_ strarn 
- Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- deliveryStream StringArn 
- Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
PipeLogConfigurationS3LogDestination, PipeLogConfigurationS3LogDestinationArgs          
- BucketName string
- Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- BucketOwner string
- Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- OutputFormat string
- EventBridge format for the log records. Valid values json,plainandw3c.
- Prefix string
- Prefix text with which to begin Amazon S3 log object names.
- BucketName string
- Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- BucketOwner string
- Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- OutputFormat string
- EventBridge format for the log records. Valid values json,plainandw3c.
- Prefix string
- Prefix text with which to begin Amazon S3 log object names.
- bucketName String
- Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucketOwner String
- Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- outputFormat String
- EventBridge format for the log records. Valid values json,plainandw3c.
- prefix String
- Prefix text with which to begin Amazon S3 log object names.
- bucketName string
- Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucketOwner string
- Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- outputFormat string
- EventBridge format for the log records. Valid values json,plainandw3c.
- prefix string
- Prefix text with which to begin Amazon S3 log object names.
- bucket_name str
- Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket_owner str
- Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output_format str
- EventBridge format for the log records. Valid values json,plainandw3c.
- prefix str
- Prefix text with which to begin Amazon S3 log object names.
- bucketName String
- Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucketOwner String
- Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- outputFormat String
- EventBridge format for the log records. Valid values json,plainandw3c.
- prefix String
- Prefix text with which to begin Amazon S3 log object names.
PipeSourceParameters, PipeSourceParametersArgs      
- ActivemqBroker PipeParameters Source Parameters Activemq Broker Parameters 
- The parameters for using an Active MQ broker as a source. Detailed below.
- DynamodbStream PipeParameters Source Parameters Dynamodb Stream Parameters 
- The parameters for using a DynamoDB stream as a source. Detailed below.
- FilterCriteria PipeSource Parameters Filter Criteria 
- The collection of event patterns used to filter events. Detailed below.
- KinesisStream PipeParameters Source Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- ManagedStreaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source. Detailed below.
- RabbitmqBroker PipeParameters Source Parameters Rabbitmq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source. Detailed below.
- SelfManaged PipeKafka Parameters Source Parameters Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- SqsQueue PipeParameters Source Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source. Detailed below.
- ActivemqBroker PipeParameters Source Parameters Activemq Broker Parameters 
- The parameters for using an Active MQ broker as a source. Detailed below.
- DynamodbStream PipeParameters Source Parameters Dynamodb Stream Parameters 
- The parameters for using a DynamoDB stream as a source. Detailed below.
- FilterCriteria PipeSource Parameters Filter Criteria 
- The collection of event patterns used to filter events. Detailed below.
- KinesisStream PipeParameters Source Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- ManagedStreaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source. Detailed below.
- RabbitmqBroker PipeParameters Source Parameters Rabbitmq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source. Detailed below.
- SelfManaged PipeKafka Parameters Source Parameters Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- SqsQueue PipeParameters Source Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemqBroker PipeParameters Source Parameters Activemq Broker Parameters 
- The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodbStream PipeParameters Source Parameters Dynamodb Stream Parameters 
- The parameters for using a DynamoDB stream as a source. Detailed below.
- filterCriteria PipeSource Parameters Filter Criteria 
- The collection of event patterns used to filter events. Detailed below.
- kinesisStream PipeParameters Source Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- managedStreaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source. Detailed below.
- rabbitmqBroker PipeParameters Source Parameters Rabbitmq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source. Detailed below.
- selfManaged PipeKafka Parameters Source Parameters Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqsQueue PipeParameters Source Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemqBroker PipeParameters Source Parameters Activemq Broker Parameters 
- The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodbStream PipeParameters Source Parameters Dynamodb Stream Parameters 
- The parameters for using a DynamoDB stream as a source. Detailed below.
- filterCriteria PipeSource Parameters Filter Criteria 
- The collection of event patterns used to filter events. Detailed below.
- kinesisStream PipeParameters Source Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- managedStreaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source. Detailed below.
- rabbitmqBroker PipeParameters Source Parameters Rabbitmq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source. Detailed below.
- selfManaged PipeKafka Parameters Source Parameters Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqsQueue PipeParameters Source Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemq_broker_ Pipeparameters Source Parameters Activemq Broker Parameters 
- The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodb_stream_ Pipeparameters Source Parameters Dynamodb Stream Parameters 
- The parameters for using a DynamoDB stream as a source. Detailed below.
- filter_criteria PipeSource Parameters Filter Criteria 
- The collection of event patterns used to filter events. Detailed below.
- kinesis_stream_ Pipeparameters Source Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- managed_streaming_ Pipekafka_ parameters Source Parameters Managed Streaming Kafka Parameters 
- The parameters for using an MSK stream as a source. Detailed below.
- rabbitmq_broker_ Pipeparameters Source Parameters Rabbitmq Broker Parameters 
- The parameters for using a Rabbit MQ broker as a source. Detailed below.
- self_managed_ Pipekafka_ parameters Source Parameters Self Managed Kafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqs_queue_ Pipeparameters Source Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemqBroker Property MapParameters 
- The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodbStream Property MapParameters 
- The parameters for using a DynamoDB stream as a source. Detailed below.
- filterCriteria Property Map
- The collection of event patterns used to filter events. Detailed below.
- kinesisStream Property MapParameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- managedStreaming Property MapKafka Parameters 
- The parameters for using an MSK stream as a source. Detailed below.
- rabbitmqBroker Property MapParameters 
- The parameters for using a Rabbit MQ broker as a source. Detailed below.
- selfManaged Property MapKafka Parameters 
- The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqsQueue Property MapParameters 
- The parameters for using a Amazon SQS stream as a source. Detailed below.
PipeSourceParametersActivemqBrokerParameters, PipeSourceParametersActivemqBrokerParametersArgs            
- Credentials
PipeSource Parameters Activemq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- QueueName string
- The name of the destination queue to consume. Maximum length of 1000.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- Credentials
PipeSource Parameters Activemq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- QueueName string
- The name of the destination queue to consume. Maximum length of 1000.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- credentials
PipeSource Parameters Activemq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- queueName String
- The name of the destination queue to consume. Maximum length of 1000.
- batchSize Integer
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- credentials
PipeSource Parameters Activemq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- queueName string
- The name of the destination queue to consume. Maximum length of 1000.
- batchSize number
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- credentials
PipeSource Parameters Activemq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- queue_name str
- The name of the destination queue to consume. Maximum length of 1000.
- batch_size int
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- credentials Property Map
- The credentials needed to access the resource. Detailed below.
- queueName String
- The name of the destination queue to consume. Maximum length of 1000.
- batchSize Number
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
PipeSourceParametersActivemqBrokerParametersCredentials, PipeSourceParametersActivemqBrokerParametersCredentialsArgs              
- BasicAuth string
- The ARN of the Secrets Manager secret containing the credentials.
- BasicAuth string
- The ARN of the Secrets Manager secret containing the credentials.
- basicAuth String
- The ARN of the Secrets Manager secret containing the credentials.
- basicAuth string
- The ARN of the Secrets Manager secret containing the credentials.
- basic_auth str
- The ARN of the Secrets Manager secret containing the credentials.
- basicAuth String
- The ARN of the Secrets Manager secret containing the credentials.
PipeSourceParametersDynamodbStreamParameters, PipeSourceParametersDynamodbStreamParametersArgs            
- StartingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- DeadLetter PipeConfig Source Parameters Dynamodb Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- MaximumRecord intAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- MaximumRetry intAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- OnPartial stringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- ParallelizationFactor int
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- StartingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- DeadLetter PipeConfig Source Parameters Dynamodb Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- MaximumRecord intAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- MaximumRetry intAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- OnPartial stringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- ParallelizationFactor int
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- startingPosition String
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batchSize Integer
- The maximum number of records to include in each batch. Maximum value of 10000.
- deadLetter PipeConfig Source Parameters Dynamodb Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- maximumRecord IntegerAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximumRetry IntegerAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- onPartial StringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelizationFactor Integer
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- startingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batchSize number
- The maximum number of records to include in each batch. Maximum value of 10000.
- deadLetter PipeConfig Source Parameters Dynamodb Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- maximumRecord numberAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximumRetry numberAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- onPartial stringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelizationFactor number
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting_position str
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch_size int
- The maximum number of records to include in each batch. Maximum value of 10000.
- dead_letter_ Pipeconfig Source Parameters Dynamodb Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- maximum_record_ intage_ in_ seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum_retry_ intattempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on_partial_ strbatch_ item_ failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization_factor int
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- startingPosition String
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batchSize Number
- The maximum number of records to include in each batch. Maximum value of 10000.
- deadLetter Property MapConfig 
- Define the target queue to send dead-letter queue events to. Detailed below.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- maximumRecord NumberAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximumRetry NumberAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- onPartial StringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelizationFactor Number
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
PipeSourceParametersDynamodbStreamParametersDeadLetterConfig, PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs                  
- Arn string
- ARN of this pipe.
- Arn string
- ARN of this pipe.
- arn String
- ARN of this pipe.
- arn string
- ARN of this pipe.
- arn str
- ARN of this pipe.
- arn String
- ARN of this pipe.
PipeSourceParametersFilterCriteria, PipeSourceParametersFilterCriteriaArgs          
- Filters
List<PipeSource Parameters Filter Criteria Filter> 
- An array of up to 5 event patterns. Detailed below.
- Filters
[]PipeSource Parameters Filter Criteria Filter 
- An array of up to 5 event patterns. Detailed below.
- filters
List<PipeSource Parameters Filter Criteria Filter> 
- An array of up to 5 event patterns. Detailed below.
- filters
PipeSource Parameters Filter Criteria Filter[] 
- An array of up to 5 event patterns. Detailed below.
- filters
Sequence[PipeSource Parameters Filter Criteria Filter] 
- An array of up to 5 event patterns. Detailed below.
- filters List<Property Map>
- An array of up to 5 event patterns. Detailed below.
PipeSourceParametersFilterCriteriaFilter, PipeSourceParametersFilterCriteriaFilterArgs            
- Pattern string
- The event pattern. At most 4096 characters.
- Pattern string
- The event pattern. At most 4096 characters.
- pattern String
- The event pattern. At most 4096 characters.
- pattern string
- The event pattern. At most 4096 characters.
- pattern str
- The event pattern. At most 4096 characters.
- pattern String
- The event pattern. At most 4096 characters.
PipeSourceParametersKinesisStreamParameters, PipeSourceParametersKinesisStreamParametersArgs            
- StartingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- DeadLetter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- MaximumRecord intAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- MaximumRetry intAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- OnPartial stringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- ParallelizationFactor int
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- StartingPosition stringTimestamp 
- With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- StartingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- DeadLetter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- MaximumRecord intAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- MaximumRetry intAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- OnPartial stringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- ParallelizationFactor int
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- StartingPosition stringTimestamp 
- With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- startingPosition String
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batchSize Integer
- The maximum number of records to include in each batch. Maximum value of 10000.
- deadLetter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- maximumRecord IntegerAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximumRetry IntegerAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- onPartial StringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelizationFactor Integer
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- startingPosition StringTimestamp 
- With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- startingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batchSize number
- The maximum number of records to include in each batch. Maximum value of 10000.
- deadLetter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- maximumRecord numberAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximumRetry numberAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- onPartial stringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelizationFactor number
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- startingPosition stringTimestamp 
- With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- starting_position str
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch_size int
- The maximum number of records to include in each batch. Maximum value of 10000.
- dead_letter_ Pipeconfig Source Parameters Kinesis Stream Parameters Dead Letter Config 
- Define the target queue to send dead-letter queue events to. Detailed below.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- maximum_record_ intage_ in_ seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum_retry_ intattempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on_partial_ strbatch_ item_ failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization_factor int
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting_position_ strtimestamp 
- With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- startingPosition String
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batchSize Number
- The maximum number of records to include in each batch. Maximum value of 10000.
- deadLetter Property MapConfig 
- Define the target queue to send dead-letter queue events to. Detailed below.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- maximumRecord NumberAge In Seconds 
- Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximumRetry NumberAttempts 
- Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- onPartial StringBatch Item Failure 
- Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelizationFactor Number
- The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- startingPosition StringTimestamp 
- With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
PipeSourceParametersKinesisStreamParametersDeadLetterConfig, PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs                  
- Arn string
- ARN of this pipe.
- Arn string
- ARN of this pipe.
- arn String
- ARN of this pipe.
- arn string
- ARN of this pipe.
- arn str
- ARN of this pipe.
- arn String
- ARN of this pipe.
PipeSourceParametersManagedStreamingKafkaParameters, PipeSourceParametersManagedStreamingKafkaParametersArgs              
- TopicName string
- The name of the topic that the pipe will read from. Maximum length of 249.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- ConsumerGroup stringId 
- The name of the destination queue to consume. Maximum value of 200.
- Credentials
PipeSource Parameters Managed Streaming Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- StartingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- TopicName string
- The name of the topic that the pipe will read from. Maximum length of 249.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- ConsumerGroup stringId 
- The name of the destination queue to consume. Maximum value of 200.
- Credentials
PipeSource Parameters Managed Streaming Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- StartingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- topicName String
- The name of the topic that the pipe will read from. Maximum length of 249.
- batchSize Integer
- The maximum number of records to include in each batch. Maximum value of 10000.
- consumerGroup StringId 
- The name of the destination queue to consume. Maximum value of 200.
- credentials
PipeSource Parameters Managed Streaming Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- startingPosition String
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- topicName string
- The name of the topic that the pipe will read from. Maximum length of 249.
- batchSize number
- The maximum number of records to include in each batch. Maximum value of 10000.
- consumerGroup stringId 
- The name of the destination queue to consume. Maximum value of 200.
- credentials
PipeSource Parameters Managed Streaming Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- startingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- topic_name str
- The name of the topic that the pipe will read from. Maximum length of 249.
- batch_size int
- The maximum number of records to include in each batch. Maximum value of 10000.
- consumer_group_ strid 
- The name of the destination queue to consume. Maximum value of 200.
- credentials
PipeSource Parameters Managed Streaming Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- starting_position str
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- topicName String
- The name of the topic that the pipe will read from. Maximum length of 249.
- batchSize Number
- The maximum number of records to include in each batch. Maximum value of 10000.
- consumerGroup StringId 
- The name of the destination queue to consume. Maximum value of 200.
- credentials Property Map
- The credentials needed to access the resource. Detailed below.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- startingPosition String
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
PipeSourceParametersManagedStreamingKafkaParametersCredentials, PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs                
- ClientCertificate stringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- SaslScram512Auth string
- The ARN of the Secrets Manager secret containing the credentials.
- ClientCertificate stringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- SaslScram512Auth string
- The ARN of the Secrets Manager secret containing the credentials.
- clientCertificate StringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- saslScram512Auth String
- The ARN of the Secrets Manager secret containing the credentials.
- clientCertificate stringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- saslScram512Auth string
- The ARN of the Secrets Manager secret containing the credentials.
- client_certificate_ strtls_ auth 
- The ARN of the Secrets Manager secret containing the credentials.
- sasl_scram512_ strauth 
- The ARN of the Secrets Manager secret containing the credentials.
- clientCertificate StringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- saslScram512Auth String
- The ARN of the Secrets Manager secret containing the credentials.
PipeSourceParametersRabbitmqBrokerParameters, PipeSourceParametersRabbitmqBrokerParametersArgs            
- Credentials
PipeSource Parameters Rabbitmq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- QueueName string
- The name of the destination queue to consume. Maximum length of 1000.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- VirtualHost string
- The name of the virtual host associated with the source broker. Maximum length of 200.
- Credentials
PipeSource Parameters Rabbitmq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- QueueName string
- The name of the destination queue to consume. Maximum length of 1000.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- VirtualHost string
- The name of the virtual host associated with the source broker. Maximum length of 200.
- credentials
PipeSource Parameters Rabbitmq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- queueName String
- The name of the destination queue to consume. Maximum length of 1000.
- batchSize Integer
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- virtualHost String
- The name of the virtual host associated with the source broker. Maximum length of 200.
- credentials
PipeSource Parameters Rabbitmq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- queueName string
- The name of the destination queue to consume. Maximum length of 1000.
- batchSize number
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- virtualHost string
- The name of the virtual host associated with the source broker. Maximum length of 200.
- credentials
PipeSource Parameters Rabbitmq Broker Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- queue_name str
- The name of the destination queue to consume. Maximum length of 1000.
- batch_size int
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- virtual_host str
- The name of the virtual host associated with the source broker. Maximum length of 200.
- credentials Property Map
- The credentials needed to access the resource. Detailed below.
- queueName String
- The name of the destination queue to consume. Maximum length of 1000.
- batchSize Number
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- virtualHost String
- The name of the virtual host associated with the source broker. Maximum length of 200.
PipeSourceParametersRabbitmqBrokerParametersCredentials, PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs              
- BasicAuth string
- The ARN of the Secrets Manager secret containing the credentials.
- BasicAuth string
- The ARN of the Secrets Manager secret containing the credentials.
- basicAuth String
- The ARN of the Secrets Manager secret containing the credentials.
- basicAuth string
- The ARN of the Secrets Manager secret containing the credentials.
- basic_auth str
- The ARN of the Secrets Manager secret containing the credentials.
- basicAuth String
- The ARN of the Secrets Manager secret containing the credentials.
PipeSourceParametersSelfManagedKafkaParameters, PipeSourceParametersSelfManagedKafkaParametersArgs              
- TopicName string
- The name of the topic that the pipe will read from. Maximum length of 249.
- AdditionalBootstrap List<string>Servers 
- An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- ConsumerGroup stringId 
- The name of the destination queue to consume. Maximum value of 200.
- Credentials
PipeSource Parameters Self Managed Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- ServerRoot stringCa Certificate 
- The ARN of the Secrets Manager secret used for certification.
- StartingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- Vpc
PipeSource Parameters Self Managed Kafka Parameters Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- TopicName string
- The name of the topic that the pipe will read from. Maximum length of 249.
- AdditionalBootstrap []stringServers 
- An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- ConsumerGroup stringId 
- The name of the destination queue to consume. Maximum value of 200.
- Credentials
PipeSource Parameters Self Managed Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- ServerRoot stringCa Certificate 
- The ARN of the Secrets Manager secret used for certification.
- StartingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- Vpc
PipeSource Parameters Self Managed Kafka Parameters Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- topicName String
- The name of the topic that the pipe will read from. Maximum length of 249.
- additionalBootstrap List<String>Servers 
- An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- batchSize Integer
- The maximum number of records to include in each batch. Maximum value of 10000.
- consumerGroup StringId 
- The name of the destination queue to consume. Maximum value of 200.
- credentials
PipeSource Parameters Self Managed Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- serverRoot StringCa Certificate 
- The ARN of the Secrets Manager secret used for certification.
- startingPosition String
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- vpc
PipeSource Parameters Self Managed Kafka Parameters Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- topicName string
- The name of the topic that the pipe will read from. Maximum length of 249.
- additionalBootstrap string[]Servers 
- An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- batchSize number
- The maximum number of records to include in each batch. Maximum value of 10000.
- consumerGroup stringId 
- The name of the destination queue to consume. Maximum value of 200.
- credentials
PipeSource Parameters Self Managed Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- serverRoot stringCa Certificate 
- The ARN of the Secrets Manager secret used for certification.
- startingPosition string
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- vpc
PipeSource Parameters Self Managed Kafka Parameters Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- topic_name str
- The name of the topic that the pipe will read from. Maximum length of 249.
- additional_bootstrap_ Sequence[str]servers 
- An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- batch_size int
- The maximum number of records to include in each batch. Maximum value of 10000.
- consumer_group_ strid 
- The name of the destination queue to consume. Maximum value of 200.
- credentials
PipeSource Parameters Self Managed Kafka Parameters Credentials 
- The credentials needed to access the resource. Detailed below.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- server_root_ strca_ certificate 
- The ARN of the Secrets Manager secret used for certification.
- starting_position str
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- vpc
PipeSource Parameters Self Managed Kafka Parameters Vpc 
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- topicName String
- The name of the topic that the pipe will read from. Maximum length of 249.
- additionalBootstrap List<String>Servers 
- An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- batchSize Number
- The maximum number of records to include in each batch. Maximum value of 10000.
- consumerGroup StringId 
- The name of the destination queue to consume. Maximum value of 200.
- credentials Property Map
- The credentials needed to access the resource. Detailed below.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- serverRoot StringCa Certificate 
- The ARN of the Secrets Manager secret used for certification.
- startingPosition String
- The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- vpc Property Map
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
PipeSourceParametersSelfManagedKafkaParametersCredentials, PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs                
- BasicAuth string
- The ARN of the Secrets Manager secret containing the credentials.
- ClientCertificate stringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- SaslScram256Auth string
- The ARN of the Secrets Manager secret containing the credentials.
- SaslScram512Auth string
- The ARN of the Secrets Manager secret containing the credentials.
- BasicAuth string
- The ARN of the Secrets Manager secret containing the credentials.
- ClientCertificate stringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- SaslScram256Auth string
- The ARN of the Secrets Manager secret containing the credentials.
- SaslScram512Auth string
- The ARN of the Secrets Manager secret containing the credentials.
- basicAuth String
- The ARN of the Secrets Manager secret containing the credentials.
- clientCertificate StringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- saslScram256Auth String
- The ARN of the Secrets Manager secret containing the credentials.
- saslScram512Auth String
- The ARN of the Secrets Manager secret containing the credentials.
- basicAuth string
- The ARN of the Secrets Manager secret containing the credentials.
- clientCertificate stringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- saslScram256Auth string
- The ARN of the Secrets Manager secret containing the credentials.
- saslScram512Auth string
- The ARN of the Secrets Manager secret containing the credentials.
- basic_auth str
- The ARN of the Secrets Manager secret containing the credentials.
- client_certificate_ strtls_ auth 
- The ARN of the Secrets Manager secret containing the credentials.
- sasl_scram256_ strauth 
- The ARN of the Secrets Manager secret containing the credentials.
- sasl_scram512_ strauth 
- The ARN of the Secrets Manager secret containing the credentials.
- basicAuth String
- The ARN of the Secrets Manager secret containing the credentials.
- clientCertificate StringTls Auth 
- The ARN of the Secrets Manager secret containing the credentials.
- saslScram256Auth String
- The ARN of the Secrets Manager secret containing the credentials.
- saslScram512Auth String
- The ARN of the Secrets Manager secret containing the credentials.
PipeSourceParametersSelfManagedKafkaParametersVpc, PipeSourceParametersSelfManagedKafkaParametersVpcArgs                
- SecurityGroups List<string>
- Subnets List<string>
- SecurityGroups []string
- Subnets []string
- securityGroups List<String>
- subnets List<String>
- securityGroups string[]
- subnets string[]
- security_groups Sequence[str]
- subnets Sequence[str]
- securityGroups List<String>
- subnets List<String>
PipeSourceParametersSqsQueueParameters, PipeSourceParametersSqsQueueParametersArgs            
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- BatchSize int
- The maximum number of records to include in each batch. Maximum value of 10000.
- MaximumBatching intWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- batchSize Integer
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximumBatching IntegerWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- batchSize number
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximumBatching numberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- batch_size int
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximum_batching_ intwindow_ in_ seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
- batchSize Number
- The maximum number of records to include in each batch. Maximum value of 10000.
- maximumBatching NumberWindow In Seconds 
- The maximum length of a time to wait for events. Maximum value of 300.
PipeTargetParameters, PipeTargetParametersArgs      
- BatchJob PipeParameters Target Parameters Batch Job Parameters 
- The parameters for using an AWS Batch job as a target. Detailed below.
- CloudwatchLogs PipeParameters Target Parameters Cloudwatch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- EcsTask PipeParameters Target Parameters Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target. Detailed below.
- EventbridgeEvent PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target. Detailed below.
- HttpParameters PipeTarget Parameters Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- InputTemplate string
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- KinesisStream PipeParameters Target Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- LambdaFunction PipeParameters Target Parameters Lambda Function Parameters 
- The parameters for using a Lambda function as a target. Detailed below.
- RedshiftData PipeParameters Target Parameters Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- SagemakerPipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target. Detailed below.
- SqsQueue PipeParameters Target Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target. Detailed below.
- StepFunction PipeState Machine Parameters Target Parameters Step Function State Machine Parameters 
- The parameters for using a Step Functions state machine as a target. Detailed below.
- BatchJob PipeParameters Target Parameters Batch Job Parameters 
- The parameters for using an AWS Batch job as a target. Detailed below.
- CloudwatchLogs PipeParameters Target Parameters Cloudwatch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- EcsTask PipeParameters Target Parameters Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target. Detailed below.
- EventbridgeEvent PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target. Detailed below.
- HttpParameters PipeTarget Parameters Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- InputTemplate string
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- KinesisStream PipeParameters Target Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- LambdaFunction PipeParameters Target Parameters Lambda Function Parameters 
- The parameters for using a Lambda function as a target. Detailed below.
- RedshiftData PipeParameters Target Parameters Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- SagemakerPipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target. Detailed below.
- SqsQueue PipeParameters Target Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target. Detailed below.
- StepFunction PipeState Machine Parameters Target Parameters Step Function State Machine Parameters 
- The parameters for using a Step Functions state machine as a target. Detailed below.
- batchJob PipeParameters Target Parameters Batch Job Parameters 
- The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatchLogs PipeParameters Target Parameters Cloudwatch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecsTask PipeParameters Target Parameters Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridgeEvent PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target. Detailed below.
- httpParameters PipeTarget Parameters Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- inputTemplate String
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesisStream PipeParameters Target Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- lambdaFunction PipeParameters Target Parameters Lambda Function Parameters 
- The parameters for using a Lambda function as a target. Detailed below.
- redshiftData PipeParameters Target Parameters Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemakerPipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target. Detailed below.
- sqsQueue PipeParameters Target Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target. Detailed below.
- stepFunction PipeState Machine Parameters Target Parameters Step Function State Machine Parameters 
- The parameters for using a Step Functions state machine as a target. Detailed below.
- batchJob PipeParameters Target Parameters Batch Job Parameters 
- The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatchLogs PipeParameters Target Parameters Cloudwatch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecsTask PipeParameters Target Parameters Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridgeEvent PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target. Detailed below.
- httpParameters PipeTarget Parameters Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- inputTemplate string
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesisStream PipeParameters Target Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- lambdaFunction PipeParameters Target Parameters Lambda Function Parameters 
- The parameters for using a Lambda function as a target. Detailed below.
- redshiftData PipeParameters Target Parameters Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemakerPipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target. Detailed below.
- sqsQueue PipeParameters Target Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target. Detailed below.
- stepFunction PipeState Machine Parameters Target Parameters Step Function State Machine Parameters 
- The parameters for using a Step Functions state machine as a target. Detailed below.
- batch_job_ Pipeparameters Target Parameters Batch Job Parameters 
- The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatch_logs_ Pipeparameters Target Parameters Cloudwatch Logs Parameters 
- The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecs_task_ Pipeparameters Target Parameters Ecs Task Parameters 
- The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridge_event_ Pipebus_ parameters Target Parameters Eventbridge Event Bus Parameters 
- The parameters for using an EventBridge event bus as a target. Detailed below.
- http_parameters PipeTarget Parameters Http Parameters 
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- input_template str
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesis_stream_ Pipeparameters Target Parameters Kinesis Stream Parameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- lambda_function_ Pipeparameters Target Parameters Lambda Function Parameters 
- The parameters for using a Lambda function as a target. Detailed below.
- redshift_data_ Pipeparameters Target Parameters Redshift Data Parameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemaker_pipeline_ Pipeparameters Target Parameters Sagemaker Pipeline Parameters 
- The parameters for using a SageMaker AI pipeline as a target. Detailed below.
- sqs_queue_ Pipeparameters Target Parameters Sqs Queue Parameters 
- The parameters for using a Amazon SQS stream as a target. Detailed below.
- step_function_ Pipestate_ machine_ parameters Target Parameters Step Function State Machine Parameters 
- The parameters for using a Step Functions state machine as a target. Detailed below.
- batchJob Property MapParameters 
- The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatchLogs Property MapParameters 
- The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecsTask Property MapParameters 
- The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridgeEvent Property MapBus Parameters 
- The parameters for using an EventBridge event bus as a target. Detailed below.
- httpParameters Property Map
- These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- inputTemplate String
- Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesisStream Property MapParameters 
- The parameters for using a Kinesis stream as a source. Detailed below.
- lambdaFunction Property MapParameters 
- The parameters for using a Lambda function as a target. Detailed below.
- redshiftData Property MapParameters 
- These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemakerPipeline Property MapParameters 
- The parameters for using a SageMaker AI pipeline as a target. Detailed below.
- sqsQueue Property MapParameters 
- The parameters for using a Amazon SQS stream as a target. Detailed below.
- stepFunction Property MapState Machine Parameters 
- The parameters for using a Step Functions state machine as a target. Detailed below.
PipeTargetParametersBatchJobParameters, PipeTargetParametersBatchJobParametersArgs            
- JobDefinition string
- The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- JobName string
- The name of the job. It can be up to 128 letters long.
- ArrayProperties PipeTarget Parameters Batch Job Parameters Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- ContainerOverrides PipeTarget Parameters Batch Job Parameters Container Overrides 
- The overrides that are sent to a container. Detailed below.
- DependsOns List<PipeTarget Parameters Batch Job Parameters Depends On> 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- Parameters Dictionary<string, string>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- RetryStrategy PipeTarget Parameters Batch Job Parameters Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- JobDefinition string
- The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- JobName string
- The name of the job. It can be up to 128 letters long.
- ArrayProperties PipeTarget Parameters Batch Job Parameters Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- ContainerOverrides PipeTarget Parameters Batch Job Parameters Container Overrides 
- The overrides that are sent to a container. Detailed below.
- DependsOns []PipeTarget Parameters Batch Job Parameters Depends On 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- Parameters map[string]string
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- RetryStrategy PipeTarget Parameters Batch Job Parameters Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- jobDefinition String
- The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- jobName String
- The name of the job. It can be up to 128 letters long.
- arrayProperties PipeTarget Parameters Batch Job Parameters Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- containerOverrides PipeTarget Parameters Batch Job Parameters Container Overrides 
- The overrides that are sent to a container. Detailed below.
- dependsOns List<PipeTarget Parameters Batch Job Parameters Depends On> 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- parameters Map<String,String>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- retryStrategy PipeTarget Parameters Batch Job Parameters Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- jobDefinition string
- The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- jobName string
- The name of the job. It can be up to 128 letters long.
- arrayProperties PipeTarget Parameters Batch Job Parameters Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- containerOverrides PipeTarget Parameters Batch Job Parameters Container Overrides 
- The overrides that are sent to a container. Detailed below.
- dependsOns PipeTarget Parameters Batch Job Parameters Depends On[] 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- parameters {[key: string]: string}
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- retryStrategy PipeTarget Parameters Batch Job Parameters Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- job_definition str
- The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- job_name str
- The name of the job. It can be up to 128 letters long.
- array_properties PipeTarget Parameters Batch Job Parameters Array Properties 
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- container_overrides PipeTarget Parameters Batch Job Parameters Container Overrides 
- The overrides that are sent to a container. Detailed below.
- depends_ons Sequence[PipeTarget Parameters Batch Job Parameters Depends On] 
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- parameters Mapping[str, str]
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- retry_strategy PipeTarget Parameters Batch Job Parameters Retry Strategy 
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- jobDefinition String
- The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- jobName String
- The name of the job. It can be up to 128 letters long.
- arrayProperties Property Map
- The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- containerOverrides Property Map
- The overrides that are sent to a container. Detailed below.
- dependsOns List<Property Map>
- A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- parameters Map<String>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- retryStrategy Property Map
- The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
PipeTargetParametersBatchJobParametersArrayProperties, PipeTargetParametersBatchJobParametersArrayPropertiesArgs                
- Size int
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- Size int
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- size Integer
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- size number
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- size int
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- size Number
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
PipeTargetParametersBatchJobParametersContainerOverrides, PipeTargetParametersBatchJobParametersContainerOverridesArgs                
- Commands List<string>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Environments
List<PipeTarget Parameters Batch Job Parameters Container Overrides Environment> 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- InstanceType string
- The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- ResourceRequirements List<PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement> 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- Commands []string
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Environments
[]PipeTarget Parameters Batch Job Parameters Container Overrides Environment 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- InstanceType string
- The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- ResourceRequirements []PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands List<String>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- environments
List<PipeTarget Parameters Batch Job Parameters Container Overrides Environment> 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- instanceType String
- The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resourceRequirements List<PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement> 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands string[]
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- environments
PipeTarget Parameters Batch Job Parameters Container Overrides Environment[] 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- instanceType string
- The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resourceRequirements PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement[] 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands Sequence[str]
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- environments
Sequence[PipeTarget Parameters Batch Job Parameters Container Overrides Environment] 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- instance_type str
- The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resource_requirements Sequence[PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement] 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands List<String>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- environments List<Property Map>
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- instanceType String
- The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resourceRequirements List<Property Map>
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
PipeTargetParametersBatchJobParametersContainerOverridesEnvironment, PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs                  
PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirement, PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs                    
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value string
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value str
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
PipeTargetParametersBatchJobParametersDependsOn, PipeTargetParametersBatchJobParametersDependsOnArgs                
- JobId string
- The job ID of the AWS Batch job that's associated with this dependency.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- JobId string
- The job ID of the AWS Batch job that's associated with this dependency.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- jobId String
- The job ID of the AWS Batch job that's associated with this dependency.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- jobId string
- The job ID of the AWS Batch job that's associated with this dependency.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- job_id str
- The job ID of the AWS Batch job that's associated with this dependency.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- jobId String
- The job ID of the AWS Batch job that's associated with this dependency.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
PipeTargetParametersBatchJobParametersRetryStrategy, PipeTargetParametersBatchJobParametersRetryStrategyArgs                
- Attempts int
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- Attempts int
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- attempts Integer
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- attempts number
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- attempts int
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- attempts Number
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
PipeTargetParametersCloudwatchLogsParameters, PipeTargetParametersCloudwatchLogsParametersArgs            
- LogStream stringName 
- The name of the log stream.
- Timestamp string
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- LogStream stringName 
- The name of the log stream.
- Timestamp string
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- logStream StringName 
- The name of the log stream.
- timestamp String
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- logStream stringName 
- The name of the log stream.
- timestamp string
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- log_stream_ strname 
- The name of the log stream.
- timestamp str
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- logStream StringName 
- The name of the log stream.
- timestamp String
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
PipeTargetParametersEcsTaskParameters, PipeTargetParametersEcsTaskParametersArgs            
- TaskDefinition stringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- CapacityProvider List<PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy> 
- List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- bool
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- EnableExecute boolCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- Group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- LaunchType string
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- NetworkConfiguration PipeTarget Parameters Ecs Task Parameters Network Configuration 
- Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- Overrides
PipeTarget Parameters Ecs Task Parameters Overrides 
- The overrides that are associated with a task. Detailed below.
- PlacementConstraints List<PipeTarget Parameters Ecs Task Parameters Placement Constraint> 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- PlacementStrategies List<PipeTarget Parameters Ecs Task Parameters Placement Strategy> 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- PlatformVersion string
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- string
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- ReferenceId string
- The reference ID to use for the task. Maximum length of 1,024.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- TaskCount int
- The number of tasks to create based on TaskDefinition. The default is 1.
- TaskDefinition stringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- CapacityProvider []PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy 
- List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- bool
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- EnableExecute boolCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- Group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- LaunchType string
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- NetworkConfiguration PipeTarget Parameters Ecs Task Parameters Network Configuration 
- Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- Overrides
PipeTarget Parameters Ecs Task Parameters Overrides 
- The overrides that are associated with a task. Detailed below.
- PlacementConstraints []PipeTarget Parameters Ecs Task Parameters Placement Constraint 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- PlacementStrategies []PipeTarget Parameters Ecs Task Parameters Placement Strategy 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- PlatformVersion string
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- string
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- ReferenceId string
- The reference ID to use for the task. Maximum length of 1,024.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- TaskCount int
- The number of tasks to create based on TaskDefinition. The default is 1.
- taskDefinition StringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacityProvider List<PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy> 
- List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- Boolean
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- enableExecute BooleanCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- group String
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launchType String
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- networkConfiguration PipeTarget Parameters Ecs Task Parameters Network Configuration 
- Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- overrides
PipeTarget Parameters Ecs Task Parameters Overrides 
- The overrides that are associated with a task. Detailed below.
- placementConstraints List<PipeTarget Parameters Ecs Task Parameters Placement Constraint> 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- placementStrategies List<PipeTarget Parameters Ecs Task Parameters Placement Strategy> 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- platformVersion String
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- String
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- referenceId String
- The reference ID to use for the task. Maximum length of 1,024.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- taskCount Integer
- The number of tasks to create based on TaskDefinition. The default is 1.
- taskDefinition stringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacityProvider PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy[] 
- List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- boolean
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- enableExecute booleanCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launchType string
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- networkConfiguration PipeTarget Parameters Ecs Task Parameters Network Configuration 
- Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- overrides
PipeTarget Parameters Ecs Task Parameters Overrides 
- The overrides that are associated with a task. Detailed below.
- placementConstraints PipeTarget Parameters Ecs Task Parameters Placement Constraint[] 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- placementStrategies PipeTarget Parameters Ecs Task Parameters Placement Strategy[] 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- platformVersion string
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- string
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- referenceId string
- The reference ID to use for the task. Maximum length of 1,024.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- taskCount number
- The number of tasks to create based on TaskDefinition. The default is 1.
- task_definition_ strarn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity_provider_ Sequence[Pipestrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy] 
- List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- bool
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- enable_execute_ boolcommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- group str
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch_type str
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- network_configuration PipeTarget Parameters Ecs Task Parameters Network Configuration 
- Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- overrides
PipeTarget Parameters Ecs Task Parameters Overrides 
- The overrides that are associated with a task. Detailed below.
- placement_constraints Sequence[PipeTarget Parameters Ecs Task Parameters Placement Constraint] 
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- placement_strategies Sequence[PipeTarget Parameters Ecs Task Parameters Placement Strategy] 
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- platform_version str
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- str
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- reference_id str
- The reference ID to use for the task. Maximum length of 1,024.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- task_count int
- The number of tasks to create based on TaskDefinition. The default is 1.
- taskDefinition StringArn 
- The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacityProvider List<Property Map>Strategies 
- List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- Boolean
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- enableExecute BooleanCommand 
- Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- group String
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launchType String
- Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- networkConfiguration Property Map
- Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- overrides Property Map
- The overrides that are associated with a task. Detailed below.
- placementConstraints List<Property Map>
- An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- placementStrategies List<Property Map>
- The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- platformVersion String
- Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- String
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- referenceId String
- The reference ID to use for the task. Maximum length of 1,024.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- taskCount Number
- The number of tasks to create based on TaskDefinition. The default is 1.
PipeTargetParametersEcsTaskParametersCapacityProviderStrategy, PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs                  
- CapacityProvider string
- The short name of the capacity provider. Maximum value of 255.
- Base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- Weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- CapacityProvider string
- The short name of the capacity provider. Maximum value of 255.
- Base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- Weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- capacityProvider String
- The short name of the capacity provider. Maximum value of 255.
- base Integer
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- weight Integer
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- capacityProvider string
- The short name of the capacity provider. Maximum value of 255.
- base number
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- weight number
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- capacity_provider str
- The short name of the capacity provider. Maximum value of 255.
- base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- capacityProvider String
- The short name of the capacity provider. Maximum value of 255.
- base Number
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- weight Number
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
PipeTargetParametersEcsTaskParametersNetworkConfiguration, PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs                
- AwsVpc PipeConfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- AwsVpc PipeConfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- awsVpc PipeConfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- awsVpc PipeConfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- aws_vpc_ Pipeconfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- awsVpc Property MapConfiguration 
- Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfiguration, PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs                      
- AssignPublic stringIp 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- SecurityGroups List<string>
- Subnets List<string>
- AssignPublic stringIp 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- SecurityGroups []string
- Subnets []string
- assignPublic StringIp 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- securityGroups List<String>
- subnets List<String>
- assignPublic stringIp 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- securityGroups string[]
- subnets string[]
- assign_public_ strip 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- security_groups Sequence[str]
- subnets Sequence[str]
- assignPublic StringIp 
- Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- securityGroups List<String>
- subnets List<String>
PipeTargetParametersEcsTaskParametersOverrides, PipeTargetParametersEcsTaskParametersOverridesArgs              
- ContainerOverrides List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override> 
- One or more container overrides that are sent to a task. Detailed below.
- Cpu string
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- EphemeralStorage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage 
- The ephemeral storage setting override for the task. Detailed below.
- ExecutionRole stringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- InferenceAccelerator List<PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override> 
- List of Elastic Inference accelerator overrides for the task. Detailed below.
- Memory string
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- TaskRole stringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- ContainerOverrides []PipeTarget Parameters Ecs Task Parameters Overrides Container Override 
- One or more container overrides that are sent to a task. Detailed below.
- Cpu string
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- EphemeralStorage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage 
- The ephemeral storage setting override for the task. Detailed below.
- ExecutionRole stringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- InferenceAccelerator []PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override 
- List of Elastic Inference accelerator overrides for the task. Detailed below.
- Memory string
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- TaskRole stringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- containerOverrides List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override> 
- One or more container overrides that are sent to a task. Detailed below.
- cpu String
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- ephemeralStorage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage 
- The ephemeral storage setting override for the task. Detailed below.
- executionRole StringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- inferenceAccelerator List<PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override> 
- List of Elastic Inference accelerator overrides for the task. Detailed below.
- memory String
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- taskRole StringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- containerOverrides PipeTarget Parameters Ecs Task Parameters Overrides Container Override[] 
- One or more container overrides that are sent to a task. Detailed below.
- cpu string
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- ephemeralStorage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage 
- The ephemeral storage setting override for the task. Detailed below.
- executionRole stringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- inferenceAccelerator PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override[] 
- List of Elastic Inference accelerator overrides for the task. Detailed below.
- memory string
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- taskRole stringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- container_overrides Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override] 
- One or more container overrides that are sent to a task. Detailed below.
- cpu str
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- ephemeral_storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage 
- The ephemeral storage setting override for the task. Detailed below.
- execution_role_ strarn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- inference_accelerator_ Sequence[Pipeoverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override] 
- List of Elastic Inference accelerator overrides for the task. Detailed below.
- memory str
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- task_role_ strarn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- containerOverrides List<Property Map>
- One or more container overrides that are sent to a task. Detailed below.
- cpu String
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- ephemeralStorage Property Map
- The ephemeral storage setting override for the task. Detailed below.
- executionRole StringArn 
- The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- inferenceAccelerator List<Property Map>Overrides 
- List of Elastic Inference accelerator overrides for the task. Detailed below.
- memory String
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- taskRole StringArn 
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
PipeTargetParametersEcsTaskParametersOverridesContainerOverride, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs                  
- Commands List<string>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Cpu int
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- EnvironmentFiles List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File> 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- Environments
List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment> 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- Memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- MemoryReservation int
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- ResourceRequirements List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement> 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- Commands []string
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Cpu int
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- EnvironmentFiles []PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- Environments
[]PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- Memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- MemoryReservation int
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- ResourceRequirements []PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands List<String>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu Integer
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environmentFiles List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File> 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- environments
List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment> 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- memory Integer
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memoryReservation Integer
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- resourceRequirements List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement> 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands string[]
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu number
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environmentFiles PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File[] 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- environments
PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment[] 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- memory number
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memoryReservation number
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- resourceRequirements PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement[] 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands Sequence[str]
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu int
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environment_files Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File] 
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- environments
Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment] 
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory_reservation int
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name str
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- resource_requirements Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement] 
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands List<String>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu Number
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environmentFiles List<Property Map>
- A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- environments List<Property Map>
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- memory Number
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memoryReservation Number
- The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
- resourceRequirements List<Property Map>
- The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironment, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs                    
PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFile, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs                      
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value string
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value str
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirement, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs                      
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value string
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value str
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker AI Model Building Pipeline. Maximum length of 1024.
PipeTargetParametersEcsTaskParametersOverridesEphemeralStorage, PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs                  
- SizeIn intGib 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- SizeIn intGib 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- sizeIn IntegerGib 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- sizeIn numberGib 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- size_in_ intgib 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- sizeIn NumberGib 
- The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverride, PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs                    
- DeviceName string
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- DeviceType string
- The Elastic Inference accelerator type to use.
- DeviceName string
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- DeviceType string
- The Elastic Inference accelerator type to use.
- deviceName String
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- deviceType String
- The Elastic Inference accelerator type to use.
- deviceName string
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- deviceType string
- The Elastic Inference accelerator type to use.
- device_name str
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- device_type str
- The Elastic Inference accelerator type to use.
- deviceName String
- The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- deviceType String
- The Elastic Inference accelerator type to use.
PipeTargetParametersEcsTaskParametersPlacementConstraint, PipeTargetParametersEcsTaskParametersPlacementConstraintArgs                
- Expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- expression String
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- expression str
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- expression String
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
PipeTargetParametersEcsTaskParametersPlacementStrategy, PipeTargetParametersEcsTaskParametersPlacementStrategyArgs                
- Field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- field String
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- field str
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- field String
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
PipeTargetParametersEventbridgeEventBusParameters, PipeTargetParametersEventbridgeEventBusParametersArgs              
- DetailType string
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- EndpointId string
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- Resources List<string>
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Time string
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- DetailType string
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- EndpointId string
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- Resources []string
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Time string
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- detailType String
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpointId String
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources List<String>
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time String
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- detailType string
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpointId string
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources string[]
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time string
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- detail_type str
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint_id str
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources Sequence[str]
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source str
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time str
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- detailType String
- A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpointId String
- The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources List<String>
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time String
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
PipeTargetParametersHttpParameters, PipeTargetParametersHttpParametersArgs          
- HeaderParameters Dictionary<string, string>
- PathParameter stringValues 
- QueryString Dictionary<string, string>Parameters 
- HeaderParameters map[string]string
- PathParameter stringValues 
- QueryString map[string]stringParameters 
- headerParameters Map<String,String>
- pathParameter StringValues 
- queryString Map<String,String>Parameters 
- headerParameters {[key: string]: string}
- pathParameter stringValues 
- queryString {[key: string]: string}Parameters 
- header_parameters Mapping[str, str]
- path_parameter_ strvalues 
- query_string_ Mapping[str, str]parameters 
- headerParameters Map<String>
- pathParameter StringValues 
- queryString Map<String>Parameters 
PipeTargetParametersKinesisStreamParameters, PipeTargetParametersKinesisStreamParametersArgs            
- PartitionKey string
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- PartitionKey string
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partitionKey String
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partitionKey string
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition_key str
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partitionKey String
- Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
PipeTargetParametersLambdaFunctionParameters, PipeTargetParametersLambdaFunctionParametersArgs            
- InvocationType string
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- InvocationType string
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocationType String
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocationType string
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation_type str
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocationType String
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
PipeTargetParametersRedshiftDataParameters, PipeTargetParametersRedshiftDataParametersArgs            
- Database string
- The name of the database. Required when authenticating using temporary credentials.
- Sqls List<string>
- List of SQL statements text to run, each of maximum length of 100,000.
- DbUser string
- The database user name. Required when authenticating using temporary credentials.
- SecretManager stringArn 
- The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- StatementName string
- The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- WithEvent bool
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- Database string
- The name of the database. Required when authenticating using temporary credentials.
- Sqls []string
- List of SQL statements text to run, each of maximum length of 100,000.
- DbUser string
- The database user name. Required when authenticating using temporary credentials.
- SecretManager stringArn 
- The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- StatementName string
- The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- WithEvent bool
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database String
- The name of the database. Required when authenticating using temporary credentials.
- sqls List<String>
- List of SQL statements text to run, each of maximum length of 100,000.
- dbUser String
- The database user name. Required when authenticating using temporary credentials.
- secretManager StringArn 
- The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- statementName String
- The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- withEvent Boolean
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database string
- The name of the database. Required when authenticating using temporary credentials.
- sqls string[]
- List of SQL statements text to run, each of maximum length of 100,000.
- dbUser string
- The database user name. Required when authenticating using temporary credentials.
- secretManager stringArn 
- The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- statementName string
- The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- withEvent boolean
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database str
- The name of the database. Required when authenticating using temporary credentials.
- sqls Sequence[str]
- List of SQL statements text to run, each of maximum length of 100,000.
- db_user str
- The database user name. Required when authenticating using temporary credentials.
- secret_manager_ strarn 
- The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- statement_name str
- The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- with_event bool
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database String
- The name of the database. Required when authenticating using temporary credentials.
- sqls List<String>
- List of SQL statements text to run, each of maximum length of 100,000.
- dbUser String
- The database user name. Required when authenticating using temporary credentials.
- secretManager StringArn 
- The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- statementName String
- The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- withEvent Boolean
- Indicates whether to send an event back to EventBridge after the SQL statement runs.
PipeTargetParametersSagemakerPipelineParameters, PipeTargetParametersSagemakerPipelineParametersArgs            
- PipelineParameters List<PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter> 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution. Detailed below.
- PipelineParameters []PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution. Detailed below.
- pipelineParameters List<PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter> 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution. Detailed below.
- pipelineParameters PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter[] 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution. Detailed below.
- pipeline_parameters Sequence[PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter] 
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution. Detailed below.
- pipelineParameters List<Property Map>
- List of Parameter names and values for SageMaker AI Model Building Pipeline execution. Detailed below.
PipeTargetParametersSagemakerPipelineParametersPipelineParameter, PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs                
PipeTargetParametersSqsQueueParameters, PipeTargetParametersSqsQueueParametersArgs            
- MessageDeduplication stringId 
- This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- MessageGroup stringId 
- The FIFO message group ID to use as the target.
- MessageDeduplication stringId 
- This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- MessageGroup stringId 
- The FIFO message group ID to use as the target.
- messageDeduplication StringId 
- This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- messageGroup StringId 
- The FIFO message group ID to use as the target.
- messageDeduplication stringId 
- This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- messageGroup stringId 
- The FIFO message group ID to use as the target.
- message_deduplication_ strid 
- This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- message_group_ strid 
- The FIFO message group ID to use as the target.
- messageDeduplication StringId 
- This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- messageGroup StringId 
- The FIFO message group ID to use as the target.
PipeTargetParametersStepFunctionStateMachineParameters, PipeTargetParametersStepFunctionStateMachineParametersArgs                
- InvocationType string
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- InvocationType string
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocationType String
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocationType string
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation_type str
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocationType String
- Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
Import
Using pulumi import, import pipes using the name. For example:
$ pulumi import aws:pipes/pipe:Pipe example my-pipe
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the awsTerraform Provider.