aws.appflow.Flow
Explore with Pulumi AI
Provides an AppFlow flow resource.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const exampleSourceBucketV2 = new aws.s3.BucketV2("example_source", {bucket: "example-source"});
const exampleSource = aws.iam.getPolicyDocument({
    statements: [{
        sid: "AllowAppFlowSourceActions",
        effect: "Allow",
        principals: [{
            type: "Service",
            identifiers: ["appflow.amazonaws.com"],
        }],
        actions: [
            "s3:ListBucket",
            "s3:GetObject",
        ],
        resources: [
            "arn:aws:s3:::example-source",
            "arn:aws:s3:::example-source/*",
        ],
    }],
});
const exampleSourceBucketPolicy = new aws.s3.BucketPolicy("example_source", {
    bucket: exampleSourceBucketV2.id,
    policy: exampleSource.then(exampleSource => exampleSource.json),
});
const example = new aws.s3.BucketObjectv2("example", {
    bucket: exampleSourceBucketV2.id,
    key: "example_source.csv",
    source: new pulumi.asset.FileAsset("example_source.csv"),
});
const exampleDestinationBucketV2 = new aws.s3.BucketV2("example_destination", {bucket: "example-destination"});
const exampleDestination = aws.iam.getPolicyDocument({
    statements: [{
        sid: "AllowAppFlowDestinationActions",
        effect: "Allow",
        principals: [{
            type: "Service",
            identifiers: ["appflow.amazonaws.com"],
        }],
        actions: [
            "s3:PutObject",
            "s3:AbortMultipartUpload",
            "s3:ListMultipartUploadParts",
            "s3:ListBucketMultipartUploads",
            "s3:GetBucketAcl",
            "s3:PutObjectAcl",
        ],
        resources: [
            "arn:aws:s3:::example-destination",
            "arn:aws:s3:::example-destination/*",
        ],
    }],
});
const exampleDestinationBucketPolicy = new aws.s3.BucketPolicy("example_destination", {
    bucket: exampleDestinationBucketV2.id,
    policy: exampleDestination.then(exampleDestination => exampleDestination.json),
});
const exampleFlow = new aws.appflow.Flow("example", {
    name: "example",
    sourceFlowConfig: {
        connectorType: "S3",
        sourceConnectorProperties: {
            s3: {
                bucketName: exampleSourceBucketPolicy.bucket,
                bucketPrefix: "example",
            },
        },
    },
    destinationFlowConfigs: [{
        connectorType: "S3",
        destinationConnectorProperties: {
            s3: {
                bucketName: exampleDestinationBucketPolicy.bucket,
                s3OutputFormatConfig: {
                    prefixConfig: {
                        prefixType: "PATH",
                    },
                },
            },
        },
    }],
    tasks: [{
        sourceFields: ["exampleField"],
        destinationField: "exampleField",
        taskType: "Map",
        connectorOperators: [{
            s3: "NO_OP",
        }],
    }],
    triggerConfig: {
        triggerType: "OnDemand",
    },
});
import pulumi
import pulumi_aws as aws
example_source_bucket_v2 = aws.s3.BucketV2("example_source", bucket="example-source")
example_source = aws.iam.get_policy_document(statements=[{
    "sid": "AllowAppFlowSourceActions",
    "effect": "Allow",
    "principals": [{
        "type": "Service",
        "identifiers": ["appflow.amazonaws.com"],
    }],
    "actions": [
        "s3:ListBucket",
        "s3:GetObject",
    ],
    "resources": [
        "arn:aws:s3:::example-source",
        "arn:aws:s3:::example-source/*",
    ],
}])
example_source_bucket_policy = aws.s3.BucketPolicy("example_source",
    bucket=example_source_bucket_v2.id,
    policy=example_source.json)
example = aws.s3.BucketObjectv2("example",
    bucket=example_source_bucket_v2.id,
    key="example_source.csv",
    source=pulumi.FileAsset("example_source.csv"))
example_destination_bucket_v2 = aws.s3.BucketV2("example_destination", bucket="example-destination")
example_destination = aws.iam.get_policy_document(statements=[{
    "sid": "AllowAppFlowDestinationActions",
    "effect": "Allow",
    "principals": [{
        "type": "Service",
        "identifiers": ["appflow.amazonaws.com"],
    }],
    "actions": [
        "s3:PutObject",
        "s3:AbortMultipartUpload",
        "s3:ListMultipartUploadParts",
        "s3:ListBucketMultipartUploads",
        "s3:GetBucketAcl",
        "s3:PutObjectAcl",
    ],
    "resources": [
        "arn:aws:s3:::example-destination",
        "arn:aws:s3:::example-destination/*",
    ],
}])
example_destination_bucket_policy = aws.s3.BucketPolicy("example_destination",
    bucket=example_destination_bucket_v2.id,
    policy=example_destination.json)
example_flow = aws.appflow.Flow("example",
    name="example",
    source_flow_config={
        "connector_type": "S3",
        "source_connector_properties": {
            "s3": {
                "bucket_name": example_source_bucket_policy.bucket,
                "bucket_prefix": "example",
            },
        },
    },
    destination_flow_configs=[{
        "connector_type": "S3",
        "destination_connector_properties": {
            "s3": {
                "bucket_name": example_destination_bucket_policy.bucket,
                "s3_output_format_config": {
                    "prefix_config": {
                        "prefix_type": "PATH",
                    },
                },
            },
        },
    }],
    tasks=[{
        "source_fields": ["exampleField"],
        "destination_field": "exampleField",
        "task_type": "Map",
        "connector_operators": [{
            "s3": "NO_OP",
        }],
    }],
    trigger_config={
        "trigger_type": "OnDemand",
    })
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/appflow"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/s3"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		exampleSourceBucketV2, err := s3.NewBucketV2(ctx, "example_source", &s3.BucketV2Args{
			Bucket: pulumi.String("example-source"),
		})
		if err != nil {
			return err
		}
		exampleSource, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
			Statements: []iam.GetPolicyDocumentStatement{
				{
					Sid:    pulumi.StringRef("AllowAppFlowSourceActions"),
					Effect: pulumi.StringRef("Allow"),
					Principals: []iam.GetPolicyDocumentStatementPrincipal{
						{
							Type: "Service",
							Identifiers: []string{
								"appflow.amazonaws.com",
							},
						},
					},
					Actions: []string{
						"s3:ListBucket",
						"s3:GetObject",
					},
					Resources: []string{
						"arn:aws:s3:::example-source",
						"arn:aws:s3:::example-source/*",
					},
				},
			},
		}, nil)
		if err != nil {
			return err
		}
		exampleSourceBucketPolicy, err := s3.NewBucketPolicy(ctx, "example_source", &s3.BucketPolicyArgs{
			Bucket: exampleSourceBucketV2.ID(),
			Policy: pulumi.String(exampleSource.Json),
		})
		if err != nil {
			return err
		}
		_, err = s3.NewBucketObjectv2(ctx, "example", &s3.BucketObjectv2Args{
			Bucket: exampleSourceBucketV2.ID(),
			Key:    pulumi.String("example_source.csv"),
			Source: pulumi.NewFileAsset("example_source.csv"),
		})
		if err != nil {
			return err
		}
		exampleDestinationBucketV2, err := s3.NewBucketV2(ctx, "example_destination", &s3.BucketV2Args{
			Bucket: pulumi.String("example-destination"),
		})
		if err != nil {
			return err
		}
		exampleDestination, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
			Statements: []iam.GetPolicyDocumentStatement{
				{
					Sid:    pulumi.StringRef("AllowAppFlowDestinationActions"),
					Effect: pulumi.StringRef("Allow"),
					Principals: []iam.GetPolicyDocumentStatementPrincipal{
						{
							Type: "Service",
							Identifiers: []string{
								"appflow.amazonaws.com",
							},
						},
					},
					Actions: []string{
						"s3:PutObject",
						"s3:AbortMultipartUpload",
						"s3:ListMultipartUploadParts",
						"s3:ListBucketMultipartUploads",
						"s3:GetBucketAcl",
						"s3:PutObjectAcl",
					},
					Resources: []string{
						"arn:aws:s3:::example-destination",
						"arn:aws:s3:::example-destination/*",
					},
				},
			},
		}, nil)
		if err != nil {
			return err
		}
		exampleDestinationBucketPolicy, err := s3.NewBucketPolicy(ctx, "example_destination", &s3.BucketPolicyArgs{
			Bucket: exampleDestinationBucketV2.ID(),
			Policy: pulumi.String(exampleDestination.Json),
		})
		if err != nil {
			return err
		}
		_, err = appflow.NewFlow(ctx, "example", &appflow.FlowArgs{
			Name: pulumi.String("example"),
			SourceFlowConfig: &appflow.FlowSourceFlowConfigArgs{
				ConnectorType: pulumi.String("S3"),
				SourceConnectorProperties: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs{
					S3: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args{
						BucketName:   exampleSourceBucketPolicy.Bucket,
						BucketPrefix: pulumi.String("example"),
					},
				},
			},
			DestinationFlowConfigs: appflow.FlowDestinationFlowConfigArray{
				&appflow.FlowDestinationFlowConfigArgs{
					ConnectorType: pulumi.String("S3"),
					DestinationConnectorProperties: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs{
						S3: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args{
							BucketName: exampleDestinationBucketPolicy.Bucket,
							S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs{
								PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs{
									PrefixType: pulumi.String("PATH"),
								},
							},
						},
					},
				},
			},
			Tasks: appflow.FlowTaskArray{
				&appflow.FlowTaskArgs{
					SourceFields: pulumi.StringArray{
						pulumi.String("exampleField"),
					},
					DestinationField: pulumi.String("exampleField"),
					TaskType:         pulumi.String("Map"),
					ConnectorOperators: appflow.FlowTaskConnectorOperatorArray{
						&appflow.FlowTaskConnectorOperatorArgs{
							S3: pulumi.String("NO_OP"),
						},
					},
				},
			},
			TriggerConfig: &appflow.FlowTriggerConfigArgs{
				TriggerType: pulumi.String("OnDemand"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var exampleSourceBucketV2 = new Aws.S3.BucketV2("example_source", new()
    {
        Bucket = "example-source",
    });
    var exampleSource = Aws.Iam.GetPolicyDocument.Invoke(new()
    {
        Statements = new[]
        {
            new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
            {
                Sid = "AllowAppFlowSourceActions",
                Effect = "Allow",
                Principals = new[]
                {
                    new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
                    {
                        Type = "Service",
                        Identifiers = new[]
                        {
                            "appflow.amazonaws.com",
                        },
                    },
                },
                Actions = new[]
                {
                    "s3:ListBucket",
                    "s3:GetObject",
                },
                Resources = new[]
                {
                    "arn:aws:s3:::example-source",
                    "arn:aws:s3:::example-source/*",
                },
            },
        },
    });
    var exampleSourceBucketPolicy = new Aws.S3.BucketPolicy("example_source", new()
    {
        Bucket = exampleSourceBucketV2.Id,
        Policy = exampleSource.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
    });
    var example = new Aws.S3.BucketObjectv2("example", new()
    {
        Bucket = exampleSourceBucketV2.Id,
        Key = "example_source.csv",
        Source = new FileAsset("example_source.csv"),
    });
    var exampleDestinationBucketV2 = new Aws.S3.BucketV2("example_destination", new()
    {
        Bucket = "example-destination",
    });
    var exampleDestination = Aws.Iam.GetPolicyDocument.Invoke(new()
    {
        Statements = new[]
        {
            new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
            {
                Sid = "AllowAppFlowDestinationActions",
                Effect = "Allow",
                Principals = new[]
                {
                    new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
                    {
                        Type = "Service",
                        Identifiers = new[]
                        {
                            "appflow.amazonaws.com",
                        },
                    },
                },
                Actions = new[]
                {
                    "s3:PutObject",
                    "s3:AbortMultipartUpload",
                    "s3:ListMultipartUploadParts",
                    "s3:ListBucketMultipartUploads",
                    "s3:GetBucketAcl",
                    "s3:PutObjectAcl",
                },
                Resources = new[]
                {
                    "arn:aws:s3:::example-destination",
                    "arn:aws:s3:::example-destination/*",
                },
            },
        },
    });
    var exampleDestinationBucketPolicy = new Aws.S3.BucketPolicy("example_destination", new()
    {
        Bucket = exampleDestinationBucketV2.Id,
        Policy = exampleDestination.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
    });
    var exampleFlow = new Aws.AppFlow.Flow("example", new()
    {
        Name = "example",
        SourceFlowConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigArgs
        {
            ConnectorType = "S3",
            SourceConnectorProperties = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs
            {
                S3 = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args
                {
                    BucketName = exampleSourceBucketPolicy.Bucket,
                    BucketPrefix = "example",
                },
            },
        },
        DestinationFlowConfigs = new[]
        {
            new Aws.AppFlow.Inputs.FlowDestinationFlowConfigArgs
            {
                ConnectorType = "S3",
                DestinationConnectorProperties = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
                {
                    S3 = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
                    {
                        BucketName = exampleDestinationBucketPolicy.Bucket,
                        S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
                        {
                            PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
                            {
                                PrefixType = "PATH",
                            },
                        },
                    },
                },
            },
        },
        Tasks = new[]
        {
            new Aws.AppFlow.Inputs.FlowTaskArgs
            {
                SourceFields = new[]
                {
                    "exampleField",
                },
                DestinationField = "exampleField",
                TaskType = "Map",
                ConnectorOperators = new[]
                {
                    new Aws.AppFlow.Inputs.FlowTaskConnectorOperatorArgs
                    {
                        S3 = "NO_OP",
                    },
                },
            },
        },
        TriggerConfig = new Aws.AppFlow.Inputs.FlowTriggerConfigArgs
        {
            TriggerType = "OnDemand",
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.s3.BucketV2;
import com.pulumi.aws.s3.BucketV2Args;
import com.pulumi.aws.iam.IamFunctions;
import com.pulumi.aws.iam.inputs.GetPolicyDocumentArgs;
import com.pulumi.aws.s3.BucketPolicy;
import com.pulumi.aws.s3.BucketPolicyArgs;
import com.pulumi.aws.s3.BucketObjectv2;
import com.pulumi.aws.s3.BucketObjectv2Args;
import com.pulumi.aws.appflow.Flow;
import com.pulumi.aws.appflow.FlowArgs;
import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs;
import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowTaskArgs;
import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var exampleSourceBucketV2 = new BucketV2("exampleSourceBucketV2", BucketV2Args.builder()
            .bucket("example-source")
            .build());
        final var exampleSource = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
            .statements(GetPolicyDocumentStatementArgs.builder()
                .sid("AllowAppFlowSourceActions")
                .effect("Allow")
                .principals(GetPolicyDocumentStatementPrincipalArgs.builder()
                    .type("Service")
                    .identifiers("appflow.amazonaws.com")
                    .build())
                .actions(                
                    "s3:ListBucket",
                    "s3:GetObject")
                .resources(                
                    "arn:aws:s3:::example-source",
                    "arn:aws:s3:::example-source/*")
                .build())
            .build());
        var exampleSourceBucketPolicy = new BucketPolicy("exampleSourceBucketPolicy", BucketPolicyArgs.builder()
            .bucket(exampleSourceBucketV2.id())
            .policy(exampleSource.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
            .build());
        var example = new BucketObjectv2("example", BucketObjectv2Args.builder()
            .bucket(exampleSourceBucketV2.id())
            .key("example_source.csv")
            .source(new FileAsset("example_source.csv"))
            .build());
        var exampleDestinationBucketV2 = new BucketV2("exampleDestinationBucketV2", BucketV2Args.builder()
            .bucket("example-destination")
            .build());
        final var exampleDestination = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
            .statements(GetPolicyDocumentStatementArgs.builder()
                .sid("AllowAppFlowDestinationActions")
                .effect("Allow")
                .principals(GetPolicyDocumentStatementPrincipalArgs.builder()
                    .type("Service")
                    .identifiers("appflow.amazonaws.com")
                    .build())
                .actions(                
                    "s3:PutObject",
                    "s3:AbortMultipartUpload",
                    "s3:ListMultipartUploadParts",
                    "s3:ListBucketMultipartUploads",
                    "s3:GetBucketAcl",
                    "s3:PutObjectAcl")
                .resources(                
                    "arn:aws:s3:::example-destination",
                    "arn:aws:s3:::example-destination/*")
                .build())
            .build());
        var exampleDestinationBucketPolicy = new BucketPolicy("exampleDestinationBucketPolicy", BucketPolicyArgs.builder()
            .bucket(exampleDestinationBucketV2.id())
            .policy(exampleDestination.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
            .build());
        var exampleFlow = new Flow("exampleFlow", FlowArgs.builder()
            .name("example")
            .sourceFlowConfig(FlowSourceFlowConfigArgs.builder()
                .connectorType("S3")
                .sourceConnectorProperties(FlowSourceFlowConfigSourceConnectorPropertiesArgs.builder()
                    .s3(FlowSourceFlowConfigSourceConnectorPropertiesS3Args.builder()
                        .bucketName(exampleSourceBucketPolicy.bucket())
                        .bucketPrefix("example")
                        .build())
                    .build())
                .build())
            .destinationFlowConfigs(FlowDestinationFlowConfigArgs.builder()
                .connectorType("S3")
                .destinationConnectorProperties(FlowDestinationFlowConfigDestinationConnectorPropertiesArgs.builder()
                    .s3(FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args.builder()
                        .bucketName(exampleDestinationBucketPolicy.bucket())
                        .s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs.builder()
                            .prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs.builder()
                                .prefixType("PATH")
                                .build())
                            .build())
                        .build())
                    .build())
                .build())
            .tasks(FlowTaskArgs.builder()
                .sourceFields("exampleField")
                .destinationField("exampleField")
                .taskType("Map")
                .connectorOperators(FlowTaskConnectorOperatorArgs.builder()
                    .s3("NO_OP")
                    .build())
                .build())
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .triggerType("OnDemand")
                .build())
            .build());
    }
}
resources:
  exampleSourceBucketV2:
    type: aws:s3:BucketV2
    name: example_source
    properties:
      bucket: example-source
  exampleSourceBucketPolicy:
    type: aws:s3:BucketPolicy
    name: example_source
    properties:
      bucket: ${exampleSourceBucketV2.id}
      policy: ${exampleSource.json}
  example:
    type: aws:s3:BucketObjectv2
    properties:
      bucket: ${exampleSourceBucketV2.id}
      key: example_source.csv
      source:
        fn::FileAsset: example_source.csv
  exampleDestinationBucketV2:
    type: aws:s3:BucketV2
    name: example_destination
    properties:
      bucket: example-destination
  exampleDestinationBucketPolicy:
    type: aws:s3:BucketPolicy
    name: example_destination
    properties:
      bucket: ${exampleDestinationBucketV2.id}
      policy: ${exampleDestination.json}
  exampleFlow:
    type: aws:appflow:Flow
    name: example
    properties:
      name: example
      sourceFlowConfig:
        connectorType: S3
        sourceConnectorProperties:
          s3:
            bucketName: ${exampleSourceBucketPolicy.bucket}
            bucketPrefix: example
      destinationFlowConfigs:
        - connectorType: S3
          destinationConnectorProperties:
            s3:
              bucketName: ${exampleDestinationBucketPolicy.bucket}
              s3OutputFormatConfig:
                prefixConfig:
                  prefixType: PATH
      tasks:
        - sourceFields:
            - exampleField
          destinationField: exampleField
          taskType: Map
          connectorOperators:
            - s3: NO_OP
      triggerConfig:
        triggerType: OnDemand
variables:
  exampleSource:
    fn::invoke:
      function: aws:iam:getPolicyDocument
      arguments:
        statements:
          - sid: AllowAppFlowSourceActions
            effect: Allow
            principals:
              - type: Service
                identifiers:
                  - appflow.amazonaws.com
            actions:
              - s3:ListBucket
              - s3:GetObject
            resources:
              - arn:aws:s3:::example-source
              - arn:aws:s3:::example-source/*
  exampleDestination:
    fn::invoke:
      function: aws:iam:getPolicyDocument
      arguments:
        statements:
          - sid: AllowAppFlowDestinationActions
            effect: Allow
            principals:
              - type: Service
                identifiers:
                  - appflow.amazonaws.com
            actions:
              - s3:PutObject
              - s3:AbortMultipartUpload
              - s3:ListMultipartUploadParts
              - s3:ListBucketMultipartUploads
              - s3:GetBucketAcl
              - s3:PutObjectAcl
            resources:
              - arn:aws:s3:::example-destination
              - arn:aws:s3:::example-destination/*
Create Flow Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Flow(name: string, args: FlowArgs, opts?: CustomResourceOptions);@overload
def Flow(resource_name: str,
         args: FlowArgs,
         opts: Optional[ResourceOptions] = None)
@overload
def Flow(resource_name: str,
         opts: Optional[ResourceOptions] = None,
         destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
         source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
         tasks: Optional[Sequence[FlowTaskArgs]] = None,
         trigger_config: Optional[FlowTriggerConfigArgs] = None,
         description: Optional[str] = None,
         kms_arn: Optional[str] = None,
         metadata_catalog_config: Optional[FlowMetadataCatalogConfigArgs] = None,
         name: Optional[str] = None,
         tags: Optional[Mapping[str, str]] = None)func NewFlow(ctx *Context, name string, args FlowArgs, opts ...ResourceOption) (*Flow, error)public Flow(string name, FlowArgs args, CustomResourceOptions? opts = null)type: aws:appflow:Flow
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var flowResource = new Aws.AppFlow.Flow("flowResource", new()
{
    DestinationFlowConfigs = new[]
    {
        new Aws.AppFlow.Inputs.FlowDestinationFlowConfigArgs
        {
            ConnectorType = "string",
            DestinationConnectorProperties = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
            {
                CustomConnector = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs
                {
                    EntityName = "string",
                    CustomProperties = 
                    {
                        { "string", "string" },
                    },
                    ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        FailOnFirstDestinationError = false,
                    },
                    IdFieldNames = new[]
                    {
                        "string",
                    },
                    WriteOperationType = "string",
                },
                CustomerProfiles = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs
                {
                    DomainName = "string",
                    ObjectTypeName = "string",
                },
                EventBridge = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs
                {
                    Object = "string",
                    ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        FailOnFirstDestinationError = false,
                    },
                },
                Honeycode = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs
                {
                    Object = "string",
                    ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        FailOnFirstDestinationError = false,
                    },
                },
                LookoutMetrics = null,
                Marketo = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs
                {
                    Object = "string",
                    ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        FailOnFirstDestinationError = false,
                    },
                },
                Redshift = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs
                {
                    IntermediateBucketName = "string",
                    Object = "string",
                    BucketPrefix = "string",
                    ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        FailOnFirstDestinationError = false,
                    },
                },
                S3 = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
                {
                    BucketName = "string",
                    BucketPrefix = "string",
                    S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
                    {
                        AggregationConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs
                        {
                            AggregationType = "string",
                            TargetFileSize = 0,
                        },
                        FileType = "string",
                        PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
                        {
                            PrefixFormat = "string",
                            PrefixHierarchies = new[]
                            {
                                "string",
                            },
                            PrefixType = "string",
                        },
                        PreserveSourceDataTyping = false,
                    },
                },
                Salesforce = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs
                {
                    Object = "string",
                    ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        FailOnFirstDestinationError = false,
                    },
                    IdFieldNames = new[]
                    {
                        "string",
                    },
                    WriteOperationType = "string",
                },
                SapoData = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs
                {
                    ObjectPath = "string",
                    ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        FailOnFirstDestinationError = false,
                    },
                    IdFieldNames = new[]
                    {
                        "string",
                    },
                    SuccessResponseHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                    },
                    WriteOperationType = "string",
                },
                Snowflake = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs
                {
                    IntermediateBucketName = "string",
                    Object = "string",
                    BucketPrefix = "string",
                    ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        FailOnFirstDestinationError = false,
                    },
                },
                Upsolver = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs
                {
                    BucketName = "string",
                    S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs
                    {
                        PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs
                        {
                            PrefixType = "string",
                            PrefixFormat = "string",
                            PrefixHierarchies = new[]
                            {
                                "string",
                            },
                        },
                        AggregationConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs
                        {
                            AggregationType = "string",
                        },
                        FileType = "string",
                    },
                    BucketPrefix = "string",
                },
                Zendesk = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs
                {
                    Object = "string",
                    ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        FailOnFirstDestinationError = false,
                    },
                    IdFieldNames = new[]
                    {
                        "string",
                    },
                    WriteOperationType = "string",
                },
            },
            ApiVersion = "string",
            ConnectorProfileName = "string",
        },
    },
    SourceFlowConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigArgs
    {
        ConnectorType = "string",
        SourceConnectorProperties = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs
        {
            Amplitude = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs
            {
                Object = "string",
            },
            CustomConnector = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs
            {
                EntityName = "string",
                CustomProperties = 
                {
                    { "string", "string" },
                },
            },
            Datadog = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs
            {
                Object = "string",
            },
            Dynatrace = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs
            {
                Object = "string",
            },
            GoogleAnalytics = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs
            {
                Object = "string",
            },
            InforNexus = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs
            {
                Object = "string",
            },
            Marketo = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs
            {
                Object = "string",
            },
            S3 = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args
            {
                BucketName = "string",
                BucketPrefix = "string",
                S3InputFormatConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs
                {
                    S3InputFileType = "string",
                },
            },
            Salesforce = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs
            {
                Object = "string",
                DataTransferApi = "string",
                EnableDynamicFieldUpdate = false,
                IncludeDeletedRecords = false,
            },
            SapoData = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs
            {
                ObjectPath = "string",
                PaginationConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataPaginationConfigArgs
                {
                    MaxPageSize = 0,
                },
                ParallelismConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataParallelismConfigArgs
                {
                    MaxPageSize = 0,
                },
            },
            ServiceNow = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs
            {
                Object = "string",
            },
            Singular = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs
            {
                Object = "string",
            },
            Slack = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs
            {
                Object = "string",
            },
            Trendmicro = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs
            {
                Object = "string",
            },
            Veeva = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs
            {
                Object = "string",
                DocumentType = "string",
                IncludeAllVersions = false,
                IncludeRenditions = false,
                IncludeSourceFiles = false,
            },
            Zendesk = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs
            {
                Object = "string",
            },
        },
        ApiVersion = "string",
        ConnectorProfileName = "string",
        IncrementalPullConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigIncrementalPullConfigArgs
        {
            DatetimeTypeFieldName = "string",
        },
    },
    Tasks = new[]
    {
        new Aws.AppFlow.Inputs.FlowTaskArgs
        {
            TaskType = "string",
            ConnectorOperators = new[]
            {
                new Aws.AppFlow.Inputs.FlowTaskConnectorOperatorArgs
                {
                    Amplitude = "string",
                    CustomConnector = "string",
                    Datadog = "string",
                    Dynatrace = "string",
                    GoogleAnalytics = "string",
                    InforNexus = "string",
                    Marketo = "string",
                    S3 = "string",
                    Salesforce = "string",
                    SapoData = "string",
                    ServiceNow = "string",
                    Singular = "string",
                    Slack = "string",
                    Trendmicro = "string",
                    Veeva = "string",
                    Zendesk = "string",
                },
            },
            DestinationField = "string",
            SourceFields = new[]
            {
                "string",
            },
            TaskProperties = 
            {
                { "string", "string" },
            },
        },
    },
    TriggerConfig = new Aws.AppFlow.Inputs.FlowTriggerConfigArgs
    {
        TriggerType = "string",
        TriggerProperties = new Aws.AppFlow.Inputs.FlowTriggerConfigTriggerPropertiesArgs
        {
            Scheduled = new Aws.AppFlow.Inputs.FlowTriggerConfigTriggerPropertiesScheduledArgs
            {
                ScheduleExpression = "string",
                DataPullMode = "string",
                FirstExecutionFrom = "string",
                ScheduleEndTime = "string",
                ScheduleOffset = 0,
                ScheduleStartTime = "string",
                Timezone = "string",
            },
        },
    },
    Description = "string",
    KmsArn = "string",
    MetadataCatalogConfig = new Aws.AppFlow.Inputs.FlowMetadataCatalogConfigArgs
    {
        GlueDataCatalog = new Aws.AppFlow.Inputs.FlowMetadataCatalogConfigGlueDataCatalogArgs
        {
            DatabaseName = "string",
            RoleArn = "string",
            TablePrefix = "string",
        },
    },
    Name = "string",
    Tags = 
    {
        { "string", "string" },
    },
});
example, err := appflow.NewFlow(ctx, "flowResource", &appflow.FlowArgs{
	DestinationFlowConfigs: appflow.FlowDestinationFlowConfigArray{
		&appflow.FlowDestinationFlowConfigArgs{
			ConnectorType: pulumi.String("string"),
			DestinationConnectorProperties: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs{
				CustomConnector: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs{
					EntityName: pulumi.String("string"),
					CustomProperties: pulumi.StringMap{
						"string": pulumi.String("string"),
					},
					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs{
						BucketName:                  pulumi.String("string"),
						BucketPrefix:                pulumi.String("string"),
						FailOnFirstDestinationError: pulumi.Bool(false),
					},
					IdFieldNames: pulumi.StringArray{
						pulumi.String("string"),
					},
					WriteOperationType: pulumi.String("string"),
				},
				CustomerProfiles: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs{
					DomainName:     pulumi.String("string"),
					ObjectTypeName: pulumi.String("string"),
				},
				EventBridge: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs{
					Object: pulumi.String("string"),
					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs{
						BucketName:                  pulumi.String("string"),
						BucketPrefix:                pulumi.String("string"),
						FailOnFirstDestinationError: pulumi.Bool(false),
					},
				},
				Honeycode: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs{
					Object: pulumi.String("string"),
					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs{
						BucketName:                  pulumi.String("string"),
						BucketPrefix:                pulumi.String("string"),
						FailOnFirstDestinationError: pulumi.Bool(false),
					},
				},
				LookoutMetrics: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetricsArgs{},
				Marketo: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs{
					Object: pulumi.String("string"),
					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs{
						BucketName:                  pulumi.String("string"),
						BucketPrefix:                pulumi.String("string"),
						FailOnFirstDestinationError: pulumi.Bool(false),
					},
				},
				Redshift: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs{
					IntermediateBucketName: pulumi.String("string"),
					Object:                 pulumi.String("string"),
					BucketPrefix:           pulumi.String("string"),
					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs{
						BucketName:                  pulumi.String("string"),
						BucketPrefix:                pulumi.String("string"),
						FailOnFirstDestinationError: pulumi.Bool(false),
					},
				},
				S3: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args{
					BucketName:   pulumi.String("string"),
					BucketPrefix: pulumi.String("string"),
					S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs{
						AggregationConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs{
							AggregationType: pulumi.String("string"),
							TargetFileSize:  pulumi.Int(0),
						},
						FileType: pulumi.String("string"),
						PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs{
							PrefixFormat: pulumi.String("string"),
							PrefixHierarchies: pulumi.StringArray{
								pulumi.String("string"),
							},
							PrefixType: pulumi.String("string"),
						},
						PreserveSourceDataTyping: pulumi.Bool(false),
					},
				},
				Salesforce: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs{
					Object: pulumi.String("string"),
					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs{
						BucketName:                  pulumi.String("string"),
						BucketPrefix:                pulumi.String("string"),
						FailOnFirstDestinationError: pulumi.Bool(false),
					},
					IdFieldNames: pulumi.StringArray{
						pulumi.String("string"),
					},
					WriteOperationType: pulumi.String("string"),
				},
				SapoData: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs{
					ObjectPath: pulumi.String("string"),
					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs{
						BucketName:                  pulumi.String("string"),
						BucketPrefix:                pulumi.String("string"),
						FailOnFirstDestinationError: pulumi.Bool(false),
					},
					IdFieldNames: pulumi.StringArray{
						pulumi.String("string"),
					},
					SuccessResponseHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs{
						BucketName:   pulumi.String("string"),
						BucketPrefix: pulumi.String("string"),
					},
					WriteOperationType: pulumi.String("string"),
				},
				Snowflake: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs{
					IntermediateBucketName: pulumi.String("string"),
					Object:                 pulumi.String("string"),
					BucketPrefix:           pulumi.String("string"),
					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs{
						BucketName:                  pulumi.String("string"),
						BucketPrefix:                pulumi.String("string"),
						FailOnFirstDestinationError: pulumi.Bool(false),
					},
				},
				Upsolver: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs{
					BucketName: pulumi.String("string"),
					S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs{
						PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs{
							PrefixType:   pulumi.String("string"),
							PrefixFormat: pulumi.String("string"),
							PrefixHierarchies: pulumi.StringArray{
								pulumi.String("string"),
							},
						},
						AggregationConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs{
							AggregationType: pulumi.String("string"),
						},
						FileType: pulumi.String("string"),
					},
					BucketPrefix: pulumi.String("string"),
				},
				Zendesk: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs{
					Object: pulumi.String("string"),
					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs{
						BucketName:                  pulumi.String("string"),
						BucketPrefix:                pulumi.String("string"),
						FailOnFirstDestinationError: pulumi.Bool(false),
					},
					IdFieldNames: pulumi.StringArray{
						pulumi.String("string"),
					},
					WriteOperationType: pulumi.String("string"),
				},
			},
			ApiVersion:           pulumi.String("string"),
			ConnectorProfileName: pulumi.String("string"),
		},
	},
	SourceFlowConfig: &appflow.FlowSourceFlowConfigArgs{
		ConnectorType: pulumi.String("string"),
		SourceConnectorProperties: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs{
			Amplitude: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs{
				Object: pulumi.String("string"),
			},
			CustomConnector: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs{
				EntityName: pulumi.String("string"),
				CustomProperties: pulumi.StringMap{
					"string": pulumi.String("string"),
				},
			},
			Datadog: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs{
				Object: pulumi.String("string"),
			},
			Dynatrace: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs{
				Object: pulumi.String("string"),
			},
			GoogleAnalytics: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs{
				Object: pulumi.String("string"),
			},
			InforNexus: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs{
				Object: pulumi.String("string"),
			},
			Marketo: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs{
				Object: pulumi.String("string"),
			},
			S3: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args{
				BucketName:   pulumi.String("string"),
				BucketPrefix: pulumi.String("string"),
				S3InputFormatConfig: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs{
					S3InputFileType: pulumi.String("string"),
				},
			},
			Salesforce: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs{
				Object:                   pulumi.String("string"),
				DataTransferApi:          pulumi.String("string"),
				EnableDynamicFieldUpdate: pulumi.Bool(false),
				IncludeDeletedRecords:    pulumi.Bool(false),
			},
			SapoData: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs{
				ObjectPath: pulumi.String("string"),
				PaginationConfig: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataPaginationConfigArgs{
					MaxPageSize: pulumi.Int(0),
				},
				ParallelismConfig: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataParallelismConfigArgs{
					MaxPageSize: pulumi.Int(0),
				},
			},
			ServiceNow: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs{
				Object: pulumi.String("string"),
			},
			Singular: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs{
				Object: pulumi.String("string"),
			},
			Slack: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs{
				Object: pulumi.String("string"),
			},
			Trendmicro: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs{
				Object: pulumi.String("string"),
			},
			Veeva: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs{
				Object:             pulumi.String("string"),
				DocumentType:       pulumi.String("string"),
				IncludeAllVersions: pulumi.Bool(false),
				IncludeRenditions:  pulumi.Bool(false),
				IncludeSourceFiles: pulumi.Bool(false),
			},
			Zendesk: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs{
				Object: pulumi.String("string"),
			},
		},
		ApiVersion:           pulumi.String("string"),
		ConnectorProfileName: pulumi.String("string"),
		IncrementalPullConfig: &appflow.FlowSourceFlowConfigIncrementalPullConfigArgs{
			DatetimeTypeFieldName: pulumi.String("string"),
		},
	},
	Tasks: appflow.FlowTaskArray{
		&appflow.FlowTaskArgs{
			TaskType: pulumi.String("string"),
			ConnectorOperators: appflow.FlowTaskConnectorOperatorArray{
				&appflow.FlowTaskConnectorOperatorArgs{
					Amplitude:       pulumi.String("string"),
					CustomConnector: pulumi.String("string"),
					Datadog:         pulumi.String("string"),
					Dynatrace:       pulumi.String("string"),
					GoogleAnalytics: pulumi.String("string"),
					InforNexus:      pulumi.String("string"),
					Marketo:         pulumi.String("string"),
					S3:              pulumi.String("string"),
					Salesforce:      pulumi.String("string"),
					SapoData:        pulumi.String("string"),
					ServiceNow:      pulumi.String("string"),
					Singular:        pulumi.String("string"),
					Slack:           pulumi.String("string"),
					Trendmicro:      pulumi.String("string"),
					Veeva:           pulumi.String("string"),
					Zendesk:         pulumi.String("string"),
				},
			},
			DestinationField: pulumi.String("string"),
			SourceFields: pulumi.StringArray{
				pulumi.String("string"),
			},
			TaskProperties: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
	},
	TriggerConfig: &appflow.FlowTriggerConfigArgs{
		TriggerType: pulumi.String("string"),
		TriggerProperties: &appflow.FlowTriggerConfigTriggerPropertiesArgs{
			Scheduled: &appflow.FlowTriggerConfigTriggerPropertiesScheduledArgs{
				ScheduleExpression: pulumi.String("string"),
				DataPullMode:       pulumi.String("string"),
				FirstExecutionFrom: pulumi.String("string"),
				ScheduleEndTime:    pulumi.String("string"),
				ScheduleOffset:     pulumi.Int(0),
				ScheduleStartTime:  pulumi.String("string"),
				Timezone:           pulumi.String("string"),
			},
		},
	},
	Description: pulumi.String("string"),
	KmsArn:      pulumi.String("string"),
	MetadataCatalogConfig: &appflow.FlowMetadataCatalogConfigArgs{
		GlueDataCatalog: &appflow.FlowMetadataCatalogConfigGlueDataCatalogArgs{
			DatabaseName: pulumi.String("string"),
			RoleArn:      pulumi.String("string"),
			TablePrefix:  pulumi.String("string"),
		},
	},
	Name: pulumi.String("string"),
	Tags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
})
var flowResource = new Flow("flowResource", FlowArgs.builder()
    .destinationFlowConfigs(FlowDestinationFlowConfigArgs.builder()
        .connectorType("string")
        .destinationConnectorProperties(FlowDestinationFlowConfigDestinationConnectorPropertiesArgs.builder()
            .customConnector(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs.builder()
                .entityName("string")
                .customProperties(Map.of("string", "string"))
                .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .failOnFirstDestinationError(false)
                    .build())
                .idFieldNames("string")
                .writeOperationType("string")
                .build())
            .customerProfiles(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs.builder()
                .domainName("string")
                .objectTypeName("string")
                .build())
            .eventBridge(FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs.builder()
                .object("string")
                .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .failOnFirstDestinationError(false)
                    .build())
                .build())
            .honeycode(FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs.builder()
                .object("string")
                .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .failOnFirstDestinationError(false)
                    .build())
                .build())
            .lookoutMetrics()
            .marketo(FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs.builder()
                .object("string")
                .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .failOnFirstDestinationError(false)
                    .build())
                .build())
            .redshift(FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs.builder()
                .intermediateBucketName("string")
                .object("string")
                .bucketPrefix("string")
                .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .failOnFirstDestinationError(false)
                    .build())
                .build())
            .s3(FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args.builder()
                .bucketName("string")
                .bucketPrefix("string")
                .s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs.builder()
                    .aggregationConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs.builder()
                        .aggregationType("string")
                        .targetFileSize(0)
                        .build())
                    .fileType("string")
                    .prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs.builder()
                        .prefixFormat("string")
                        .prefixHierarchies("string")
                        .prefixType("string")
                        .build())
                    .preserveSourceDataTyping(false)
                    .build())
                .build())
            .salesforce(FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs.builder()
                .object("string")
                .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .failOnFirstDestinationError(false)
                    .build())
                .idFieldNames("string")
                .writeOperationType("string")
                .build())
            .sapoData(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs.builder()
                .objectPath("string")
                .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .failOnFirstDestinationError(false)
                    .build())
                .idFieldNames("string")
                .successResponseHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .build())
                .writeOperationType("string")
                .build())
            .snowflake(FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs.builder()
                .intermediateBucketName("string")
                .object("string")
                .bucketPrefix("string")
                .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .failOnFirstDestinationError(false)
                    .build())
                .build())
            .upsolver(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs.builder()
                .bucketName("string")
                .s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs.builder()
                    .prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs.builder()
                        .prefixType("string")
                        .prefixFormat("string")
                        .prefixHierarchies("string")
                        .build())
                    .aggregationConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs.builder()
                        .aggregationType("string")
                        .build())
                    .fileType("string")
                    .build())
                .bucketPrefix("string")
                .build())
            .zendesk(FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs.builder()
                .object("string")
                .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .failOnFirstDestinationError(false)
                    .build())
                .idFieldNames("string")
                .writeOperationType("string")
                .build())
            .build())
        .apiVersion("string")
        .connectorProfileName("string")
        .build())
    .sourceFlowConfig(FlowSourceFlowConfigArgs.builder()
        .connectorType("string")
        .sourceConnectorProperties(FlowSourceFlowConfigSourceConnectorPropertiesArgs.builder()
            .amplitude(FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs.builder()
                .object("string")
                .build())
            .customConnector(FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs.builder()
                .entityName("string")
                .customProperties(Map.of("string", "string"))
                .build())
            .datadog(FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs.builder()
                .object("string")
                .build())
            .dynatrace(FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs.builder()
                .object("string")
                .build())
            .googleAnalytics(FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs.builder()
                .object("string")
                .build())
            .inforNexus(FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs.builder()
                .object("string")
                .build())
            .marketo(FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs.builder()
                .object("string")
                .build())
            .s3(FlowSourceFlowConfigSourceConnectorPropertiesS3Args.builder()
                .bucketName("string")
                .bucketPrefix("string")
                .s3InputFormatConfig(FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs.builder()
                    .s3InputFileType("string")
                    .build())
                .build())
            .salesforce(FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs.builder()
                .object("string")
                .dataTransferApi("string")
                .enableDynamicFieldUpdate(false)
                .includeDeletedRecords(false)
                .build())
            .sapoData(FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs.builder()
                .objectPath("string")
                .paginationConfig(FlowSourceFlowConfigSourceConnectorPropertiesSapoDataPaginationConfigArgs.builder()
                    .maxPageSize(0)
                    .build())
                .parallelismConfig(FlowSourceFlowConfigSourceConnectorPropertiesSapoDataParallelismConfigArgs.builder()
                    .maxPageSize(0)
                    .build())
                .build())
            .serviceNow(FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs.builder()
                .object("string")
                .build())
            .singular(FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs.builder()
                .object("string")
                .build())
            .slack(FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs.builder()
                .object("string")
                .build())
            .trendmicro(FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs.builder()
                .object("string")
                .build())
            .veeva(FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs.builder()
                .object("string")
                .documentType("string")
                .includeAllVersions(false)
                .includeRenditions(false)
                .includeSourceFiles(false)
                .build())
            .zendesk(FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs.builder()
                .object("string")
                .build())
            .build())
        .apiVersion("string")
        .connectorProfileName("string")
        .incrementalPullConfig(FlowSourceFlowConfigIncrementalPullConfigArgs.builder()
            .datetimeTypeFieldName("string")
            .build())
        .build())
    .tasks(FlowTaskArgs.builder()
        .taskType("string")
        .connectorOperators(FlowTaskConnectorOperatorArgs.builder()
            .amplitude("string")
            .customConnector("string")
            .datadog("string")
            .dynatrace("string")
            .googleAnalytics("string")
            .inforNexus("string")
            .marketo("string")
            .s3("string")
            .salesforce("string")
            .sapoData("string")
            .serviceNow("string")
            .singular("string")
            .slack("string")
            .trendmicro("string")
            .veeva("string")
            .zendesk("string")
            .build())
        .destinationField("string")
        .sourceFields("string")
        .taskProperties(Map.of("string", "string"))
        .build())
    .triggerConfig(FlowTriggerConfigArgs.builder()
        .triggerType("string")
        .triggerProperties(FlowTriggerConfigTriggerPropertiesArgs.builder()
            .scheduled(FlowTriggerConfigTriggerPropertiesScheduledArgs.builder()
                .scheduleExpression("string")
                .dataPullMode("string")
                .firstExecutionFrom("string")
                .scheduleEndTime("string")
                .scheduleOffset(0)
                .scheduleStartTime("string")
                .timezone("string")
                .build())
            .build())
        .build())
    .description("string")
    .kmsArn("string")
    .metadataCatalogConfig(FlowMetadataCatalogConfigArgs.builder()
        .glueDataCatalog(FlowMetadataCatalogConfigGlueDataCatalogArgs.builder()
            .databaseName("string")
            .roleArn("string")
            .tablePrefix("string")
            .build())
        .build())
    .name("string")
    .tags(Map.of("string", "string"))
    .build());
flow_resource = aws.appflow.Flow("flowResource",
    destination_flow_configs=[{
        "connector_type": "string",
        "destination_connector_properties": {
            "custom_connector": {
                "entity_name": "string",
                "custom_properties": {
                    "string": "string",
                },
                "error_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                    "fail_on_first_destination_error": False,
                },
                "id_field_names": ["string"],
                "write_operation_type": "string",
            },
            "customer_profiles": {
                "domain_name": "string",
                "object_type_name": "string",
            },
            "event_bridge": {
                "object": "string",
                "error_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                    "fail_on_first_destination_error": False,
                },
            },
            "honeycode": {
                "object": "string",
                "error_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                    "fail_on_first_destination_error": False,
                },
            },
            "lookout_metrics": {},
            "marketo": {
                "object": "string",
                "error_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                    "fail_on_first_destination_error": False,
                },
            },
            "redshift": {
                "intermediate_bucket_name": "string",
                "object": "string",
                "bucket_prefix": "string",
                "error_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                    "fail_on_first_destination_error": False,
                },
            },
            "s3": {
                "bucket_name": "string",
                "bucket_prefix": "string",
                "s3_output_format_config": {
                    "aggregation_config": {
                        "aggregation_type": "string",
                        "target_file_size": 0,
                    },
                    "file_type": "string",
                    "prefix_config": {
                        "prefix_format": "string",
                        "prefix_hierarchies": ["string"],
                        "prefix_type": "string",
                    },
                    "preserve_source_data_typing": False,
                },
            },
            "salesforce": {
                "object": "string",
                "error_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                    "fail_on_first_destination_error": False,
                },
                "id_field_names": ["string"],
                "write_operation_type": "string",
            },
            "sapo_data": {
                "object_path": "string",
                "error_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                    "fail_on_first_destination_error": False,
                },
                "id_field_names": ["string"],
                "success_response_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                },
                "write_operation_type": "string",
            },
            "snowflake": {
                "intermediate_bucket_name": "string",
                "object": "string",
                "bucket_prefix": "string",
                "error_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                    "fail_on_first_destination_error": False,
                },
            },
            "upsolver": {
                "bucket_name": "string",
                "s3_output_format_config": {
                    "prefix_config": {
                        "prefix_type": "string",
                        "prefix_format": "string",
                        "prefix_hierarchies": ["string"],
                    },
                    "aggregation_config": {
                        "aggregation_type": "string",
                    },
                    "file_type": "string",
                },
                "bucket_prefix": "string",
            },
            "zendesk": {
                "object": "string",
                "error_handling_config": {
                    "bucket_name": "string",
                    "bucket_prefix": "string",
                    "fail_on_first_destination_error": False,
                },
                "id_field_names": ["string"],
                "write_operation_type": "string",
            },
        },
        "api_version": "string",
        "connector_profile_name": "string",
    }],
    source_flow_config={
        "connector_type": "string",
        "source_connector_properties": {
            "amplitude": {
                "object": "string",
            },
            "custom_connector": {
                "entity_name": "string",
                "custom_properties": {
                    "string": "string",
                },
            },
            "datadog": {
                "object": "string",
            },
            "dynatrace": {
                "object": "string",
            },
            "google_analytics": {
                "object": "string",
            },
            "infor_nexus": {
                "object": "string",
            },
            "marketo": {
                "object": "string",
            },
            "s3": {
                "bucket_name": "string",
                "bucket_prefix": "string",
                "s3_input_format_config": {
                    "s3_input_file_type": "string",
                },
            },
            "salesforce": {
                "object": "string",
                "data_transfer_api": "string",
                "enable_dynamic_field_update": False,
                "include_deleted_records": False,
            },
            "sapo_data": {
                "object_path": "string",
                "pagination_config": {
                    "max_page_size": 0,
                },
                "parallelism_config": {
                    "max_page_size": 0,
                },
            },
            "service_now": {
                "object": "string",
            },
            "singular": {
                "object": "string",
            },
            "slack": {
                "object": "string",
            },
            "trendmicro": {
                "object": "string",
            },
            "veeva": {
                "object": "string",
                "document_type": "string",
                "include_all_versions": False,
                "include_renditions": False,
                "include_source_files": False,
            },
            "zendesk": {
                "object": "string",
            },
        },
        "api_version": "string",
        "connector_profile_name": "string",
        "incremental_pull_config": {
            "datetime_type_field_name": "string",
        },
    },
    tasks=[{
        "task_type": "string",
        "connector_operators": [{
            "amplitude": "string",
            "custom_connector": "string",
            "datadog": "string",
            "dynatrace": "string",
            "google_analytics": "string",
            "infor_nexus": "string",
            "marketo": "string",
            "s3": "string",
            "salesforce": "string",
            "sapo_data": "string",
            "service_now": "string",
            "singular": "string",
            "slack": "string",
            "trendmicro": "string",
            "veeva": "string",
            "zendesk": "string",
        }],
        "destination_field": "string",
        "source_fields": ["string"],
        "task_properties": {
            "string": "string",
        },
    }],
    trigger_config={
        "trigger_type": "string",
        "trigger_properties": {
            "scheduled": {
                "schedule_expression": "string",
                "data_pull_mode": "string",
                "first_execution_from": "string",
                "schedule_end_time": "string",
                "schedule_offset": 0,
                "schedule_start_time": "string",
                "timezone": "string",
            },
        },
    },
    description="string",
    kms_arn="string",
    metadata_catalog_config={
        "glue_data_catalog": {
            "database_name": "string",
            "role_arn": "string",
            "table_prefix": "string",
        },
    },
    name="string",
    tags={
        "string": "string",
    })
const flowResource = new aws.appflow.Flow("flowResource", {
    destinationFlowConfigs: [{
        connectorType: "string",
        destinationConnectorProperties: {
            customConnector: {
                entityName: "string",
                customProperties: {
                    string: "string",
                },
                errorHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    failOnFirstDestinationError: false,
                },
                idFieldNames: ["string"],
                writeOperationType: "string",
            },
            customerProfiles: {
                domainName: "string",
                objectTypeName: "string",
            },
            eventBridge: {
                object: "string",
                errorHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    failOnFirstDestinationError: false,
                },
            },
            honeycode: {
                object: "string",
                errorHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    failOnFirstDestinationError: false,
                },
            },
            lookoutMetrics: {},
            marketo: {
                object: "string",
                errorHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    failOnFirstDestinationError: false,
                },
            },
            redshift: {
                intermediateBucketName: "string",
                object: "string",
                bucketPrefix: "string",
                errorHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    failOnFirstDestinationError: false,
                },
            },
            s3: {
                bucketName: "string",
                bucketPrefix: "string",
                s3OutputFormatConfig: {
                    aggregationConfig: {
                        aggregationType: "string",
                        targetFileSize: 0,
                    },
                    fileType: "string",
                    prefixConfig: {
                        prefixFormat: "string",
                        prefixHierarchies: ["string"],
                        prefixType: "string",
                    },
                    preserveSourceDataTyping: false,
                },
            },
            salesforce: {
                object: "string",
                errorHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    failOnFirstDestinationError: false,
                },
                idFieldNames: ["string"],
                writeOperationType: "string",
            },
            sapoData: {
                objectPath: "string",
                errorHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    failOnFirstDestinationError: false,
                },
                idFieldNames: ["string"],
                successResponseHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                },
                writeOperationType: "string",
            },
            snowflake: {
                intermediateBucketName: "string",
                object: "string",
                bucketPrefix: "string",
                errorHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    failOnFirstDestinationError: false,
                },
            },
            upsolver: {
                bucketName: "string",
                s3OutputFormatConfig: {
                    prefixConfig: {
                        prefixType: "string",
                        prefixFormat: "string",
                        prefixHierarchies: ["string"],
                    },
                    aggregationConfig: {
                        aggregationType: "string",
                    },
                    fileType: "string",
                },
                bucketPrefix: "string",
            },
            zendesk: {
                object: "string",
                errorHandlingConfig: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    failOnFirstDestinationError: false,
                },
                idFieldNames: ["string"],
                writeOperationType: "string",
            },
        },
        apiVersion: "string",
        connectorProfileName: "string",
    }],
    sourceFlowConfig: {
        connectorType: "string",
        sourceConnectorProperties: {
            amplitude: {
                object: "string",
            },
            customConnector: {
                entityName: "string",
                customProperties: {
                    string: "string",
                },
            },
            datadog: {
                object: "string",
            },
            dynatrace: {
                object: "string",
            },
            googleAnalytics: {
                object: "string",
            },
            inforNexus: {
                object: "string",
            },
            marketo: {
                object: "string",
            },
            s3: {
                bucketName: "string",
                bucketPrefix: "string",
                s3InputFormatConfig: {
                    s3InputFileType: "string",
                },
            },
            salesforce: {
                object: "string",
                dataTransferApi: "string",
                enableDynamicFieldUpdate: false,
                includeDeletedRecords: false,
            },
            sapoData: {
                objectPath: "string",
                paginationConfig: {
                    maxPageSize: 0,
                },
                parallelismConfig: {
                    maxPageSize: 0,
                },
            },
            serviceNow: {
                object: "string",
            },
            singular: {
                object: "string",
            },
            slack: {
                object: "string",
            },
            trendmicro: {
                object: "string",
            },
            veeva: {
                object: "string",
                documentType: "string",
                includeAllVersions: false,
                includeRenditions: false,
                includeSourceFiles: false,
            },
            zendesk: {
                object: "string",
            },
        },
        apiVersion: "string",
        connectorProfileName: "string",
        incrementalPullConfig: {
            datetimeTypeFieldName: "string",
        },
    },
    tasks: [{
        taskType: "string",
        connectorOperators: [{
            amplitude: "string",
            customConnector: "string",
            datadog: "string",
            dynatrace: "string",
            googleAnalytics: "string",
            inforNexus: "string",
            marketo: "string",
            s3: "string",
            salesforce: "string",
            sapoData: "string",
            serviceNow: "string",
            singular: "string",
            slack: "string",
            trendmicro: "string",
            veeva: "string",
            zendesk: "string",
        }],
        destinationField: "string",
        sourceFields: ["string"],
        taskProperties: {
            string: "string",
        },
    }],
    triggerConfig: {
        triggerType: "string",
        triggerProperties: {
            scheduled: {
                scheduleExpression: "string",
                dataPullMode: "string",
                firstExecutionFrom: "string",
                scheduleEndTime: "string",
                scheduleOffset: 0,
                scheduleStartTime: "string",
                timezone: "string",
            },
        },
    },
    description: "string",
    kmsArn: "string",
    metadataCatalogConfig: {
        glueDataCatalog: {
            databaseName: "string",
            roleArn: "string",
            tablePrefix: "string",
        },
    },
    name: "string",
    tags: {
        string: "string",
    },
});
type: aws:appflow:Flow
properties:
    description: string
    destinationFlowConfigs:
        - apiVersion: string
          connectorProfileName: string
          connectorType: string
          destinationConnectorProperties:
            customConnector:
                customProperties:
                    string: string
                entityName: string
                errorHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                    failOnFirstDestinationError: false
                idFieldNames:
                    - string
                writeOperationType: string
            customerProfiles:
                domainName: string
                objectTypeName: string
            eventBridge:
                errorHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                    failOnFirstDestinationError: false
                object: string
            honeycode:
                errorHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                    failOnFirstDestinationError: false
                object: string
            lookoutMetrics: {}
            marketo:
                errorHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                    failOnFirstDestinationError: false
                object: string
            redshift:
                bucketPrefix: string
                errorHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                    failOnFirstDestinationError: false
                intermediateBucketName: string
                object: string
            s3:
                bucketName: string
                bucketPrefix: string
                s3OutputFormatConfig:
                    aggregationConfig:
                        aggregationType: string
                        targetFileSize: 0
                    fileType: string
                    prefixConfig:
                        prefixFormat: string
                        prefixHierarchies:
                            - string
                        prefixType: string
                    preserveSourceDataTyping: false
            salesforce:
                errorHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                    failOnFirstDestinationError: false
                idFieldNames:
                    - string
                object: string
                writeOperationType: string
            sapoData:
                errorHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                    failOnFirstDestinationError: false
                idFieldNames:
                    - string
                objectPath: string
                successResponseHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                writeOperationType: string
            snowflake:
                bucketPrefix: string
                errorHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                    failOnFirstDestinationError: false
                intermediateBucketName: string
                object: string
            upsolver:
                bucketName: string
                bucketPrefix: string
                s3OutputFormatConfig:
                    aggregationConfig:
                        aggregationType: string
                    fileType: string
                    prefixConfig:
                        prefixFormat: string
                        prefixHierarchies:
                            - string
                        prefixType: string
            zendesk:
                errorHandlingConfig:
                    bucketName: string
                    bucketPrefix: string
                    failOnFirstDestinationError: false
                idFieldNames:
                    - string
                object: string
                writeOperationType: string
    kmsArn: string
    metadataCatalogConfig:
        glueDataCatalog:
            databaseName: string
            roleArn: string
            tablePrefix: string
    name: string
    sourceFlowConfig:
        apiVersion: string
        connectorProfileName: string
        connectorType: string
        incrementalPullConfig:
            datetimeTypeFieldName: string
        sourceConnectorProperties:
            amplitude:
                object: string
            customConnector:
                customProperties:
                    string: string
                entityName: string
            datadog:
                object: string
            dynatrace:
                object: string
            googleAnalytics:
                object: string
            inforNexus:
                object: string
            marketo:
                object: string
            s3:
                bucketName: string
                bucketPrefix: string
                s3InputFormatConfig:
                    s3InputFileType: string
            salesforce:
                dataTransferApi: string
                enableDynamicFieldUpdate: false
                includeDeletedRecords: false
                object: string
            sapoData:
                objectPath: string
                paginationConfig:
                    maxPageSize: 0
                parallelismConfig:
                    maxPageSize: 0
            serviceNow:
                object: string
            singular:
                object: string
            slack:
                object: string
            trendmicro:
                object: string
            veeva:
                documentType: string
                includeAllVersions: false
                includeRenditions: false
                includeSourceFiles: false
                object: string
            zendesk:
                object: string
    tags:
        string: string
    tasks:
        - connectorOperators:
            - amplitude: string
              customConnector: string
              datadog: string
              dynatrace: string
              googleAnalytics: string
              inforNexus: string
              marketo: string
              s3: string
              salesforce: string
              sapoData: string
              serviceNow: string
              singular: string
              slack: string
              trendmicro: string
              veeva: string
              zendesk: string
          destinationField: string
          sourceFields:
            - string
          taskProperties:
            string: string
          taskType: string
    triggerConfig:
        triggerProperties:
            scheduled:
                dataPullMode: string
                firstExecutionFrom: string
                scheduleEndTime: string
                scheduleExpression: string
                scheduleOffset: 0
                scheduleStartTime: string
                timezone: string
        triggerType: string
Flow Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Flow resource accepts the following input properties:
- DestinationFlow List<FlowConfigs Destination Flow Config> 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- SourceFlow FlowConfig Source Flow Config 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Tasks
List<FlowTask> 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- TriggerConfig FlowTrigger Config 
- A Trigger that determine how and when the flow runs.
- Description string
- Description of the flow you want to create.
- KmsArn string
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- MetadataCatalog FlowConfig Metadata Catalog Config 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- Name string
- Name of the flow.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- DestinationFlow []FlowConfigs Destination Flow Config Args 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- SourceFlow FlowConfig Source Flow Config Args 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Tasks
[]FlowTask Args 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- TriggerConfig FlowTrigger Config Args 
- A Trigger that determine how and when the flow runs.
- Description string
- Description of the flow you want to create.
- KmsArn string
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- MetadataCatalog FlowConfig Metadata Catalog Config Args 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- Name string
- Name of the flow.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- destinationFlow List<FlowConfigs Destination Flow Config> 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- sourceFlow FlowConfig Source Flow Config 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks
List<FlowTask> 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- triggerConfig FlowTrigger Config 
- A Trigger that determine how and when the flow runs.
- description String
- Description of the flow you want to create.
- kmsArn String
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- metadataCatalog FlowConfig Metadata Catalog Config 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- name String
- Name of the flow.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- destinationFlow FlowConfigs Destination Flow Config[] 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- sourceFlow FlowConfig Source Flow Config 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks
FlowTask[] 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- triggerConfig FlowTrigger Config 
- A Trigger that determine how and when the flow runs.
- description string
- Description of the flow you want to create.
- kmsArn string
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- metadataCatalog FlowConfig Metadata Catalog Config 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- name string
- Name of the flow.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- destination_flow_ Sequence[Flowconfigs Destination Flow Config Args] 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- source_flow_ Flowconfig Source Flow Config Args 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks
Sequence[FlowTask Args] 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger_config FlowTrigger Config Args 
- A Trigger that determine how and when the flow runs.
- description str
- Description of the flow you want to create.
- kms_arn str
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- metadata_catalog_ Flowconfig Metadata Catalog Config Args 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- name str
- Name of the flow.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- destinationFlow List<Property Map>Configs 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- sourceFlow Property MapConfig 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks List<Property Map>
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- triggerConfig Property Map
- A Trigger that determine how and when the flow runs.
- description String
- Description of the flow you want to create.
- kmsArn String
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- metadataCatalog Property MapConfig 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- name String
- Name of the flow.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
Outputs
All input properties are implicitly available as output properties. Additionally, the Flow resource produces the following output properties:
- Arn string
- Flow's ARN.
- FlowStatus string
- The current status of the flow.
- Id string
- The provider-assigned unique ID for this managed resource.
- Dictionary<string, string>
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Arn string
- Flow's ARN.
- FlowStatus string
- The current status of the flow.
- Id string
- The provider-assigned unique ID for this managed resource.
- map[string]string
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- arn String
- Flow's ARN.
- flowStatus String
- The current status of the flow.
- id String
- The provider-assigned unique ID for this managed resource.
- Map<String,String>
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- arn string
- Flow's ARN.
- flowStatus string
- The current status of the flow.
- id string
- The provider-assigned unique ID for this managed resource.
- {[key: string]: string}
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- arn str
- Flow's ARN.
- flow_status str
- The current status of the flow.
- id str
- The provider-assigned unique ID for this managed resource.
- Mapping[str, str]
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- arn String
- Flow's ARN.
- flowStatus String
- The current status of the flow.
- id String
- The provider-assigned unique ID for this managed resource.
- Map<String>
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
Look up Existing Flow Resource
Get an existing Flow resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: FlowState, opts?: CustomResourceOptions): Flow@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        arn: Optional[str] = None,
        description: Optional[str] = None,
        destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
        flow_status: Optional[str] = None,
        kms_arn: Optional[str] = None,
        metadata_catalog_config: Optional[FlowMetadataCatalogConfigArgs] = None,
        name: Optional[str] = None,
        source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
        tags: Optional[Mapping[str, str]] = None,
        tags_all: Optional[Mapping[str, str]] = None,
        tasks: Optional[Sequence[FlowTaskArgs]] = None,
        trigger_config: Optional[FlowTriggerConfigArgs] = None) -> Flowfunc GetFlow(ctx *Context, name string, id IDInput, state *FlowState, opts ...ResourceOption) (*Flow, error)public static Flow Get(string name, Input<string> id, FlowState? state, CustomResourceOptions? opts = null)public static Flow get(String name, Output<String> id, FlowState state, CustomResourceOptions options)resources:  _:    type: aws:appflow:Flow    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arn string
- Flow's ARN.
- Description string
- Description of the flow you want to create.
- DestinationFlow List<FlowConfigs Destination Flow Config> 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- FlowStatus string
- The current status of the flow.
- KmsArn string
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- MetadataCatalog FlowConfig Metadata Catalog Config 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- Name string
- Name of the flow.
- SourceFlow FlowConfig Source Flow Config 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Dictionary<string, string>
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Tasks
List<FlowTask> 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- TriggerConfig FlowTrigger Config 
- A Trigger that determine how and when the flow runs.
- Arn string
- Flow's ARN.
- Description string
- Description of the flow you want to create.
- DestinationFlow []FlowConfigs Destination Flow Config Args 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- FlowStatus string
- The current status of the flow.
- KmsArn string
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- MetadataCatalog FlowConfig Metadata Catalog Config Args 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- Name string
- Name of the flow.
- SourceFlow FlowConfig Source Flow Config Args 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- map[string]string
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Tasks
[]FlowTask Args 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- TriggerConfig FlowTrigger Config Args 
- A Trigger that determine how and when the flow runs.
- arn String
- Flow's ARN.
- description String
- Description of the flow you want to create.
- destinationFlow List<FlowConfigs Destination Flow Config> 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- flowStatus String
- The current status of the flow.
- kmsArn String
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- metadataCatalog FlowConfig Metadata Catalog Config 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- name String
- Name of the flow.
- sourceFlow FlowConfig Source Flow Config 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String,String>
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- tasks
List<FlowTask> 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- triggerConfig FlowTrigger Config 
- A Trigger that determine how and when the flow runs.
- arn string
- Flow's ARN.
- description string
- Description of the flow you want to create.
- destinationFlow FlowConfigs Destination Flow Config[] 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- flowStatus string
- The current status of the flow.
- kmsArn string
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- metadataCatalog FlowConfig Metadata Catalog Config 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- name string
- Name of the flow.
- sourceFlow FlowConfig Source Flow Config 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- {[key: string]: string}
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- tasks
FlowTask[] 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- triggerConfig FlowTrigger Config 
- A Trigger that determine how and when the flow runs.
- arn str
- Flow's ARN.
- description str
- Description of the flow you want to create.
- destination_flow_ Sequence[Flowconfigs Destination Flow Config Args] 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- flow_status str
- The current status of the flow.
- kms_arn str
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- metadata_catalog_ Flowconfig Metadata Catalog Config Args 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- name str
- Name of the flow.
- source_flow_ Flowconfig Source Flow Config Args 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Mapping[str, str]
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- tasks
Sequence[FlowTask Args] 
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger_config FlowTrigger Config Args 
- A Trigger that determine how and when the flow runs.
- arn String
- Flow's ARN.
- description String
- Description of the flow you want to create.
- destinationFlow List<Property Map>Configs 
- A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- flowStatus String
- The current status of the flow.
- kmsArn String
- ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- metadataCatalog Property MapConfig 
- A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
- name String
- Name of the flow.
- sourceFlow Property MapConfig 
- The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String>
- Map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- tasks List<Property Map>
- A Task that Amazon AppFlow performs while transferring the data in the flow run.
- triggerConfig Property Map
- A Trigger that determine how and when the flow runs.
Supporting Types
FlowDestinationFlowConfig, FlowDestinationFlowConfigArgs        
- ConnectorType string
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- DestinationConnector FlowProperties Destination Flow Config Destination Connector Properties 
- This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- ApiVersion string
- API version that the destination connector uses.
- ConnectorProfile stringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- ConnectorType string
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- DestinationConnector FlowProperties Destination Flow Config Destination Connector Properties 
- This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- ApiVersion string
- API version that the destination connector uses.
- ConnectorProfile stringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connectorType String
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- destinationConnector FlowProperties Destination Flow Config Destination Connector Properties 
- This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- apiVersion String
- API version that the destination connector uses.
- connectorProfile StringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connectorType string
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- destinationConnector FlowProperties Destination Flow Config Destination Connector Properties 
- This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- apiVersion string
- API version that the destination connector uses.
- connectorProfile stringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connector_type str
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- destination_connector_ Flowproperties Destination Flow Config Destination Connector Properties 
- This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- api_version str
- API version that the destination connector uses.
- connector_profile_ strname 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connectorType String
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- destinationConnector Property MapProperties 
- This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- apiVersion String
- API version that the destination connector uses.
- connectorProfile StringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
FlowDestinationFlowConfigDestinationConnectorProperties, FlowDestinationFlowConfigDestinationConnectorPropertiesArgs              
- CustomConnector FlowDestination Flow Config Destination Connector Properties Custom Connector 
- Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- CustomerProfiles FlowDestination Flow Config Destination Connector Properties Customer Profiles 
- Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- EventBridge FlowDestination Flow Config Destination Connector Properties Event Bridge 
- Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- Honeycode
FlowDestination Flow Config Destination Connector Properties Honeycode 
- Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- LookoutMetrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics 
- Marketo
FlowDestination Flow Config Destination Connector Properties Marketo 
- Properties that are required to query Marketo. See Generic Destination Properties for more details.
- Redshift
FlowDestination Flow Config Destination Connector Properties Redshift 
- Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- S3
FlowDestination Flow Config Destination Connector Properties S3 
- Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- Salesforce
FlowDestination Flow Config Destination Connector Properties Salesforce 
- Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- SapoData FlowDestination Flow Config Destination Connector Properties Sapo Data 
- Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- Snowflake
FlowDestination Flow Config Destination Connector Properties Snowflake 
- Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- Upsolver
FlowDestination Flow Config Destination Connector Properties Upsolver 
- Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- Zendesk
FlowDestination Flow Config Destination Connector Properties Zendesk 
- Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- CustomConnector FlowDestination Flow Config Destination Connector Properties Custom Connector 
- Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- CustomerProfiles FlowDestination Flow Config Destination Connector Properties Customer Profiles 
- Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- EventBridge FlowDestination Flow Config Destination Connector Properties Event Bridge 
- Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- Honeycode
FlowDestination Flow Config Destination Connector Properties Honeycode 
- Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- LookoutMetrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics 
- Marketo
FlowDestination Flow Config Destination Connector Properties Marketo 
- Properties that are required to query Marketo. See Generic Destination Properties for more details.
- Redshift
FlowDestination Flow Config Destination Connector Properties Redshift 
- Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- S3
FlowDestination Flow Config Destination Connector Properties S3 
- Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- Salesforce
FlowDestination Flow Config Destination Connector Properties Salesforce 
- Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- SapoData FlowDestination Flow Config Destination Connector Properties Sapo Data 
- Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- Snowflake
FlowDestination Flow Config Destination Connector Properties Snowflake 
- Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- Upsolver
FlowDestination Flow Config Destination Connector Properties Upsolver 
- Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- Zendesk
FlowDestination Flow Config Destination Connector Properties Zendesk 
- Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- customConnector FlowDestination Flow Config Destination Connector Properties Custom Connector 
- Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customerProfiles FlowDestination Flow Config Destination Connector Properties Customer Profiles 
- Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- eventBridge FlowDestination Flow Config Destination Connector Properties Event Bridge 
- Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode
FlowDestination Flow Config Destination Connector Properties Honeycode 
- Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookoutMetrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics 
- marketo
FlowDestination Flow Config Destination Connector Properties Marketo 
- Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift
FlowDestination Flow Config Destination Connector Properties Redshift 
- Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3
FlowDestination Flow Config Destination Connector Properties S3 
- Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce
FlowDestination Flow Config Destination Connector Properties Salesforce 
- Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapoData FlowDestination Flow Config Destination Connector Properties Sapo Data 
- Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake
FlowDestination Flow Config Destination Connector Properties Snowflake 
- Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver
FlowDestination Flow Config Destination Connector Properties Upsolver 
- Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk
FlowDestination Flow Config Destination Connector Properties Zendesk 
- Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- customConnector FlowDestination Flow Config Destination Connector Properties Custom Connector 
- Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customerProfiles FlowDestination Flow Config Destination Connector Properties Customer Profiles 
- Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- eventBridge FlowDestination Flow Config Destination Connector Properties Event Bridge 
- Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode
FlowDestination Flow Config Destination Connector Properties Honeycode 
- Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookoutMetrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics 
- marketo
FlowDestination Flow Config Destination Connector Properties Marketo 
- Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift
FlowDestination Flow Config Destination Connector Properties Redshift 
- Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3
FlowDestination Flow Config Destination Connector Properties S3 
- Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce
FlowDestination Flow Config Destination Connector Properties Salesforce 
- Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapoData FlowDestination Flow Config Destination Connector Properties Sapo Data 
- Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake
FlowDestination Flow Config Destination Connector Properties Snowflake 
- Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver
FlowDestination Flow Config Destination Connector Properties Upsolver 
- Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk
FlowDestination Flow Config Destination Connector Properties Zendesk 
- Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- custom_connector FlowDestination Flow Config Destination Connector Properties Custom Connector 
- Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customer_profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles 
- Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- event_bridge FlowDestination Flow Config Destination Connector Properties Event Bridge 
- Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode
FlowDestination Flow Config Destination Connector Properties Honeycode 
- Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookout_metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics 
- marketo
FlowDestination Flow Config Destination Connector Properties Marketo 
- Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift
FlowDestination Flow Config Destination Connector Properties Redshift 
- Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3
FlowDestination Flow Config Destination Connector Properties S3 
- Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce
FlowDestination Flow Config Destination Connector Properties Salesforce 
- Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapo_data FlowDestination Flow Config Destination Connector Properties Sapo Data 
- Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake
FlowDestination Flow Config Destination Connector Properties Snowflake 
- Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver
FlowDestination Flow Config Destination Connector Properties Upsolver 
- Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk
FlowDestination Flow Config Destination Connector Properties Zendesk 
- Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- customConnector Property Map
- Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customerProfiles Property Map
- Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- eventBridge Property Map
- Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode Property Map
- Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookoutMetrics Property Map
- marketo Property Map
- Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift Property Map
- Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3 Property Map
- Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce Property Map
- Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapoData Property Map
- Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake Property Map
- Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver Property Map
- Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk Property Map
- Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs                  
- EntityName string
- CustomProperties Dictionary<string, string>
- ErrorHandling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config 
- IdField List<string>Names 
- WriteOperation stringType 
- EntityName string
- CustomProperties map[string]string
- ErrorHandling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config 
- IdField []stringNames 
- WriteOperation stringType 
- entityName String
- customProperties Map<String,String>
- errorHandling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config 
- idField List<String>Names 
- writeOperation StringType 
- entityName string
- customProperties {[key: string]: string}
- errorHandling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config 
- idField string[]Names 
- writeOperation stringType 
- entityName String
- customProperties Map<String>
- errorHandling Property MapConfig 
- idField List<String>Names 
- writeOperation StringType 
FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs                        
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- failOn booleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- fail_on_ boolfirst_ destination_ error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs                  
- DomainName string
- Unique name of the Amazon Connect Customer Profiles domain.
- ObjectType stringName 
- Object specified in the Amazon Connect Customer Profiles flow destination.
- DomainName string
- Unique name of the Amazon Connect Customer Profiles domain.
- ObjectType stringName 
- Object specified in the Amazon Connect Customer Profiles flow destination.
- domainName String
- Unique name of the Amazon Connect Customer Profiles domain.
- objectType StringName 
- Object specified in the Amazon Connect Customer Profiles flow destination.
- domainName string
- Unique name of the Amazon Connect Customer Profiles domain.
- objectType stringName 
- Object specified in the Amazon Connect Customer Profiles flow destination.
- domain_name str
- Unique name of the Amazon Connect Customer Profiles domain.
- object_type_ strname 
- Object specified in the Amazon Connect Customer Profiles flow destination.
- domainName String
- Unique name of the Amazon Connect Customer Profiles domain.
- objectType StringName 
- Object specified in the Amazon Connect Customer Profiles flow destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs                  
FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs                        
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- failOn booleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- fail_on_ boolfirst_ destination_ error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs                
FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs                      
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- failOn booleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- fail_on_ boolfirst_ destination_ error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs                
FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs                      
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- failOn booleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- fail_on_ boolfirst_ destination_ error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs                
- intermediateBucket StringName 
- object String
- bucketPrefix String
- errorHandling Property MapConfig 
FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs                      
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- failOn booleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- fail_on_ boolfirst_ destination_ error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesS3, FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args                
- bucketName String
- bucketPrefix String
- s3OutputFormat Property MapConfig 
FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs                    
- AggregationConfig FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- FileType string
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- PrefixConfig FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- PreserveSource boolData Typing 
- Whether the data types from the source system need to be preserved (Only valid for Parquetfile type)
- AggregationConfig FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- FileType string
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- PrefixConfig FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- PreserveSource boolData Typing 
- Whether the data types from the source system need to be preserved (Only valid for Parquetfile type)
- aggregationConfig FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- fileType String
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- prefixConfig FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserveSource BooleanData Typing 
- Whether the data types from the source system need to be preserved (Only valid for Parquetfile type)
- aggregationConfig FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- fileType string
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- prefixConfig FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserveSource booleanData Typing 
- Whether the data types from the source system need to be preserved (Only valid for Parquetfile type)
- aggregation_config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file_type str
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- prefix_config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserve_source_ booldata_ typing 
- Whether the data types from the source system need to be preserved (Only valid for Parquetfile type)
- aggregationConfig Property Map
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- fileType String
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- prefixConfig Property Map
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserveSource BooleanData Typing 
- Whether the data types from the source system need to be preserved (Only valid for Parquetfile type)
FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs                        
- AggregationType string
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- TargetFile intSize 
- The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- AggregationType string
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- TargetFile intSize 
- The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- aggregationType String
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- targetFile IntegerSize 
- The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- aggregationType string
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- targetFile numberSize 
- The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- aggregation_type str
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- target_file_ intsize 
- The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
- aggregationType String
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- targetFile NumberSize 
- The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs                        
- PrefixFormat string
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- PrefixHierarchies List<string>
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- PrefixType string
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- PrefixFormat string
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- PrefixHierarchies []string
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- PrefixType string
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- prefixFormat String
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- prefixHierarchies List<String>
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- prefixType String
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- prefixFormat string
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- prefixHierarchies string[]
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- prefixType string
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- prefix_format str
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- prefix_hierarchies Sequence[str]
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- prefix_type str
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- prefixFormat String
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- prefixHierarchies List<String>
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- prefixType String
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs                
- object String
- errorHandling Property MapConfig 
- idField List<String>Names 
- writeOperation StringType 
FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs                      
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- failOn booleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- fail_on_ boolfirst_ destination_ error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs                  
- ObjectPath string
- ErrorHandling FlowConfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config 
- IdField List<string>Names 
- SuccessResponse FlowHandling Config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config 
- Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- WriteOperation stringType 
- ObjectPath string
- ErrorHandling FlowConfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config 
- IdField []stringNames 
- SuccessResponse FlowHandling Config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config 
- Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- WriteOperation stringType 
- objectPath String
- errorHandling FlowConfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config 
- idField List<String>Names 
- successResponse FlowHandling Config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config 
- Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- writeOperation StringType 
- objectPath string
- errorHandling FlowConfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config 
- idField string[]Names 
- successResponse FlowHandling Config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config 
- Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- writeOperation stringType 
- object_path str
- error_handling_ Flowconfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config 
- id_field_ Sequence[str]names 
- success_response_ Flowhandling_ config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config 
- Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- write_operation_ strtype 
- objectPath String
- errorHandling Property MapConfig 
- idField List<String>Names 
- successResponse Property MapHandling Config 
- Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- writeOperation StringType 
FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs                        
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- failOn booleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- fail_on_ boolfirst_ destination_ error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs                          
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs                
- intermediateBucket StringName 
- object String
- bucketPrefix String
- errorHandling Property MapConfig 
FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs                      
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- failOn booleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- fail_on_ boolfirst_ destination_ error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs                
- bucketName String
- s3OutputFormat Property MapConfig 
- bucketPrefix String
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs                      
- PrefixConfig FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- AggregationConfig FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- FileType string
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- PrefixConfig FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- AggregationConfig FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- FileType string
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- prefixConfig FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregationConfig FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- fileType String
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- prefixConfig FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregationConfig FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- fileType string
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- prefix_config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config 
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregation_config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config 
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file_type str
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
- prefixConfig Property Map
- Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregationConfig Property Map
- Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- fileType String
- File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV,JSON, andPARQUET.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs                          
- AggregationType string
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- AggregationType string
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- aggregationType String
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- aggregationType string
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- aggregation_type str
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
- aggregationType String
- Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are NoneandSingleFile.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs                          
- PrefixType string
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- PrefixFormat string
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- PrefixHierarchies List<string>
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- PrefixType string
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- PrefixFormat string
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- PrefixHierarchies []string
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- prefixType String
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- prefixFormat String
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- prefixHierarchies List<String>
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- prefixType string
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- prefixFormat string
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- prefixHierarchies string[]
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- prefix_type str
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- prefix_format str
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- prefix_hierarchies Sequence[str]
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
- prefixType String
- Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME,PATH, andPATH_AND_FILENAME.
- prefixFormat String
- Determines the level of granularity that's included in the prefix. Valid values are YEAR,MONTH,DAY,HOUR, andMINUTE.
- prefixHierarchies List<String>
- Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_IDandSCHEMA_VERSION
FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs                
- object String
- errorHandling Property MapConfig 
- idField List<String>Names 
- writeOperation StringType 
FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs                      
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- BucketName string
- Name of the Amazon S3 bucket.
- BucketPrefix string
- Amazon S3 bucket prefix.
- FailOn boolFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName string
- Name of the Amazon S3 bucket.
- bucketPrefix string
- Amazon S3 bucket prefix.
- failOn booleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_name str
- Name of the Amazon S3 bucket.
- bucket_prefix str
- Amazon S3 bucket prefix.
- fail_on_ boolfirst_ destination_ error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucketName String
- Name of the Amazon S3 bucket.
- bucketPrefix String
- Amazon S3 bucket prefix.
- failOn BooleanFirst Destination Error 
- If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowMetadataCatalogConfig, FlowMetadataCatalogConfigArgs        
FlowMetadataCatalogConfigGlueDataCatalog, FlowMetadataCatalogConfigGlueDataCatalogArgs              
- DatabaseName string
- The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
- RoleArn string
- The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
- TablePrefix string
- A naming prefix for each Data Catalog table that Amazon AppFlow creates
- DatabaseName string
- The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
- RoleArn string
- The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
- TablePrefix string
- A naming prefix for each Data Catalog table that Amazon AppFlow creates
- databaseName String
- The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
- roleArn String
- The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
- tablePrefix String
- A naming prefix for each Data Catalog table that Amazon AppFlow creates
- databaseName string
- The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
- roleArn string
- The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
- tablePrefix string
- A naming prefix for each Data Catalog table that Amazon AppFlow creates
- database_name str
- The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
- role_arn str
- The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
- table_prefix str
- A naming prefix for each Data Catalog table that Amazon AppFlow creates
- databaseName String
- The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
- roleArn String
- The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
- tablePrefix String
- A naming prefix for each Data Catalog table that Amazon AppFlow creates
FlowSourceFlowConfig, FlowSourceFlowConfigArgs        
- ConnectorType string
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- SourceConnector FlowProperties Source Flow Config Source Connector Properties 
- Information that is required to query a particular source connector. See Source Connector Properties for details.
- ApiVersion string
- API version that the destination connector uses.
- ConnectorProfile stringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- IncrementalPull FlowConfig Source Flow Config Incremental Pull Config 
- Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- ConnectorType string
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- SourceConnector FlowProperties Source Flow Config Source Connector Properties 
- Information that is required to query a particular source connector. See Source Connector Properties for details.
- ApiVersion string
- API version that the destination connector uses.
- ConnectorProfile stringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- IncrementalPull FlowConfig Source Flow Config Incremental Pull Config 
- Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connectorType String
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- sourceConnector FlowProperties Source Flow Config Source Connector Properties 
- Information that is required to query a particular source connector. See Source Connector Properties for details.
- apiVersion String
- API version that the destination connector uses.
- connectorProfile StringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incrementalPull FlowConfig Source Flow Config Incremental Pull Config 
- Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connectorType string
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- sourceConnector FlowProperties Source Flow Config Source Connector Properties 
- Information that is required to query a particular source connector. See Source Connector Properties for details.
- apiVersion string
- API version that the destination connector uses.
- connectorProfile stringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incrementalPull FlowConfig Source Flow Config Incremental Pull Config 
- Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connector_type str
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- source_connector_ Flowproperties Source Flow Config Source Connector Properties 
- Information that is required to query a particular source connector. See Source Connector Properties for details.
- api_version str
- API version that the destination connector uses.
- connector_profile_ strname 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incremental_pull_ Flowconfig Source Flow Config Incremental Pull Config 
- Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connectorType String
- Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce,Singular,Slack,Redshift,S3,Marketo,Googleanalytics,Zendesk,Servicenow,Datadog,Trendmicro,Snowflake,Dynatrace,Infornexus,Amplitude,Veeva,EventBridge,LookoutMetrics,Upsolver,Honeycode,CustomerProfiles,SAPOData, andCustomConnector.
- sourceConnector Property MapProperties 
- Information that is required to query a particular source connector. See Source Connector Properties for details.
- apiVersion String
- API version that the destination connector uses.
- connectorProfile StringName 
- Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incrementalPull Property MapConfig 
- Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
FlowSourceFlowConfigIncrementalPullConfig, FlowSourceFlowConfigIncrementalPullConfigArgs              
- DatetimeType stringField Name 
- Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- DatetimeType stringField Name 
- Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetimeType StringField Name 
- Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetimeType stringField Name 
- Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetime_type_ strfield_ name 
- Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetimeType StringField Name 
- Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
FlowSourceFlowConfigSourceConnectorProperties, FlowSourceFlowConfigSourceConnectorPropertiesArgs              
- Amplitude
FlowSource Flow Config Source Connector Properties Amplitude 
- Information that is required for querying Amplitude. See Generic Source Properties for more details.
- CustomConnector FlowSource Flow Config Source Connector Properties Custom Connector 
- Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- Datadog
FlowSource Flow Config Source Connector Properties Datadog 
- Information that is required for querying Datadog. See Generic Source Properties for more details.
- Dynatrace
FlowSource Flow Config Source Connector Properties Dynatrace 
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- GoogleAnalytics FlowSource Flow Config Source Connector Properties Google Analytics 
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- InforNexus FlowSource Flow Config Source Connector Properties Infor Nexus 
- Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- Marketo
FlowSource Flow Config Source Connector Properties Marketo 
- Information that is required for querying Marketo. See Generic Source Properties for more details.
- S3
FlowSource Flow Config Source Connector Properties S3 
- Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- Salesforce
FlowSource Flow Config Source Connector Properties Salesforce 
- Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- SapoData FlowSource Flow Config Source Connector Properties Sapo Data 
- Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- ServiceNow FlowSource Flow Config Source Connector Properties Service Now 
- Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- Singular
FlowSource Flow Config Source Connector Properties Singular 
- Information that is required for querying Singular. See Generic Source Properties for more details.
- Slack
FlowSource Flow Config Source Connector Properties Slack 
- Information that is required for querying Slack. See Generic Source Properties for more details.
- Trendmicro
FlowSource Flow Config Source Connector Properties Trendmicro 
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Veeva
FlowSource Flow Config Source Connector Properties Veeva 
- Information that is required for querying Veeva. See Veeva Source Properties for more details.
- Zendesk
FlowSource Flow Config Source Connector Properties Zendesk 
- Information that is required for querying Zendesk. See Generic Source Properties for more details.
- Amplitude
FlowSource Flow Config Source Connector Properties Amplitude 
- Information that is required for querying Amplitude. See Generic Source Properties for more details.
- CustomConnector FlowSource Flow Config Source Connector Properties Custom Connector 
- Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- Datadog
FlowSource Flow Config Source Connector Properties Datadog 
- Information that is required for querying Datadog. See Generic Source Properties for more details.
- Dynatrace
FlowSource Flow Config Source Connector Properties Dynatrace 
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- GoogleAnalytics FlowSource Flow Config Source Connector Properties Google Analytics 
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- InforNexus FlowSource Flow Config Source Connector Properties Infor Nexus 
- Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- Marketo
FlowSource Flow Config Source Connector Properties Marketo 
- Information that is required for querying Marketo. See Generic Source Properties for more details.
- S3
FlowSource Flow Config Source Connector Properties S3 
- Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- Salesforce
FlowSource Flow Config Source Connector Properties Salesforce 
- Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- SapoData FlowSource Flow Config Source Connector Properties Sapo Data 
- Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- ServiceNow FlowSource Flow Config Source Connector Properties Service Now 
- Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- Singular
FlowSource Flow Config Source Connector Properties Singular 
- Information that is required for querying Singular. See Generic Source Properties for more details.
- Slack
FlowSource Flow Config Source Connector Properties Slack 
- Information that is required for querying Slack. See Generic Source Properties for more details.
- Trendmicro
FlowSource Flow Config Source Connector Properties Trendmicro 
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Veeva
FlowSource Flow Config Source Connector Properties Veeva 
- Information that is required for querying Veeva. See Veeva Source Properties for more details.
- Zendesk
FlowSource Flow Config Source Connector Properties Zendesk 
- Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude
FlowSource Flow Config Source Connector Properties Amplitude 
- Information that is required for querying Amplitude. See Generic Source Properties for more details.
- customConnector FlowSource Flow Config Source Connector Properties Custom Connector 
- Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog
FlowSource Flow Config Source Connector Properties Datadog 
- Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace
FlowSource Flow Config Source Connector Properties Dynatrace 
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- googleAnalytics FlowSource Flow Config Source Connector Properties Google Analytics 
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- inforNexus FlowSource Flow Config Source Connector Properties Infor Nexus 
- Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo
FlowSource Flow Config Source Connector Properties Marketo 
- Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3
FlowSource Flow Config Source Connector Properties S3 
- Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce
FlowSource Flow Config Source Connector Properties Salesforce 
- Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapoData FlowSource Flow Config Source Connector Properties Sapo Data 
- Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- serviceNow FlowSource Flow Config Source Connector Properties Service Now 
- Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular
FlowSource Flow Config Source Connector Properties Singular 
- Information that is required for querying Singular. See Generic Source Properties for more details.
- slack
FlowSource Flow Config Source Connector Properties Slack 
- Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro
FlowSource Flow Config Source Connector Properties Trendmicro 
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- veeva
FlowSource Flow Config Source Connector Properties Veeva 
- Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk
FlowSource Flow Config Source Connector Properties Zendesk 
- Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude
FlowSource Flow Config Source Connector Properties Amplitude 
- Information that is required for querying Amplitude. See Generic Source Properties for more details.
- customConnector FlowSource Flow Config Source Connector Properties Custom Connector 
- Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog
FlowSource Flow Config Source Connector Properties Datadog 
- Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace
FlowSource Flow Config Source Connector Properties Dynatrace 
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- googleAnalytics FlowSource Flow Config Source Connector Properties Google Analytics 
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- inforNexus FlowSource Flow Config Source Connector Properties Infor Nexus 
- Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo
FlowSource Flow Config Source Connector Properties Marketo 
- Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3
FlowSource Flow Config Source Connector Properties S3 
- Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce
FlowSource Flow Config Source Connector Properties Salesforce 
- Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapoData FlowSource Flow Config Source Connector Properties Sapo Data 
- Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- serviceNow FlowSource Flow Config Source Connector Properties Service Now 
- Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular
FlowSource Flow Config Source Connector Properties Singular 
- Information that is required for querying Singular. See Generic Source Properties for more details.
- slack
FlowSource Flow Config Source Connector Properties Slack 
- Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro
FlowSource Flow Config Source Connector Properties Trendmicro 
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- veeva
FlowSource Flow Config Source Connector Properties Veeva 
- Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk
FlowSource Flow Config Source Connector Properties Zendesk 
- Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude
FlowSource Flow Config Source Connector Properties Amplitude 
- Information that is required for querying Amplitude. See Generic Source Properties for more details.
- custom_connector FlowSource Flow Config Source Connector Properties Custom Connector 
- Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog
FlowSource Flow Config Source Connector Properties Datadog 
- Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace
FlowSource Flow Config Source Connector Properties Dynatrace 
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- google_analytics FlowSource Flow Config Source Connector Properties Google Analytics 
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- infor_nexus FlowSource Flow Config Source Connector Properties Infor Nexus 
- Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo
FlowSource Flow Config Source Connector Properties Marketo 
- Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3
FlowSource Flow Config Source Connector Properties S3 
- Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce
FlowSource Flow Config Source Connector Properties Salesforce 
- Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapo_data FlowSource Flow Config Source Connector Properties Sapo Data 
- Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- service_now FlowSource Flow Config Source Connector Properties Service Now 
- Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular
FlowSource Flow Config Source Connector Properties Singular 
- Information that is required for querying Singular. See Generic Source Properties for more details.
- slack
FlowSource Flow Config Source Connector Properties Slack 
- Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro
FlowSource Flow Config Source Connector Properties Trendmicro 
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- veeva
FlowSource Flow Config Source Connector Properties Veeva 
- Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk
FlowSource Flow Config Source Connector Properties Zendesk 
- Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude Property Map
- Information that is required for querying Amplitude. See Generic Source Properties for more details.
- customConnector Property Map
- Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog Property Map
- Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace Property Map
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- googleAnalytics Property Map
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- inforNexus Property Map
- Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo Property Map
- Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3 Property Map
- Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce Property Map
- Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapoData Property Map
- Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- serviceNow Property Map
- Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular Property Map
- Information that is required for querying Singular. See Generic Source Properties for more details.
- slack Property Map
- Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro Property Map
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- veeva Property Map
- Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk Property Map
- Information that is required for querying Zendesk. See Generic Source Properties for more details.
FlowSourceFlowConfigSourceConnectorPropertiesAmplitude, FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs                
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector, FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs                  
- EntityName string
- CustomProperties Dictionary<string, string>
- EntityName string
- CustomProperties map[string]string
- entityName String
- customProperties Map<String,String>
- entityName string
- customProperties {[key: string]: string}
- entity_name str
- custom_properties Mapping[str, str]
- entityName String
- customProperties Map<String>
FlowSourceFlowConfigSourceConnectorPropertiesDatadog, FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs                
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesDynatrace, FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs                
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics, FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs                  
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesInforNexus, FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs                  
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesMarketo, FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs                
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesS3, FlowSourceFlowConfigSourceConnectorPropertiesS3Args                
- BucketName string
- BucketPrefix string
- S3InputFormat FlowConfig Source Flow Config Source Connector Properties S3S3Input Format Config 
- When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- BucketName string
- BucketPrefix string
- S3InputFormat FlowConfig Source Flow Config Source Connector Properties S3S3Input Format Config 
- When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- bucketName String
- bucketPrefix String
- s3InputFormat FlowConfig Source Flow Config Source Connector Properties S3S3Input Format Config 
- When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- bucketName string
- bucketPrefix string
- s3InputFormat FlowConfig Source Flow Config Source Connector Properties S3S3Input Format Config 
- When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- bucket_name str
- bucket_prefix str
- s3_input_ Flowformat_ config Source Flow Config Source Connector Properties S3S3Input Format Config 
- When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- bucketName String
- bucketPrefix String
- s3InputFormat Property MapConfig 
- When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig, FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs                    
- S3InputFile stringType 
- File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSVandJSON.
- S3InputFile stringType 
- File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSVandJSON.
- s3InputFile StringType 
- File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSVandJSON.
- s3InputFile stringType 
- File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSVandJSON.
- s3_input_ strfile_ type 
- File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSVandJSON.
- s3InputFile StringType 
- File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSVandJSON.
FlowSourceFlowConfigSourceConnectorPropertiesSalesforce, FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs                
- Object string
- DataTransfer stringApi 
- Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data to Salesforce.
- EnableDynamic boolField Update 
- Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- IncludeDeleted boolRecords 
- Whether Amazon AppFlow includes deleted files in the flow run.
- Object string
- DataTransfer stringApi 
- Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data to Salesforce.
- EnableDynamic boolField Update 
- Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- IncludeDeleted boolRecords 
- Whether Amazon AppFlow includes deleted files in the flow run.
- object String
- dataTransfer StringApi 
- Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data to Salesforce.
- enableDynamic BooleanField Update 
- Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- includeDeleted BooleanRecords 
- Whether Amazon AppFlow includes deleted files in the flow run.
- object string
- dataTransfer stringApi 
- Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data to Salesforce.
- enableDynamic booleanField Update 
- Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- includeDeleted booleanRecords 
- Whether Amazon AppFlow includes deleted files in the flow run.
- object str
- data_transfer_ strapi 
- Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data to Salesforce.
- enable_dynamic_ boolfield_ update 
- Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- include_deleted_ boolrecords 
- Whether Amazon AppFlow includes deleted files in the flow run.
- object String
- dataTransfer StringApi 
- Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data to Salesforce.
- enableDynamic BooleanField Update 
- Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- includeDeleted BooleanRecords 
- Whether Amazon AppFlow includes deleted files in the flow run.
FlowSourceFlowConfigSourceConnectorPropertiesSapoData, FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs                  
- ObjectPath string
- PaginationConfig FlowSource Flow Config Source Connector Properties Sapo Data Pagination Config 
- Sets the page size for each concurrent process that transfers OData records from your SAP instance.
- ParallelismConfig FlowSource Flow Config Source Connector Properties Sapo Data Parallelism Config 
- Sets the number of concurrent processes that transfers OData records from your SAP instance.
- ObjectPath string
- PaginationConfig FlowSource Flow Config Source Connector Properties Sapo Data Pagination Config 
- Sets the page size for each concurrent process that transfers OData records from your SAP instance.
- ParallelismConfig FlowSource Flow Config Source Connector Properties Sapo Data Parallelism Config 
- Sets the number of concurrent processes that transfers OData records from your SAP instance.
- objectPath String
- paginationConfig FlowSource Flow Config Source Connector Properties Sapo Data Pagination Config 
- Sets the page size for each concurrent process that transfers OData records from your SAP instance.
- parallelismConfig FlowSource Flow Config Source Connector Properties Sapo Data Parallelism Config 
- Sets the number of concurrent processes that transfers OData records from your SAP instance.
- objectPath string
- paginationConfig FlowSource Flow Config Source Connector Properties Sapo Data Pagination Config 
- Sets the page size for each concurrent process that transfers OData records from your SAP instance.
- parallelismConfig FlowSource Flow Config Source Connector Properties Sapo Data Parallelism Config 
- Sets the number of concurrent processes that transfers OData records from your SAP instance.
- object_path str
- pagination_config FlowSource Flow Config Source Connector Properties Sapo Data Pagination Config 
- Sets the page size for each concurrent process that transfers OData records from your SAP instance.
- parallelism_config FlowSource Flow Config Source Connector Properties Sapo Data Parallelism Config 
- Sets the number of concurrent processes that transfers OData records from your SAP instance.
- objectPath String
- paginationConfig Property Map
- Sets the page size for each concurrent process that transfers OData records from your SAP instance.
- parallelismConfig Property Map
- Sets the number of concurrent processes that transfers OData records from your SAP instance.
FlowSourceFlowConfigSourceConnectorPropertiesSapoDataPaginationConfig, FlowSourceFlowConfigSourceConnectorPropertiesSapoDataPaginationConfigArgs                      
- MaxPage intSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- MaxPage intSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- maxPage IntegerSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- maxPage numberSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- max_page_ intsize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- maxPage NumberSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
FlowSourceFlowConfigSourceConnectorPropertiesSapoDataParallelismConfig, FlowSourceFlowConfigSourceConnectorPropertiesSapoDataParallelismConfigArgs                      
- MaxPage intSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- MaxPage intSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- maxPage IntegerSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- maxPage numberSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- max_page_ intsize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
- maxPage NumberSize 
- he maximum number of records that Amazon AppFlow receives in each page of the response from your SAP application.
FlowSourceFlowConfigSourceConnectorPropertiesServiceNow, FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs                  
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesSingular, FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs                
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesSlack, FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs                
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro, FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs                
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowSourceFlowConfigSourceConnectorPropertiesVeeva, FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs                
- Object string
- DocumentType string
- Document type specified in the Veeva document extract flow.
- IncludeAll boolVersions 
- Boolean value to include All Versions of files in Veeva document extract flow.
- IncludeRenditions bool
- Boolean value to include file renditions in Veeva document extract flow.
- IncludeSource boolFiles 
- Boolean value to include source files in Veeva document extract flow.
- Object string
- DocumentType string
- Document type specified in the Veeva document extract flow.
- IncludeAll boolVersions 
- Boolean value to include All Versions of files in Veeva document extract flow.
- IncludeRenditions bool
- Boolean value to include file renditions in Veeva document extract flow.
- IncludeSource boolFiles 
- Boolean value to include source files in Veeva document extract flow.
- object String
- documentType String
- Document type specified in the Veeva document extract flow.
- includeAll BooleanVersions 
- Boolean value to include All Versions of files in Veeva document extract flow.
- includeRenditions Boolean
- Boolean value to include file renditions in Veeva document extract flow.
- includeSource BooleanFiles 
- Boolean value to include source files in Veeva document extract flow.
- object string
- documentType string
- Document type specified in the Veeva document extract flow.
- includeAll booleanVersions 
- Boolean value to include All Versions of files in Veeva document extract flow.
- includeRenditions boolean
- Boolean value to include file renditions in Veeva document extract flow.
- includeSource booleanFiles 
- Boolean value to include source files in Veeva document extract flow.
- object str
- document_type str
- Document type specified in the Veeva document extract flow.
- include_all_ boolversions 
- Boolean value to include All Versions of files in Veeva document extract flow.
- include_renditions bool
- Boolean value to include file renditions in Veeva document extract flow.
- include_source_ boolfiles 
- Boolean value to include source files in Veeva document extract flow.
- object String
- documentType String
- Document type specified in the Veeva document extract flow.
- includeAll BooleanVersions 
- Boolean value to include All Versions of files in Veeva document extract flow.
- includeRenditions Boolean
- Boolean value to include file renditions in Veeva document extract flow.
- includeSource BooleanFiles 
- Boolean value to include source files in Veeva document extract flow.
FlowSourceFlowConfigSourceConnectorPropertiesZendesk, FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs                
- Object string
- Object string
- object String
- object string
- object str
- object String
FlowTask, FlowTaskArgs    
- TaskType string
- Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic,Filter,Map,Map_all,Mask,Merge,Passthrough,Truncate, andValidate.
- ConnectorOperators List<FlowTask Connector Operator> 
- Operation to be performed on the provided source fields. See Connector Operator for details.
- DestinationField string
- Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- SourceFields List<string>
- Source fields to which a particular task is applied.
- TaskProperties Dictionary<string, string>
- Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys areVALUE,VALUES,DATA_TYPE,UPPER_BOUND,LOWER_BOUND,SOURCE_DATA_TYPE,DESTINATION_DATA_TYPE,VALIDATION_ACTION,MASK_VALUE,MASK_LENGTH,TRUNCATE_LENGTH,MATH_OPERATION_FIELDS_ORDER,CONCAT_FORMAT,SUBFIELD_CATEGORY_MAP, andEXCLUDE_SOURCE_FIELDS_LIST.
- TaskType string
- Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic,Filter,Map,Map_all,Mask,Merge,Passthrough,Truncate, andValidate.
- ConnectorOperators []FlowTask Connector Operator 
- Operation to be performed on the provided source fields. See Connector Operator for details.
- DestinationField string
- Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- SourceFields []string
- Source fields to which a particular task is applied.
- TaskProperties map[string]string
- Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys areVALUE,VALUES,DATA_TYPE,UPPER_BOUND,LOWER_BOUND,SOURCE_DATA_TYPE,DESTINATION_DATA_TYPE,VALIDATION_ACTION,MASK_VALUE,MASK_LENGTH,TRUNCATE_LENGTH,MATH_OPERATION_FIELDS_ORDER,CONCAT_FORMAT,SUBFIELD_CATEGORY_MAP, andEXCLUDE_SOURCE_FIELDS_LIST.
- taskType String
- Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic,Filter,Map,Map_all,Mask,Merge,Passthrough,Truncate, andValidate.
- connectorOperators List<FlowTask Connector Operator> 
- Operation to be performed on the provided source fields. See Connector Operator for details.
- destinationField String
- Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- sourceFields List<String>
- Source fields to which a particular task is applied.
- taskProperties Map<String,String>
- Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys areVALUE,VALUES,DATA_TYPE,UPPER_BOUND,LOWER_BOUND,SOURCE_DATA_TYPE,DESTINATION_DATA_TYPE,VALIDATION_ACTION,MASK_VALUE,MASK_LENGTH,TRUNCATE_LENGTH,MATH_OPERATION_FIELDS_ORDER,CONCAT_FORMAT,SUBFIELD_CATEGORY_MAP, andEXCLUDE_SOURCE_FIELDS_LIST.
- taskType string
- Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic,Filter,Map,Map_all,Mask,Merge,Passthrough,Truncate, andValidate.
- connectorOperators FlowTask Connector Operator[] 
- Operation to be performed on the provided source fields. See Connector Operator for details.
- destinationField string
- Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- sourceFields string[]
- Source fields to which a particular task is applied.
- taskProperties {[key: string]: string}
- Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys areVALUE,VALUES,DATA_TYPE,UPPER_BOUND,LOWER_BOUND,SOURCE_DATA_TYPE,DESTINATION_DATA_TYPE,VALIDATION_ACTION,MASK_VALUE,MASK_LENGTH,TRUNCATE_LENGTH,MATH_OPERATION_FIELDS_ORDER,CONCAT_FORMAT,SUBFIELD_CATEGORY_MAP, andEXCLUDE_SOURCE_FIELDS_LIST.
- task_type str
- Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic,Filter,Map,Map_all,Mask,Merge,Passthrough,Truncate, andValidate.
- connector_operators Sequence[FlowTask Connector Operator] 
- Operation to be performed on the provided source fields. See Connector Operator for details.
- destination_field str
- Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- source_fields Sequence[str]
- Source fields to which a particular task is applied.
- task_properties Mapping[str, str]
- Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys areVALUE,VALUES,DATA_TYPE,UPPER_BOUND,LOWER_BOUND,SOURCE_DATA_TYPE,DESTINATION_DATA_TYPE,VALIDATION_ACTION,MASK_VALUE,MASK_LENGTH,TRUNCATE_LENGTH,MATH_OPERATION_FIELDS_ORDER,CONCAT_FORMAT,SUBFIELD_CATEGORY_MAP, andEXCLUDE_SOURCE_FIELDS_LIST.
- taskType String
- Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic,Filter,Map,Map_all,Mask,Merge,Passthrough,Truncate, andValidate.
- connectorOperators List<Property Map>
- Operation to be performed on the provided source fields. See Connector Operator for details.
- destinationField String
- Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- sourceFields List<String>
- Source fields to which a particular task is applied.
- taskProperties Map<String>
- Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys areVALUE,VALUES,DATA_TYPE,UPPER_BOUND,LOWER_BOUND,SOURCE_DATA_TYPE,DESTINATION_DATA_TYPE,VALIDATION_ACTION,MASK_VALUE,MASK_LENGTH,TRUNCATE_LENGTH,MATH_OPERATION_FIELDS_ORDER,CONCAT_FORMAT,SUBFIELD_CATEGORY_MAP, andEXCLUDE_SOURCE_FIELDS_LIST.
FlowTaskConnectorOperator, FlowTaskConnectorOperatorArgs        
- Amplitude string
- Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
- CustomConnector string
- Operators supported by the custom connector. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Datadog string
- Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Dynatrace string
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- GoogleAnalytics string
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- InforNexus string
- Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Marketo string
- Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- S3 string
- Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Salesforce string
- Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- SapoData string
- Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- ServiceNow string
- Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Singular string
- Operation to be performed on the provided Singular source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Slack string
- Operation to be performed on the provided Slack source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Trendmicro string
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Veeva string
- Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Zendesk string
- Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION,GREATER_THAN,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Amplitude string
- Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
- CustomConnector string
- Operators supported by the custom connector. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Datadog string
- Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Dynatrace string
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- GoogleAnalytics string
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- InforNexus string
- Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Marketo string
- Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- S3 string
- Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Salesforce string
- Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- SapoData string
- Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- ServiceNow string
- Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Singular string
- Operation to be performed on the provided Singular source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Slack string
- Operation to be performed on the provided Slack source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Trendmicro string
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Veeva string
- Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- Zendesk string
- Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION,GREATER_THAN,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- amplitude String
- Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
- customConnector String
- Operators supported by the custom connector. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- datadog String
- Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- dynatrace String
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- googleAnalytics String
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- inforNexus String
- Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- marketo String
- Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- s3 String
- Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- salesforce String
- Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- sapoData String
- Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- serviceNow String
- Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- singular String
- Operation to be performed on the provided Singular source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- slack String
- Operation to be performed on the provided Slack source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- trendmicro String
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- veeva String
- Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- zendesk String
- Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION,GREATER_THAN,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- amplitude string
- Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
- customConnector string
- Operators supported by the custom connector. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- datadog string
- Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- dynatrace string
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- googleAnalytics string
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- inforNexus string
- Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- marketo string
- Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- s3 string
- Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- salesforce string
- Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- sapoData string
- Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- serviceNow string
- Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- singular string
- Operation to be performed on the provided Singular source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- slack string
- Operation to be performed on the provided Slack source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- trendmicro string
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- veeva string
- Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- zendesk string
- Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION,GREATER_THAN,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- amplitude str
- Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
- custom_connector str
- Operators supported by the custom connector. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- datadog str
- Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- dynatrace str
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- google_analytics str
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- infor_nexus str
- Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- marketo str
- Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- s3 str
- Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- salesforce str
- Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- sapo_data str
- Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- service_now str
- Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- singular str
- Operation to be performed on the provided Singular source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- slack str
- Operation to be performed on the provided Slack source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- trendmicro str
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- veeva str
- Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- zendesk str
- Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION,GREATER_THAN,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- amplitude String
- Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
- customConnector String
- Operators supported by the custom connector. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- datadog String
- Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- dynatrace String
- Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- googleAnalytics String
- Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTIONandBETWEEN.
- inforNexus String
- Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- marketo String
- Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION,BETWEEN,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- s3 String
- Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- salesforce String
- Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- sapoData String
- Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- serviceNow String
- Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- singular String
- Operation to be performed on the provided Singular source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- slack String
- Operation to be performed on the provided Slack source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- trendmicro String
- Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION,EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- veeva String
- Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION,LESS_THAN,GREATER_THAN,CONTAINS,BETWEEN,LESS_THAN_OR_EQUAL_TO,GREATER_THAN_OR_EQUAL_TO,EQUAL_TO,NOT_EQUAL_TO,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
- zendesk String
- Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION,GREATER_THAN,ADDITION,MULTIPLICATION,DIVISION,SUBTRACTION,MASK_ALL,MASK_FIRST_N,MASK_LAST_N,VALIDATE_NON_NULL,VALIDATE_NON_ZERO,VALIDATE_NON_NEGATIVE,VALIDATE_NUMERIC, andNO_OP.
FlowTriggerConfig, FlowTriggerConfigArgs      
- TriggerType string
- Type of flow trigger. Valid values are Scheduled,Event, andOnDemand.
- TriggerProperties FlowTrigger Config Trigger Properties 
- Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduledtrigger type. See Scheduled Trigger Properties for details.
- TriggerType string
- Type of flow trigger. Valid values are Scheduled,Event, andOnDemand.
- TriggerProperties FlowTrigger Config Trigger Properties 
- Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduledtrigger type. See Scheduled Trigger Properties for details.
- triggerType String
- Type of flow trigger. Valid values are Scheduled,Event, andOnDemand.
- triggerProperties FlowTrigger Config Trigger Properties 
- Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduledtrigger type. See Scheduled Trigger Properties for details.
- triggerType string
- Type of flow trigger. Valid values are Scheduled,Event, andOnDemand.
- triggerProperties FlowTrigger Config Trigger Properties 
- Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduledtrigger type. See Scheduled Trigger Properties for details.
- trigger_type str
- Type of flow trigger. Valid values are Scheduled,Event, andOnDemand.
- trigger_properties FlowTrigger Config Trigger Properties 
- Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduledtrigger type. See Scheduled Trigger Properties for details.
- triggerType String
- Type of flow trigger. Valid values are Scheduled,Event, andOnDemand.
- triggerProperties Property Map
- Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduledtrigger type. See Scheduled Trigger Properties for details.
FlowTriggerConfigTriggerProperties, FlowTriggerConfigTriggerPropertiesArgs          
FlowTriggerConfigTriggerPropertiesScheduled, FlowTriggerConfigTriggerPropertiesScheduledArgs            
- ScheduleExpression string
- Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
- DataPull stringMode 
- Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are IncrementalandComplete.
- FirstExecution stringFrom 
- Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- ScheduleEnd stringTime 
- Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- ScheduleOffset int
- Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- ScheduleStart stringTime 
- Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Timezone string
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.package generated_program;import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths; public class App { public static void main(String[] args) { Pulumi.run(App::stack); } public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }} resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- ScheduleExpression string
- Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
- DataPull stringMode 
- Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are IncrementalandComplete.
- FirstExecution stringFrom 
- Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- ScheduleEnd stringTime 
- Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- ScheduleOffset int
- Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- ScheduleStart stringTime 
- Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Timezone string
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.package generated_program;import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths; public class App { public static void main(String[] args) { Pulumi.run(App::stack); } public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }} resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- scheduleExpression String
- Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
- dataPull StringMode 
- Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are IncrementalandComplete.
- firstExecution StringFrom 
- Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- scheduleEnd StringTime 
- Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- scheduleOffset Integer
- Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- scheduleStart StringTime 
- Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone String
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.package generated_program;import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths; public class App { public static void main(String[] args) { Pulumi.run(App::stack); } public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }} resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- scheduleExpression string
- Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
- dataPull stringMode 
- Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are IncrementalandComplete.
- firstExecution stringFrom 
- Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- scheduleEnd stringTime 
- Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- scheduleOffset number
- Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- scheduleStart stringTime 
- Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone string
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.package generated_program;import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths; public class App { public static void main(String[] args) { Pulumi.run(App::stack); } public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }} resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- schedule_expression str
- Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
- data_pull_ strmode 
- Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are IncrementalandComplete.
- first_execution_ strfrom 
- Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- schedule_end_ strtime 
- Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- schedule_offset int
- Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- schedule_start_ strtime 
- Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone str
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.package generated_program;import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths; public class App { public static void main(String[] args) { Pulumi.run(App::stack); } public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }} resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- scheduleExpression String
- Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
- dataPull StringMode 
- Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are IncrementalandComplete.
- firstExecution StringFrom 
- Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- scheduleEnd StringTime 
- Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- scheduleOffset Number
- Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- scheduleStart StringTime 
- Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone String
- Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.package generated_program;import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths; public class App { public static void main(String[] args) { Pulumi.run(App::stack); } public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }} resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
Import
Using pulumi import, import AppFlow flows using the arn. For example:
$ pulumi import aws:appflow/flow:Flow example arn:aws:appflow:us-west-2:123456789012:flow/example-flow
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the awsTerraform Provider.