Skip to content

Commit 5404f66

Browse files
committed
add DynamoDB
1 parent 0cac722 commit 5404f66

19 files changed

+34409
-44
lines changed

packages/@aws-cdk/aws-pipes-sources-alpha/README.md

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,33 @@ declare const sourceStream: kinesis.Stream;
7272
declare const targetQueue: sqs.Queue;
7373

7474
const pipeSource = new sources.KinesisSource(sourceStream, {
75-
startingPosition: sources.StartingPosition.LATEST,
75+
startingPosition: sources.KinesisStartingPosition.LATEST,
76+
});
77+
78+
const pipe = new pipes.Pipe(this, 'Pipe', {
79+
source: pipeSource,
80+
target: new SomeTarget(targetQueue)
81+
});
82+
```
83+
84+
### Amazon DynamoDB
85+
86+
A DynamoDB stream can be used as a source for a pipe. The stream will be polled for new messages and the messages will be sent to the pipe.
87+
88+
```ts
89+
import * as ddb from 'aws-cdk-lib/aws-dynamodb';
90+
91+
const table = new ddb.TableV2(stack, 'MyTable', {
92+
partitionKey: {
93+
name: 'id',
94+
type: ddb.AttributeType.STRING,
95+
},
96+
dynamoStream: ddb.StreamViewType.NEW_IMAGE,
97+
});
98+
declare const targetQueue: sqs.Queue;
99+
100+
const pipeSource = new sources.DynamoDBSource(table, {
101+
startingPosition: sources.DynamoDBStartingPosition.LATEST,
76102
});
77103

78104
const pipe = new pipes.Pipe(this, 'Pipe', {
Lines changed: 158 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,158 @@
1+
import { IPipe, ISource, SourceConfig } from '@aws-cdk/aws-pipes-alpha';
2+
import { Duration } from 'aws-cdk-lib';
3+
import { IRole } from 'aws-cdk-lib/aws-iam';
4+
import { ITableV2 } from 'aws-cdk-lib/aws-dynamodb';
5+
import { DeadLetterConfigParameters } from './deadLetterConfig';
6+
import { DynamoDBStartingPosition, OnPartialBatchItemFailure } from './enums';
7+
8+
/**
9+
* Parameters for the DynamoDB source.
10+
*/
11+
export interface DynamoDBSourceParameters {
12+
/**
13+
* The maximum number of records to include in each batch.
14+
*
15+
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-pipes-pipe-pipesourcedynamodbstreamparameters.html#cfn-pipes-pipe-pipesourcedynamodbstreamparameters-batchsize
16+
* @default 1
17+
*/
18+
readonly batchSize?: number;
19+
20+
/**
21+
* Define the target queue to send dead-letter queue events to.
22+
*
23+
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-pipes-pipe-pipesourcedynamodbstreamparameters.html#cfn-pipes-pipe-pipesourcedynamodbstreamparameters-deadletterconfig
24+
* @default no dead letter queue
25+
*/
26+
readonly deadLetterConfig?: DeadLetterConfigParameters;
27+
28+
/**
29+
* The maximum length of a time to wait for events.
30+
*
31+
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-pipes-pipe-pipesourcedynamodbstreamparameters.html#cfn-pipes-pipe-pipesourcedynamodbstreamparameters-maximumbatchingwindowinseconds
32+
* @default no batching window
33+
*/
34+
readonly maximumBatchingWindow?: Duration;
35+
36+
/**
37+
* (Streams only) Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
38+
*
39+
* Leave undefined to set the maximum record age to infinite.
40+
*
41+
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-pipes-pipe-pipesourcedynamodbstreamparameters.html#cfn-pipes-pipe-pipesourcedynamodbstreamparameters-maximumrecordageinseconds
42+
* @default -1 (infinite)
43+
*/
44+
readonly maximumRecordAge?: Duration;
45+
46+
/**
47+
* (Streams only) Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
48+
*
49+
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-pipes-pipe-pipesourcedynamodbstreamparameters.html#cfn-pipes-pipe-pipesourcedynamodbstreamparameters-maximumretryattempts
50+
* @default -1 (infinite)
51+
*/
52+
readonly maximumRetryAttempts?: number;
53+
54+
/**
55+
* (Streams only) Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch.
56+
*
57+
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-pipes-pipe-pipesourcedynamodbstreamparameters.html#cfn-pipes-pipe-pipesourcedynamodbstreamparameters-onpartialbatchitemfailure
58+
* @default off
59+
*/
60+
readonly onPartialBatchItemFailure?: OnPartialBatchItemFailure;
61+
62+
/**
63+
* (Streams only) The number of batches to process concurrently from each shard. The default value is 1.
64+
*
65+
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-pipes-pipe-pipesourcedynamodbstreamparameters.html#cfn-pipes-pipe-pipesourcedynamodbstreamparameters-parallelizationfactor
66+
* @default 1
67+
*/
68+
readonly parallelizationFactor?: number;
69+
70+
/**
71+
* (Streams only) The position in a stream from which to start reading.
72+
*
73+
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-pipes-pipe-pipesourcedynamodbstreamparameters.html#cfn-pipes-pipe-pipesourcedynamodbstreamparameters-startingposition
74+
*/
75+
readonly startingPosition: DynamoDBStartingPosition;
76+
}
77+
78+
/**
79+
* A source that reads from an DynamoDB stream.
80+
*/
81+
export class DynamoDBSource implements ISource {
82+
private readonly table: ITableV2;
83+
readonly sourceArn;
84+
private sourceParameters;
85+
86+
private batchSize;
87+
private maximumBatchingWindowInSeconds;
88+
private maximumRecordAgeInSeconds;
89+
private maximumRetryAttempts;
90+
private parallelizationFactor;
91+
92+
constructor(table: ITableV2, parameters: DynamoDBSourceParameters) {
93+
this.table = table;
94+
95+
if (table.tableStreamArn === undefined) {
96+
throw new Error('Table does not have a stream defined, cannot create pipes source');
97+
}
98+
99+
this.sourceArn = table.tableStreamArn;
100+
this.sourceParameters = parameters;
101+
102+
this.batchSize = this.sourceParameters.batchSize;
103+
this.maximumBatchingWindowInSeconds = this.sourceParameters.maximumBatchingWindow?.toSeconds();
104+
this.maximumRecordAgeInSeconds = this.sourceParameters.maximumRecordAge?.toSeconds();
105+
this.maximumRetryAttempts = this.sourceParameters.maximumRetryAttempts;
106+
this.parallelizationFactor = this.sourceParameters.parallelizationFactor;
107+
108+
if (this.batchSize !== undefined) {
109+
if (this.batchSize < 1 || this.batchSize > 10000) {
110+
throw new Error(`Batch size must be between 1 and 10000, received ${this.batchSize}`);
111+
}
112+
}
113+
if (this.maximumBatchingWindowInSeconds !== undefined) {
114+
// only need to check upper bound since Duration amounts cannot be negative
115+
if (this.maximumBatchingWindowInSeconds > 300) {
116+
throw new Error(`Maximum batching window must be between 0 and 300, received ${this.maximumBatchingWindowInSeconds}`);
117+
}
118+
}
119+
if (this.maximumRecordAgeInSeconds !== undefined) {
120+
// only need to check upper bound since Duration amounts cannot be negative
121+
if (this.maximumRecordAgeInSeconds > 604800) {
122+
throw new Error(`Maximum record age in seconds must be between -1 and 604800, received ${this.maximumRecordAgeInSeconds}`);
123+
}
124+
}
125+
if (this.maximumRetryAttempts !== undefined) {
126+
if (this.maximumRetryAttempts < -1 || this.maximumRetryAttempts > 10000) {
127+
throw new Error(`Maximum retry attempts must be between -1 and 10000, received ${this.maximumRetryAttempts}`);
128+
}
129+
}
130+
if (this.parallelizationFactor !== undefined) {
131+
if (this.parallelizationFactor < 1 || this.parallelizationFactor > 10) {
132+
throw new Error(`Parallelization factor must be between 1 and 10, received ${this.parallelizationFactor}`);
133+
}
134+
}
135+
}
136+
137+
bind(_pipe: IPipe): SourceConfig {
138+
return {
139+
sourceParameters: {
140+
dynamoDbStreamParameters: {
141+
batchSize: this.batchSize,
142+
deadLetterConfig: this.sourceParameters.deadLetterConfig,
143+
maximumBatchingWindowInSeconds: this.maximumBatchingWindowInSeconds,
144+
maximumRecordAgeInSeconds: this.maximumRecordAgeInSeconds,
145+
maximumRetryAttempts: this.maximumRetryAttempts,
146+
onPartialBatchItemFailure: this.sourceParameters.onPartialBatchItemFailure,
147+
parallelizationFactor: this.sourceParameters.parallelizationFactor,
148+
startingPosition: this.sourceParameters.startingPosition,
149+
},
150+
},
151+
};
152+
}
153+
154+
grantRead(grantee: IRole): void {
155+
this.table.grantStreamRead(grantee);
156+
}
157+
}
158+
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
/**
2+
* Define how to handle item process failures.
3+
*/
4+
export enum OnPartialBatchItemFailure {
5+
/**
6+
* AUTOMATIC_BISECT
7+
*/
8+
AUTOMATIC_BISECT = 'AUTOMATIC_BISECT',
9+
}
10+
11+
/**
12+
* The position in a Kinesis stream from which to start reading.
13+
*/
14+
export enum KinesisStartingPosition {
15+
/**
16+
* TRIM_HORIZON
17+
*/
18+
TRIM_HORIZON = 'TRIM_HORIZON',
19+
/**
20+
* LATEST
21+
*/
22+
LATEST = 'LATEST',
23+
/**
24+
* AT_TIMESTAMP
25+
*/
26+
AT_TIMESTAMP = 'AT_TIMESTAMP',
27+
}
28+
29+
/**
30+
* The position in a DynamoDB stream from which to start reading.
31+
*/
32+
export enum DynamoDBStartingPosition {
33+
/**
34+
* TRIM_HORIZON
35+
*/
36+
TRIM_HORIZON = 'TRIM_HORIZON',
37+
/**
38+
* LATEST
39+
*/
40+
LATEST = 'LATEST',
41+
}
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
11
export * from './sqs';
22
export * from './kinesis';
3+
export * from './dynamodb';
4+
export * from './enums';
35
export * from './deadLetterConfig';

packages/@aws-cdk/aws-pipes-sources-alpha/lib/kinesis.ts

Lines changed: 4 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import { Duration } from 'aws-cdk-lib';
33
import { IRole } from 'aws-cdk-lib/aws-iam';
44
import { IStream } from 'aws-cdk-lib/aws-kinesis';
55
import { DeadLetterConfigParameters } from './deadLetterConfig';
6+
import { KinesisStartingPosition, OnPartialBatchItemFailure } from './enums';
67

78
/**
89
* Parameters for the Kinesis source.
@@ -71,7 +72,7 @@ export interface KinesisSourceParameters {
7172
*
7273
* @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-pipes-pipe-pipesourcekinesisstreamparameters.html#cfn-pipes-pipe-pipesourcekinesisstreamparameters-startingposition
7374
*/
74-
readonly startingPosition: StartingPosition;
75+
readonly startingPosition: KinesisStartingPosition;
7576

7677
/**
7778
* With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
@@ -82,41 +83,14 @@ export interface KinesisSourceParameters {
8283
readonly startingPositionTimestamp?: string;
8384
}
8485

85-
/**
86-
* Define how to handle item process failures.
87-
*/
88-
export enum OnPartialBatchItemFailure {
89-
/**
90-
* AUTOMATIC_BISECT
91-
*/
92-
AUTOMATIC_BISECT = 'AUTOMATIC_BISECT',
93-
}
94-
95-
/**
96-
* The position in a stream from which to start reading.
97-
*/
98-
export enum StartingPosition {
99-
/**
100-
* TRIM_HORIZON
101-
*/
102-
TRIM_HORIZON = 'TRIM_HORIZON',
103-
/**
104-
* LATEST
105-
*/
106-
LATEST = 'LATEST',
107-
/**
108-
* AT_TIMESTAMP
109-
*/
110-
AT_TIMESTAMP = 'AT_TIMESTAMP',
111-
}
112-
11386
/**
11487
* A source that reads from Kinesis.
11588
*/
11689
export class KinesisSource implements ISource {
11790
private readonly stream: IStream;
11891
readonly sourceArn;
11992
private sourceParameters;
93+
12094
private batchSize;
12195
private maximumBatchingWindowInSeconds;
12296
private maximumRecordAgeInSeconds;
@@ -127,6 +101,7 @@ export class KinesisSource implements ISource {
127101
this.stream = stream;
128102
this.sourceArn = stream.streamArn;
129103
this.sourceParameters = parameters;
104+
130105
this.batchSize = this.sourceParameters.batchSize;
131106
this.maximumBatchingWindowInSeconds = this.sourceParameters.maximumBatchingWindow?.toSeconds();
132107
this.maximumRecordAgeInSeconds = this.sourceParameters.maximumRecordAge?.toSeconds();
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
// Jest Snapshot v1, https://goo.gl/fbAQLP
2+
3+
exports[`dynamodb source should grant pipe role read access 1`] = `
4+
{
5+
"MyPipeRoleCBC8E9AB": {
6+
"Properties": {
7+
"AssumeRolePolicyDocument": {
8+
"Statement": [
9+
{
10+
"Action": "sts:AssumeRole",
11+
"Effect": "Allow",
12+
"Principal": {
13+
"Service": "pipes.amazonaws.com",
14+
},
15+
},
16+
],
17+
"Version": "2012-10-17",
18+
},
19+
},
20+
"Type": "AWS::IAM::Role",
21+
},
22+
}
23+
`;
24+
25+
exports[`dynamodb source should grant pipe role read access 2`] = `
26+
{
27+
"MyPipeRoleDefaultPolicy31387C20": {
28+
"Properties": {
29+
"PolicyDocument": {
30+
"Statement": [
31+
{
32+
"Action": "dynamodb:ListStreams",
33+
"Effect": "Allow",
34+
"Resource": {
35+
"Fn::GetAtt": [
36+
"MyTable794EDED1",
37+
"StreamArn",
38+
],
39+
},
40+
},
41+
{
42+
"Action": [
43+
"dynamodb:DescribeStream",
44+
"dynamodb:GetRecords",
45+
"dynamodb:GetShardIterator",
46+
],
47+
"Effect": "Allow",
48+
"Resource": {
49+
"Fn::GetAtt": [
50+
"MyTable794EDED1",
51+
"StreamArn",
52+
],
53+
},
54+
},
55+
],
56+
"Version": "2012-10-17",
57+
},
58+
"PolicyName": "MyPipeRoleDefaultPolicy31387C20",
59+
"Roles": [
60+
{
61+
"Ref": "MyPipeRoleCBC8E9AB",
62+
},
63+
],
64+
},
65+
"Type": "AWS::IAM::Policy",
66+
},
67+
}
68+
`;

0 commit comments

Comments
 (0)