-
Notifications
You must be signed in to change notification settings - Fork 102
Expand file tree
/
Copy pathindex.ts
More file actions
105 lines (83 loc) · 2.77 KB
/
index.ts
File metadata and controls
105 lines (83 loc) · 2.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import type { CloudFormationCustomResourceEvent } from 'aws-lambda';
import { DynamoDB, S3 } from 'aws-sdk';
import { customResourceHelper, OnCreateHandler, ResourceHandler, ResourceHandlerReturn } from 'custom-resource-helper';
import chunk from 'lodash.chunk';
// DynamoDB has a 25 item limit in batch requests
// https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
const MAX_BATCH_CHUNK = 25;
interface DynamoDBSeederProps {
tableName: string;
seeds: {
inlineSeeds?: string;
s3Bucket?: string;
s3Key?: string;
s3ObjectVersion?: string;
};
}
type Seeds = Record<string, unknown>[];
const dynamodb = new DynamoDB.DocumentClient();
const s3 = new S3();
const getProperties = (props: CloudFormationCustomResourceEvent['ResourceProperties']): DynamoDBSeederProps => ({
tableName: props.TableName,
seeds: {
inlineSeeds: props.Seeds.InlineSeeds,
s3Bucket: props.Seeds.S3Bucket,
s3Key: props.Seeds.S3Key,
s3ObjectVersion: props.Seeds.S3ObjectVersion,
},
});
const handleCreate: OnCreateHandler = async (event): Promise<ResourceHandlerReturn> => {
const props = getProperties(event.ResourceProperties);
const { inlineSeeds, ...s3Location } = props.seeds;
const seeds = inlineSeeds ? (JSON.parse(inlineSeeds) as Seeds) : await getSeedsFromS3(s3Location);
await writeSeeds(props.tableName, seeds);
console.log(`Seed running complete for table ${props.tableName}`);
return {
physicalResourceId: event.RequestId,
};
};
const getSeedsFromS3 = async (s3Location: { s3Bucket?: string; s3Key?: string; s3ObjectVersion?: string }): Promise<Seeds> => {
const { s3Bucket, s3Key, s3ObjectVersion } = s3Location;
if (!s3Bucket || !s3Key) {
throw new Error('Bucket configuration missing!');
}
const { Body: body } = await s3
.getObject({
Bucket: s3Bucket,
Key: s3Key,
VersionId: s3ObjectVersion,
})
.promise();
const bodyContents = await body.transformToString("utf-8");
if (!bodyContents) {
throw new Error(
`Cannot load seeds from bucket ${s3Bucket} with key ${s3Key}`
);
}
return JSON.parse(bodyContents) as Seeds;
};
const writeSeeds = async (tableName: string, seeds: Seeds): Promise<void> => {
const seedChunks = chunk(seeds, MAX_BATCH_CHUNK);
console.log(`Sending data to dynamodb: ${seedChunks.length} chunks`);
await Promise.all(
seedChunks.map(async (seedChunk) => {
const requests = seedChunk.map((seed) => ({
PutRequest: {
Item: seed,
},
}));
return dynamodb
.batchWrite({
RequestItems: {
[tableName]: requests,
},
})
.promise();
}),
);
};
export const handler = customResourceHelper(
(): ResourceHandler => ({
onCreate: handleCreate,
}),
);