-
Notifications
You must be signed in to change notification settings - Fork 3
/
file-system-service.ts
89 lines (77 loc) · 2.57 KB
/
file-system-service.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
export interface FileSystemService {
read(path: string): Promise<Buffer>;
save(data: Buffer, path: string): Promise<any>;
}
import fs = require('fs');
// Each Lambda function receives 500MB of non-persistent disk space in its own /tmp directory - see https://aws.amazon.com/lambda/faqs/.
export class LambdaFileSystemService implements FileSystemService {
public read(path: string): Promise<Buffer> {
return new Promise((resolve, reject) => {
fs.readFile(path, (error, data) => {
if (error) {
console.log(`Read file returned error ${JSON.stringify(error, null, 4)}`);
reject(error);
} else {
console.log(`Read file returned result of length ${data.length}`);
resolve(data);
}
});
});
}
public save(data: Buffer, path: string): Promise<any> {
return new Promise((resolve, reject) => {
console.log(`Writing file to ${path}`);
fs.writeFile(path, data, error => {
if (error) {
console.log(`Write file returned error ${JSON.stringify(error, null, 4)}`);
reject(error);
} else {
console.log(`Write file was successful`);
resolve({});
}
});
});
}
}
import * as AWS from 'aws-sdk';
export class S3FileSystemService implements FileSystemService {
public read(path: string): Promise<Buffer> {
return new Promise((resolve, reject) => {
console.log(`Reading file from ${path}`);
const bucket = process.env.STEP_FUNCTIONS_DATA_BUCKET;
const s3 = new AWS.S3();
s3.getObject({
Bucket: bucket,
Key: path,
}, (err, s3data) => {
if (err) {
console.log('Error', err);
reject(err);
} else {
const buffer = s3data.Body as Buffer;
resolve(buffer);
}
});
});
}
public save(data: Buffer, path: string): Promise<any> {
return new Promise((resolve, reject) => {
const bucket = process.env.STEP_FUNCTIONS_DATA_BUCKET;
console.log(`Writing file to ${path}`);
const s3 = new AWS.S3();
s3.upload({
Bucket: bucket,
Key: path,
Body: data,
}, (err, s3data) => {
if (err) {
console.log('Error', err);
reject(err);
} else {
console.log(`S3 save ${JSON.stringify(s3data, null, 4)}`);
resolve(s3data);
}
});
});
}
}