-
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathhelper-functions.js
73 lines (60 loc) · 2.57 KB
/
helper-functions.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import fs from 'fs'
import path, { resolve } from 'path'
import vkb from 'vkbeautify'
//import sqlite3 from 'sqlite3'
function extractSqlite(dbFile, outputFile) {
const db = new sqlite3.Database(dbFile);
const tableName = 'entry';
db.all(`SELECT rowid as id, * FROM ${tableName}`, [], (err, rows) => {
if (err) {
throw err
}
const idToDownloads = {};
rows.forEach(row => {
if (idToDownloads[row.id]) console.error(`id ${row.id} already exists`)
if (row.type == 'coll') return
idToDownloads[row.id] = {downloads: row.downloads, dateAdded: row.date_added}
});
fs.writeFileSync(outputFile, vkb.json(JSON.stringify(idToDownloads)), 'utf-8')
console.log(`extracted ${Object.keys(idToDownloads).length} files from ${dbFile} to ${outputFile}`)
});
db.close();
}
//extractSqlite('library/library.db', 'library/id-to-info.json')
import { S3Handler } from './s3-hander.js'
import { getTypeInfo } from './vue-handler.js'
import { rejects } from 'assert';
const sh = new S3Handler('cloud-dev')
// recursively uploads a local folder to s3, preserving path and file names
async function uploadFolderToS3(inputPath, extraPrefixes) {
const files = await fs.promises.readdir(inputPath);
for (const file of files) {
const filePath = path.join(inputPath, file)
if (!/\{\d+\}/.test(file)) {
console.error(`file ${file} does not have an id. ${filePath}`)
}
const stats = await fs.promises.stat(filePath);
if (stats.isFile()) {
const uploadParams = {
Key: [...extraPrefixes, file].join('/'),
Body: fs.createReadStream(filePath), //await fs.promises.readFile(filePath),
ContentType: getTypeInfo(file.split('.').slice(-1)[0])[3],
Metadata: {
'last-modified': stats.mtime.toISOString(),
},
};
try {
await sh.upload(uploadParams)
console.log(`uploaded ${filePath} to S3 bucket at ${uploadParams.Key}`);
} catch (err) {
console.error(`Error uploading ${filePath} to S3 bucket at ${uploadParams.Key}`, err);
}
} else {
await uploadFolderToS3(filePath, [...extraPrefixes, file])
}
}
};
const folderPath = 'මෘදුකාංග{450}'
//uploadFolderToS3('/datadrive/public/library/' + folderPath, folderPath.split('/'))
//uploadFolderToS3('/datadrive/public/library', [])
//uploadFolderToS3('/Users/janaka/Downloads/test{1}', ['test{1}'])