Skip to content

Commit

Permalink
fix(compile): make the functions with streams wait for the streams to…
Browse files Browse the repository at this point in the history
… finish
  • Loading branch information
meszaros-lajos-gyorgy committed Dec 20, 2023
1 parent eedaa8d commit 36bcea1
Showing 1 changed file with 60 additions and 30 deletions.
90 changes: 60 additions & 30 deletions src/compile.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,27 @@ const compileFTS = async (settings: Settings, fts: ArxFTS) => {
const repackedFts = FTS.save(fts)
const { total: ftsHeaderSize } = getHeaderSize(repackedFts, 'fts')

Readable.from(repackedFts)
.pipe(
through(
transformSplitBy(
splitAt(ftsHeaderSize),
transformIdentity(),
implode(Compression.Binary, DictionarySize.Large),
return new Promise((resolve, reject) => {
const writeStream = fs.createWriteStream(path.join(ftsPath, 'fast.fts'))
writeStream
.on('close', () => {
resolve(true)
})
.on('error', (e) => {
reject(e)
})
Readable.from(repackedFts)
.pipe(
through(
transformSplitBy(
splitAt(ftsHeaderSize),
transformIdentity(),
implode(Compression.Binary, DictionarySize.Large),
),
),
),
)
.pipe(fs.createWriteStream(path.join(ftsPath, 'fast.fts')))
)
.pipe(writeStream)
})
}

const compileLLF = async (settings: Settings, llf: ArxLLF) => {
Expand All @@ -38,17 +48,27 @@ const compileLLF = async (settings: Settings, llf: ArxLLF) => {
const repackedLlf = LLF.save(llf)
const { total: llfHeaderSize } = getHeaderSize(repackedLlf, 'llf')

Readable.from(repackedLlf)
.pipe(
through(
transformSplitBy(
splitAt(llfHeaderSize),
transformIdentity(),
implode(Compression.Binary, DictionarySize.Large),
return new Promise((resolve, reject) => {
const writeStream = fs.createWriteStream(path.join(llfPath, `level${settings.levelIdx}.llf`))
writeStream
.on('close', () => {
resolve(true)
})
.on('error', (e) => {
reject(e)
})
Readable.from(repackedLlf)
.pipe(
through(
transformSplitBy(
splitAt(llfHeaderSize),
transformIdentity(),
implode(Compression.Binary, DictionarySize.Large),
),
),
),
)
.pipe(fs.createWriteStream(path.join(llfPath, `level${settings.levelIdx}.llf`)))
)
.pipe(writeStream)
})
}

const compileDLF = async (settings: Settings, dlf: ArxDLF) => {
Expand All @@ -57,17 +77,27 @@ const compileDLF = async (settings: Settings, dlf: ArxDLF) => {
const repackedDlf = DLF.save(dlf)
const { total: dlfHeaderSize } = getHeaderSize(repackedDlf, 'dlf')

Readable.from(repackedDlf)
.pipe(
through(
transformSplitBy(
splitAt(dlfHeaderSize),
transformIdentity(),
implode(Compression.Binary, DictionarySize.Large),
return new Promise((resolve, reject) => {
const writeStream = fs.createWriteStream(path.join(dlfPath, `level${settings.levelIdx}.dlf`))
writeStream
.on('close', () => {
resolve(true)
})
.on('error', (e) => {
reject(e)
})
Readable.from(repackedDlf)
.pipe(
through(
transformSplitBy(
splitAt(dlfHeaderSize),
transformIdentity(),
implode(Compression.Binary, DictionarySize.Large),
),
),
),
)
.pipe(fs.createWriteStream(path.join(dlfPath, `level${settings.levelIdx}.dlf`)))
)
.pipe(writeStream)
})
}

const hasDotnet6OrNewer = async () => {
Expand Down

0 comments on commit 36bcea1

Please sign in to comment.