Skip to content

Commit

Permalink
Merge pull request #279 from cofacts/use-buffer
Browse files Browse the repository at this point in the history
Use buffer instead of cloned stream
  • Loading branch information
MrOrz authored May 1, 2022
2 parents 2aeb4a3 + f6cac74 commit 271bc9b
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 21 deletions.
19 changes: 9 additions & 10 deletions src/graphql/mutations/CreateMediaArticle.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { GraphQLString, GraphQLNonNull } from 'graphql';
import { assertUser } from 'util/user';
import client from 'util/client';
import { uploadToGCS } from 'util/gcs';
import { getMediaFileHash } from 'graphql/util';
import { getMediaFileHash, MAX_FILE_SIZE } from 'graphql/util';

import { ArticleReferenceInput } from 'graphql/models/ArticleReference';
import MutationResult from 'graphql/models/MutationResult';
Expand All @@ -12,12 +12,12 @@ import ArticleTypeEnum from 'graphql/models/ArticleTypeEnum';
import fetch from 'node-fetch';

/**
* @param {NodeJS.ReadableStream} fileStream
* @param {Buffer} fileBuffer
* @param {sring} name File name
* @param {ArticleTypeEnum} type The article type
* @returns {string} url
*/
export async function uploadFile(fileStream, name, type) {
export async function uploadFile(fileBuffer, name, type) {
// final file name that combined with folder name.
let fileName;
let mimeType = '*/*';
Expand All @@ -31,7 +31,7 @@ export async function uploadFile(fileStream, name, type) {

mimeType = 'image/jpeg';
}
return await uploadToGCS(fileStream, fileName, mimeType);
return await uploadToGCS(fileBuffer, fileName, mimeType);
}

/**
Expand All @@ -57,11 +57,10 @@ async function createNewMediaArticle({
throw new Error(`Type ${articleType} is not yet supported.`);
}

const file = await fetch(mediaUrl);
const attachmentHash = await getMediaFileHash(
await file.clone().buffer(),
articleType
);
const fileBuffer = await (await fetch(mediaUrl, {
size: MAX_FILE_SIZE,
})).buffer();
const attachmentHash = await getMediaFileHash(fileBuffer, articleType);
const text = '';
const now = new Date().toISOString();
const reference = {
Expand All @@ -88,7 +87,7 @@ async function createNewMediaArticle({
}

const attachmentUrl = await uploadFile(
file.body,
fileBuffer,
attachmentHash,
articleType
);
Expand Down
4 changes: 2 additions & 2 deletions src/graphql/mutations/__tests__/CreateMediaArticle.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ describe('creation', () => {
Promise.resolve({
status: 200,
body: {},
clone: () => ({ buffer: jest.fn() }),
buffer: jest.fn(),
})
);
imageHash.mockImplementation((file, bits, method, callback) =>
Expand Down Expand Up @@ -302,7 +302,7 @@ describe('error', () => {
Promise.resolve({
status: 200,
body: {},
clone: () => ({ buffer: jest.fn() }),
buffer: jest.fn(),
})
);
imageHash.mockImplementation((file, bits, method, callback) =>
Expand Down
10 changes: 5 additions & 5 deletions src/graphql/queries/ListArticles.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import {
createCommonListFilter,
attachCommonListFilter,
getMediaFileHash,
MAX_FILE_SIZE,
} from 'graphql/util';
import scrapUrls from 'util/scrapUrls';
import ReplyTypeEnum from 'graphql/models/ReplyTypeEnum';
Expand Down Expand Up @@ -443,12 +444,11 @@ export default {
}

if (filter.mediaUrl) {
const file = await fetch(filter.mediaUrl);
// FIXME: Use mime or binary header to get articleType instead of manual input
const attachmentHash = await getMediaFileHash(
await file.clone().buffer(),
'IMAGE'
);
const fileBuffer = await (await fetch(filter.mediaUrl, {
size: MAX_FILE_SIZE,
})).buffer();
const attachmentHash = await getMediaFileHash(fileBuffer, 'IMAGE');
filterQueries.push({
term: {
attachmentHash,
Expand Down
2 changes: 1 addition & 1 deletion src/graphql/queries/__tests__/ListArticles.js
Original file line number Diff line number Diff line change
Expand Up @@ -719,7 +719,7 @@ describe('ListArticles', () => {
Promise.resolve({
status: 200,
body: {},
clone: () => ({ buffer: jest.fn() }),
buffer: jest.fn(),
})
);
imageHash.mockImplementation((file, bits, method, callback) =>
Expand Down
3 changes: 3 additions & 0 deletions src/graphql/util.js
Original file line number Diff line number Diff line change
Expand Up @@ -430,6 +430,9 @@ export function filterByStatuses(entriesWithStatus, statuses) {
return entriesWithStatus.filter(({ status }) => statuses.includes(status));
}

/** Max downloadable file size */
export const MAX_FILE_SIZE = 5 * 1024 * 1024; // byte

/**
* @param {Buffer} fileBuffer
* @param {ArticleTypeEnum} type The article type
Expand Down
12 changes: 9 additions & 3 deletions src/util/gcs.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Storage } from '@google-cloud/storage';
import { Readable } from 'stream';
import rollbar from '../rollbarInstance';

const storage = new Storage({
Expand All @@ -9,16 +10,21 @@ const gcsBucket = storage.bucket(process.env.GCS_BUCKET_NAME || 'default');
/**
* Generate hash for identifying if two files are similar
*
* @param {ReadableStream} fileStream
* @param {ReadableStream | Buffer} data
* @param {string} fileName
* @param {string} contentType MIME type
* @returns {string} url
*/
export async function uploadToGCS(fileStream, fileName, contentType) {
export async function uploadToGCS(data, fileName, contentType) {
if (!process.env.GCS_BUCKET_NAME) {
throw new Error('GCS_BUCKET_NAME is not set, cannot upload file.');
}

// If data is buffer, wrap it to become a readable stream
if (!data.pipe) {
data = Readable.from(data);
}

const options = {
metadata: {
contentType,
Expand All @@ -27,7 +33,7 @@ export async function uploadToGCS(fileStream, fileName, contentType) {

const file = gcsBucket.file(fileName);
const url = await new Promise((resolve, reject) => {
fileStream
data
.pipe(file.createWriteStream(options))
.on('error', function(err) {
rollbar.error('GCS error', err);
Expand Down

0 comments on commit 271bc9b

Please sign in to comment.