File Storage

This section gives details on how ScaffoldHub implements file storage.

For setup, refer to Setup > File Storage.

Configuration

Each type of file upload has its own configuration and they all stay at:

  • frontend/src/security/storage.ts

  • backend/src/security/storage.ts

/**
* Storage permissions.
*
* @id - Used to identify the rule on permissions and upload.
* @folder - Folder where the files will be saved
* @maxSizeInBytes - Max allowed size in bytes
* @bypassWritingPermissions - Does not validate if the user has permission to write
* @publicRead - The file can be publicly accessed via the URL without the need for a signed token
*/
export default class Storage {
static get values() {
return {
userAvatarsProfiles: {
id: 'userAvatarsProfiles',
folder: 'user/avatars/profile/:userId',
maxSizeInBytes: 10 * 1024 * 1024,
bypassWritingPermissions: true,
publicRead: true,
},
settingsLogos: {
id: 'settingsLogos',
folder: 'tenant/:tenantId/settings/logos',
maxSizeInBytes: 10 * 1024 * 1024,
publicRead: true,
},
settingsBackgroundImages: {
id: 'settingsBackgroundImages',
folder:
'tenant/:tenantId/settings/backgroundImages',
maxSizeInBytes: 10 * 1024 * 1024,
publicRead: true,
},
productPhotos: {
id: 'productPhotos',
folder: 'tenant/:tenantId/product/photos',
maxSizeInBytes: 1000000,
},
orderAttachments: {
id: 'orderAttachments',
folder: 'tenant/:tenantId/order/attachments',
maxSizeInBytes: 1000000,
},
};
}
}
  • id: Used to identify the rule on permissions and upload.

  • folder: Folder where the files will be saved. It accepts two parameters, :userId and:tenantId, that, when saved, are replaced by their real value.

  • maxSizeInBytes: Max allowed size in bytes.

  • bypassWritingPermissions: Does not validate if the user has permission to write. This is usually when the user id is on the path, so they only access their folder.

  • publicRead: The file can be publicly accessed via the URL without the need for a signed token.

Credentials

For Amazon S3 and Google Cloud Storage, the uploaded files do not pass throught the backend. The backend creates credentials that allow the frontend to submit directly to the file storage provider.

Before sending the credentials to the frontend, the backend validates if the user has all the needed permissions.

import PermissionChecker from '../../services/user/permissionChecker';
import Storage from '../../security/storage';
import FileStorage from '../../services/file/fileStorage';
import ApiResponseHandler from '../apiResponseHandler';
import Error403 from '../../errors/Error403';
export default async (req, res) => {
try {
const permissionChecker = new PermissionChecker(req);
const filename = req.query.filename;
const storageId = req.query.storageId;
if (!req.currentUser || !req.currentUser.id) {
throw new Error403();
}
if (!req.currentTenant || !req.currentTenant.id) {
throw new Error403();
}
// The config storage has the information on where
// to store the file and the max size
const config = Storage.values[storageId];
if (!config) {
throw new Error403();
}
if (
// Some permissions are related to the user itself,
// not related to any entity, that's why there is a bypass permissions
!config.bypassWritingPermissions &&
!permissionChecker.hasStorage(storageId)
) {
throw new Error403();
}
// The private URL is the path related to the bucket/file system folder
let privateUrl = `${config.folder}/${filename}`;
privateUrl = privateUrl.replace(
':tenantId',
req.currentTenant.id,
);
privateUrl = privateUrl.replace(
':userId',
req.currentUser.id,
);
const maxSizeInBytes = config.maxSizeInBytes;
const publicRead = Boolean(config.publicRead);
const downloadUrl = await FileStorage.downloadUrl(
privateUrl,
publicRead,
);
/**
* Upload Credentials has the URL and the fields to be sent
* to the upload server.
*/
const uploadCredentials = await FileStorage.uploadCredentials(
privateUrl,
maxSizeInBytes,
publicRead,
);
await ApiResponseHandler.success(req, res, {
privateUrl,
downloadUrl,
uploadCredentials,
});
} catch (error) {
await ApiResponseHandler.error(req, res, error);
}
};

Google Cloud Storage

import { getConfig } from '../../config';
import { Storage } from '@google-cloud/storage';
const serviceAccount = JSON.parse(
getConfig().GOOGLE_CLOUD_PLATFORM_CREDENTIALS,
);
const bucket = new Storage({
projectId: serviceAccount.project_id,
credentials: serviceAccount,
}).bucket(getConfig().FILE_STORAGE_BUCKET);
export default class GoogleCloudFileStorage {
/**
* Creates a signed upload URL that enables
* the frontend to upload directly to GCS in a
* secure way
*
* @param {*} privateUrl
* @param {*} maxSizeInBytes
* @param {*} publicRead
* @param {*} tokenExpiresAt
*/
static async uploadCredentials(
privateUrl,
maxSizeInBytes,
publicRead,
tokenExpiresAt,
) {
const expires =
tokenExpiresAt || Date.now() + 10 * 60 * 1000;
const file = bucket.file(privateUrl);
const conditions: Array<any> = [];
const fields: any = {};
if (maxSizeInBytes) {
conditions.push([
'content-length-range',
0,
maxSizeInBytes,
]);
}
let publicUrl;
if (publicRead) {
fields.acl = 'public-read';
publicUrl = await this.downloadUrl(
privateUrl,
publicRead,
);
}
const [policy] = await file.generateSignedPostPolicyV4({
expires,
virtualHostedStyle: true,
conditions,
fields,
});
return {
...policy,
publicUrl,
};
}
/**
* Returns a signed download URL.
*
* @param {*} privateUrl
* @param {*} publicRead
* @param {*} tokenExpiresAt
*/
static async downloadUrl(
privateUrl,
publicRead,
tokenExpiresAt?,
) {
if (publicRead) {
return `https://storage.googleapis.com/${
getConfig().FILE_STORAGE_BUCKET
}/${privateUrl}`;
}
tokenExpiresAt =
tokenExpiresAt || Date.now() + 1000 * 60 * 60;
const response = await bucket
.file(privateUrl)
.getSignedUrl({
action: 'read',
expires: tokenExpiresAt,
version: 'v4',
});
if (response && response[0]) {
return response[0];
}
return response;
}
}

Amazon S3

import { getConfig } from '../../config';
const aws = require('aws-sdk');
const s3 = new aws.S3({
accessKeyId: getConfig().AWS_ACCESS_KEY_ID,
secretAccessKey: getConfig().AWS_SECRET_ACCESS_KEY,
});
export default class AWSStorage {
/**
* Creates a signed upload URL that enables
* the frontend to upload directly to S3 in a
* secure way
*
* @param {*} privateUrl
* @param {*} maxSizeInBytes
* @param {*} publicRead
* @param {*} tokenExpiresAt
*/
static async uploadCredentials(
privateUrl,
maxSizeInBytes,
publicRead,
tokenExpiresAt,
) {
const expires =
tokenExpiresAt || Date.now() + 10 * 60 * 1000;
const Conditions: Array<any> = [];
if (maxSizeInBytes) {
Conditions.push([
'content-length-range',
0,
maxSizeInBytes,
]);
}
let publicUrl;
const Fields: any = { key: privateUrl };
if (publicRead) {
Fields.acl = 'public-read';
Conditions.push({ acl: 'public-read' });
publicUrl = await this.downloadUrl(
privateUrl,
publicRead,
);
}
const policy = await s3.createPresignedPost({
Bucket: getConfig().FILE_STORAGE_BUCKET,
Fields,
Expires: tokenExpiresAt,
Conditions,
});
return {
...policy,
publicUrl,
};
}
/**
* Returns a signed download URL.
*
* @param {*} privateUrl
* @param {*} publicRead
* @param {*} tokenExpiresAt
*/
static async downloadUrl(privateUrl, publicRead) {
if (publicRead) {
return `https://${
getConfig().FILE_STORAGE_BUCKET
}.s3.amazonaws.com/${privateUrl}`;
}
const params = {
Key: privateUrl,
Bucket: getConfig().FILE_STORAGE_BUCKET,
};
return await s3.getSignedUrlPromise(
'getObject',
params,
);
}
}

Localhost

The localhost is a bit different from the Amazon S3 and Google Cloud Storage. Instead of generating a provider's token, it uses the JWT token of the current application to pass it to another endpoint that only validates this token and handles the upload to the localhost server.

import path from 'path';
import fs from 'fs';
import os from 'os';
import jwt from 'jsonwebtoken';
import { getConfig } from '../../config';
/**
* The directory where the files should be uploaded.
* Change this to a persisted folder.
*/
const UPLOAD_DIR = os.tmpdir();
export default class LocalFileStorage {
/**
* Creates a signed upload URL that enables
* the frontend to upload directly to the server in a
* secure way.
*
* @param {*} privateUrl
* @param {*} maxSizeInBytes
* @param {*} publicRead
* @param {*} tokenExpiresAt
*/
static async uploadCredentials(
privateUrl,
maxSizeInBytes,
publicRead,
tokenExpiresAt,
) {
const expires =
tokenExpiresAt || Date.now() + 10 * 60 * 1000;
const token = jwt.sign(
{ privateUrl, maxSizeInBytes },
getConfig().AUTH_JWT_SECRET,
{ expiresIn: expires },
);
return {
url: `${
getConfig().BACKEND_URL
}/file/upload?token=${token}`,
};
}
/**
* Handles the upload to the server.
*/
static async upload(fileTempUrl, privateUrl) {
const internalUrl = path.join(UPLOAD_DIR, privateUrl);
ensureDirectoryExistence(internalUrl);
fs.renameSync(fileTempUrl, internalUrl);
return this.downloadUrl(privateUrl);
}
/**
* Return the download URL of the file from this server.
*/
static async downloadUrl(privateUrl) {
return `${
getConfig().BACKEND_URL
}/file/download?privateUrl=${privateUrl}`;
}
/**
* Downloads the file.
* @param {*} privateUrl
*/
static async download(privateUrl) {
return path.join(UPLOAD_DIR, privateUrl);
}
}
function ensureDirectoryExistence(filePath) {
var dirname = path.dirname(filePath);
if (fs.existsSync(dirname)) {
return true;
}
ensureDirectoryExistence(dirname);
fs.mkdirSync(dirname);
}

Frontend

The frontend builds the upload form using the credentials created by the backend.

static async uploadToServer(file, uploadCredentials) {
try {
const url = uploadCredentials.url;
const formData = new FormData();
if (uploadCredentials.fields) {
for (const [key, value] of Object.entries(
uploadCredentials.fields,
)) {
formData.append(key, value as string);
}
}
formData.append('file', file);
return axios.post(url, formData, {
headers: {
'Content-Type': 'multipart/form-data',
},
});
} catch (error) {
console.error(error);
throw error;
}
}