Cloud Storage
Store files in the cloud with AWS S3, Cloudinary, Google Cloud Storage, and more.
7 min read
Why Cloud Storage?#
Local file storage has problems:
- Server disk fills up - Files grow, servers don't
- No redundancy - Server dies, files gone
- No CDN - Slow delivery worldwide
- Multiple servers - Files on server A, request goes to B
Cloud storage solves all of these.
Option 1: AWS S3 (Industry Standard)#
The most popular choice:
bash
npm install @aws-sdk/client-s3 @aws-sdk/s3-request-presigner
Setup#
javascript
// src/config/s3.js
import { S3Client } from '@aws-sdk/client-s3';
import { config } from './index.js';
export const s3Client = new S3Client({
region: config.aws.region,
credentials: {
accessKeyId: config.aws.accessKeyId,
secretAccessKey: config.aws.secretAccessKey,
},
});
export const BUCKET_NAME = config.aws.bucketName;
Upload to S3#
javascript
// src/services/storage.js
import { PutObjectCommand, GetObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import { s3Client, BUCKET_NAME } from '../config/s3.js';
import crypto from 'crypto';
import path from 'path';
export async function uploadToS3(buffer, originalName, folder = 'uploads') {
const ext = path.extname(originalName);
const key = `${folder}/${crypto.randomUUID()}${ext}`;
await s3Client.send(new PutObjectCommand({
Bucket: BUCKET_NAME,
Key: key,
Body: buffer,
ContentType: getMimeType(ext),
}));
return {
key,
url: `https://${BUCKET_NAME}.s3.amazonaws.com/${key}`,
};
}
export async function deleteFromS3(key) {
await s3Client.send(new DeleteObjectCommand({
Bucket: BUCKET_NAME,
Key: key,
}));
}
// Generate signed URL for private files
export async function getSignedDownloadUrl(key, expiresIn = 3600) {
const command = new GetObjectCommand({
Bucket: BUCKET_NAME,
Key: key,
});
return getSignedUrl(s3Client, command, { expiresIn });
}
// Generate signed URL for direct upload
export async function getSignedUploadUrl(filename, contentType, folder = 'uploads') {
const ext = path.extname(filename);
const key = `${folder}/${crypto.randomUUID()}${ext}`;
const command = new PutObjectCommand({
Bucket: BUCKET_NAME,
Key: key,
ContentType: contentType,
});
const url = await getSignedUrl(s3Client, command, { expiresIn: 3600 });
return { key, uploadUrl: url };
}
Usage in Routes#
javascript
// src/routes/uploads.js
import { Router } from 'express';
import { uploadToMemory } from '../middleware/upload.js';
import { uploadToS3, getSignedUploadUrl } from '../services/storage.js';
const router = Router();
// Server-side upload
router.post('/upload', uploadToMemory.single('file'), async (req, res) => {
const result = await uploadToS3(
req.file.buffer,
req.file.originalname,
'user-uploads'
);
res.json({ data: result });
});
// Client-side direct upload (better for large files)
router.post('/presigned-url', async (req, res) => {
const { filename, contentType } = req.body;
const result = await getSignedUploadUrl(filename, contentType);
res.json({ data: result });
});
export default router;
Option 2: Cloudinary (Media Optimized)#
Best for images and videos - automatic optimization, transformations, CDN:
bash
npm install cloudinary
Setup#
javascript
// src/config/cloudinary.js
import { v2 as cloudinary } from 'cloudinary';
import { config } from './index.js';
cloudinary.config({
cloud_name: config.cloudinary.cloudName,
api_key: config.cloudinary.apiKey,
api_secret: config.cloudinary.apiSecret,
});
export { cloudinary };
Upload to Cloudinary#
javascript
// src/services/cloudinary.js
import { cloudinary } from '../config/cloudinary.js';
export async function uploadImage(buffer, options = {}) {
return new Promise((resolve, reject) => {
const uploadOptions = {
folder: options.folder || 'uploads',
resource_type: 'auto',
...options,
};
cloudinary.uploader.upload_stream(uploadOptions, (error, result) => {
if (error) reject(error);
else resolve(result);
}).end(buffer);
});
}
export async function uploadWithTransformations(buffer, options = {}) {
return uploadImage(buffer, {
...options,
transformation: [
{ width: 1000, height: 1000, crop: 'limit' }, // Max dimensions
{ quality: 'auto' }, // Auto optimize
{ fetch_format: 'auto' }, // Best format for browser
],
});
}
export async function deleteImage(publicId) {
return cloudinary.uploader.destroy(publicId);
}
// Generate optimized URL
export function getOptimizedUrl(publicId, transformations = {}) {
return cloudinary.url(publicId, {
secure: true,
transformation: [
{ quality: 'auto', fetch_format: 'auto' },
transformations,
],
});
}
Usage#
javascript
router.post('/avatar', uploadToMemory.single('avatar'), async (req, res) => {
const result = await uploadImage(req.file.buffer, {
folder: 'avatars',
transformation: [
{ width: 200, height: 200, crop: 'fill', gravity: 'face' },
],
});
res.json({
data: {
url: result.secure_url,
publicId: result.public_id,
},
});
});
On-the-fly Transformations#
javascript
// No need to store multiple sizes - transform on request
const avatarUrl = cloudinary.url('avatars/user123', {
transformation: [
{ width: 100, height: 100, crop: 'fill' },
{ quality: 'auto' },
],
});
// https://res.cloudinary.com/demo/image/upload/w_100,h_100,c_fill,q_auto/avatars/user123
Option 3: Google Cloud Storage#
bash
npm install @google-cloud/storage
javascript
// src/services/gcs.js
import { Storage } from '@google-cloud/storage';
import { config } from '../config/index.js';
const storage = new Storage({
projectId: config.gcp.projectId,
keyFilename: config.gcp.keyFilePath, // Or use credentials object
});
const bucket = storage.bucket(config.gcp.bucketName);
export async function uploadToGCS(buffer, filename, folder = 'uploads') {
const key = `${folder}/${Date.now()}-${filename}`;
const file = bucket.file(key);
await file.save(buffer, {
contentType: getMimeType(filename),
resumable: false,
});
// Make public (optional)
await file.makePublic();
return {
key,
url: `https://storage.googleapis.com/${config.gcp.bucketName}/${key}`,
};
}
export async function getSignedUrl(key, expiresIn = 3600) {
const [url] = await bucket.file(key).getSignedUrl({
action: 'read',
expires: Date.now() + expiresIn * 1000,
});
return url;
}
Option 4: DigitalOcean Spaces (S3 Compatible)#
Uses S3 SDK with different endpoint:
javascript
import { S3Client } from '@aws-sdk/client-s3';
const s3Client = new S3Client({
endpoint: 'https://nyc3.digitaloceanspaces.com',
region: 'us-east-1', // Required but ignored
credentials: {
accessKeyId: config.do.spacesKey,
secretAccessKey: config.do.spacesSecret,
},
});
// Use same S3 upload functions
Option 5: Supabase Storage#
bash
npm install @supabase/supabase-js
javascript
import { createClient } from '@supabase/supabase-js';
const supabase = createClient(
config.supabase.url,
config.supabase.serviceKey
);
export async function uploadToSupabase(buffer, filename, bucket = 'uploads') {
const { data, error } = await supabase.storage
.from(bucket)
.upload(`${Date.now()}-${filename}`, buffer, {
contentType: getMimeType(filename),
});
if (error) throw error;
const { data: urlData } = supabase.storage
.from(bucket)
.getPublicUrl(data.path);
return {
path: data.path,
url: urlData.publicUrl,
};
}
Comparison#
| Feature | S3 | Cloudinary | GCS | DO Spaces |
|---|---|---|---|---|
| Price | $$ | $-$$$ | $$ | $ |
| CDN | Extra | Built-in | Extra | Built-in |
| Image transforms | No | Excellent | No | No |
| Video processing | Extra | Built-in | No | No |
| S3 compatible | Yes | No | No | Yes |
| Free tier | Yes | Yes | Yes | No |
Recommendations
- Images/videos with transformations: Cloudinary
- General files, enterprise: AWS S3
- Budget friendly, S3 compatible: DigitalOcean Spaces
- Full-stack with Supabase: Supabase Storage
Storage Service Abstraction#
Create an abstraction to switch providers easily:
javascript
// src/services/storage/index.js
import { config } from '../../config/index.js';
import * as s3Storage from './s3.js';
import * as cloudinaryStorage from './cloudinary.js';
const providers = {
s3: s3Storage,
cloudinary: cloudinaryStorage,
};
const storage = providers[config.storage.provider];
export async function upload(buffer, filename, options = {}) {
return storage.upload(buffer, filename, options);
}
export async function remove(key) {
return storage.remove(key);
}
export async function getUrl(key, options = {}) {
return storage.getUrl(key, options);
}
Client-Side Direct Upload#
Better for large files - upload directly to cloud, skip your server:
javascript
// Backend: Generate presigned URL
router.post('/upload/presign', authenticate, async (req, res) => {
const { filename, contentType } = req.body;
const { key, uploadUrl } = await getSignedUploadUrl(filename, contentType);
// Store pending upload in database
await Upload.create({
userId: req.user.id,
key,
status: 'pending',
});
res.json({ data: { key, uploadUrl } });
});
// Backend: Confirm upload completed
router.post('/upload/confirm', authenticate, async (req, res) => {
const { key } = req.body;
const upload = await Upload.findOneAndUpdate(
{ key, userId: req.user.id, status: 'pending' },
{ status: 'completed' },
{ new: true }
);
if (!upload) {
return res.status(404).json({ error: 'Upload not found' });
}
res.json({ data: upload });
});
javascript
// Frontend
async function uploadFile(file) {
// 1. Get presigned URL
const { data } = await api.post('/upload/presign', {
filename: file.name,
contentType: file.type,
});
// 2. Upload directly to cloud
await fetch(data.uploadUrl, {
method: 'PUT',
body: file,
headers: { 'Content-Type': file.type },
});
// 3. Confirm upload
await api.post('/upload/confirm', { key: data.key });
}
Key Takeaways#
- Use cloud storage - Don't store files on application servers
- Cloudinary for images - Automatic optimization, transformations
- S3 for everything else - Industry standard, lots of tooling
- Direct uploads for large files - Skip your server
- Abstract your provider - Easy to switch later
Best Practice
Store file metadata (key, url, userId, type) in your database. Store actual files in cloud storage. This gives you the best of both worlds - queryable metadata and scalable file storage.
Continue Learning
Ready to level up your skills?
Explore more guides and tutorials to deepen your understanding and become a better developer.