Storage Best Practices
File Organization
Bucket Strategy
┌─────────────────────────────────────────────────────────────┐
│ BUCKET ORGANIZATION │
├─────────────────────────────────────────────────────────────┤
│ │
│ By Access Level: │
│ ├── public-assets/ (logos, hero images) │
│ ├── user-uploads/ (private user files) │
│ └── admin-files/ (internal documents) │
│ │
│ By Content Type: │
│ ├── avatars/ (profile pictures) │
│ ├── documents/ (PDFs, docs) │
│ ├── images/ (general images) │
│ └── videos/ (video content) │
│ │
│ By Feature: │
│ ├── project-files/ (project attachments) │
│ ├── chat-uploads/ (chat attachments) │
│ └── exports/ (generated reports) │
│ │
└─────────────────────────────────────────────────────────────┘
File Path Convention
// ✅ GOOD: Organized, predictable paths
// Pattern: {entity_type}/{entity_id}/{file_type}/{filename}
// User files
`users/${userId}/avatar/profile.jpg`
`users/${userId}/documents/${documentId}.pdf`
// Project files
`projects/${projectId}/attachments/${attachmentId}.png`
`projects/${projectId}/exports/${exportId}.xlsx`
// ❌ BAD: Flat structure, no organization
`file123.jpg`
`document456.pdf`
`random-name.png`
Unique Filenames
// Generate unique filename to prevent conflicts
function generateFilePath(
folder: string,
originalName: string
): string {
const extension = originalName.split('.').pop();
const uniqueId = crypto.randomUUID();
const timestamp = Date.now();
return `${folder}/${timestamp}-${uniqueId}.${extension}`;
}
// Usage
const path = generateFilePath(
`users/${userId}/documents`,
file.name
);
// Result: users/123/documents/1705123456789-abc123.pdf
Security Best Practices
RLS Policies
-- 1. Always enable RLS
ALTER TABLE storage.objects ENABLE ROW LEVEL SECURITY;
-- 2. Deny by default (no policy = no access)
-- 3. Specific policies per bucket
CREATE POLICY "Users upload to own folder"
ON storage.objects FOR INSERT
WITH CHECK (
bucket_id = 'user-files'
AND (storage.foldername(name))[1] = auth.uid()::text
);
-- 4. Validate file types
CREATE POLICY "Only images in avatars"
ON storage.objects FOR INSERT
WITH CHECK (
bucket_id = 'avatars'
AND storage.extension(name) IN ('jpg', 'jpeg', 'png', 'webp', 'gif')
);
File Validation
// Client-side validation
function validateFile(file: File): string | null {
// Check file size (5MB max)
const maxSize = 5 * 1024 * 1024;
if (file.size > maxSize) {
return 'File size must be less than 5MB';
}
// Check file type
const allowedTypes = ['image/jpeg', 'image/png', 'image/webp'];
if (!allowedTypes.includes(file.type)) {
return 'Only JPEG, PNG, and WebP images are allowed';
}
// Check filename
const dangerousChars = /[<>:"/\\|?*]/;
if (dangerousChars.test(file.name)) {
return 'Filename contains invalid characters';
}
return null; // Valid
}
Signed URL Best Practices
// ✅ GOOD: Short expiry for sensitive files
const { data } = await supabase.storage
.from('private-docs')
.createSignedUrl('sensitive.pdf', 300); // 5 minutes
// ✅ GOOD: Longer expiry for download links
const { data } = await supabase.storage
.from('downloads')
.createSignedUrl('software.zip', 86400); // 24 hours
// ❌ BAD: Extremely long expiry
createSignedUrl('file.pdf', 31536000); // 1 year - too long!
// ✅ GOOD: Generate on-demand, not stored
async function getDownloadUrl(path: string) {
const { data } = await supabase.storage
.from('documents')
.createSignedUrl(path, 3600);
return data?.signedUrl;
}
Upload Optimization
// 1. Compress images before upload
async function compressImage(file: File, maxWidth = 1920): Promise<Blob> {
return new Promise((resolve) => {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d')!;
const img = new Image();
img.onload = () => {
// Calculate new dimensions
const ratio = Math.min(maxWidth / img.width, 1);
canvas.width = img.width * ratio;
canvas.height = img.height * ratio;
// Draw and compress
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
canvas.toBlob(
(blob) => resolve(blob!),
'image/jpeg',
0.85 // Quality
);
};
img.src = URL.createObjectURL(file);
});
}
// 2. Upload compressed version
const compressed = await compressImage(file);
await supabase.storage.from('images').upload(path, compressed);
Download Optimization
// 1. Use image transformations for thumbnails
const { data } = supabase.storage
.from('images')
.getPublicUrl('photo.jpg', {
transform: { width: 200, quality: 75 },
});
// 2. Lazy load images
<img src={url} loading="lazy" />
// 3. Use appropriate formats
transform: { format: 'webp' } // 30% smaller than JPEG
Caching Strategy
// Set cache headers on upload
await supabase.storage
.from('static-assets')
.upload('logo.png', file, {
cacheControl: '31536000', // 1 year for static assets
});
await supabase.storage
.from('user-uploads')
.upload(path, file, {
cacheControl: '3600', // 1 hour for dynamic content
});
// Cache control values
// - Static assets: 31536000 (1 year)
// - Dynamic content: 3600-86400 (1 hour - 1 day)
// - Frequently updated: 0 (no cache)
Error Handling
Comprehensive Error Handling
async function uploadFile(file: File, path: string) {
// Validate first
const validationError = validateFile(file);
if (validationError) {
throw new Error(validationError);
}
try {
const { data, error } = await supabase.storage
.from('uploads')
.upload(path, file);
if (error) {
// Handle specific errors
if (error.message.includes('already exists')) {
throw new Error('File already exists. Use a different name.');
}
if (error.message.includes('size')) {
throw new Error('File is too large.');
}
if (error.message.includes('type')) {
throw new Error('File type not allowed.');
}
throw new Error(`Upload failed: ${error.message}`);
}
return data;
} catch (err) {
// Network errors
if (err instanceof TypeError) {
throw new Error('Network error. Please check your connection.');
}
throw err;
}
}
Retry Logic
async function uploadWithRetry(
file: File,
path: string,
maxRetries = 3
): Promise<any> {
let lastError: Error;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
const { data, error } = await supabase.storage
.from('uploads')
.upload(path, file);
if (error) throw new Error(error.message);
return data;
} catch (err) {
lastError = err as Error;
console.log(`Upload attempt ${attempt} failed:`, err);
if (attempt < maxRetries) {
// Exponential backoff
await new Promise(r => setTimeout(r, 1000 * attempt));
}
}
}
throw new Error(`Upload failed after ${maxRetries} attempts: ${lastError.message}`);
}
Cleanup & Maintenance
Delete Unused Files
// Cleanup orphaned files
async function cleanupOrphanedFiles() {
// Get all files in storage
const { data: files } = await supabaseAdmin.storage
.from('uploads')
.list();
// Get all file references from database
const { data: references } = await supabaseAdmin
.from('attachments')
.select('file_path');
const referencedPaths = new Set(references?.map(r => r.file_path));
// Find orphaned files
const orphaned = files?.filter(
f => !referencedPaths.has(f.name)
) ?? [];
// Delete orphaned files
if (orphaned.length > 0) {
await supabaseAdmin.storage
.from('uploads')
.remove(orphaned.map(f => f.name));
}
return orphaned.length;
}
Database-Storage Consistency
// Use transaction pattern
async function deleteDocument(docId: string) {
// 1. Get file path from database
const { data: doc } = await supabase
.from('documents')
.select('file_path')
.eq('id', docId)
.single();
// 2. Delete from storage
await supabase.storage
.from('documents')
.remove([doc.file_path]);
// 3. Delete from database
await supabase
.from('documents')
.delete()
.eq('id', docId);
}
Tổng kết
Checklist
Security:
✅ Enable RLS on storage.objects
✅ Validate file type and size
✅ Use short signed URL expiry
✅ Organize files by user/entity ID
Performance:
✅ Compress images before upload
✅ Use image transformations
✅ Set appropriate cache headers
✅ Lazy load images
Maintenance:
✅ Unique filenames
✅ Cleanup orphaned files
✅ Database-storage consistency
✅ Error handling with retries
Quick Reference
| Bucket Type |
Cache |
Security |
| Public assets |
1 year |
Public URL |
| User uploads |
1 hour |
Signed URL |
| Sensitive docs |
No cache |
Short signed URL |
Q&A
- Có cleanup strategy cho storage chưa?
- Cần upload size limit bao nhiêu?
- Vấn đề gì với current storage setup?