Implement backend API and database services in Docker setup
- Added a new `api` service for the NestJS backend, including health checks and dependencies on PostgreSQL, Redis, and MinIO. - Introduced PostgreSQL and Redis services with health checks and configurations for data persistence. - Added MinIO for S3-compatible object storage and a one-shot service to initialize required buckets. - Updated the Nginx configuration to proxy requests to the new backend API and MinIO storage. - Enhanced the Dockerfile to support the new API environment variables and configurations. - Updated the `package.json` and `package-lock.json` to include new dependencies for QR code generation and other utilities. Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
186
src/composables/useUpload.ts
Normal file
186
src/composables/useUpload.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
/**
|
||||
* useUpload composable
|
||||
*
|
||||
* Chunked multipart upload with progress tracking.
|
||||
* Ported from the original indeehub-frontend uploader library.
|
||||
* Works with both the original API and our self-hosted MinIO backend.
|
||||
*/
|
||||
|
||||
import { ref, computed } from 'vue'
|
||||
import { filmmakerService } from '../services/filmmaker.service'
|
||||
import axios from 'axios'
|
||||
|
||||
const CHUNK_SIZE = 20 * 1024 * 1024 // 20 MB
|
||||
const MAX_PARALLEL_UPLOADS = 6
|
||||
const MAX_RETRIES = 3
|
||||
|
||||
export interface UploadItem {
|
||||
id: string
|
||||
file: File
|
||||
key: string
|
||||
bucket: string
|
||||
progress: number
|
||||
status: 'pending' | 'uploading' | 'completed' | 'failed'
|
||||
error?: string
|
||||
}
|
||||
|
||||
// Shared upload queue (singleton across components)
|
||||
const uploadQueue = ref<UploadItem[]>([])
|
||||
const isUploading = ref(false)
|
||||
|
||||
export function useUpload() {
|
||||
const totalProgress = computed(() => {
|
||||
if (uploadQueue.value.length === 0) return 0
|
||||
const total = uploadQueue.value.reduce((sum, item) => sum + item.progress, 0)
|
||||
return Math.round(total / uploadQueue.value.length)
|
||||
})
|
||||
|
||||
const activeUploads = computed(() =>
|
||||
uploadQueue.value.filter((u) => u.status === 'uploading')
|
||||
)
|
||||
|
||||
const completedUploads = computed(() =>
|
||||
uploadQueue.value.filter((u) => u.status === 'completed')
|
||||
)
|
||||
|
||||
/**
|
||||
* Add a file to the upload queue and start uploading
|
||||
*/
|
||||
async function addUpload(file: File, key: string, bucket: string = 'indeedhub-private'): Promise<string | null> {
|
||||
const item: UploadItem = {
|
||||
id: `upload-${Date.now()}-${Math.random().toString(36).slice(2)}`,
|
||||
file,
|
||||
key,
|
||||
bucket,
|
||||
progress: 0,
|
||||
status: 'pending',
|
||||
}
|
||||
|
||||
uploadQueue.value.push(item)
|
||||
return processUpload(item)
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single upload: initialize, chunk, upload, finalize
|
||||
*/
|
||||
async function processUpload(item: UploadItem): Promise<string | null> {
|
||||
try {
|
||||
item.status = 'uploading'
|
||||
isUploading.value = true
|
||||
|
||||
// Step 1: Initialize multipart upload
|
||||
const { UploadId, Key } = await filmmakerService.initializeUpload(
|
||||
item.key,
|
||||
item.bucket,
|
||||
item.file.type
|
||||
)
|
||||
|
||||
// Step 2: Calculate chunks
|
||||
const totalChunks = Math.ceil(item.file.size / CHUNK_SIZE)
|
||||
|
||||
// Step 3: Get presigned URLs for all chunks
|
||||
const { parts: presignedParts } = await filmmakerService.getPresignedUrls(
|
||||
UploadId,
|
||||
Key,
|
||||
item.bucket,
|
||||
totalChunks
|
||||
)
|
||||
|
||||
// Step 4: Upload chunks in parallel with progress tracking
|
||||
const completedParts: Array<{ PartNumber: number; ETag: string }> = []
|
||||
let uploadedChunks = 0
|
||||
|
||||
// Process chunks in batches of MAX_PARALLEL_UPLOADS
|
||||
for (let batchStart = 0; batchStart < presignedParts.length; batchStart += MAX_PARALLEL_UPLOADS) {
|
||||
const batch = presignedParts.slice(batchStart, batchStart + MAX_PARALLEL_UPLOADS)
|
||||
|
||||
const batchResults = await Promise.all(
|
||||
batch.map(async (part) => {
|
||||
const start = (part.PartNumber - 1) * CHUNK_SIZE
|
||||
const end = Math.min(start + CHUNK_SIZE, item.file.size)
|
||||
const chunk = item.file.slice(start, end)
|
||||
|
||||
// Upload with retries
|
||||
let lastError: Error | null = null
|
||||
for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
|
||||
try {
|
||||
const response = await axios.put(part.signedUrl, chunk, {
|
||||
headers: { 'Content-Type': item.file.type },
|
||||
onUploadProgress: () => {
|
||||
// Progress is tracked at the chunk level
|
||||
},
|
||||
})
|
||||
|
||||
uploadedChunks++
|
||||
item.progress = Math.round((uploadedChunks / totalChunks) * 100)
|
||||
|
||||
const etag = response.headers.etag || response.headers.ETag
|
||||
return {
|
||||
PartNumber: part.PartNumber,
|
||||
ETag: etag?.replace(/"/g, '') || '',
|
||||
}
|
||||
} catch (err: any) {
|
||||
lastError = err
|
||||
// Exponential backoff
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, Math.pow(2, attempt) * 1000)
|
||||
)
|
||||
}
|
||||
}
|
||||
throw lastError || new Error(`Failed to upload part ${part.PartNumber}`)
|
||||
})
|
||||
)
|
||||
|
||||
completedParts.push(...batchResults)
|
||||
}
|
||||
|
||||
// Step 5: Finalize
|
||||
await filmmakerService.finalizeUpload(UploadId, Key, item.bucket, completedParts)
|
||||
|
||||
item.status = 'completed'
|
||||
item.progress = 100
|
||||
|
||||
// Check if all uploads done
|
||||
if (uploadQueue.value.every((u) => u.status !== 'uploading' && u.status !== 'pending')) {
|
||||
isUploading.value = false
|
||||
}
|
||||
|
||||
return Key
|
||||
} catch (err: any) {
|
||||
item.status = 'failed'
|
||||
item.error = err.message || 'Upload failed'
|
||||
console.error('Upload failed:', err)
|
||||
|
||||
if (uploadQueue.value.every((u) => u.status !== 'uploading' && u.status !== 'pending')) {
|
||||
isUploading.value = false
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove completed or failed upload from queue
|
||||
*/
|
||||
function removeUpload(id: string) {
|
||||
uploadQueue.value = uploadQueue.value.filter((u) => u.id !== id)
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all completed uploads
|
||||
*/
|
||||
function clearCompleted() {
|
||||
uploadQueue.value = uploadQueue.value.filter((u) => u.status !== 'completed')
|
||||
}
|
||||
|
||||
return {
|
||||
uploadQueue,
|
||||
isUploading,
|
||||
totalProgress,
|
||||
activeUploads,
|
||||
completedUploads,
|
||||
addUpload,
|
||||
removeUpload,
|
||||
clearCompleted,
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user