@statezero/core
Version:
The type-safe frontend client for StateZero - connect directly to your backend models with zero boilerplate
356 lines (355 loc) • 14.2 kB
JavaScript
import axios from "axios";
import { configInstance } from "../../config.js";
import PQueue from "p-queue";
/**
* FileObject - A file wrapper that handles uploads to StateZero backend
*/
export class FileObject {
// Simple changes to FileObject constructor
constructor(file, options = {}) {
// Handle stored file data (from API)
if (file &&
typeof file === "object" &&
file.file_path &&
!(file instanceof File)) {
// This is stored file data from the backend
this.name = file.file_name;
this.size = file.size;
this.type = file.mime_type; // Now coming from backend
this.lastModified = null;
// Mark as already uploaded
this.uploaded = true;
this.uploading = false;
this.uploadResult = file; // Store the entire response
this.uploadError = null;
this.fileData = null;
// No upload properties needed
this.uploadType = null;
this.uploadId = null;
this.totalChunks = 0;
this.completedChunks = 0;
this.chunkSize = null;
this.maxConcurrency = null;
this.uploadPromise = Promise.resolve(this.uploadResult);
return;
}
// Handle File objects (for upload) - existing code
if (!file || !(file instanceof File)) {
throw new Error("FileObject requires a File object or stored file data");
}
// Store file metadata directly
this.name = file.name;
this.size = file.size;
this.type = file.type;
this.lastModified = file.lastModified;
// Initialize state properties
this.uploaded = false;
this.uploading = false;
this.uploadResult = null;
this.uploadError = null;
this.fileData = null;
// Multipart upload properties
this.uploadType = null; // 'single' or 'multipart'
this.uploadId = null;
this.totalChunks = 0;
this.completedChunks = 0;
this.chunkSize = options.chunkSize || 5 * 1024 * 1024; // 5MB default
if (this.chunkSize < this.constructor.MIN_CHUNK_SIZE) {
throw new Error(`Chunk size must be at least ${this.constructor.MIN_CHUNK_SIZE / (1024 * 1024)}MB for multipart uploads. ` +
`Provided: ${this.chunkSize / (1024 * 1024)}MB`);
}
this.maxConcurrency = options.maxConcurrency || 3;
this.uploadPromise = this._initializeAndStartUpload(file, options);
}
get isStoredFile() {
return this.uploaded && !this.fileData && this.uploadResult?.file_path;
}
get status() {
if (this.uploadError)
return "failed";
if (this.uploading)
return "uploading";
if (this.uploaded)
return "uploaded";
return "pending";
}
get filePath() {
return this.uploadResult?.file_path;
}
get fileUrl() {
if (!this.uploadResult?.file_url) {
return null;
}
return configInstance.buildFileUrl(this.uploadResult.file_url, this.constructor.configKey);
}
async _initializeAndStartUpload(file, options) {
const config = configInstance.getConfig();
const backend = config.backendConfigs?.[this.constructor.configKey];
if (!backend) {
throw new Error(`No backend configuration found for key: ${this.constructor.configKey}`);
}
// Check if fast uploads are enabled
if (backend.fileUploadMode === "s3") {
return this._fastUpload(file, options);
}
else {
// Read file data for direct upload
await this._readFileData(file);
return this._directUpload(options);
}
}
/**
* Fast upload using S3 presigned URLs with multipart support
*/
async _fastUpload(file, options = {}) {
if (this.uploading)
return this.uploadPromise;
if (this.uploaded)
return Promise.resolve(this.uploadResult);
this.uploading = true;
this.uploadError = null;
try {
const config = configInstance.getConfig();
const backend = config.backendConfigs[this.constructor.configKey];
const baseUrl = backend.API_URL.replace(/\/+$/, "");
const headers = backend.getAuthHeaders ? backend.getAuthHeaders() : {};
// Determine if we need multipart upload
const needsMultipart = this.size > this.chunkSize;
const numChunks = needsMultipart
? Math.ceil(this.size / this.chunkSize)
: 1;
this.totalChunks = numChunks;
this.uploadType = needsMultipart ? "multipart" : "single";
// Step 1: Initiate fast upload
const initiateResponse = await axios.post(`${baseUrl}/files/fast-upload/`, {
action: "initiate",
filename: this.name,
content_type: this.type,
file_size: this.size,
num_chunks: numChunks,
}, { headers });
const uploadData = initiateResponse.data;
if (uploadData.upload_type === "single") {
// Single file upload
return await this._singleUpload(file, uploadData, options);
}
else {
// Multipart upload
this.uploadId = uploadData.upload_id;
return await this._multipartUpload(file, uploadData, options);
}
}
catch (error) {
this.uploading = false;
this.uploadError =
error.response?.data?.error || error.message || "Fast upload failed";
const uploadFailedError = new Error(`Fast upload failed: ${this.uploadError}`);
uploadFailedError.originalError = error;
throw uploadFailedError;
}
}
/**
* Handle single file upload
*/
async _singleUpload(file, uploadData, options) {
const { upload_url, content_type, file_path } = uploadData;
// Upload directly to S3 using PUT with raw file
await axios.put(upload_url, file, {
headers: {
"Content-Type": content_type,
},
...(options.onProgress && {
onUploadProgress: (progressEvent) => {
const total = progressEvent.total > 0 ? progressEvent.total : 0;
const percentage = total > 0 ? Math.round((progressEvent.loaded / total) * 100) : 0;
if (options.onProgress) {
options.onProgress(percentage);
}
},
}),
});
// Complete the upload
return await this._completeUpload(file_path, this.name);
}
/**
* Handle multipart upload with concurrency using p-queue
*/
async _multipartUpload(file, uploadData, options) {
const { upload_urls, file_path } = uploadData;
const parts = [];
const chunks = this._createFileChunks(file);
// Create p-queue instance with concurrency control
const queue = new PQueue({
concurrency: this.maxConcurrency,
});
// Create upload tasks for each chunk
const uploadTasks = chunks.map((chunk, index) => {
const partNumber = index + 1;
const uploadUrl = upload_urls[partNumber];
return queue.add(async () => {
try {
const response = await axios.put(uploadUrl, chunk, {
headers: {
"Content-Type": "application/octet-stream",
},
});
const etag = response.headers.etag?.replace(/"/g, "");
parts[index] = {
PartNumber: partNumber,
ETag: etag,
};
this.completedChunks++;
// Report progress
if (options.onProgress) {
const progress = Math.round((this.completedChunks / this.totalChunks) * 100);
options.onProgress(progress);
}
return parts[index];
}
catch (error) {
console.error(`Failed to upload chunk ${partNumber}:`, error);
throw error;
}
});
});
// Wait for all uploads to complete
await Promise.all(uploadTasks);
// Complete multipart upload
return await this._completeUpload(file_path, this.name, this.uploadId, parts);
}
/**
* Create file chunks for multipart upload
*/
_createFileChunks(file) {
const chunks = [];
let offset = 0;
while (offset < file.size) {
const chunkSize = Math.min(this.chunkSize, file.size - offset);
const chunk = file.slice(offset, offset + chunkSize);
chunks.push(chunk);
offset += chunkSize;
}
return chunks;
}
/**
* Complete the upload (both single and multipart)
*/
async _completeUpload(filePath, originalName, uploadId = null, parts = null) {
const config = configInstance.getConfig();
const backend = config.backendConfigs[this.constructor.configKey];
const baseUrl = backend.API_URL.replace(/\/+$/, "");
const headers = backend.getAuthHeaders ? backend.getAuthHeaders() : {};
const completeData = {
action: "complete",
file_path: filePath,
original_name: originalName,
};
if (uploadId && parts) {
completeData.upload_id = uploadId;
completeData.parts = parts;
}
const completeResponse = await axios.post(`${baseUrl}/files/fast-upload/`, completeData, { headers });
this.uploadResult = {
...completeResponse.data,
uploadedAt: new Date(),
};
this.uploaded = true;
this.uploading = false;
return this.uploadResult;
}
/**
* Direct upload to Django backend (original method)
*/
async _directUpload(options = {}) {
if (this.uploading)
return this.uploadPromise;
if (this.uploaded)
return Promise.resolve(this.uploadResult);
if (this.uploadError && !this.uploading && !this.uploaded) {
return Promise.reject(new Error(`Cannot upload: file processing failed earlier - ${this.uploadError}`));
}
this.uploading = true;
this.uploadError = null;
try {
if (!this.fileData) {
throw new Error("File data is not available. Upload cannot proceed.");
}
const config = configInstance.getConfig();
const backend = config.backendConfigs[this.constructor.configKey];
if (!backend.API_URL) {
throw new Error(`API_URL is not defined in backend configuration for key: ${this.constructor.configKey}`);
}
const formData = new FormData();
const fileBlob = this.getBlob();
const reconstructedFile = new File([fileBlob], this.name, {
type: this.type,
lastModified: this.lastModified,
});
formData.append("file", reconstructedFile);
if (options.additionalFields) {
Object.entries(options.additionalFields).forEach(([key, value]) => {
formData.append(key, value);
});
}
const baseUrl = backend.API_URL.replace(/\/+$/, "");
const uploadUrl = `${baseUrl}/files/upload/`;
const headers = backend.getAuthHeaders ? backend.getAuthHeaders() : {};
const response = await axios.post(uploadUrl, formData, {
headers: {
...headers,
"Content-Type": "multipart/form-data",
},
...(options.onProgress && {
onUploadProgress: (progressEvent) => {
const total = progressEvent.total > 0 ? progressEvent.total : 0;
const percentage = total > 0 ? Math.round((progressEvent.loaded / total) * 100) : 0;
if (options.onProgress) {
options.onProgress(percentage);
}
},
}),
});
this.uploadResult = {
...response.data,
uploadedAt: new Date(),
};
this.uploaded = true;
this.uploading = false;
return this.uploadResult;
}
catch (error) {
this.uploading = false;
this.uploadError =
error.response?.data?.error || error.message || "Unknown upload error";
const uploadFailedError = new Error(`Upload failed: ${this.uploadError}`);
uploadFailedError.originalError = error;
throw uploadFailedError;
}
}
/**
* Reads the file content into an ArrayBuffer (for direct uploads only)
*/
async _readFileData(file) {
try {
this.fileData = await file.arrayBuffer();
}
catch (error) {
console.error("Failed to read file data:", error);
throw new Error(`Failed to read file data: ${error.message}`);
}
}
/**
* Gets the file data as a Blob (for direct uploads only)
*/
getBlob() {
if (!this.fileData) {
throw new Error("File data not yet loaded or failed to load.");
}
return new Blob([this.fileData], { type: this.type });
}
async waitForUpload() {
return this.uploadPromise;
}
}
FileObject.configKey = "default";
FileObject.MIN_CHUNK_SIZE = 5 * 1024 * 1024; // 5MB minimum for S3 multipart