magically-sdk
Version:
Official SDK for Magically - Build mobile apps with AI
421 lines (420 loc) • 17.6 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.MagicallyFiles = void 0;
const Logger_1 = require("./Logger");
const APIClient_1 = require("./APIClient");
const utils_1 = require("./utils");
// Chunking configuration
const CHUNK_SIZE = 3 * 1024 * 1024; // 3MB chunks (safe under Vercel's 4.5MB limit)
const CHUNK_THRESHOLD = 4 * 1024 * 1024; // Use chunking for files >= 4MB
class MagicallyFiles {
constructor(config, auth) {
this.config = config;
this.auth = auth;
this.logger = new Logger_1.Logger(config.debug || false, 'MagicallyFiles');
this.apiClient = new APIClient_1.APIClient(config, 'MagicallyFiles');
}
/**
* Convert a blob/file to base64 string for chunked upload
* Works in all environments (browser, React Native, Node.js, Cloudflare Workers)
*/
async blobToBase64(blob) {
// Check if we're in Cloudflare Workers/Edge environment
if (typeof FileReader === 'undefined') {
// In Cloudflare Workers, use arrayBuffer and convert to base64
// Note: Since chunks are max 3MB, this is safe for memory
const arrayBuffer = await blob.arrayBuffer();
const bytes = new Uint8Array(arrayBuffer);
// For chunks up to 3MB, we can safely use btoa with array manipulation
// Using Array.from and map is more efficient than string concatenation
const binary = Array.from(bytes, byte => String.fromCharCode(byte)).join('');
return btoa(binary);
}
// For browser/React Native environments, use FileReader
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(blob);
reader.onload = () => {
const result = reader.result;
// Remove data URL prefix (e.g., "data:image/png;base64,")
const base64 = result.split(',')[1];
resolve(base64);
};
reader.onerror = error => reject(error);
});
}
/**
* Internal method to handle chunked upload
*/
async uploadChunked(file, options) {
const fileName = file instanceof File ? file.name : 'blob';
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
this.logger.debug('Using chunked upload', {
fileName,
fileSize: file.size,
totalChunks,
chunkSize: CHUNK_SIZE
});
const token = await (0, utils_1.getAuthToken)(this.apiClient, this.auth);
const baseUrl = `${this.config.apiUrl || 'https://trymagically.com'}/api/project/${this.config.projectId}/data/files/chunk`;
// Build headers
const headers = {
'Content-Type': 'application/json',
};
if (token) {
headers['Authorization'] = `Bearer ${token}`;
}
else if (this.apiClient.isEdgeEnvironment() && this.config.apiKey) {
headers['Authorization'] = `Bearer ${this.config.apiKey}`;
}
// Step 1: Start upload session
const startResponse = await fetch(baseUrl, {
method: 'POST',
headers,
body: JSON.stringify({
action: 'start',
filename: fileName,
fileSize: file.size,
mimeType: file.type || 'application/octet-stream',
totalChunks
})
});
if (!startResponse.ok) {
const error = await startResponse.json();
throw new Error(error.error_description || error.error || 'Failed to start upload session');
}
const { uploadId } = await startResponse.json();
// Step 2: Upload chunks
for (let i = 0; i < totalChunks; i++) {
const start = i * CHUNK_SIZE;
const end = Math.min(start + CHUNK_SIZE, file.size);
const chunk = file.slice(start, end);
// Convert chunk to base64
const base64Data = await this.blobToBase64(chunk);
const chunkResponse = await fetch(baseUrl, {
method: 'POST',
headers,
body: JSON.stringify({
action: 'upload',
uploadId,
chunkIndex: i,
data: base64Data
})
});
if (!chunkResponse.ok) {
const error = await chunkResponse.json();
throw new Error(error.error_description || `Failed to upload chunk ${i + 1}/${totalChunks}`);
}
const chunkResult = await chunkResponse.json();
// Report progress if callback provided
if (options?.onUploadProgress) {
options.onUploadProgress(chunkResult.progress);
}
this.logger.debug(`Uploaded chunk ${i + 1}/${totalChunks}`, {
progress: chunkResult.progress
});
}
// Step 3: Complete upload
const completeResponse = await fetch(baseUrl, {
method: 'POST',
headers,
body: JSON.stringify({
action: 'complete',
uploadId,
tags: options?.tags,
metadata: options?.metadata
})
});
if (!completeResponse.ok) {
const error = await completeResponse.json();
throw new Error(error.error_description || 'Failed to complete upload');
}
const result = await completeResponse.json();
return result.file;
}
/**
* Convert a URI (from camera, image picker, etc.) to a proper File object
*
* This is the CRITICAL step that LLMs often miss. All Expo camera/picker results
* give you URIs, but the upload API needs proper File objects.
*
* Steps performed:
* 1. Fetch the URI to get the actual binary data
* 2. Convert response to blob
* 3. Create proper File object with name and MIME type
*
* @param uri - URI from camera/picker (asset.uri or photo.uri)
* @param fileName - Name for the file (include extension!)
* @param mimeType - MIME type (e.g., 'image/jpeg', 'image/png')
* @returns Proper File object ready for upload
*
* @example
* // From camera
* const photo = await cameraRef.current.takePictureAsync();
* const file = await magically.files.convertUriToFile(
* photo.uri,
* 'photo.jpg',
* 'image/jpeg'
* );
*
* @example
* // From image picker
* const result = await ImagePicker.launchImageLibraryAsync();
* const asset = result.assets[0];
* const file = await magically.files.convertUriToFile(
* asset.uri,
* asset.fileName || 'image.jpg',
* asset.mimeType || 'image/jpeg'
* );
*/
async convertUriToFile(uri, fileName, mimeType = 'image/jpeg') {
try {
this.logger.debug('Converting URI to File', { uri, fileName, mimeType });
// Step 1: Fetch the URI to get binary data
const response = await fetch(uri);
if (!response.ok) {
throw new Error(`Failed to fetch URI: ${response.statusText}`);
}
// Step 2: Convert to blob and validate
const blob = await response.blob();
// Validate blob has content
if (blob.size === 0) {
throw new Error('File is empty or could not be read from URI');
}
this.logger.debug('Blob created from URI', {
blobSize: blob.size,
blobType: blob.type
});
// Step 3: Create proper File object
const file = new File([blob], fileName, { type: mimeType });
// Validate File object was created properly
if (file.size === 0) {
throw new Error('Failed to create valid File object - size is 0');
}
if (file.size !== blob.size) {
this.logger.warn('File size mismatch after creation', {
blobSize: blob.size,
fileSize: file.size
});
}
this.logger.debug('URI conversion successful', {
originalUri: uri,
fileName: file.name,
fileSize: file.size,
fileType: file.type,
conversionValid: file.size > 0
});
return file;
}
catch (error) {
throw new Error(`Failed to convert URI to File: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
/**
* Upload a file to Vercel Blob storage and save metadata to MongoDB
* @param file - File object to upload (from file input or camera)
* @param options - Upload options (tags, metadata)
* @returns Uploaded file metadata
*/
async upload(file, options) {
try {
this.logger.debug('Starting file upload', {
fileName: file instanceof File ? file.name : 'blob',
fileSize: file.size,
fileType: file.type,
options
});
// Automatically use chunked upload for files >= 4MB
if (file.size >= CHUNK_THRESHOLD) {
this.logger.debug(`File size ${(file.size / 1024 / 1024).toFixed(2)}MB exceeds threshold, using chunked upload`);
return await this.uploadChunked(file, options);
}
// Use direct upload for small files (< 4MB)
this.logger.debug(`File size ${(file.size / 1024 / 1024).toFixed(2)}MB below threshold, using direct upload`);
const token = await (0, utils_1.getAuthToken)(this.apiClient, this.auth);
// Convert to blob for raw upload (like Convex pattern)
const blob = file instanceof File ? new Blob([file], { type: file.type }) : file;
const fileName = file instanceof File ? file.name : 'blob';
// Build query parameters for metadata
const params = new URLSearchParams();
params.set('fileName', fileName);
params.set('fileSize', file.size.toString());
params.set('mimeType', file.type);
if (options?.tags) {
params.set('tags', JSON.stringify(options.tags));
}
if (options?.metadata) {
params.set('metadata', JSON.stringify(options.metadata));
}
// Build headers based on token
const headers = {
'Content-Type': file.type,
'Content-Length': file.size.toString(),
};
// Add Authorization header with either JWT token or API key
if (token) {
// JWT token from browser/Node environment
headers['Authorization'] = `Bearer ${token}`;
}
else if (this.apiClient.isEdgeEnvironment() && this.config.apiKey) {
// API key in edge environment
headers['Authorization'] = `Bearer ${this.config.apiKey}`;
}
// Use APIClient for request - special handling for blob upload
const response = await fetch(`${this.config.apiUrl || 'https://trymagically.com'}/api/project/${this.config.projectId}/data/files?${params.toString()}`, {
method: 'POST',
headers,
body: blob,
});
// Log the request
const requestId = this.logger.networkRequest('POST', `/api/project/${this.config.projectId}/data/files`, {
headers: { 'Content-Type': file.type, 'Content-Length': file.size.toString() },
body: { fileName, fileSize: file.size, mimeType: file.type, ...options },
operation: 'upload:file'
});
const startTime = Date.now();
const responseData = await response.json();
const duration = Date.now() - startTime;
if (!response.ok) {
this.logger.networkError(requestId, responseData, { duration, operation: 'upload:file' });
throw new Error(responseData.error_description || `File upload failed: ${response.statusText}`);
}
this.logger.networkResponse(requestId, {
status: response.status,
statusText: response.statusText,
duration,
data: responseData,
operation: 'upload:file'
});
this.logger.success('File upload completed', {
fileId: responseData.file._id,
fileName: responseData.file.originalName,
fileSize: responseData.file.size,
url: responseData.file.url
});
// Report 100% progress for direct upload if callback provided
if (options?.onUploadProgress) {
options.onUploadProgress(100);
}
return responseData.file;
}
catch (error) {
throw error;
}
}
/**
* List uploaded files with filtering and pagination
* @param options - List options (limit, skip, tags, mimeType)
* @returns List of files with pagination info
*/
async list(options) {
try {
this.logger.debug('Listing files', { options });
const token = await (0, utils_1.getAuthToken)(this.apiClient, this.auth);
// Build query parameters
const params = new URLSearchParams();
if (options?.limit)
params.append('limit', options.limit.toString());
if (options?.skip)
params.append('skip', options.skip.toString());
if (options?.tags)
params.append('tags', options.tags.join(','));
if (options?.mimeType)
params.append('mimeType', options.mimeType);
const endpoint = `/api/project/${this.config.projectId}/data/files${params.toString() ? `?${params.toString()}` : ''}`;
const result = await this.apiClient.request(endpoint, {
method: 'GET',
operation: 'list:files'
}, token);
this.logger.success('Files listed', {
count: result.data.length,
total: result.total,
limit: result.limit,
skip: result.skip
});
return result;
}
catch (error) {
throw error;
}
}
/**
* Delete a file from both Vercel Blob and MongoDB
* @param fileId - MongoDB document ID of the file to delete
* @returns Success confirmation
*/
async delete(fileId) {
try {
this.logger.debug('Deleting file', { fileId });
const token = await (0, utils_1.getAuthToken)(this.apiClient, this.auth);
const result = await this.apiClient.request(`/api/project/${this.config.projectId}/data/files?fileId=${fileId}`, {
method: 'DELETE',
operation: `delete:file:${fileId}`
}, token);
this.logger.success('File deleted', { fileId });
return result;
}
catch (error) {
throw error;
}
}
/**
* Upload multiple files in parallel
* @param files - Array of files to upload
* @param options - Upload options applied to all files
* @returns Array of uploaded file metadata
*/
async uploadMultiple(files, options) {
try {
this.logger.debug('Starting multiple file upload', { fileCount: files.length });
const uploadPromises = files.map(file => this.upload(file, options));
const results = await Promise.all(uploadPromises);
this.logger.success('Multiple file upload completed', {
uploadedCount: results.length,
totalSize: results.reduce((sum, file) => sum + file.size, 0)
});
return results;
}
catch (error) {
throw error;
}
}
/**
* Get files by tags
* @param tags - Array of tags to filter by
* @param options - Additional list options
* @returns Files matching the tags
*/
async getByTags(tags, options) {
return this.list({ ...options, tags });
}
/**
* Get files by MIME type
* @param mimeType - MIME type to filter by (supports partial matching)
* @param options - Additional list options
* @returns Files matching the MIME type
*/
async getByMimeType(mimeType, options) {
return this.list({ ...options, mimeType });
}
/**
* Get image files only
* @param options - List options
* @returns Image files only
*/
async getImages(options) {
return this.getByMimeType('image/', options);
}
/**
* Get document files only (PDF, DOC, etc.)
* @param options - List options
* @returns Document files only
*/
async getDocuments(options) {
const documentTypes = ['application/pdf', 'application/msword', 'application/vnd.openxmlformats-officedocument'];
// For multiple MIME types, we'll need to make separate calls or enhance the API
// For now, let's just get PDFs as an example
return this.getByMimeType('application/pdf', options);
}
}
exports.MagicallyFiles = MagicallyFiles;