uploadthing
Version:
Learn more: [docs.uploadthing.com](https://docs.uploadthing.com)
364 lines (354 loc) • 15.7 kB
JavaScript
Object.defineProperty(exports, '__esModule', { value: true });
var Arr = require('effect/Array');
var Micro = require('effect/Micro');
var shared = require('@uploadthing/shared');
var Function = require('effect/Function');
function _interopNamespace(e) {
if (e && e.__esModule) return e;
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () { return e[k]; }
});
}
});
}
n.default = e;
return n;
}
var Arr__namespace = /*#__PURE__*/_interopNamespace(Arr);
var Micro__namespace = /*#__PURE__*/_interopNamespace(Micro);
var version$1 = "7.4.4";
const createDeferred = ()=>{
let resolve;
let reject;
const ac = new AbortController();
const promise = new Promise((res, rej)=>{
resolve = res;
reject = rej;
});
return {
promise,
ac,
resolve,
reject
};
};
const createAPIRequestUrl = (config)=>{
const url = new URL(config.url);
const queryParams = new URLSearchParams(url.search);
queryParams.set("actionType", config.actionType);
queryParams.set("slug", config.slug);
url.search = queryParams.toString();
return url;
};
/**
* Creates a "client" for reporting events to the UploadThing server via the user's API endpoint.
* Events are handled in "./handler.ts starting at L112"
*/ const createUTReporter = (cfg)=>(type, payload)=>Micro__namespace.gen(function*() {
const url = createAPIRequestUrl({
url: cfg.url,
slug: cfg.endpoint,
actionType: type
});
const headers = new Headers((yield* Micro__namespace.promise(async ()=>typeof cfg.headers === "function" ? await cfg.headers() : cfg.headers)));
headers.set("x-uploadthing-package", cfg.package);
headers.set("x-uploadthing-version", version$1);
headers.set("Content-Type", "application/json");
const response = yield* shared.fetchEff(url, {
method: "POST",
body: JSON.stringify(payload),
headers
}).pipe(Micro__namespace.andThen(shared.parseResponseJson), /**
* We don't _need_ to validate the response here, just cast it for now.
* As of now, @effect/schema includes quite a few bytes we cut out by this...
* We have "strong typing" on the backend that ensures the shape should match.
*/ Micro__namespace.map(Function.unsafeCoerce), Micro__namespace.catchTag("FetchError", (e)=>Micro__namespace.fail(new shared.UploadThingError({
code: "INTERNAL_CLIENT_ERROR",
message: `Failed to report event "${type}" to UploadThing server`,
cause: e
}))), Micro__namespace.catchTag("BadRequestError", (e)=>Micro__namespace.fail(new shared.UploadThingError({
code: shared.getErrorTypeFromStatusCode(e.status),
message: e.getMessage(),
cause: e.json
}))), Micro__namespace.catchTag("InvalidJson", (e)=>Micro__namespace.fail(new shared.UploadThingError({
code: "INTERNAL_CLIENT_ERROR",
message: "Failed to parse response from UploadThing server",
cause: e
}))));
return response;
});
const uploadWithProgress = (file, rangeStart, presigned, onUploadProgress)=>Micro__namespace.async((resume)=>{
const xhr = new XMLHttpRequest();
xhr.open("PUT", presigned.url, true);
xhr.setRequestHeader("Range", `bytes=${rangeStart}-`);
xhr.setRequestHeader("x-uploadthing-version", version$1);
xhr.responseType = "json";
let previousLoaded = 0;
xhr.upload.addEventListener("progress", ({ loaded })=>{
const delta = loaded - previousLoaded;
onUploadProgress?.({
loaded,
delta
});
previousLoaded = loaded;
});
xhr.addEventListener("load", ()=>{
resume(xhr.status >= 200 && xhr.status < 300 ? Micro__namespace.succeed(xhr.response) : Micro__namespace.die(`XHR failed ${xhr.status} ${xhr.statusText} - ${JSON.stringify(xhr.response)}`));
});
// Is there a case when the client would throw and
// ingest server not knowing about it? idts?
xhr.addEventListener("error", ()=>{
resume(new shared.UploadThingError({
code: "UPLOAD_FAILED"
}));
});
const formData = new FormData();
formData.append("file", rangeStart > 0 ? file.slice(rangeStart) : file);
xhr.send(formData);
return Micro__namespace.sync(()=>xhr.abort());
});
const uploadFile = (file, presigned, opts)=>shared.fetchEff(presigned.url, {
method: "HEAD"
}).pipe(Micro__namespace.map(({ headers })=>parseInt(headers.get("x-ut-range-start") ?? "0", 10)), Micro__namespace.tap((start)=>opts.onUploadProgress?.({
delta: start,
loaded: start
})), Micro__namespace.flatMap((start)=>uploadWithProgress(file, start, presigned, (progressEvent)=>opts.onUploadProgress?.({
delta: progressEvent.delta,
loaded: progressEvent.loaded + start
}))), Micro__namespace.map(Function.unsafeCoerce), Micro__namespace.map((uploadResponse)=>({
name: file.name,
size: file.size,
key: presigned.key,
lastModified: file.lastModified,
serverData: uploadResponse.serverData,
url: uploadResponse.url,
appUrl: uploadResponse.appUrl,
customId: presigned.customId,
type: file.type,
fileHash: uploadResponse.fileHash
})));
const uploadFilesInternal = (endpoint, opts)=>{
// classic service right here
const reportEventToUT = createUTReporter({
endpoint: String(endpoint),
package: opts.package,
url: opts.url,
headers: opts.headers
});
const totalSize = opts.files.reduce((acc, f)=>acc + f.size, 0);
let totalLoaded = 0;
return reportEventToUT("upload", {
input: "input" in opts ? opts.input : null,
files: opts.files.map((f)=>({
name: f.name,
size: f.size,
type: f.type,
lastModified: f.lastModified
}))
}).pipe(Micro__namespace.flatMap((presigneds)=>Micro__namespace.forEach(presigneds, (presigned, i)=>Micro__namespace.flatMap(Micro__namespace.sync(()=>opts.onUploadBegin?.({
file: opts.files[i].name
})), ()=>uploadFile(opts.files[i], presigned, {
onUploadProgress: (ev)=>{
totalLoaded += ev.delta;
opts.onUploadProgress?.({
file: opts.files[i],
progress: Math.round(ev.loaded / opts.files[i].size * 100),
loaded: ev.loaded,
delta: ev.delta,
totalLoaded,
totalProgress: Math.round(totalLoaded / totalSize * 100)
});
}
})), {
concurrency: 6
})), Micro__namespace.provideService(shared.FetchContext, window.fetch));
};
const version = version$1;
/**
* Validate that a file is of a valid type given a route config
* @public
*/ const isValidFileType = (file, routeConfig)=>Micro__namespace.runSync(shared.matchFileType(file, shared.objectKeys(routeConfig)).pipe(Micro__namespace.map((type)=>file.type.includes(type)), Micro__namespace.orElseSucceed(()=>false)));
/**
* Validate that a file is of a valid size given a route config
* @public
*/ const isValidFileSize = (file, routeConfig)=>Micro__namespace.runSync(shared.matchFileType(file, shared.objectKeys(routeConfig)).pipe(Micro__namespace.flatMap((type)=>shared.fileSizeToBytes(routeConfig[type].maxFileSize)), Micro__namespace.map((maxFileSize)=>file.size <= maxFileSize), Micro__namespace.orElseSucceed(()=>false)));
/**
* Generate a typed uploader for a given FileRouter
* @public
*/ const genUploader = (initOpts)=>{
const routeRegistry = shared.createIdentityProxy();
const controllableUpload = async (slug, opts)=>{
const uploads = new Map();
const endpoint = typeof slug === "function" ? slug(routeRegistry) : slug;
const utReporter = createUTReporter({
endpoint: String(endpoint),
package: initOpts.package,
url: shared.resolveMaybeUrlArg(initOpts?.url),
headers: opts.headers
});
const presigneds = await Micro__namespace.runPromise(utReporter("upload", {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
input: "input" in opts ? opts.input : null,
files: opts.files.map((f)=>({
name: f.name,
size: f.size,
type: f.type,
lastModified: f.lastModified
}))
}).pipe(Micro__namespace.provideService(shared.FetchContext, window.fetch)));
const totalSize = opts.files.reduce((acc, f)=>acc + f.size, 0);
let totalLoaded = 0;
const uploadEffect = (file, presigned)=>uploadFile(file, presigned, {
onUploadProgress: (progressEvent)=>{
totalLoaded += progressEvent.delta;
opts.onUploadProgress?.({
...progressEvent,
file,
progress: Math.round(progressEvent.loaded / file.size * 100),
totalLoaded,
totalProgress: Math.round(totalLoaded / totalSize * 100)
});
}
}).pipe(Micro__namespace.provideService(shared.FetchContext, window.fetch));
for (const [i, p] of presigneds.entries()){
const file = opts.files[i];
const deferred = createDeferred();
uploads.set(file, {
deferred,
presigned: p
});
void Micro__namespace.runPromiseExit(uploadEffect(file, p), {
signal: deferred.ac.signal
}).then((result)=>{
if (result._tag === "Success") {
return deferred.resolve(result.value);
} else if (result.cause._tag === "Interrupt") {
throw new shared.UploadPausedError();
}
throw Micro__namespace.causeSquash(result.cause);
}).catch((err)=>{
if (err instanceof shared.UploadPausedError) return;
deferred.reject(err);
});
}
/**
* Pause an ongoing upload
* @param file The file upload you want to pause. Can be omitted to pause all files
*/ const pauseUpload = (file)=>{
const files = Arr__namespace.ensure(file ?? opts.files);
for (const file of files){
const upload = uploads.get(file);
if (!upload) return;
if (upload.deferred.ac.signal.aborted) {
// Cancel the upload if it's already been paused
throw new shared.UploadAbortedError();
}
upload.deferred.ac.abort();
}
};
/**
* Resume a paused upload
* @param file The file upload you want to resume. Can be omitted to resume all files
*/ const resumeUpload = (file)=>{
const files = Arr__namespace.ensure(file ?? opts.files);
for (const file of files){
const upload = uploads.get(file);
if (!upload) throw "No upload found";
upload.deferred.ac = new AbortController();
void Micro__namespace.runPromiseExit(uploadEffect(file, upload.presigned), {
signal: upload.deferred.ac.signal
}).then((result)=>{
if (result._tag === "Success") {
return upload.deferred.resolve(result.value);
} else if (result.cause._tag === "Interrupt") {
throw new shared.UploadPausedError();
}
throw Micro__namespace.causeSquash(result.cause);
}).catch((err)=>{
if (err instanceof shared.UploadPausedError) return;
upload.deferred.reject(err);
});
}
};
/**
* Wait for an upload to complete
* @param file The file upload you want to wait for. Can be omitted to wait for all files
*/ const done = async (file)=>{
const promises = [];
const files = Arr__namespace.ensure(file ?? opts.files);
for (const file of files){
const upload = uploads.get(file);
if (!upload) throw "No upload found";
promises.push(upload.deferred.promise);
}
const results = await Promise.all(promises);
return file ? results[0] : results;
};
return {
pauseUpload,
resumeUpload,
done
};
};
/**
* One step upload function that both requests presigned URLs
* and then uploads the files to UploadThing
*/ const typedUploadFiles = (slug, opts)=>{
const endpoint = typeof slug === "function" ? slug(routeRegistry) : slug;
return uploadFilesInternal(endpoint, {
...opts,
skipPolling: {},
url: shared.resolveMaybeUrlArg(initOpts?.url),
package: initOpts.package,
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
input: opts.input
}).pipe((effect)=>Micro__namespace.runPromiseExit(effect, opts.signal && {
signal: opts.signal
})).then((exit)=>{
if (exit._tag === "Success") {
return exit.value;
} else if (exit.cause._tag === "Interrupt") {
throw new shared.UploadAbortedError();
}
throw Micro__namespace.causeSquash(exit.cause);
});
};
return {
uploadFiles: typedUploadFiles,
createUpload: controllableUpload,
/**
* Identity object that can be used instead of raw strings
* that allows "Go to definition" in your IDE to bring you
* to the backend definition of a route.
*/ routeRegistry
};
};
Object.defineProperty(exports, "UploadAbortedError", {
enumerable: true,
get: function () { return shared.UploadAbortedError; }
});
Object.defineProperty(exports, "UploadPausedError", {
enumerable: true,
get: function () { return shared.UploadPausedError; }
});
Object.defineProperty(exports, "generateClientDropzoneAccept", {
enumerable: true,
get: function () { return shared.generateClientDropzoneAccept; }
});
Object.defineProperty(exports, "generateMimeTypes", {
enumerable: true,
get: function () { return shared.generateMimeTypes; }
});
Object.defineProperty(exports, "generatePermittedFileTypes", {
enumerable: true,
get: function () { return shared.generatePermittedFileTypes; }
});
exports.genUploader = genUploader;
exports.isValidFileSize = isValidFileSize;
exports.isValidFileType = isValidFileType;
exports.version = version;