benchling_typescript_sdk
Version:
Typescript SDK for Benchling API
297 lines (264 loc) • 10.4 kB
text/typescript
import { BaseClient } from "../BaseClient";
import { chunkQueries } from "../superModules/chunkQueries";
import type {
Container,
ContainersContent,
ContainersArchivalChange,
ContainerContentsList,
ContainerPathGetParamQuery,
ContainerConentPathGetParamPath,
MultipleContainersTransfer,
TransfersAsyncTask,
MultipleContainersTransfersList,
ContainerUpdate,
ContainersArchive,
Measurement,
ContainerBulkUpdateItem,
AsyncTaskLink,
BulkUpdateContainersAsyncTask,
} from "../types";
export type LimitedContainerQueries = keyof NonNullable<ContainerPathGetParamQuery>;
const limitedContainerSearches: LimitedContainerQueries[] = [
"names.anyOf",
"names.anyOf.caseSensitive",
"barcodes",
"ids",
"sampleOwnerIds.allOf",
"sampleOwnerIds.anyOf",
"sampleOwnerIds.noneOf",
"restrictedSamplePartyIds.allOf",
"restrictedSamplePartyIds.anyOf",
"restrictedSamplePartyIds.noneOf",
];
export class Containers {
private client: BaseClient;
constructor(client: BaseClient) {
this.client = client;
}
public async *bulkUpdateContainers(
bulkUpdateContainers: ContainerBulkUpdateItem[]
): AsyncGenerator<Container[]> {
if (!bulkUpdateContainers) {
throw new Error("bulkUpdateContainers(): No containers provided for bulk update.");
}
if (bulkUpdateContainers && bulkUpdateContainers.length === 0) {
return [];
}
const chunkSize = 1000; // Max 1000 items per request
const chunkedUpdates: ContainerBulkUpdateItem[][] = [];
// Create chunks of the input array
for (let i = 0; i < bulkUpdateContainers.length; i += chunkSize) {
chunkedUpdates.push(bulkUpdateContainers.slice(i, i + chunkSize));
}
const endpoint = "containers:bulk-update";
for (const chunk of chunkedUpdates) {
// Send the chunk to the endpoint
const taskResponse = await this.client.postData<AsyncTaskLink>(
endpoint,
{ containers: chunk },
{}
);
if (!taskResponse || !taskResponse.taskId) {
throw new Error(
`Failed to initiate bulk update for containers. Response: ${JSON.stringify(taskResponse)}`
);
}
try {
// Wait for the task to complete and retrieve the containers
const containers = await this.client.awaitForTask<BulkUpdateContainersAsyncTask>(
taskResponse.taskId
);
if (containers.containers) {
yield containers.containers; // Yield the resolved containers for this chunk
} else {
console.error(containers.errors);
console.error(containers.message);
throw new Error(
`bulkUpdateContainers(): Task response for bulk update does not contain containers. Task ID: ${taskResponse.taskId}`
);
}
} catch (error) {
throw new Error(
`bulkUpdateContainers(): Error while awaiting task completion. Task ID: ${taskResponse.taskId}. Error: ${error}`
);
}
}
}
public async deleteContainerContent(container_id: string, containable_id: string) {
// Deletes a specific content from a container
const endpoint = `containers/${container_id}/contents/${containable_id}`;
const response = await this.client.delete(endpoint);
if (!response) {
throw new Error(`Failed to delete content ${containable_id} from container ${container_id}`);
}
return response;
}
public async *listContainersNoLimits(
paramQuery: ContainerPathGetParamQuery,
chunkSize: number = 100 // Default chunk size
): AsyncGenerator<Container[]> {
if (!paramQuery || typeof paramQuery !== "object") {
throw new Error("Invalid query parameters provided.");
}
// Generate the list of chunked queries so that no list of queries is longer than 100
let paramQueries: ContainerPathGetParamQuery[] = chunkQueries<
NonNullable<ContainerPathGetParamQuery>
>(paramQuery, chunkSize, limitedContainerSearches);
// Iterate over each chunked query
for (const query of paramQueries) {
// Call listContainers for the current query
for await (const containers of this.listContainers(query)) {
// Yield each batch of containers
yield containers;
}
}
}
public listContainers(parameters: ContainerPathGetParamQuery): AsyncGenerator<Container[]> {
return this.client.fetchPagesIterator<Container>("containers", parameters);
}
public async listContainerContents(
parameters: ContainerConentPathGetParamPath
): Promise<ContainersContent[]> {
const { container_id } = parameters;
const endpoint = `containers/${container_id}/contents`;
const data = await this.client.fetchData<ContainerContentsList>(endpoint);
return data["contents"] ? data["contents"] : [];
}
public async patchContainer(
container_id: string,
container_update: ContainerUpdate
): Promise<Container> {
return this.client.patchData<Container>(`containers/${container_id}`, container_update, {});
}
public async patchContainerContent(
container_id: string,
containable_id: string,
concentration: Measurement
) {
// Patches a specific content in a container
const endpoint = `containers/${container_id}/contents/${containable_id}`;
const response = await this.client.patchData<Container>(
endpoint,
{ concentration: concentration },
{}
);
if (!response) {
throw new Error(`Failed to patch content ${containable_id} in container ${container_id}`);
}
return response;
}
public async archiveContainers(
containerArchive: ContainersArchive
): Promise<ContainersArchivalChange> {
return this.client.postData<ContainersArchivalChange>(
"containers:archive",
containerArchive,
{}
);
}
public async waitForAllTransfers(
transfers: MultipleContainersTransfer[],
transfersSize = 1000 // max 5000
): Promise<Container[]> {
// iteratively transfers all contents to all containers in the list
// returns the final containers
transfersSize = transfersSize > 5000 ? 5000 : transfersSize; // max 5000
// divide transfers into chunks of size transfersSize
const chunkedTransfers: MultipleContainersTransfer[][] = [];
for (let i = 0; i < transfers.length; i += transfersSize) {
chunkedTransfers.push(transfers.slice(i, i + transfersSize));
}
const allContainers: Container[] = [];
for (const chunk of chunkedTransfers) {
// Wait for each chunk to resolve sequentially
const containers = await this.waitForTransfers({ transfers: chunk });
allContainers.push(...containers); // Push the resolved containers directly into allContainers
}
// many containers are returned more than once because multiple items are transferred into it
// for each container.id only return the most recent container.modifiedAt
const uniqueContainers = this.getMostRecentContainers(allContainers);
return uniqueContainers;
}
public async waitForTransfers(transfers: MultipleContainersTransfersList): Promise<Container[]> {
const taskTimeout = 10000 + transfers.transfers.length * 5; // 10 seconds baseline plus 5 extra milliseconds per transfer
const endpoint = "transfers";
try {
// Make the POST request
const response = await fetch(`${this.client.baseUrl}${endpoint}`, {
method: "POST",
headers: this.client.headers,
body: JSON.stringify(transfers),
});
// Attempt to parse the JSON response
let data: { taskId?: string; error?: any };
try {
data = (await response.json()) as { taskId?: string; error?: any };
} catch (jsonError) {
const errorBody = await response.text(); // Get raw response body for debugging
throw new Error(
`Failed to parse JSON response from ${endpoint}. HTTP Status: ${response.status} ${response.statusText}. Response Body: ${errorBody}`
);
}
// Check if the response is not OK
if (!response.ok) {
console.error("Error in transfer:", data);
throw new Error(
`\nFAILED to initiate transfer:\n${JSON.stringify(data.error || data)}\n\n${
response.statusText
}`
);
}
// Validate that the taskId exists in the response
if (!data.taskId) {
throw new Error(
`Unexpected response format from ${endpoint}. Missing "taskId". Response: ${JSON.stringify(
data
)}`
);
}
// Wait for the task to complete
const taskResponse = await this.client.awaitForTask<
NonNullable<TransfersAsyncTask["response"]>
>(data.taskId, taskTimeout);
// Validate the task response
if (taskResponse?.destinationContainers) {
return taskResponse.destinationContainers; // Return the destination containers
} else {
throw new Error(
"Internal Program Error: waitForTransfers(): Task response for new transfers does not contain destination containers"
);
}
} catch (error) {
// Improved error handling with detailed messages
throw new Error(
`Error in waitForTransfers with transfers: ${JSON.stringify(transfers)}: ${
error instanceof Error ? error.message : String(error)
}`
);
}
}
public getMostRecentContainers(allContainers: Container[]): Container[] {
const containerMap = new Map<string, Container>();
for (const container of allContainers) {
// Validate that id and modifiedAt are not undefined or null
if (!container.id || !container.modifiedAt) {
throw new Error(
`Invalid container data: id or modifiedAt is missing. Container: ${JSON.stringify(
container
)}`
);
}
const existingContainer = containerMap.get(container.id);
// If the container is not in the map or the current container has a more recent modifiedAt, update the map
if (
!existingContainer ||
(existingContainer.modifiedAt &&
new Date(container.modifiedAt) > new Date(existingContainer.modifiedAt))
) {
containerMap.set(container.id, container);
}
}
// Return the most recent containers as an array
return Array.from(containerMap.values());
}
}