@veltdev/sdk
Version:
Velt is an SDK to add collaborative features to your product within minutes. Example: Comments like Figma, Frame.io, Google docs or sheets, Recording like Loom, Huddles like Slack and much more.
239 lines (238 loc) • 6.2 kB
TypeScript
import { RecorderFileFormat, RecorderLayoutMode, RecorderType } from "../../utils/enums";
import { Attachment } from "./attachment.model";
import { BaseMetadata } from "./base-metadata.data.model";
import { CursorPosition } from "./cursor-position.data.model";
import { Location } from "./location.model";
import { PageInfo } from "./page-info.model";
import { TargetElement } from "./target-element.data.model";
import { Transcription } from "./transcription.data.model";
import { User } from "./user.data.model";
export declare class RecorderAnnotation {
/**
* Unique identifier for the recorder pin annotation.
*
* Auto generated.
*/
annotationId: string;
/**
* Connected comment annotation id of reaction annotation
*/
commentAnnotationId?: string;
/**
* The user who created this recorder pin annotation.
*
*/
from: User;
/**
* Color used for the recorder pin annotation.
*
*/
color?: string;
/**
* Timestamp when the recorder annotation was last updated.
*
* Auto generated.
*/
lastUpdated?: any;
/**
* Recorder annotation's position on the X axis.
*
* Auto generated.
*/
positionX?: number;
/**
* Recorder annotation's position on the Y axis.
*
* Auto generated.
*/
positionY?: number;
/**
* User’s screen width.
*
* Auto generated.
*/
screenWidth?: number;
/**
* User’s screen height.
*
* Auto generated.
*/
screenHeight?: number;
/**
* User’s screen scroll height.
*
* Auto generated.
*/
screenScrollHeight?: number;
/**
* User’s screen scroll height.
*
* Auto generated.
*/
screenScrollTop?: number;
/**
* Xpath of the element that was clicked.
*
* Auto generated.
*/
recorderedElementPath?: string;
/**
* Xpath of the element that was clicked.
*
* Auto generated.
*/
recorderedElementRect?: any;
targetElement?: TargetElement | null;
position?: CursorPosition | null;
/**
* Unique location id generated from prvoided location
*/
locationId?: number | null;
/**
* Set location to identify user on sub document
*/
location?: Location | null;
type?: string;
recordingType: RecorderType;
mode: RecorderLayoutMode;
approved?: boolean;
/**
* Attachment object of recorded media
* @deprecated Set attachment in attachments array
*/
attachment: Attachment | null;
/**
* List of attachments for the annotation.
* It will be added when recording is converted to other formats.
*/
attachments: Attachment[];
/**
* To maintain index of current annotation in available list of annotations
* It will start from 1, so no need to add 1 in that.
*/
annotationIndex?: number;
pageInfo?: PageInfo;
/**
* Recorded time in milliseconds and default (hh:mm:ss) display format
*/
recordedTime?: {
duration?: number;
display?: string;
} | null;
/**
* Transcription of the recorded media
*/
transcription?: Transcription;
/**
* Waveform data for the annotation
*/
waveformData?: number[];
/**
* Display name for the annotation
*/
displayName?: string;
metadata?: RecorderMetadata;
/**
* Current version of the recording edit
*/
latestVersion?: number;
/**
* History of all editing versions of this recording
* Maps version number to version data
*/
recordingEditVersions?: {
[key: number]: RecorderAnnotationEditVersion;
};
/**
* URLs for individual uploaded chunks
*/
chunkUrls?: {
[key: number]: string;
};
}
export interface RecorderAnnotationEditVersion {
from?: User;
createdAt?: Date;
attachment?: Attachment | null;
attachments?: Attachment[];
recordedTime?: {
duration?: number;
display?: string;
} | null;
transcription?: Transcription;
waveformData?: number[];
displayName?: string;
boundedTrimRanges?: RecorderBoundedTrimRange[];
boundedScaleRanges?: RecorderBoundedScaleRange[];
}
export interface RecorderBoundedTrimRange {
start: number;
end: number;
}
export interface RecorderBoundedScaleRange {
start: number;
end: number;
zoomInDuration?: number;
holdDuration?: number;
zoomOutDuration?: number;
zoomFactor?: number;
centerX?: number;
centerY?: number;
topLeftX?: number;
topLeftY?: number;
topLeftXPixels?: number;
topLeftYPixels?: number;
}
export interface RecorderEditRange {
trimRanges: RecorderBoundedTrimRange[];
scaleRanges: RecorderBoundedScaleRange[];
}
export declare class RecorderMetadata extends BaseMetadata {
[key: string]: any;
}
declare class RecorderDataTranscriptSegment {
startTime: string;
endTime: string;
startTimeInSeconds: number;
endTimeInSeconds: number;
text: string;
}
declare class RecorderDataTranscription {
transcriptSegments?: RecorderDataTranscriptSegment[];
vttFileUrl?: string;
contentSummary?: string;
}
declare class RecorderDataAsset {
version?: number;
url: string;
mimeType?: string;
fileName?: string;
/** File size in bytes */
fileSizeInBytes?: number;
/**
* The format/extension of the file
* @example 'mp3', 'mp4', 'webm'
*/
fileFormat?: RecorderFileFormat;
thumbnailUrl?: string;
transcription: RecorderDataTranscription;
}
export declare class RecorderData {
recorderId: string;
from?: User | null;
metadata?: RecorderMetadata;
assets: RecorderDataAsset[];
assetsAllVersions: RecorderDataAsset[];
transcription: RecorderDataTranscription;
}
export interface RecorderRequestQuery {
recorderIds: string[];
}
export interface GetRecordingDataResponse {
data: Record<string, RecorderData> | null;
}
export interface GetRecordingsResponse extends RecorderData {
}
export interface DeleteRecordingsResponse extends RecorderData {
}
export {};