tns-platform-declarations
Version:
Platform-specific TypeScript declarations for NativeScript for accessing native objects
1,730 lines (911 loc) • 273 kB
TypeScript
declare class AVAggregateAssetDownloadTask extends NSURLSessionTask {
static alloc(): AVAggregateAssetDownloadTask; // inherited from NSObject
static new(): AVAggregateAssetDownloadTask; // inherited from NSObject
readonly URLAsset: AVURLAsset;
}
declare class AVAsset extends NSObject implements AVAsynchronousKeyValueLoading, NSCopying {
static alloc(): AVAsset; // inherited from NSObject
static assetWithURL(URL: NSURL): AVAsset;
static new(): AVAsset; // inherited from NSObject
readonly allMediaSelections: NSArray<AVMediaSelection>;
readonly availableChapterLocales: NSArray<NSLocale>;
readonly availableMediaCharacteristicsWithMediaSelectionOptions: NSArray<string>;
readonly availableMetadataFormats: NSArray<string>;
readonly canContainFragments: boolean;
readonly commonMetadata: NSArray<AVMetadataItem>;
readonly compatibleWithAirPlayVideo: boolean;
readonly compatibleWithSavedPhotosAlbum: boolean;
readonly composable: boolean;
readonly containsFragments: boolean;
readonly creationDate: AVMetadataItem;
readonly duration: CMTime;
readonly exportable: boolean;
readonly hasProtectedContent: boolean;
readonly lyrics: string;
readonly metadata: NSArray<AVMetadataItem>;
readonly minimumTimeOffsetFromLive: CMTime;
readonly naturalSize: CGSize;
readonly overallDurationHint: CMTime;
readonly playable: boolean;
readonly preferredMediaSelection: AVMediaSelection;
readonly preferredRate: number;
readonly preferredTransform: CGAffineTransform;
readonly preferredVolume: number;
readonly providesPreciseDurationAndTiming: boolean;
readonly readable: boolean;
readonly referenceRestrictions: AVAssetReferenceRestrictions;
readonly trackGroups: NSArray<AVAssetTrackGroup>;
readonly tracks: NSArray<AVAssetTrack>;
cancelLoading(): void;
chapterMetadataGroupsBestMatchingPreferredLanguages(preferredLanguages: NSArray<string> | string[]): NSArray<AVTimedMetadataGroup>;
chapterMetadataGroupsWithTitleLocaleContainingItemsWithCommonKeys(locale: NSLocale, commonKeys: NSArray<string> | string[]): NSArray<AVTimedMetadataGroup>;
copyWithZone(zone: interop.Pointer | interop.Reference<any>): any;
loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray<string> | string[], handler: () => void): void;
mediaSelectionGroupForMediaCharacteristic(mediaCharacteristic: string): AVMediaSelectionGroup;
metadataForFormat(format: string): NSArray<AVMetadataItem>;
statusOfValueForKeyError(key: string): AVKeyValueStatus;
trackWithTrackID(trackID: number): AVAssetTrack;
tracksWithMediaCharacteristic(mediaCharacteristic: string): NSArray<AVAssetTrack>;
tracksWithMediaType(mediaType: string): NSArray<AVAssetTrack>;
unusedTrackID(): number;
}
declare class AVAssetCache extends NSObject {
static alloc(): AVAssetCache; // inherited from NSObject
static new(): AVAssetCache; // inherited from NSObject
readonly playableOffline: boolean;
mediaSelectionOptionsInMediaSelectionGroup(mediaSelectionGroup: AVMediaSelectionGroup): NSArray<AVMediaSelectionOption>;
}
declare var AVAssetChapterMetadataGroupsDidChangeNotification: string;
declare var AVAssetContainsFragmentsDidChangeNotification: string;
interface AVAssetDownloadDelegate extends NSURLSessionTaskDelegate {
URLSessionAggregateAssetDownloadTaskDidCompleteForMediaSelection?(session: NSURLSession, aggregateAssetDownloadTask: AVAggregateAssetDownloadTask, mediaSelection: AVMediaSelection): void;
URLSessionAggregateAssetDownloadTaskDidLoadTimeRangeTotalTimeRangesLoadedTimeRangeExpectedToLoadForMediaSelection?(session: NSURLSession, aggregateAssetDownloadTask: AVAggregateAssetDownloadTask, timeRange: CMTimeRange, loadedTimeRanges: NSArray<NSValue> | NSValue[], timeRangeExpectedToLoad: CMTimeRange, mediaSelection: AVMediaSelection): void;
URLSessionAggregateAssetDownloadTaskWillDownloadToURL?(session: NSURLSession, aggregateAssetDownloadTask: AVAggregateAssetDownloadTask, location: NSURL): void;
URLSessionAssetDownloadTaskDidFinishDownloadingToURL?(session: NSURLSession, assetDownloadTask: AVAssetDownloadTask, location: NSURL): void;
URLSessionAssetDownloadTaskDidLoadTimeRangeTotalTimeRangesLoadedTimeRangeExpectedToLoad?(session: NSURLSession, assetDownloadTask: AVAssetDownloadTask, timeRange: CMTimeRange, loadedTimeRanges: NSArray<NSValue> | NSValue[], timeRangeExpectedToLoad: CMTimeRange): void;
URLSessionAssetDownloadTaskDidResolveMediaSelection?(session: NSURLSession, assetDownloadTask: AVAssetDownloadTask, resolvedMediaSelection: AVMediaSelection): void;
}
declare var AVAssetDownloadDelegate: {
prototype: AVAssetDownloadDelegate;
};
declare class AVAssetDownloadStorageManagementPolicy extends NSObject implements NSCopying, NSMutableCopying {
static alloc(): AVAssetDownloadStorageManagementPolicy; // inherited from NSObject
static new(): AVAssetDownloadStorageManagementPolicy; // inherited from NSObject
readonly expirationDate: Date;
readonly priority: string;
copyWithZone(zone: interop.Pointer | interop.Reference<any>): any;
mutableCopyWithZone(zone: interop.Pointer | interop.Reference<any>): any;
}
declare class AVAssetDownloadStorageManager extends NSObject {
static alloc(): AVAssetDownloadStorageManager; // inherited from NSObject
static new(): AVAssetDownloadStorageManager; // inherited from NSObject
static sharedDownloadStorageManager(): AVAssetDownloadStorageManager;
setStorageManagementPolicyForURL(storageManagementPolicy: AVAssetDownloadStorageManagementPolicy, downloadStorageURL: NSURL): void;
storageManagementPolicyForURL(downloadStorageURL: NSURL): AVAssetDownloadStorageManagementPolicy;
}
declare class AVAssetDownloadTask extends NSURLSessionTask {
static alloc(): AVAssetDownloadTask; // inherited from NSObject
static new(): AVAssetDownloadTask; // inherited from NSObject
readonly URLAsset: AVURLAsset;
readonly destinationURL: NSURL;
readonly loadedTimeRanges: NSArray<NSValue>;
readonly options: NSDictionary<string, any>;
}
declare var AVAssetDownloadTaskMediaSelectionKey: string;
declare var AVAssetDownloadTaskMediaSelectionPrefersMultichannelKey: string;
declare var AVAssetDownloadTaskMinimumRequiredMediaBitrateKey: string;
declare class AVAssetDownloadURLSession extends NSURLSession {
static alloc(): AVAssetDownloadURLSession; // inherited from NSObject
static new(): AVAssetDownloadURLSession; // inherited from NSObject
static sessionWithConfigurationAssetDownloadDelegateDelegateQueue(configuration: NSURLSessionConfiguration, delegate: AVAssetDownloadDelegate, delegateQueue: NSOperationQueue): AVAssetDownloadURLSession;
aggregateAssetDownloadTaskWithURLAssetMediaSelectionsAssetTitleAssetArtworkDataOptions(URLAsset: AVURLAsset, mediaSelections: NSArray<AVMediaSelection> | AVMediaSelection[], title: string, artworkData: NSData, options: NSDictionary<string, any>): AVAggregateAssetDownloadTask;
assetDownloadTaskWithURLAssetAssetTitleAssetArtworkDataOptions(URLAsset: AVURLAsset, title: string, artworkData: NSData, options: NSDictionary<string, any>): AVAssetDownloadTask;
assetDownloadTaskWithURLAssetDestinationURLOptions(URLAsset: AVURLAsset, destinationURL: NSURL, options: NSDictionary<string, any>): AVAssetDownloadTask;
}
declare var AVAssetDownloadedAssetEvictionPriorityDefault: string;
declare var AVAssetDownloadedAssetEvictionPriorityImportant: string;
declare var AVAssetDurationDidChangeNotification: string;
declare var AVAssetExportPreset1280x720: string;
declare var AVAssetExportPreset1920x1080: string;
declare var AVAssetExportPreset3840x2160: string;
declare var AVAssetExportPreset640x480: string;
declare var AVAssetExportPreset960x540: string;
declare var AVAssetExportPresetAppleM4A: string;
declare var AVAssetExportPresetHEVC1920x1080: string;
declare var AVAssetExportPresetHEVC1920x1080WithAlpha: string;
declare var AVAssetExportPresetHEVC3840x2160: string;
declare var AVAssetExportPresetHEVC3840x2160WithAlpha: string;
declare var AVAssetExportPresetHEVCHighestQuality: string;
declare var AVAssetExportPresetHEVCHighestQualityWithAlpha: string;
declare var AVAssetExportPresetHighestQuality: string;
declare var AVAssetExportPresetLowQuality: string;
declare var AVAssetExportPresetMediumQuality: string;
declare var AVAssetExportPresetPassthrough: string;
declare class AVAssetExportSession extends NSObject {
static allExportPresets(): NSArray<string>;
static alloc(): AVAssetExportSession; // inherited from NSObject
static determineCompatibilityOfExportPresetWithAssetOutputFileTypeCompletionHandler(presetName: string, asset: AVAsset, outputFileType: string, handler: (p1: boolean) => void): void;
static exportPresetsCompatibleWithAsset(asset: AVAsset): NSArray<string>;
static exportSessionWithAssetPresetName(asset: AVAsset, presetName: string): AVAssetExportSession;
static new(): AVAssetExportSession; // inherited from NSObject
readonly asset: AVAsset;
audioMix: AVAudioMix;
audioTimePitchAlgorithm: string;
canPerformMultiplePassesOverSourceMediaData: boolean;
readonly customVideoCompositor: AVVideoCompositing;
directoryForTemporaryFiles: NSURL;
readonly error: NSError;
readonly estimatedOutputFileLength: number;
fileLengthLimit: number;
readonly maxDuration: CMTime;
metadata: NSArray<AVMetadataItem>;
metadataItemFilter: AVMetadataItemFilter;
outputFileType: string;
outputURL: NSURL;
readonly presetName: string;
readonly progress: number;
shouldOptimizeForNetworkUse: boolean;
readonly status: AVAssetExportSessionStatus;
readonly supportedFileTypes: NSArray<string>;
timeRange: CMTimeRange;
videoComposition: AVVideoComposition;
constructor(o: { asset: AVAsset; presetName: string; });
cancelExport(): void;
determineCompatibleFileTypesWithCompletionHandler(handler: (p1: NSArray<string>) => void): void;
estimateMaximumDurationWithCompletionHandler(handler: (p1: CMTime, p2: NSError) => void): void;
estimateOutputFileLengthWithCompletionHandler(handler: (p1: number, p2: NSError) => void): void;
exportAsynchronouslyWithCompletionHandler(handler: () => void): void;
initWithAssetPresetName(asset: AVAsset, presetName: string): this;
}
declare const enum AVAssetExportSessionStatus {
Unknown = 0,
Waiting = 1,
Exporting = 2,
Completed = 3,
Failed = 4,
Cancelled = 5
}
declare class AVAssetImageGenerator extends NSObject {
static alloc(): AVAssetImageGenerator; // inherited from NSObject
static assetImageGeneratorWithAsset(asset: AVAsset): AVAssetImageGenerator;
static new(): AVAssetImageGenerator; // inherited from NSObject
apertureMode: string;
appliesPreferredTrackTransform: boolean;
readonly asset: AVAsset;
readonly customVideoCompositor: AVVideoCompositing;
maximumSize: CGSize;
requestedTimeToleranceAfter: CMTime;
requestedTimeToleranceBefore: CMTime;
videoComposition: AVVideoComposition;
constructor(o: { asset: AVAsset; });
cancelAllCGImageGeneration(): void;
copyCGImageAtTimeActualTimeError(requestedTime: CMTime, actualTime: interop.Pointer | interop.Reference<CMTime>): any;
generateCGImagesAsynchronouslyForTimesCompletionHandler(requestedTimes: NSArray<NSValue> | NSValue[], handler: (p1: CMTime, p2: any, p3: CMTime, p4: AVAssetImageGeneratorResult, p5: NSError) => void): void;
initWithAsset(asset: AVAsset): this;
}
declare var AVAssetImageGeneratorApertureModeCleanAperture: string;
declare var AVAssetImageGeneratorApertureModeEncodedPixels: string;
declare var AVAssetImageGeneratorApertureModeProductionAperture: string;
declare const enum AVAssetImageGeneratorResult {
Succeeded = 0,
Failed = 1,
Cancelled = 2
}
declare var AVAssetMediaSelectionGroupsDidChangeNotification: string;
declare class AVAssetReader extends NSObject {
static alloc(): AVAssetReader; // inherited from NSObject
static assetReaderWithAssetError(asset: AVAsset): AVAssetReader;
static new(): AVAssetReader; // inherited from NSObject
readonly asset: AVAsset;
readonly error: NSError;
readonly outputs: NSArray<AVAssetReaderOutput>;
readonly status: AVAssetReaderStatus;
timeRange: CMTimeRange;
constructor(o: { asset: AVAsset; });
addOutput(output: AVAssetReaderOutput): void;
canAddOutput(output: AVAssetReaderOutput): boolean;
cancelReading(): void;
initWithAssetError(asset: AVAsset): this;
startReading(): boolean;
}
declare class AVAssetReaderAudioMixOutput extends AVAssetReaderOutput {
static alloc(): AVAssetReaderAudioMixOutput; // inherited from NSObject
static assetReaderAudioMixOutputWithAudioTracksAudioSettings(audioTracks: NSArray<AVAssetTrack> | AVAssetTrack[], audioSettings: NSDictionary<string, any>): AVAssetReaderAudioMixOutput;
static new(): AVAssetReaderAudioMixOutput; // inherited from NSObject
audioMix: AVAudioMix;
readonly audioSettings: NSDictionary<string, any>;
audioTimePitchAlgorithm: string;
readonly audioTracks: NSArray<AVAssetTrack>;
constructor(o: { audioTracks: NSArray<AVAssetTrack> | AVAssetTrack[]; audioSettings: NSDictionary<string, any>; });
initWithAudioTracksAudioSettings(audioTracks: NSArray<AVAssetTrack> | AVAssetTrack[], audioSettings: NSDictionary<string, any>): this;
}
declare class AVAssetReaderOutput extends NSObject {
static alloc(): AVAssetReaderOutput; // inherited from NSObject
static new(): AVAssetReaderOutput; // inherited from NSObject
alwaysCopiesSampleData: boolean;
readonly mediaType: string;
supportsRandomAccess: boolean;
copyNextSampleBuffer(): any;
markConfigurationAsFinal(): void;
resetForReadingTimeRanges(timeRanges: NSArray<NSValue> | NSValue[]): void;
}
declare class AVAssetReaderOutputMetadataAdaptor extends NSObject {
static alloc(): AVAssetReaderOutputMetadataAdaptor; // inherited from NSObject
static assetReaderOutputMetadataAdaptorWithAssetReaderTrackOutput(trackOutput: AVAssetReaderTrackOutput): AVAssetReaderOutputMetadataAdaptor;
static new(): AVAssetReaderOutputMetadataAdaptor; // inherited from NSObject
readonly assetReaderTrackOutput: AVAssetReaderTrackOutput;
constructor(o: { assetReaderTrackOutput: AVAssetReaderTrackOutput; });
initWithAssetReaderTrackOutput(trackOutput: AVAssetReaderTrackOutput): this;
nextTimedMetadataGroup(): AVTimedMetadataGroup;
}
declare class AVAssetReaderSampleReferenceOutput extends AVAssetReaderOutput {
static alloc(): AVAssetReaderSampleReferenceOutput; // inherited from NSObject
static assetReaderSampleReferenceOutputWithTrack(track: AVAssetTrack): AVAssetReaderSampleReferenceOutput;
static new(): AVAssetReaderSampleReferenceOutput; // inherited from NSObject
readonly track: AVAssetTrack;
constructor(o: { track: AVAssetTrack; });
initWithTrack(track: AVAssetTrack): this;
}
declare const enum AVAssetReaderStatus {
Unknown = 0,
Reading = 1,
Completed = 2,
Failed = 3,
Cancelled = 4
}
declare class AVAssetReaderTrackOutput extends AVAssetReaderOutput {
static alloc(): AVAssetReaderTrackOutput; // inherited from NSObject
static assetReaderTrackOutputWithTrackOutputSettings(track: AVAssetTrack, outputSettings: NSDictionary<string, any>): AVAssetReaderTrackOutput;
static new(): AVAssetReaderTrackOutput; // inherited from NSObject
audioTimePitchAlgorithm: string;
readonly outputSettings: NSDictionary<string, any>;
readonly track: AVAssetTrack;
constructor(o: { track: AVAssetTrack; outputSettings: NSDictionary<string, any>; });
initWithTrackOutputSettings(track: AVAssetTrack, outputSettings: NSDictionary<string, any>): this;
}
declare class AVAssetReaderVideoCompositionOutput extends AVAssetReaderOutput {
static alloc(): AVAssetReaderVideoCompositionOutput; // inherited from NSObject
static assetReaderVideoCompositionOutputWithVideoTracksVideoSettings(videoTracks: NSArray<AVAssetTrack> | AVAssetTrack[], videoSettings: NSDictionary<string, any>): AVAssetReaderVideoCompositionOutput;
static new(): AVAssetReaderVideoCompositionOutput; // inherited from NSObject
readonly customVideoCompositor: AVVideoCompositing;
videoComposition: AVVideoComposition;
readonly videoSettings: NSDictionary<string, any>;
readonly videoTracks: NSArray<AVAssetTrack>;
constructor(o: { videoTracks: NSArray<AVAssetTrack> | AVAssetTrack[]; videoSettings: NSDictionary<string, any>; });
initWithVideoTracksVideoSettings(videoTracks: NSArray<AVAssetTrack> | AVAssetTrack[], videoSettings: NSDictionary<string, any>): this;
}
declare const enum AVAssetReferenceRestrictions {
ForbidNone = 0,
ForbidRemoteReferenceToLocal = 1,
ForbidLocalReferenceToRemote = 2,
ForbidCrossSiteReference = 4,
ForbidLocalReferenceToLocal = 8,
ForbidAll = 65535,
DefaultPolicy = 2
}
declare class AVAssetResourceLoader extends NSObject {
static alloc(): AVAssetResourceLoader; // inherited from NSObject
static new(): AVAssetResourceLoader; // inherited from NSObject
readonly delegate: AVAssetResourceLoaderDelegate;
readonly delegateQueue: NSObject;
preloadsEligibleContentKeys: boolean;
setDelegateQueue(delegate: AVAssetResourceLoaderDelegate, delegateQueue: NSObject): void;
}
interface AVAssetResourceLoaderDelegate extends NSObjectProtocol {
resourceLoaderDidCancelAuthenticationChallenge?(resourceLoader: AVAssetResourceLoader, authenticationChallenge: NSURLAuthenticationChallenge): void;
resourceLoaderDidCancelLoadingRequest?(resourceLoader: AVAssetResourceLoader, loadingRequest: AVAssetResourceLoadingRequest): void;
resourceLoaderShouldWaitForLoadingOfRequestedResource?(resourceLoader: AVAssetResourceLoader, loadingRequest: AVAssetResourceLoadingRequest): boolean;
resourceLoaderShouldWaitForRenewalOfRequestedResource?(resourceLoader: AVAssetResourceLoader, renewalRequest: AVAssetResourceRenewalRequest): boolean;
resourceLoaderShouldWaitForResponseToAuthenticationChallenge?(resourceLoader: AVAssetResourceLoader, authenticationChallenge: NSURLAuthenticationChallenge): boolean;
}
declare var AVAssetResourceLoaderDelegate: {
prototype: AVAssetResourceLoaderDelegate;
};
declare class AVAssetResourceLoadingContentInformationRequest extends NSObject {
static alloc(): AVAssetResourceLoadingContentInformationRequest; // inherited from NSObject
static new(): AVAssetResourceLoadingContentInformationRequest; // inherited from NSObject
readonly allowedContentTypes: NSArray<string>;
byteRangeAccessSupported: boolean;
contentLength: number;
contentType: string;
renewalDate: Date;
}
declare class AVAssetResourceLoadingDataRequest extends NSObject {
static alloc(): AVAssetResourceLoadingDataRequest; // inherited from NSObject
static new(): AVAssetResourceLoadingDataRequest; // inherited from NSObject
readonly currentOffset: number;
readonly requestedLength: number;
readonly requestedOffset: number;
readonly requestsAllDataToEndOfResource: boolean;
respondWithData(data: NSData): void;
}
declare class AVAssetResourceLoadingRequest extends NSObject {
static alloc(): AVAssetResourceLoadingRequest; // inherited from NSObject
static new(): AVAssetResourceLoadingRequest; // inherited from NSObject
readonly cancelled: boolean;
readonly contentInformationRequest: AVAssetResourceLoadingContentInformationRequest;
readonly dataRequest: AVAssetResourceLoadingDataRequest;
readonly finished: boolean;
redirect: NSURLRequest;
readonly request: NSURLRequest;
readonly requestor: AVAssetResourceLoadingRequestor;
response: NSURLResponse;
finishLoading(): void;
finishLoadingWithError(error: NSError): void;
finishLoadingWithResponseDataRedirect(response: NSURLResponse, data: NSData, redirect: NSURLRequest): void;
persistentContentKeyFromKeyVendorResponseOptionsError(keyVendorResponse: NSData, options: NSDictionary<string, any>): NSData;
streamingContentKeyRequestDataForAppContentIdentifierOptionsError(appIdentifier: NSData, contentIdentifier: NSData, options: NSDictionary<string, any>): NSData;
}
declare var AVAssetResourceLoadingRequestStreamingContentKeyRequestRequiresPersistentKey: string;
declare class AVAssetResourceLoadingRequestor extends NSObject {
static alloc(): AVAssetResourceLoadingRequestor; // inherited from NSObject
static new(): AVAssetResourceLoadingRequestor; // inherited from NSObject
readonly providesExpiredSessionReports: boolean;
}
declare class AVAssetResourceRenewalRequest extends AVAssetResourceLoadingRequest {
static alloc(): AVAssetResourceRenewalRequest; // inherited from NSObject
static new(): AVAssetResourceRenewalRequest; // inherited from NSObject
}
declare class AVAssetTrack extends NSObject implements AVAsynchronousKeyValueLoading, NSCopying {
static alloc(): AVAssetTrack; // inherited from NSObject
static new(): AVAssetTrack; // inherited from NSObject
readonly asset: AVAsset;
readonly availableMetadataFormats: NSArray<string>;
readonly availableTrackAssociationTypes: NSArray<string>;
readonly commonMetadata: NSArray<AVMetadataItem>;
readonly decodable: boolean;
readonly enabled: boolean;
readonly estimatedDataRate: number;
readonly extendedLanguageTag: string;
readonly formatDescriptions: NSArray<any>;
readonly hasAudioSampleDependencies: boolean;
readonly languageCode: string;
readonly mediaType: string;
readonly metadata: NSArray<AVMetadataItem>;
readonly minFrameDuration: CMTime;
readonly naturalSize: CGSize;
readonly naturalTimeScale: number;
readonly nominalFrameRate: number;
readonly playable: boolean;
readonly preferredTransform: CGAffineTransform;
readonly preferredVolume: number;
readonly requiresFrameReordering: boolean;
readonly segments: NSArray<AVAssetTrackSegment>;
readonly selfContained: boolean;
readonly timeRange: CMTimeRange;
readonly totalSampleDataLength: number;
readonly trackID: number;
associatedTracksOfType(trackAssociationType: string): NSArray<AVAssetTrack>;
copyWithZone(zone: interop.Pointer | interop.Reference<any>): any;
hasMediaCharacteristic(mediaCharacteristic: string): boolean;
loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray<string> | string[], handler: () => void): void;
metadataForFormat(format: string): NSArray<AVMetadataItem>;
samplePresentationTimeForTrackTime(trackTime: CMTime): CMTime;
segmentForTrackTime(trackTime: CMTime): AVAssetTrackSegment;
statusOfValueForKeyError(key: string): AVKeyValueStatus;
}
declare class AVAssetTrackGroup extends NSObject implements NSCopying {
static alloc(): AVAssetTrackGroup; // inherited from NSObject
static new(): AVAssetTrackGroup; // inherited from NSObject
readonly trackIDs: NSArray<number>;
copyWithZone(zone: interop.Pointer | interop.Reference<any>): any;
}
declare class AVAssetTrackSegment extends NSObject {
static alloc(): AVAssetTrackSegment; // inherited from NSObject
static new(): AVAssetTrackSegment; // inherited from NSObject
readonly empty: boolean;
readonly timeMapping: CMTimeMapping;
}
declare var AVAssetTrackSegmentsDidChangeNotification: string;
declare var AVAssetTrackTimeRangeDidChangeNotification: string;
declare var AVAssetTrackTrackAssociationsDidChangeNotification: string;
declare var AVAssetWasDefragmentedNotification: string;
declare class AVAssetWriter extends NSObject {
static alloc(): AVAssetWriter; // inherited from NSObject
static assetWriterWithURLFileTypeError(outputURL: NSURL, outputFileType: string): AVAssetWriter;
static new(): AVAssetWriter; // inherited from NSObject
readonly availableMediaTypes: NSArray<string>;
directoryForTemporaryFiles: NSURL;
readonly error: NSError;
readonly inputGroups: NSArray<AVAssetWriterInputGroup>;
readonly inputs: NSArray<AVAssetWriterInput>;
metadata: NSArray<AVMetadataItem>;
movieFragmentInterval: CMTime;
movieTimeScale: number;
readonly outputFileType: string;
readonly outputURL: NSURL;
overallDurationHint: CMTime;
shouldOptimizeForNetworkUse: boolean;
readonly status: AVAssetWriterStatus;
constructor(o: { URL: NSURL; fileType: string; });
addInput(input: AVAssetWriterInput): void;
addInputGroup(inputGroup: AVAssetWriterInputGroup): void;
canAddInput(input: AVAssetWriterInput): boolean;
canAddInputGroup(inputGroup: AVAssetWriterInputGroup): boolean;
canApplyOutputSettingsForMediaType(outputSettings: NSDictionary<string, any>, mediaType: string): boolean;
cancelWriting(): void;
endSessionAtSourceTime(endTime: CMTime): void;
finishWriting(): boolean;
finishWritingWithCompletionHandler(handler: () => void): void;
initWithURLFileTypeError(outputURL: NSURL, outputFileType: string): this;
startSessionAtSourceTime(startTime: CMTime): void;
startWriting(): boolean;
}
declare class AVAssetWriterInput extends NSObject {
static alloc(): AVAssetWriterInput; // inherited from NSObject
static assetWriterInputWithMediaTypeOutputSettings(mediaType: string, outputSettings: NSDictionary<string, any>): AVAssetWriterInput;
static assetWriterInputWithMediaTypeOutputSettingsSourceFormatHint(mediaType: string, outputSettings: NSDictionary<string, any>, sourceFormatHint: any): AVAssetWriterInput;
static new(): AVAssetWriterInput; // inherited from NSObject
readonly canPerformMultiplePasses: boolean;
readonly currentPassDescription: AVAssetWriterInputPassDescription;
expectsMediaDataInRealTime: boolean;
extendedLanguageTag: string;
languageCode: string;
marksOutputTrackAsEnabled: boolean;
mediaDataLocation: string;
mediaTimeScale: number;
readonly mediaType: string;
metadata: NSArray<AVMetadataItem>;
naturalSize: CGSize;
readonly outputSettings: NSDictionary<string, any>;
performsMultiPassEncodingIfSupported: boolean;
preferredMediaChunkAlignment: number;
preferredMediaChunkDuration: CMTime;
preferredVolume: number;
readonly readyForMoreMediaData: boolean;
sampleReferenceBaseURL: NSURL;
readonly sourceFormatHint: any;
transform: CGAffineTransform;
constructor(o: { mediaType: string; outputSettings: NSDictionary<string, any>; });
constructor(o: { mediaType: string; outputSettings: NSDictionary<string, any>; sourceFormatHint: any; });
addTrackAssociationWithTrackOfInputType(input: AVAssetWriterInput, trackAssociationType: string): void;
appendSampleBuffer(sampleBuffer: any): boolean;
canAddTrackAssociationWithTrackOfInputType(input: AVAssetWriterInput, trackAssociationType: string): boolean;
initWithMediaTypeOutputSettings(mediaType: string, outputSettings: NSDictionary<string, any>): this;
initWithMediaTypeOutputSettingsSourceFormatHint(mediaType: string, outputSettings: NSDictionary<string, any>, sourceFormatHint: any): this;
markAsFinished(): void;
markCurrentPassAsFinished(): void;
requestMediaDataWhenReadyOnQueueUsingBlock(queue: NSObject, block: () => void): void;
respondToEachPassDescriptionOnQueueUsingBlock(queue: NSObject, block: () => void): void;
}
declare class AVAssetWriterInputGroup extends AVMediaSelectionGroup {
static alloc(): AVAssetWriterInputGroup; // inherited from NSObject
static assetWriterInputGroupWithInputsDefaultInput(inputs: NSArray<AVAssetWriterInput> | AVAssetWriterInput[], defaultInput: AVAssetWriterInput): AVAssetWriterInputGroup;
static new(): AVAssetWriterInputGroup; // inherited from NSObject
readonly defaultInput: AVAssetWriterInput;
readonly inputs: NSArray<AVAssetWriterInput>;
constructor(o: { inputs: NSArray<AVAssetWriterInput> | AVAssetWriterInput[]; defaultInput: AVAssetWriterInput; });
initWithInputsDefaultInput(inputs: NSArray<AVAssetWriterInput> | AVAssetWriterInput[], defaultInput: AVAssetWriterInput): this;
}
declare var AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved: string;
declare var AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData: string;
declare class AVAssetWriterInputMetadataAdaptor extends NSObject {
static alloc(): AVAssetWriterInputMetadataAdaptor; // inherited from NSObject
static assetWriterInputMetadataAdaptorWithAssetWriterInput(input: AVAssetWriterInput): AVAssetWriterInputMetadataAdaptor;
static new(): AVAssetWriterInputMetadataAdaptor; // inherited from NSObject
readonly assetWriterInput: AVAssetWriterInput;
constructor(o: { assetWriterInput: AVAssetWriterInput; });
appendTimedMetadataGroup(timedMetadataGroup: AVTimedMetadataGroup): boolean;
initWithAssetWriterInput(input: AVAssetWriterInput): this;
}
declare class AVAssetWriterInputPassDescription extends NSObject {
static alloc(): AVAssetWriterInputPassDescription; // inherited from NSObject
static new(): AVAssetWriterInputPassDescription; // inherited from NSObject
readonly sourceTimeRanges: NSArray<NSValue>;
}
declare class AVAssetWriterInputPixelBufferAdaptor extends NSObject {
static alloc(): AVAssetWriterInputPixelBufferAdaptor; // inherited from NSObject
static assetWriterInputPixelBufferAdaptorWithAssetWriterInputSourcePixelBufferAttributes(input: AVAssetWriterInput, sourcePixelBufferAttributes: NSDictionary<string, any>): AVAssetWriterInputPixelBufferAdaptor;
static new(): AVAssetWriterInputPixelBufferAdaptor; // inherited from NSObject
readonly assetWriterInput: AVAssetWriterInput;
readonly pixelBufferPool: any;
readonly sourcePixelBufferAttributes: NSDictionary<string, any>;
constructor(o: { assetWriterInput: AVAssetWriterInput; sourcePixelBufferAttributes: NSDictionary<string, any>; });
appendPixelBufferWithPresentationTime(pixelBuffer: any, presentationTime: CMTime): boolean;
initWithAssetWriterInputSourcePixelBufferAttributes(input: AVAssetWriterInput, sourcePixelBufferAttributes: NSDictionary<string, any>): this;
}
declare const enum AVAssetWriterStatus {
Unknown = 0,
Writing = 1,
Completed = 2,
Failed = 3,
Cancelled = 4
}
declare class AVAsynchronousCIImageFilteringRequest extends NSObject implements NSCopying {
static alloc(): AVAsynchronousCIImageFilteringRequest; // inherited from NSObject
static new(): AVAsynchronousCIImageFilteringRequest; // inherited from NSObject
readonly compositionTime: CMTime;
readonly renderSize: CGSize;
readonly sourceImage: CIImage;
copyWithZone(zone: interop.Pointer | interop.Reference<any>): any;
finishWithError(error: NSError): void;
finishWithImageContext(filteredImage: CIImage, context: CIContext): void;
}
interface AVAsynchronousKeyValueLoading {
loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray<string> | string[], handler: () => void): void;
statusOfValueForKeyError(key: string): AVKeyValueStatus;
}
declare var AVAsynchronousKeyValueLoading: {
prototype: AVAsynchronousKeyValueLoading;
};
declare class AVAsynchronousVideoCompositionRequest extends NSObject implements NSCopying {
static alloc(): AVAsynchronousVideoCompositionRequest; // inherited from NSObject
static new(): AVAsynchronousVideoCompositionRequest; // inherited from NSObject
readonly compositionTime: CMTime;
readonly renderContext: AVVideoCompositionRenderContext;
readonly sourceTrackIDs: NSArray<number>;
readonly videoCompositionInstruction: AVVideoCompositionInstructionProtocol;
copyWithZone(zone: interop.Pointer | interop.Reference<any>): any;
finishCancelledRequest(): void;
finishWithComposedVideoFrame(composedVideoFrame: any): void;
finishWithError(error: NSError): void;
sourceFrameByTrackID(trackID: number): any;
}
interface AVAudio3DAngularOrientation {
yaw: number;
pitch: number;
roll: number;
}
declare var AVAudio3DAngularOrientation: interop.StructType<AVAudio3DAngularOrientation>;
interface AVAudio3DMixing extends NSObjectProtocol {
obstruction: number;
occlusion: number;
pointSourceInHeadMode: AVAudio3DMixingPointSourceInHeadMode;
position: AVAudio3DPoint;
rate: number;
renderingAlgorithm: AVAudio3DMixingRenderingAlgorithm;
reverbBlend: number;
sourceMode: AVAudio3DMixingSourceMode;
}
declare var AVAudio3DMixing: {
prototype: AVAudio3DMixing;
};
declare const enum AVAudio3DMixingPointSourceInHeadMode {
Mono = 0,
Bypass = 1
}
declare const enum AVAudio3DMixingRenderingAlgorithm {
EqualPowerPanning = 0,
SphericalHead = 1,
HRTF = 2,
SoundField = 3,
StereoPassThrough = 5,
HRTFHQ = 6,
Auto = 7
}
declare const enum AVAudio3DMixingSourceMode {
SpatializeIfMono = 0,
Bypass = 1,
PointSource = 2,
AmbienceBed = 3
}
interface AVAudio3DPoint {
x: number;
y: number;
z: number;
}
declare var AVAudio3DPoint: interop.StructType<AVAudio3DPoint>;
interface AVAudio3DVectorOrientation {
forward: AVAudio3DPoint;
up: AVAudio3DPoint;
}
declare var AVAudio3DVectorOrientation: interop.StructType<AVAudio3DVectorOrientation>;
declare var AVAudioBitRateStrategy_Constant: string;
declare var AVAudioBitRateStrategy_LongTermAverage: string;
declare var AVAudioBitRateStrategy_Variable: string;
declare var AVAudioBitRateStrategy_VariableConstrained: string;
declare class AVAudioBuffer extends NSObject implements NSCopying, NSMutableCopying {
static alloc(): AVAudioBuffer; // inherited from NSObject
static new(): AVAudioBuffer; // inherited from NSObject
readonly audioBufferList: interop.Pointer | interop.Reference<AudioBufferList>;
readonly format: AVAudioFormat;
readonly mutableAudioBufferList: interop.Pointer | interop.Reference<AudioBufferList>;
copyWithZone(zone: interop.Pointer | interop.Reference<any>): any;
mutableCopyWithZone(zone: interop.Pointer | interop.Reference<any>): any;
}
declare class AVAudioChannelLayout extends NSObject implements NSSecureCoding {
static alloc(): AVAudioChannelLayout; // inherited from NSObject
static layoutWithLayout(layout: interop.Pointer | interop.Reference<AudioChannelLayout>): AVAudioChannelLayout;
static layoutWithLayoutTag(layoutTag: number): AVAudioChannelLayout;
static new(): AVAudioChannelLayout; // inherited from NSObject
readonly channelCount: number;
readonly layout: interop.Pointer | interop.Reference<AudioChannelLayout>;
readonly layoutTag: number;
static readonly supportsSecureCoding: boolean; // inherited from NSSecureCoding
constructor(o: { coder: NSCoder; }); // inherited from NSCoding
constructor(o: { layout: interop.Pointer | interop.Reference<AudioChannelLayout>; });
constructor(o: { layoutTag: number; });
encodeWithCoder(coder: NSCoder): void;
initWithCoder(coder: NSCoder): this;
initWithLayout(layout: interop.Pointer | interop.Reference<AudioChannelLayout>): this;
initWithLayoutTag(layoutTag: number): this;
}
declare const enum AVAudioCommonFormat {
OtherFormat = 0,
PCMFormatFloat32 = 1,
PCMFormatFloat64 = 2,
PCMFormatInt16 = 3,
PCMFormatInt32 = 4
}
declare class AVAudioCompressedBuffer extends AVAudioBuffer {
static alloc(): AVAudioCompressedBuffer; // inherited from NSObject
static new(): AVAudioCompressedBuffer; // inherited from NSObject
readonly byteCapacity: number;
byteLength: number;
readonly data: interop.Pointer | interop.Reference<any>;
readonly maximumPacketSize: number;
readonly packetCapacity: number;
packetCount: number;
readonly packetDescriptions: interop.Pointer | interop.Reference<AudioStreamPacketDescription>;
constructor(o: { format: AVAudioFormat; packetCapacity: number; });
constructor(o: { format: AVAudioFormat; packetCapacity: number; maximumPacketSize: number; });
initWithFormatPacketCapacity(format: AVAudioFormat, packetCapacity: number): this;
initWithFormatPacketCapacityMaximumPacketSize(format: AVAudioFormat, packetCapacity: number, maximumPacketSize: number): this;
}
declare class AVAudioConnectionPoint extends NSObject {
static alloc(): AVAudioConnectionPoint; // inherited from NSObject
static new(): AVAudioConnectionPoint; // inherited from NSObject
readonly bus: number;
readonly node: AVAudioNode;
constructor(o: { node: AVAudioNode; bus: number; });
initWithNodeBus(node: AVAudioNode, bus: number): this;
}
declare class AVAudioConverter extends NSObject {
static alloc(): AVAudioConverter; // inherited from NSObject
static new(): AVAudioConverter; // inherited from NSObject
readonly applicableEncodeBitRates: NSArray<number>;
readonly applicableEncodeSampleRates: NSArray<number>;
readonly availableEncodeBitRates: NSArray<number>;
readonly availableEncodeChannelLayoutTags: NSArray<number>;
readonly availableEncodeSampleRates: NSArray<number>;
bitRate: number;
bitRateStrategy: string;
channelMap: NSArray<number>;
dither: boolean;
downmix: boolean;
readonly inputFormat: AVAudioFormat;
magicCookie: NSData;
readonly maximumOutputPacketSize: number;
readonly outputFormat: AVAudioFormat;
primeInfo: AVAudioConverterPrimeInfo;
primeMethod: AVAudioConverterPrimeMethod;
sampleRateConverterAlgorithm: string;
sampleRateConverterQuality: number;
constructor(o: { fromFormat: AVAudioFormat; toFormat: AVAudioFormat; });
convertToBufferErrorWithInputFromBlock(outputBuffer: AVAudioBuffer, outError: interop.Pointer | interop.Reference<NSError>, inputBlock: (p1: number, p2: interop.Pointer | interop.Reference<AVAudioConverterInputStatus>) => AVAudioBuffer): AVAudioConverterOutputStatus;
convertToBufferFromBufferError(outputBuffer: AVAudioPCMBuffer, inputBuffer: AVAudioPCMBuffer): boolean;
initFromFormatToFormat(fromFormat: AVAudioFormat, toFormat: AVAudioFormat): this;
reset(): void;
}
declare const enum AVAudioConverterInputStatus {
HaveData = 0,
NoDataNow = 1,
EndOfStream = 2
}
declare const enum AVAudioConverterOutputStatus {
HaveData = 0,
InputRanDry = 1,
EndOfStream = 2,
Error = 3
}
interface AVAudioConverterPrimeInfo {
leadingFrames: number;
trailingFrames: number;
}
declare var AVAudioConverterPrimeInfo: interop.StructType<AVAudioConverterPrimeInfo>;
declare const enum AVAudioConverterPrimeMethod {
Pre = 0,
Normal = 1,
None = 2
}
declare class AVAudioEngine extends NSObject {
static alloc(): AVAudioEngine; // inherited from NSObject
static new(): AVAudioEngine; // inherited from NSObject
readonly attachedNodes: NSSet<AVAudioNode>;
autoShutdownEnabled: boolean;
readonly inputNode: AVAudioInputNode;
readonly isInManualRenderingMode: boolean;
readonly mainMixerNode: AVAudioMixerNode;
readonly manualRenderingBlock: (p1: number, p2: interop.Pointer | interop.Reference<AudioBufferList>, p3: interop.Pointer | interop.Reference<number>) => AVAudioEngineManualRenderingStatus;
readonly manualRenderingFormat: AVAudioFormat;
readonly manualRenderingMaximumFrameCount: number;
readonly manualRenderingMode: AVAudioEngineManualRenderingMode;
readonly manualRenderingSampleTime: number;
musicSequence: interop.Pointer | interop.Reference<any>;
readonly outputNode: AVAudioOutputNode;
readonly running: boolean;
attachNode(node: AVAudioNode): void;
connectMIDIToFormatBlock(sourceNode: AVAudioNode, destinationNode: AVAudioNode, format: AVAudioFormat, tapBlock: (p1: number, p2: number, p3: number, p4: string) => number): void;
connectMIDIToNodesFormatBlock(sourceNode: AVAudioNode, destinationNodes: NSArray<AVAudioNode> | AVAudioNode[], format: AVAudioFormat, tapBlock: (p1: number, p2: number, p3: number, p4: string) => number): void;
connectToConnectionPointsFromBusFormat(sourceNode: AVAudioNode, destNodes: NSArray<AVAudioConnectionPoint> | AVAudioConnectionPoint[], sourceBus: number, format: AVAudioFormat): void;
connectToFormat(node1: AVAudioNode, node2: AVAudioNode, format: AVAudioFormat): void;
connectToFromBusToBusFormat(node1: AVAudioNode, node2: AVAudioNode, bus1: number, bus2: number, format: AVAudioFormat): void;
detachNode(node: AVAudioNode): void;
disableManualRenderingMode(): void;
disconnectMIDIFrom(sourceNode: AVAudioNode, destinationNode: AVAudioNode): void;
disconnectMIDIFromNodes(sourceNode: AVAudioNode, destinationNodes: NSArray<AVAudioNode> | AVAudioNode[]): void;
disconnectMIDIInput(node: AVAudioNode): void;
disconnectMIDIOutput(node: AVAudioNode): void;
disconnectNodeInput(node: AVAudioNode): void;
disconnectNodeInputBus(node: AVAudioNode, bus: number): void;
disconnectNodeOutput(node: AVAudioNode): void;
disconnectNodeOutputBus(node: AVAudioNode, bus: number): void;
enableManualRenderingModeFormatMaximumFrameCountError(mode: AVAudioEngineManualRenderingMode, pcmFormat: AVAudioFormat, maximumFrameCount: number): boolean;
inputConnectionPointForNodeInputBus(node: AVAudioNode, bus: number): AVAudioConnectionPoint;
outputConnectionPointsForNodeOutputBus(node: AVAudioNode, bus: number): NSArray<AVAudioConnectionPoint>;
pause(): void;
prepare(): void;
renderOfflineToBufferError(numberOfFrames: number, buffer: AVAudioPCMBuffer): AVAudioEngineManualRenderingStatus;
reset(): void;
startAndReturnError(): boolean;
stop(): void;
}
declare var AVAudioEngineConfigurationChangeNotification: string;
declare const enum AVAudioEngineManualRenderingError {
InvalidMode = -80800,
Initialized = -80801,
NotRunning = -80802
}
declare const enum AVAudioEngineManualRenderingMode {
Offline = 0,
Realtime = 1
}
declare const enum AVAudioEngineManualRenderingStatus {
Error = -1,
Success = 0,
InsufficientDataFromInputNode = 1,
CannotDoInCurrentContext = 2
}
declare const enum AVAudioEnvironmentDistanceAttenuationModel {
Exponential = 1,
Inverse = 2,
Linear = 3
}
declare class AVAudioEnvironmentDistanceAttenuationParameters extends NSObject {
static alloc(): AVAudioEnvironmentDistanceAttenuationParameters; // inherited from NSObject
static new(): AVAudioEnvironmentDistanceAttenuationParameters; // inherited from NSObject
distanceAttenuationModel: AVAudioEnvironmentDistanceAttenuationModel;
maximumDistance: number;
referenceDistance: number;
rolloffFactor: number;
}
declare class AVAudioEnvironmentNode extends AVAudioNode implements AVAudioMixing {
static alloc(): AVAudioEnvironmentNode; // inherited from NSObject
static new(): AVAudioEnvironmentNode; // inherited from NSObject
readonly applicableRenderingAlgorithms: NSArray<number>;
readonly distanceAttenuationParameters: AVAudioEnvironmentDistanceAttenuationParameters;
listenerAngularOrientation: AVAudio3DAngularOrientation;
listenerPosition: AVAudio3DPoint;
listenerVectorOrientation: AVAudio3DVectorOrientation;
readonly nextAvailableInputBus: number;
outputType: AVAudioEnvironmentOutputType;
outputVolume: number;
readonly reverbParameters: AVAudioEnvironmentReverbParameters;
readonly debugDescription: string; // inherited from NSObjectProtocol
readonly description: string; // inherited from NSObjectProtocol
readonly hash: number; // inherited from NSObjectProtocol
readonly isProxy: boolean; // inherited from NSObjectProtocol
obstruction: number; // inherited from AVAudio3DMixing
occlusion: number; // inherited from AVAudio3DMixing
pan: number; // inherited from AVAudioStereoMixing
pointSourceInHeadMode: AVAudio3DMixingPointSourceInHeadMode; // inherited from AVAudio3DMixing
position: AVAudio3DPoint; // inherited from AVAudio3DMixing
rate: number; // inherited from AVAudio3DMixing
renderingAlgorithm: AVAudio3DMixingRenderingAlgorithm; // inherited from AVAudio3DMixing
reverbBlend: number; // inherited from AVAudio3DMixing
sourceMode: AVAudio3DMixingSourceMode; // inherited from AVAudio3DMixing
readonly superclass: typeof NSObject; // inherited from NSObjectProtocol
volume: number; // inherited from AVAudioMixing
readonly // inherited from NSObjectProtocol
class(): typeof NSObject;
conformsToProtocol(aProtocol: any /* Protocol */): boolean;
destinationForMixerBus(mixer: AVAudioNode, bus: number): AVAudioMixingDestination;
isEqual(object: any): boolean;
isKindOfClass(aClass: typeof NSObject): boolean;
isMemberOfClass(aClass: typeof NSObject): boolean;
performSelector(aSelector: string): any;
performSelectorWithObject(aSelector: string, object: any): any;
performSelectorWithObjectWithObject(aSelector: string, object1: any, object2: any): any;
respondsToSelector(aSelector: string): boolean;
retainCount(): number;
self(): this;
}
declare const enum AVAudioEnvironmentOutputType {
Auto = 0,
Headphones = 1,
BuiltInSpeakers = 2,
ExternalSpeakers = 3
}
declare class AVAudioEnvironmentReverbParameters extends NSObject {
static alloc(): AVAudioEnvironmentReverbParameters; // inherited from NSObject
static new(): AVAudioEnvironmentReverbParameters; // inherited from NSObject
enable: boolean;
readonly filterParameters: AVAudioUnitEQFilterParameters;
level: number;
loadFactoryReverbPreset(preset: AVAudioUnitReverbPreset): void;
}
declare class AVAudioFile extends NSObject {
static alloc(): AVAudioFile; // inherited from NSObject
static new(): AVAudioFile; // inherited from NSObject
readonly fileFormat: AVAudioFormat;
framePosition: number;
readonly length: number;
readonly processingFormat: AVAudioFormat;
readonly url: NSURL;
constructor(o: { forReading: NSURL; commonFormat: AVAudioCommonFormat; interleaved: boolean; });
constructor(o: { forReading: NSURL; });
constructor(o: { forWriting: NSURL; settings: NSDictionary<string, any>; commonFormat: AVAudioCommonFormat; interleaved: boolean; });
constructor(o: { forWriting: NSURL; settings: NSDictionary<string, any>; });
initForReadingCommonFormatInterleavedError(fileURL: NSURL, format: AVAudioCommonFormat, interleaved: boolean): this;
initForReadingError(fileURL: NSURL): this;
initForWritingSettingsCommonFormatInterleavedError(fileURL: NSURL, settings: NSDictionary<string, any>, format: AVAudioCommonFormat, interleaved: boolean): this;
initForWritingSettingsError(fileURL: NSURL, settings: NSDictionary<string, any>): this;
readIntoBufferError(buffer: AVAudioPCMBuffer): boolean;
readIntoBufferFrameCountError(buffer: AVAudioPCMBuffer, frames: number): boolean;
writeFromBufferError(buffer: AVAudioPCMBuffer): boolean;
}
declare var AVAudioFileTypeKey: string;
declare class AVAudioFormat extends NSObject implements NSSecureCoding {
static alloc(): AVAudioFormat; // inherited from NSObject
static new(): AVAudioFormat; // inherited from NSObject
readonly channelCount: number;
readonly channelLayout: AVAudioChannelLayout;
readonly commonFormat: AVAudioCommonFormat;
readonly formatDescription: any;
readonly interleaved: boolean;
magicCookie: NSData;
readonly sampleRate: number;
readonly settings: NSDictionary<string, any>;
readonly standard: boolean;
readonly streamDescription: interop.Pointer | interop.Reference<AudioStreamBasicDescription>;
static readonly supportsSecureCoding: boolean; // inherited from NSSecureCoding
constructor(o: { standardFormatWithSampleRate: number; channelLayout: AVAudioChannelLayout; });
constructor(o: { standardFormatWithSampleRate: number; channels: number; });
constructor(o: { CMAudioFormatDescription: any; });
constructor(o: { coder: NSCoder; }); // inherited from NSCoding
constructor(o: { commonFormat: AVAudioCommonFormat; sampleRate: number; channels: number; interleaved: boolean; });
constructor(o: { commonFormat: AVAudioCommonFormat; sampleRate: number; interleaved: boolean; channelLayout: AVAudioChannelLayout; });
constructor(o: { settings: NSDictionary<string, any>; });
constructor(o: { streamDescription: interop.Pointer | interop.Reference<AudioStreamBasicDescription>; });
constructor(o: { streamDescription: interop.Pointer | interop.Reference<AudioStreamBasicDescription>; channelLayout: AVAudioChannelLayout; });
encodeWithCoder(coder: NSCoder): void;
initStandardFormatWithSampleRateChannelLayout(sampleRate: number, layout: AVAudioChannelLayout): this;
initStandardFormatWithSampleRateChannels(sampleRate: number, channels: number): this;
initWithCMAudioFormatDescription(formatDescription: any): this;
initWithCoder(coder: NSCoder): this;
initWithCommonFormatSampleRateChannelsInterleaved(format: AVAudioCommonFormat, sampleRate: number, channels: number, interleaved: boolean): this;
initWithCommonFormatSampleRateInterleavedChannelLayout(format: AVAudioCommonFormat, sampleRate: number, interleaved: boolean, layout: AVAudioChannelLayout): this;
initWithSettings(settings: NSDictionary<string, any>): this;
initWithStreamDescription(asbd: interop.Pointer | interop.Reference<AudioStreamBasicDescription>): this;
initWithStreamDescriptionChannelLayout(asbd: interop.Pointer | interop.Reference<AudioStreamBasicDescription>, layout: AVAudioChannelLayout): this;
}
declare class AVAudioIONode extends AVAudioNode {
static alloc(): AVAudioIONode; // inherited from NSObject
static new(): AVAudioIONode; // inherited from NSObject
readonly audioUnit: interop.Pointer | interop.Reference<any>;
readonly presentationLatency: number;
readonly voiceProcessingEnabled: boolean;
setVoiceProcessingEnabledError(enabled: boolean): boolean;
}
declare class AVAudioInputNode extends AVAudioIONode implements AVAudioMixing {
static alloc(): AVAudioInputNode; // inherited from NSObject
static new(): AVAudioInputNode; // inherited from NSObject
voiceProcessingAGCEnabled: boolean;
voiceProcessingBypassed: boolean;
voiceProcessingInputMuted: boolean;
readonly debugDescription: string; // inherited from NSObjectProtocol
readonly description: string; // inherited from NSObjectProtocol
readonly hash: number; // inherited from NSObjectProtocol
readonly isProxy: boolean; // inherited from NSObjectProtocol
obstruction: number; // inherited from AVAudio3DMixing
occlusion: number; // inherited from AVAudio3DMixing
pan: number; // inherited from AVAudioStereoMixing
pointSourceInHeadMode: AVAudio3DMixingPointSourceInHeadMode; // inherited from AVAudio3DMixing
position: AVAudio3DPoint; // inherited from AVAudio3DMixing
rate: number; // inherited from AVAudio3DMixing
renderingAlgorithm: AVAudio3DMixingRenderingAlgorithm; // inherited from AVAudio3DMixing
reverbBlend: number; // inherited from AVAudio3DMixing
sourceMode: AVAudio3DMixingSourceMode; // inherited from AVAudio3DMixing
readonly superclass: typeof NSObject; // inherited from NSObjectProtocol
volume: number; // inherited from AVAudioMixing
readonly // inherited from NSObjectProt