ngx-mic-recorder
Version:
Microphone recorder for Angular
503 lines (493 loc) • 30.2 kB
JavaScript
import * as i0 from '@angular/core';
import { Injectable, Directive, EventEmitter, Component, ViewEncapsulation, ViewChild, ContentChild, Input, Output, NgModule } from '@angular/core';
import { BehaviorSubject, map, switchMap, from } from 'rxjs';
import { Mp3Encoder } from 'lamejstmp';
import * as i2 from '@angular/common';
import { CommonModule } from '@angular/common';
// @ts-ignore
class MP3Encoder {
constructor(config) {
this.dataBuffer = [];
this.config = {
sampleRate: 44100,
bitRate: 128,
};
Object.assign(this.config, config);
this.mp3Encoder = new Mp3Encoder(1, this.config.sampleRate, this.config.bitRate);
this.maxSamples = 1152;
this.samplesMono = null;
this.clearBuffer();
}
clearBuffer() {
this.dataBuffer = [];
}
appendToBuffer(buffer) {
this.dataBuffer.push(new Int8Array(buffer));
}
floatTo16BitPCM(input, output) {
for (let i = 0; i < input.length; i++) {
const s = Math.max(-1, Math.min(1, input[i]));
output[i] = s < 0 ? s * 0x8000 : s * 0x7fff;
}
}
convertBuffer(arrayBuffer) {
const data = new Float32Array(arrayBuffer);
const out = new Int16Array(arrayBuffer.length);
this.floatTo16BitPCM(data, out);
return out;
}
encode(arrayBuffer) {
this.samplesMono = this.convertBuffer(arrayBuffer);
let remaining = this.samplesMono.length;
for (let i = 0; remaining >= 0; i += this.maxSamples) {
const left = this.samplesMono.subarray(i, i + this.maxSamples);
const mp3buffer = this.mp3Encoder.encodeBuffer(left);
this.appendToBuffer(mp3buffer);
remaining -= this.maxSamples;
}
}
finish() {
this.appendToBuffer(this.mp3Encoder.flush());
return this.dataBuffer;
}
}
const audioCtx = new (window.AudioContext || window['webkitAudioContext'])();
let analyser = audioCtx.createAnalyser();
const AudioContext = {
getAudioContext() {
return audioCtx;
},
startAnalyze(stream) {
const audioCtx = AudioContext.getAudioContext();
audioCtx.resume().then(() => {
const analyser = AudioContext.getAnalyser();
const sourceNode = audioCtx.createMediaStreamSource(stream);
sourceNode.connect(analyser);
});
},
pauseAnalyze() {
const audioCtx = AudioContext.getAudioContext();
void audioCtx.suspend();
},
resumeAnalyze() {
const audioCtx = AudioContext.getAudioContext();
void audioCtx.resume();
},
getAnalyser() {
return analyser;
},
resetAnalyser() {
analyser = audioCtx.createAnalyser();
},
};
class NgxMicRecorderService {
constructor() {
this._isRecordingSubject = new BehaviorSubject(false);
this._isPausedSubject = new BehaviorSubject(false);
this._recordingTimeSubject = new BehaviorSubject(0);
this._recordedBlobSubject = new BehaviorSubject(null);
this._recordingStateSubject = new BehaviorSubject('inactive');
this._audioContext = new (window.AudioContext || window['webkitAudioContext'])();
this._mp3Encoder = new MP3Encoder();
this.isRecording$ = this._isRecordingSubject.asObservable();
this.isPaused$ = this._isPausedSubject.asObservable();
this.recordingTime$ = this._recordingTimeSubject.asObservable().pipe(map(s => new Date(s * 1000).toISOString().slice(11, 19)));
this.recordedBlob$ = this._recordedBlobSubject.asObservable();
this.recordedBlobAsMp3$ = this.recordedBlob$.pipe(switchMap(() => from(this._getMp3())));
this.recordingState$ = this._recordingStateSubject.asObservable();
this.toggleStartStop = () => {
const isRecording = this._isRecordingSubject.getValue();
if (isRecording) {
this.stopRecording();
}
else {
this.startRecording();
}
};
this.startRecording = () => {
if (this._timeInterval !== undefined)
return;
navigator.mediaDevices
.getUserMedia({ audio: true })
.then((stream) => {
this._activeStream = stream;
this._isRecordingSubject.next(true);
const recorder = new MediaRecorder(stream);
this._mediaRecorder = recorder;
recorder.start();
this._startTimer();
this._recordingStateSubject.next('recording');
this._mic = this._audioContext.createMediaStreamSource(stream);
this._processor = this._audioContext.createScriptProcessor(0, 1, 1);
this._mic.connect(this._processor);
this._processor.connect(this._audioContext.destination);
this._processor.onaudioprocess = (event) => {
this._mp3Encoder.encode(event.inputBuffer.getChannelData(0));
};
if (this._recordingEvents?.afterStartRecording)
this._recordingEvents?.afterStartRecording();
recorder.addEventListener('dataavailable', (event) => {
this._recordedBlobSubject.next(event.data);
// if (onDataAvailable) onDataAvailable(event.data);
recorder.stream.getTracks().forEach((t) => t.stop());
this._mediaRecorder = undefined;
});
AudioContext.startAnalyze(stream);
})
.catch((err) => console.log(err));
};
this.stopRecording = () => {
this._mediaRecorder?.stop();
this._stopTimer();
this._recordingTimeSubject.next(0);
this._isRecordingSubject.next(false);
this._isPausedSubject.next(false);
this._recordingStateSubject.next('inactive');
AudioContext.resetAnalyser();
if (this._recordingEvents?.afterStopRecording)
this._recordingEvents.afterStopRecording(this._recordedBlobSubject.getValue());
if (this._processor && this._mic) {
this._mic.disconnect();
this._processor.disconnect();
if (this._audioContext && this._audioContext.state !== 'closed') {
this._audioContext.close();
}
this._processor.onaudioprocess = null;
this._activeStream?.getAudioTracks().forEach((track) => track.stop());
}
};
this.togglePauseAndResume = () => {
const isRecording = this._isRecordingSubject.getValue();
if (!isRecording)
return;
const isPaused = this._isPausedSubject.getValue();
if (isPaused) {
this.resume();
}
else {
this.pause();
}
};
this.resume = () => {
this._isPausedSubject.next(false);
this._mediaRecorder?.resume();
this._recordingStateSubject.next('recording');
AudioContext.resumeAnalyze();
void this._audioContext.resume();
this._startTimer();
if (this._recordingEvents?.onResume)
this._recordingEvents.onResume();
};
this.pause = () => {
this._isPausedSubject.next(true);
this._mediaRecorder?.pause();
this._recordingStateSubject.next('paused');
AudioContext.pauseAnalyze();
void this._audioContext.suspend();
this._stopTimer();
if (this._recordingEvents?.onPause)
this._recordingEvents.onPause();
};
}
_startTimer() {
this._timeInterval = setInterval(() => {
this._recordingTimeSubject.next(this._recordingTimeSubject.getValue() + 1);
}, 1000);
}
_stopTimer() {
clearInterval(this._timeInterval);
this._timeInterval = undefined;
}
_getMp3() {
const finalBuffer = this._mp3Encoder.finish();
return new Promise((resolve, reject) => {
if (finalBuffer.length === 0) {
reject(new Error('No buffer to send'));
}
else {
resolve(new Blob(finalBuffer, { type: 'audio/mp3' }));
this._mp3Encoder.clearBuffer();
}
});
}
setRecordingEvents(events) {
this._recordingEvents = events;
}
}
NgxMicRecorderService.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicRecorderService, deps: [], target: i0.ɵɵFactoryTarget.Injectable });
NgxMicRecorderService.ɵprov = i0.ɵɵngDeclareInjectable({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicRecorderService, providedIn: 'root' });
i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicRecorderService, decorators: [{
type: Injectable,
args: [{
providedIn: 'root'
}]
}] });
let drawVisual;
const defaultOptions = {
width: 300,
height: 150,
strokeColor: '#212121',
backgroundColor: 'white',
};
const AudioVisualizer = {
visualizeSineWave({ canvas, backgroundColor, strokeColor, width, height, }) {
const canvasCtx = canvas.getContext('2d');
let analyser = AudioContext.getAnalyser();
const bufferLength = analyser.fftSize;
const dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, width, height);
function draw() {
drawVisual = requestAnimationFrame(draw);
analyser = AudioContext.getAnalyser();
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = backgroundColor;
canvasCtx.fillRect(0, 0, width, height);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = strokeColor;
canvasCtx.beginPath();
const sliceWidth = width / bufferLength;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0;
const y = (v * height) / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
}
else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
}
draw();
},
visualizeFrequencyBars({ canvas, backgroundColor, strokeColor, width, height, }) {
const canvasCtx = canvas.getContext('2d');
let analyser = AudioContext.getAnalyser();
analyser.fftSize = 256;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, width, height);
const draw = () => {
drawVisual = requestAnimationFrame(draw);
analyser = AudioContext.getAnalyser();
analyser.getByteFrequencyData(dataArray);
canvasCtx.fillStyle = backgroundColor;
canvasCtx.fillRect(0, 0, width, height);
const barWidth = (width / bufferLength) * 2.5;
let barHeight;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
const rgb = this.hexToRgb(strokeColor);
canvasCtx.fillStyle = strokeColor;
canvasCtx.fillRect(x, height - barHeight / 2, barWidth, barHeight / 2);
x += barWidth + 1;
}
};
draw();
},
visualizeFrequencyCircles({ canvas, backgroundColor, strokeColor, width, height, }) {
const canvasCtx = canvas.getContext('2d');
let analyser = AudioContext.getAnalyser();
analyser.fftSize = 32;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, width, height);
const draw = () => {
drawVisual = requestAnimationFrame(draw);
analyser = AudioContext.getAnalyser();
analyser.getByteFrequencyData(dataArray);
const reductionAmount = 3;
const reducedDataArray = new Uint8Array(bufferLength / reductionAmount);
for (let i = 0; i < bufferLength; i += reductionAmount) {
let sum = 0;
for (let j = 0; j < reductionAmount; j++) {
sum += dataArray[i + j];
}
reducedDataArray[i / reductionAmount] = sum / reductionAmount;
}
canvasCtx.clearRect(0, 0, width, height);
canvasCtx.beginPath();
canvasCtx.arc(width / 2, height / 2, Math.min(height, width) / 2, 0, 2 * Math.PI);
canvasCtx.fillStyle = backgroundColor;
canvasCtx.fill();
const stepSize = Math.min(height, width) / 2.0 / reducedDataArray.length;
canvasCtx.strokeStyle = strokeColor;
for (let i = 0; i < reducedDataArray.length; i++) {
canvasCtx.beginPath();
const normalized = reducedDataArray[i] / 128;
const r = stepSize * i + stepSize * normalized;
canvasCtx.arc(width / 2, height / 2, r, 0, 2 * Math.PI);
canvasCtx.stroke();
}
};
draw();
},
hexToRgb(hex) {
const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result
? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16),
}
: null;
},
visualize(type, options) {
this[`visualize${type || 'SineWave'}`]({
...defaultOptions,
...options
});
}
};
class NgxMicStartStopTemplate {
constructor(templateRef) {
this.templateRef = templateRef;
}
}
NgxMicStartStopTemplate.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicStartStopTemplate, deps: [{ token: i0.TemplateRef }], target: i0.ɵɵFactoryTarget.Directive });
NgxMicStartStopTemplate.ɵdir = i0.ɵɵngDeclareDirective({ minVersion: "12.0.0", version: "13.2.7", type: NgxMicStartStopTemplate, selector: "[ngx-mic-start-and-stop]", ngImport: i0 });
i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicStartStopTemplate, decorators: [{
type: Directive,
args: [{
selector: '[ngx-mic-start-and-stop]'
}]
}], ctorParameters: function () { return [{ type: i0.TemplateRef }]; } });
class NgxPauseResumeTemplate {
constructor(templateRef) {
this.templateRef = templateRef;
}
}
NgxPauseResumeTemplate.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxPauseResumeTemplate, deps: [{ token: i0.TemplateRef }], target: i0.ɵɵFactoryTarget.Directive });
NgxPauseResumeTemplate.ɵdir = i0.ɵɵngDeclareDirective({ minVersion: "12.0.0", version: "13.2.7", type: NgxPauseResumeTemplate, selector: "[ngx-mic-pause-and-resume]", ngImport: i0 });
i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxPauseResumeTemplate, decorators: [{
type: Directive,
args: [{
selector: '[ngx-mic-pause-and-resume]'
}]
}], ctorParameters: function () { return [{ type: i0.TemplateRef }]; } });
class NgxMicRecorderComponent {
constructor(ngxMicRecorderService) {
this.ngxMicRecorderService = ngxMicRecorderService;
this.showVisualization = true;
this.getAsMp3 = new EventEmitter();
this.getAsBlob = new EventEmitter();
this.afterStartRecording = new EventEmitter();
this.afterStopRecording = new EventEmitter();
this.onPauseRecording = new EventEmitter();
this.onResumeRecording = new EventEmitter();
}
ngOnInit() {
this._subscription = this.ngxMicRecorderService.recordedBlobAsMp3$.subscribe((data) => {
this.getAsMp3.emit({ data, url: URL.createObjectURL(data) });
});
this._subscription.add(this.ngxMicRecorderService.recordedBlob$.subscribe((data) => this.getAsBlob.emit(data)));
this.ngxMicRecorderService.setRecordingEvents({
afterStartRecording: () => this.afterStartRecording.emit(),
afterStopRecording: (blob) => this.afterStopRecording.emit(blob),
onPause: () => this.onPauseRecording.emit(),
onResume: () => this.onResumeRecording.emit(),
});
}
ngAfterViewInit() {
AudioVisualizer.visualize(this.visualizationType, {
canvas: this.canvas.nativeElement,
...this.visualizationOptions
});
}
ngOnDestroy() {
this._subscription?.unsubscribe();
}
getStartStopTemplateContext($implicit) {
return {
$implicit,
toggle: this.ngxMicRecorderService.toggleStartStop,
start: this.ngxMicRecorderService.startRecording,
stop: this.ngxMicRecorderService.stopRecording
};
}
getPauseResumeTemplateContext($implicit, isPaused, recordingTime, recordingState) {
return {
$implicit,
isPaused,
recordingState,
recordingTime,
toggle: this.ngxMicRecorderService.togglePauseAndResume,
pause: this.ngxMicRecorderService.pause,
resume: this.ngxMicRecorderService.resume
};
}
}
NgxMicRecorderComponent.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicRecorderComponent, deps: [{ token: NgxMicRecorderService }], target: i0.ɵɵFactoryTarget.Component });
NgxMicRecorderComponent.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "12.0.0", version: "13.2.7", type: NgxMicRecorderComponent, selector: "ngx-mic-recorder", inputs: { showVisualization: "showVisualization", visualizationType: "visualizationType", visualizationOptions: "visualizationOptions" }, outputs: { getAsMp3: "getAsMp3", getAsBlob: "getAsBlob", afterStartRecording: "afterStartRecording", afterStopRecording: "afterStopRecording", onPauseRecording: "onPauseRecording", onResumeRecording: "onResumeRecording" }, queries: [{ propertyName: "startStopTemplateRef", first: true, predicate: NgxMicStartStopTemplate, descendants: true }, { propertyName: "pauseResumeTemplateRef", first: true, predicate: NgxPauseResumeTemplate, descendants: true }], viewQueries: [{ propertyName: "canvas", first: true, predicate: ["canvas"], descendants: true }], ngImport: i0, template: "<div class=\"ngx-mic-recorder\">\r\n <ng-template\r\n [ngTemplateOutlet]=\"startStopTemplateRef?.templateRef || defaultStartStopTemplate\"\r\n [ngTemplateOutletContext]=\"getStartStopTemplateContext((ngxMicRecorderService.isRecording$ | async) ?? false)\"\r\n\r\n ></ng-template>\r\n <ng-template\r\n [ngTemplateOutlet]=\"pauseResumeTemplateRef?.templateRef || defaultPauseAndResume\"\r\n [ngTemplateOutletContext]=\"getPauseResumeTemplateContext(\r\n (ngxMicRecorderService.isRecording$ | async) ?? false,\r\n (ngxMicRecorderService.isPaused$ | async) ?? false,\r\n (ngxMicRecorderService.recordingTime$ | async) ?? '00:00:00',\r\n (ngxMicRecorderService.recordingState$ | async) ?? 'inactive'\r\n )\"\r\n\r\n ></ng-template>\r\n</div>\r\n\r\n<canvas *ngIf=\"showVisualization\" #canvas [class.hidden]=\"!(ngxMicRecorderService.isRecording$ | async)\"></canvas>\r\n\r\n\r\n<ng-template #defaultStartStopTemplate let-isRecording let-toggle=\"toggle\">\r\n <div class=\"ngx-mic-recorder__start-stop\" (click)=\"toggle()\">\r\n <div class=\"ngx-mic-recorder__state\">\r\n <span *ngIf=\"isRecording\" class=\"ngx-mic-recorder__stop\"></span>\r\n <svg *ngIf=\"!isRecording\" class=\"ngx-mic-recorder__start\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" aria-hidden=\"true\"\r\n id=\"footer-sample-full\" preserveAspectRatio=\"xMidYMid meet\" viewBox=\"0 0 24 24\">\r\n <path fill=\"currentColor\" d=\"M12 14q-1.25 0-2.125-.875T9 11V5q0-1.25.875-2.125T12 2q1.25 0 2.125.875T15 5v6q0 1.25-.875 2.125T12 14Zm-1 7v-3.075q-2.6-.35-4.3-2.325Q5 13.625 5 11h2q0 2.075 1.463 3.537Q9.925 16 12 16t3.538-1.463Q17 13.075 17 11h2q0 2.625-1.7 4.6q-1.7 1.975-4.3 2.325V21Z\"></path>\r\n </svg>\r\n </div>\r\n </div>\r\n</ng-template>\r\n\r\n<ng-template\r\n #defaultPauseAndResume\r\n let-isRecording\r\n let-toggle=\"toggle\"\r\n let-recordingTime=\"recordingTime\"\r\n let-isPaused=\"isPaused\"\r\n let-recordingState=\"recordingState\"\r\n>\r\n <ng-template [ngIf]=\"isRecording\">\r\n <div class=\"ngx-mic-recorder__recording-time\">\r\n {{ recordingTime }}\r\n </div>\r\n <div class=\"ngx-mic-recorder__toggle\" (click)=\"toggle()\">\r\n <span [class.blink]=\"!isPaused\"></span>\r\n <p>{{ recordingState }}</p>\r\n </div>\r\n </ng-template>\r\n</ng-template>\r\n", styles: ["ngx-mic-recorder{width:400px;display:block}ngx-mic-recorder>*{box-sizing:border-box}ngx-mic-recorder>canvas{width:100%}.ngx-mic-recorder{display:flex;align-items:center;justify-content:space-between}.ngx-mic-recorder__recording-time{color:#282828;font-size:24px;font-weight:700}.ngx-mic-recorder__state{width:24px;height:24px}.ngx-mic-recorder__start-stop{padding:16px;background-color:#282828;border-radius:50px;border:4px solid #CFCFCF;cursor:pointer}.ngx-mic-recorder__start-stop .ngx-mic-recorder__stop{background-color:#cfcfcf;border-radius:4px;width:100%;height:100%;display:block}.ngx-mic-recorder__start-stop .ngx-mic-recorder__start{color:#cfcfcf;width:100%;height:100%}.ngx-mic-recorder__toggle{padding:16px 48px;background-color:#282828;border-radius:50px;border:4px solid #CFCFCF;cursor:pointer;color:#cfcfcf;display:flex;align-items:center}.ngx-mic-recorder__toggle>span{background-color:#d40100;border-radius:50%;width:24px;height:24px;display:block;margin-inline-end:8px}.ngx-mic-recorder__toggle>p{margin:0;width:60px;text-transform:capitalize}.hidden{visibility:hidden}.blink{animation-name:blink;animation-duration:1s;animation-iteration-count:infinite}@keyframes blink{0%{opacity:1}50%{opacity:.3}to{opacity:1}}\n"], directives: [{ type: i2.NgTemplateOutlet, selector: "[ngTemplateOutlet]", inputs: ["ngTemplateOutletContext", "ngTemplateOutlet"] }, { type: i2.NgIf, selector: "[ngIf]", inputs: ["ngIf", "ngIfThen", "ngIfElse"] }], pipes: { "async": i2.AsyncPipe }, encapsulation: i0.ViewEncapsulation.None });
i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicRecorderComponent, decorators: [{
type: Component,
args: [{ selector: 'ngx-mic-recorder', encapsulation: ViewEncapsulation.None, template: "<div class=\"ngx-mic-recorder\">\r\n <ng-template\r\n [ngTemplateOutlet]=\"startStopTemplateRef?.templateRef || defaultStartStopTemplate\"\r\n [ngTemplateOutletContext]=\"getStartStopTemplateContext((ngxMicRecorderService.isRecording$ | async) ?? false)\"\r\n\r\n ></ng-template>\r\n <ng-template\r\n [ngTemplateOutlet]=\"pauseResumeTemplateRef?.templateRef || defaultPauseAndResume\"\r\n [ngTemplateOutletContext]=\"getPauseResumeTemplateContext(\r\n (ngxMicRecorderService.isRecording$ | async) ?? false,\r\n (ngxMicRecorderService.isPaused$ | async) ?? false,\r\n (ngxMicRecorderService.recordingTime$ | async) ?? '00:00:00',\r\n (ngxMicRecorderService.recordingState$ | async) ?? 'inactive'\r\n )\"\r\n\r\n ></ng-template>\r\n</div>\r\n\r\n<canvas *ngIf=\"showVisualization\" #canvas [class.hidden]=\"!(ngxMicRecorderService.isRecording$ | async)\"></canvas>\r\n\r\n\r\n<ng-template #defaultStartStopTemplate let-isRecording let-toggle=\"toggle\">\r\n <div class=\"ngx-mic-recorder__start-stop\" (click)=\"toggle()\">\r\n <div class=\"ngx-mic-recorder__state\">\r\n <span *ngIf=\"isRecording\" class=\"ngx-mic-recorder__stop\"></span>\r\n <svg *ngIf=\"!isRecording\" class=\"ngx-mic-recorder__start\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" aria-hidden=\"true\"\r\n id=\"footer-sample-full\" preserveAspectRatio=\"xMidYMid meet\" viewBox=\"0 0 24 24\">\r\n <path fill=\"currentColor\" d=\"M12 14q-1.25 0-2.125-.875T9 11V5q0-1.25.875-2.125T12 2q1.25 0 2.125.875T15 5v6q0 1.25-.875 2.125T12 14Zm-1 7v-3.075q-2.6-.35-4.3-2.325Q5 13.625 5 11h2q0 2.075 1.463 3.537Q9.925 16 12 16t3.538-1.463Q17 13.075 17 11h2q0 2.625-1.7 4.6q-1.7 1.975-4.3 2.325V21Z\"></path>\r\n </svg>\r\n </div>\r\n </div>\r\n</ng-template>\r\n\r\n<ng-template\r\n #defaultPauseAndResume\r\n let-isRecording\r\n let-toggle=\"toggle\"\r\n let-recordingTime=\"recordingTime\"\r\n let-isPaused=\"isPaused\"\r\n let-recordingState=\"recordingState\"\r\n>\r\n <ng-template [ngIf]=\"isRecording\">\r\n <div class=\"ngx-mic-recorder__recording-time\">\r\n {{ recordingTime }}\r\n </div>\r\n <div class=\"ngx-mic-recorder__toggle\" (click)=\"toggle()\">\r\n <span [class.blink]=\"!isPaused\"></span>\r\n <p>{{ recordingState }}</p>\r\n </div>\r\n </ng-template>\r\n</ng-template>\r\n", styles: ["ngx-mic-recorder{width:400px;display:block}ngx-mic-recorder>*{box-sizing:border-box}ngx-mic-recorder>canvas{width:100%}.ngx-mic-recorder{display:flex;align-items:center;justify-content:space-between}.ngx-mic-recorder__recording-time{color:#282828;font-size:24px;font-weight:700}.ngx-mic-recorder__state{width:24px;height:24px}.ngx-mic-recorder__start-stop{padding:16px;background-color:#282828;border-radius:50px;border:4px solid #CFCFCF;cursor:pointer}.ngx-mic-recorder__start-stop .ngx-mic-recorder__stop{background-color:#cfcfcf;border-radius:4px;width:100%;height:100%;display:block}.ngx-mic-recorder__start-stop .ngx-mic-recorder__start{color:#cfcfcf;width:100%;height:100%}.ngx-mic-recorder__toggle{padding:16px 48px;background-color:#282828;border-radius:50px;border:4px solid #CFCFCF;cursor:pointer;color:#cfcfcf;display:flex;align-items:center}.ngx-mic-recorder__toggle>span{background-color:#d40100;border-radius:50%;width:24px;height:24px;display:block;margin-inline-end:8px}.ngx-mic-recorder__toggle>p{margin:0;width:60px;text-transform:capitalize}.hidden{visibility:hidden}.blink{animation-name:blink;animation-duration:1s;animation-iteration-count:infinite}@keyframes blink{0%{opacity:1}50%{opacity:.3}to{opacity:1}}\n"] }]
}], ctorParameters: function () { return [{ type: NgxMicRecorderService }]; }, propDecorators: { canvas: [{
type: ViewChild,
args: ['canvas']
}], startStopTemplateRef: [{
type: ContentChild,
args: [NgxMicStartStopTemplate]
}], pauseResumeTemplateRef: [{
type: ContentChild,
args: [NgxPauseResumeTemplate]
}], showVisualization: [{
type: Input
}], visualizationType: [{
type: Input
}], visualizationOptions: [{
type: Input
}], getAsMp3: [{
type: Output
}], getAsBlob: [{
type: Output
}], afterStartRecording: [{
type: Output
}], afterStopRecording: [{
type: Output
}], onPauseRecording: [{
type: Output
}], onResumeRecording: [{
type: Output
}] } });
class NgxMicRecorderModule {
}
NgxMicRecorderModule.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicRecorderModule, deps: [], target: i0.ɵɵFactoryTarget.NgModule });
NgxMicRecorderModule.ɵmod = i0.ɵɵngDeclareNgModule({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicRecorderModule, declarations: [NgxMicRecorderComponent,
NgxMicStartStopTemplate,
NgxPauseResumeTemplate], imports: [CommonModule], exports: [NgxMicRecorderComponent,
NgxMicStartStopTemplate,
NgxPauseResumeTemplate] });
NgxMicRecorderModule.ɵinj = i0.ɵɵngDeclareInjector({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicRecorderModule, imports: [[
CommonModule
]] });
i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "13.2.7", ngImport: i0, type: NgxMicRecorderModule, decorators: [{
type: NgModule,
args: [{
declarations: [
NgxMicRecorderComponent,
NgxMicStartStopTemplate,
NgxPauseResumeTemplate,
],
imports: [
CommonModule
],
exports: [
NgxMicRecorderComponent,
NgxMicStartStopTemplate,
NgxPauseResumeTemplate,
]
}]
}] });
/*
* Public API Surface of ngx-mic-recorder
*/
/**
* Generated bundle index. Do not edit.
*/
export { NgxMicRecorderComponent, NgxMicRecorderModule, NgxMicRecorderService, NgxMicStartStopTemplate, NgxPauseResumeTemplate };
//# sourceMappingURL=ngx-mic-recorder.mjs.map