import { observe } from "mobx"; import { getEnv, getRoot, getType, isAlive, types } from "mobx-state-tree"; import { createRef } from "react"; import { customTypes } from "../../../core/CustomTypes"; import { AnnotationMixin } from "../../../mixins/AnnotationMixin"; import IsReadyMixin from "../../../mixins/IsReadyMixin"; import ProcessAttrsMixin from "../../../mixins/ProcessAttrs"; import { SyncableMixin } from "../../../mixins/Syncable"; import { AudioRegionModel } from "../../../regions/AudioRegion"; import { FF_LSDV_E_278, isFF } from "../../../utils/feature-flags"; import { isDefined } from "../../../utils/utilities"; import ObjectBase from "../Base"; import { WS_SPEED, WS_VOLUME, WS_ZOOM_X } from "./constants"; import { ff } from "@humansignal/core"; const isSyncedBuffering = ff.isActive(ff.FF_SYNCED_BUFFERING); /** * The Audio tag plays audio and shows its waveform. Use for audio annotation tasks where you want to label regions of audio, see the waveform, and manipulate audio during annotation. * * Use with the following data types: audio * @example * * * * @example * * * * @example * * * * @example * * *