diff --git a/dotcom-rendering/src/components/AudioPlayer/AudioPlayer.tsx b/dotcom-rendering/src/components/AudioPlayer/AudioPlayer.tsx index eb1fdf6a49..0e0127a45c 100644 --- a/dotcom-rendering/src/components/AudioPlayer/AudioPlayer.tsx +++ b/dotcom-rendering/src/components/AudioPlayer/AudioPlayer.tsx @@ -8,6 +8,8 @@ import { CurrentTime, Duration } from './components/time'; import { Volume } from './components/Volume'; import { Wrapper } from './components/Wrapper'; +// ********************* ophan stuff ********************* + // possible events for audio in ophan type AudioEvents = TAudioEventType extends `audio:content:${infer E}` ? E @@ -21,6 +23,21 @@ type AudioProgressEvents = Extract< ? N : never; +const reportAudioEvent = (mediaId: string, eventName: AudioEvents) => { + const audioEvent: AudioEvent = { + id: mediaId, + eventType: `audio:content:${eventName}`, + }; + + void getOphan('Web').then((ophan) => { + ophan.record({ + audio: audioEvent, + }); + }); +}; + +// ********************* Component ********************* + type AudioPlayerProps = { /** The audio source you want to play. */ src: string; @@ -47,31 +64,6 @@ export const AudioPlayer = ({ showVolumeControls = true, mediaId, }: AudioPlayerProps) => { - // ********************* ophan stuff ********************* - - // we'll send listening progress reports to ophan at these percentage points - // through playback (100% is handled by the 'ended' event) - const audioProgressEvents = useRef>( - new Set([25, 50, 75]), - ); - - // wrapper to send audio events to ophan - const reportAudioEvent = useCallback( - (eventName: AudioEvents) => { - const audioEvent: AudioEvent = { - id: mediaId, - eventType: `audio:content:${eventName}`, - }; - - void getOphan('Web').then((ophan) => { - ophan.record({ - audio: audioEvent, - }); - }); - }, - [mediaId], - ); - // ********************* player ********************* // state for displaying feedback to the user @@ -89,15 +81,15 @@ export const AudioPlayer = ({ // ref to the