diff --git a/AudioPlayer/AudioPlayer.xcodeproj/project.pbxproj b/AudioPlayer/AudioPlayer.xcodeproj/project.pbxproj index 902b4f4..b98978a 100644 --- a/AudioPlayer/AudioPlayer.xcodeproj/project.pbxproj +++ b/AudioPlayer/AudioPlayer.xcodeproj/project.pbxproj @@ -7,6 +7,7 @@ objects = { /* Begin PBXBuildFile section */ + 42BE42F52C9322AA00C0E448 /* CustomStreamSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 42BE42F42C9322AA00C0E448 /* CustomStreamSource.swift */; }; 9806E8182BC5D12500757370 /* App.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9806E8172BC5D12500757370 /* App.swift */; }; 9806E81A2BC5D12500757370 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9806E8192BC5D12500757370 /* ContentView.swift */; }; 9806E81C2BC5D12700757370 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 9806E81B2BC5D12700757370 /* Assets.xcassets */; }; @@ -47,6 +48,7 @@ /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ + 42BE42F42C9322AA00C0E448 /* CustomStreamSource.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomStreamSource.swift; sourceTree = ""; }; 9806E8142BC5D12500757370 /* AudioPlayer.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AudioPlayer.app; sourceTree = BUILT_PRODUCTS_DIR; }; 9806E8172BC5D12500757370 /* App.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = App.swift; sourceTree = ""; }; 9806E8192BC5D12500757370 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; @@ -195,6 +197,7 @@ 98E3921C2BD845E100B586E9 /* AudioPlayer */ = { isa = PBXGroup; children = ( + 42BE42F42C9322AA00C0E448 /* CustomStreamSource.swift */, 9806E8302BC6927D00757370 /* AudioPlayerModel.swift */, 9806E8292BC68F8700757370 /* AudioPlayerView.swift */, 98BFB41C2BCD7BB800E812C0 /* EqualizerView.swift */, @@ -292,6 +295,7 @@ 9816A8BB2BC87BC200AD1299 /* AudioPlayerService.swift in Sources */, 984DE9572BDAFC7E004B427A /* AudioPlayerControlsView.swift in Sources */, 9806E8182BC5D12500757370 /* App.swift in Sources */, + 42BE42F52C9322AA00C0E448 /* CustomStreamSource.swift in Sources */, 989E08E72BF7A4E300599F17 /* PrefersTabNavigationEnvironmentKey.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/AudioPlayer/AudioPlayer/Content/AudioPlayer/AudioPlayerControlsView.swift b/AudioPlayer/AudioPlayer/Content/AudioPlayer/AudioPlayerControlsView.swift index 5ab51df..6bb22f7 100644 --- a/AudioPlayer/AudioPlayer/Content/AudioPlayer/AudioPlayerControlsView.swift +++ b/AudioPlayer/AudioPlayer/Content/AudioPlayer/AudioPlayerControlsView.swift @@ -4,6 +4,7 @@ import AVFoundation import SwiftUI +import AudioStreaming struct AudioPlayerControls: View { @State var model: Model @@ -247,11 +248,23 @@ extension AudioPlayerControls { func play(_ track: AudioTrack) { if track != currentTrack { currentTrack?.status = .idle - audioPlayerService.play(url: track.url) - currentTrack = track + if track.url.scheme == "custom" { + let source = createStreamSource() + let audioFormat = AVAudioFormat( + commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 2, interleaved: false + )! + audioPlayerService.play(source: source, entryId: track.url.absoluteString, format: audioFormat) + currentTrack = track + } else { + audioPlayerService.play(url: track.url) + } } } + func createStreamSource() -> CoreAudioStreamSource { + return CustomStreamAudioSource(underlyingQueue: audioPlayerService.player.sourceQueue) + } + func onTick() { let duration = audioPlayerService.duration let progress = audioPlayerService.progress diff --git a/AudioPlayer/AudioPlayer/Content/AudioPlayer/AudioPlayerModel.swift b/AudioPlayer/AudioPlayer/Content/AudioPlayer/AudioPlayerModel.swift index db958f5..3637b08 100644 --- a/AudioPlayer/AudioPlayer/Content/AudioPlayer/AudioPlayerModel.swift +++ b/AudioPlayer/AudioPlayer/Content/AudioPlayer/AudioPlayerModel.swift @@ -59,11 +59,13 @@ public class AudioPlayerModel { private let radioTracks: [AudioContent] = [.offradio, .enlefko, .pepper966, .kosmos, .kosmosJazz, .radiox] private let audioTracks: [AudioContent] = [.khruangbin, .piano, .optimized, .nonOptimized, .remoteWave, .local, .localWave, .loopBeatFlac] +private let customStreams: [AudioContent] = [.custom("custom://sinwave")] func audioTracksProvider() -> [AudioPlaylist] { [ AudioPlaylist(title: "Radio", tracks: radioTracks.map { AudioTrack.init(from: $0) }), - AudioPlaylist(title: "Tracks", tracks: audioTracks.map { AudioTrack.init(from:$0) }) + AudioPlaylist(title: "Tracks", tracks: audioTracks.map { AudioTrack.init(from:$0) }), + AudioPlaylist(title: "Generated", tracks: customStreams.map { AudioTrack.init(from:$0) }) ] } diff --git a/AudioPlayer/AudioPlayer/Content/AudioPlayer/CustomStreamSource.swift b/AudioPlayer/AudioPlayer/Content/AudioPlayer/CustomStreamSource.swift new file mode 100644 index 0000000..cdad498 --- /dev/null +++ b/AudioPlayer/AudioPlayer/Content/AudioPlayer/CustomStreamSource.swift @@ -0,0 +1,139 @@ +// +// CustomStreamSource.swift +// AudioPlayer +// +// Created by Jackson Harper on 12/9/24. +// + +import AVFoundation +import Foundation + +import AudioStreaming + +// This is a basic example of playing a custom audio stream. We generate +// a small audio data on load and then pass it off to AudioStreaming. +final class CustomStreamAudioSource: NSObject, CoreAudioStreamSource { + weak var delegate: AudioStreamSourceDelegate? + + var underlyingQueue: DispatchQueue + + var position = 0 + var length = 0 + + var audioFileHint: AudioFileTypeID { + kAudioFileWAVEType + } + + init(underlyingQueue: DispatchQueue) { + self.underlyingQueue = underlyingQueue + } + + // no-op + func close() {} + + // no-op + func suspend() {} + + func resume() {} + + func seek(at _: Int) { + // The streaming process is started by a seek(0) call from AudioStreaming + generateData() + } + + private func generateData() { + let frequency = 440.0 + let sampleRate = 44100 + let duration = 20.0 + + let lpcmData = generateSineWave(frequency: frequency, sampleRate: sampleRate, duration: duration) + let waveFile = createWavFile(using: lpcmData) + + // We enqueue this because during startup the seek call will be made, but the player + // is not completely setup and ready to handle data yet, as its expected to be + // generated asyncronously. + underlyingQueue.asyncAfter(deadline: .now().advanced(by: .milliseconds(100))) { + self.delegate?.dataAvailable(source: self, data: waveFile) + } + } +} + +// Functions for generating some sample data + +// Function to generate a sine wave as Data +func generateSineWave(frequency: Double, sampleRate: Int, duration: Double, amplitude: Double = 0.5) -> Data { + let numberOfSamples = Int(Double(sampleRate) * duration) + let twoPi = 2.0 * Double.pi + var lpcmData = Data() + + for sampleIndex in 0 ..< numberOfSamples { + let time = Double(sampleIndex) / Double(sampleRate) + let sampleValue = amplitude * sin(twoPi * frequency * time) + + let pcmValue = Int16(sampleValue * Double(Int16.max)) + withUnsafeBytes(of: pcmValue.littleEndian) { lpcmData.append(contentsOf: $0) } + } + + return lpcmData +} + +func createWavFile(using rawData: Data) -> Data { + let waveHeaderFormate = createWaveHeader(data: rawData) as Data + let waveFileData = waveHeaderFormate + rawData + return waveFileData +} + +// from: https://stackoverflow.com/questions/49399823/in-ios-how-to-create-audio-file-wav-mp3-file-from-data +private func createWaveHeader(data: Data) -> NSData { + let sampleRate: Int32 = 44100 + let chunkSize: Int32 = 36 + Int32(data.count) + let subChunkSize: Int32 = 16 + let format: Int16 = 1 + let channels: Int16 = 2 + let bitsPerSample: Int16 = 16 + let byteRate: Int32 = sampleRate * Int32(channels * bitsPerSample / 8) + let blockAlign: Int16 = channels * bitsPerSample / 8 + let dataSize = Int32(data.count) + + let header = NSMutableData() + + header.append([UInt8]("RIFF".utf8), length: 4) + header.append(intToByteArray(chunkSize), length: 4) + + // WAVE + header.append([UInt8]("WAVE".utf8), length: 4) + + // FMT + header.append([UInt8]("fmt ".utf8), length: 4) + + header.append(intToByteArray(subChunkSize), length: 4) + header.append(shortToByteArray(format), length: 2) + header.append(shortToByteArray(channels), length: 2) + header.append(intToByteArray(sampleRate), length: 4) + header.append(intToByteArray(byteRate), length: 4) + header.append(shortToByteArray(blockAlign), length: 2) + header.append(shortToByteArray(bitsPerSample), length: 2) + + header.append([UInt8]("data".utf8), length: 4) + header.append(intToByteArray(dataSize), length: 4) + + return header +} + +private func intToByteArray(_ i: Int32) -> [UInt8] { + return [ + // little endian + UInt8(truncatingIfNeeded: i & 0xFF), + UInt8(truncatingIfNeeded: (i >> 8) & 0xFF), + UInt8(truncatingIfNeeded: (i >> 16) & 0xFF), + UInt8(truncatingIfNeeded: (i >> 24) & 0xFF), + ] +} + +private func shortToByteArray(_ i: Int16) -> [UInt8] { + return [ + // little endian + UInt8(truncatingIfNeeded: i & 0xFF), + UInt8(truncatingIfNeeded: (i >> 8) & 0xFF), + ] +} diff --git a/AudioPlayer/AudioPlayer/Dependencies/AudioPlayerService.swift b/AudioPlayer/AudioPlayer/Dependencies/AudioPlayerService.swift index 793894e..b61d383 100644 --- a/AudioPlayer/AudioPlayer/Dependencies/AudioPlayerService.swift +++ b/AudioPlayer/AudioPlayer/Dependencies/AudioPlayerService.swift @@ -17,7 +17,7 @@ protocol AudioPlayerServiceDelegate: AnyObject { final class AudioPlayerService { weak var delegate: AudioPlayerServiceDelegate? - private var player: AudioPlayer + var player: AudioPlayer private var audioSystemResetObserver: Any? var duration: Double { @@ -60,6 +60,11 @@ final class AudioPlayerService { player.play(url: url) } + func play(source: CoreAudioStreamSource, entryId: String, format: AVAudioFormat) { + activateAudioSession() + player.play(source: source, entryId: entryId, format: format) + } + func queue(url: URL) { activateAudioSession() player.queue(url: url) diff --git a/AudioStreaming/Streaming/Audio Source/AudioStreamSource.swift b/AudioStreaming/Streaming/Audio Source/AudioStreamSource.swift index 9f797e7..7c91c69 100644 --- a/AudioStreaming/Streaming/Audio Source/AudioStreamSource.swift +++ b/AudioStreaming/Streaming/Audio Source/AudioStreamSource.swift @@ -6,7 +6,7 @@ import AudioToolbox import Foundation -protocol AudioStreamSourceDelegate: AnyObject { +public protocol AudioStreamSourceDelegate: AnyObject { /// Indicates that there's data available func dataAvailable(source: CoreAudioStreamSource, data: Data) /// Indicates an error occurred @@ -17,7 +17,7 @@ protocol AudioStreamSourceDelegate: AnyObject { func metadataReceived(data: [String: String]) } -protocol CoreAudioStreamSource: AnyObject { +public protocol CoreAudioStreamSource: AnyObject { /// An `Int` that represents the position of the audio var position: Int { get } /// The length of the audio in bytes diff --git a/AudioStreaming/Streaming/Audio Source/RemoteAudioSource.swift b/AudioStreaming/Streaming/Audio Source/RemoteAudioSource.swift index 370fb67..10bbba0 100644 --- a/AudioStreaming/Streaming/Audio Source/RemoteAudioSource.swift +++ b/AudioStreaming/Streaming/Audio Source/RemoteAudioSource.swift @@ -13,13 +13,13 @@ enum RemoteAudioSourceError: Error { } public class RemoteAudioSource: AudioStreamSource { - weak var delegate: AudioStreamSourceDelegate? + public weak var delegate: AudioStreamSourceDelegate? - var position: Int { + public var position: Int { return seekOffset + relativePosition } - var length: Int { + public var length: Int { guard let parsedHeader = parsedHeaderOutput else { return 0 } return parsedHeader.fileLength } @@ -40,7 +40,7 @@ public class RemoteAudioSource: AudioStreamSource { private var shouldTryParsingIcycastHeaders: Bool = false private let icycastHeadersProcessor: IcycastHeadersProcessor - var audioFileHint: AudioFileTypeID { + public var audioFileHint: AudioFileTypeID { guard let output = parsedHeaderOutput, output.typeId != 0 else { return audioFileType(fileExtension: url.pathExtension) } @@ -49,7 +49,7 @@ public class RemoteAudioSource: AudioStreamSource { private let mp4Restructure: RemoteMp4Restructure - let underlyingQueue: DispatchQueue + public let underlyingQueue: DispatchQueue let streamOperationQueue: OperationQueue let netStatusService: NetStatusProvider var waitingForNetwork = false @@ -114,7 +114,7 @@ public class RemoteAudioSource: AudioStreamSource { httpHeaders: [:]) } - func close() { + public func close() { retrierTimeout.cancel() streamOperationQueue.isSuspended = false streamOperationQueue.cancelAllOperations() @@ -125,7 +125,7 @@ public class RemoteAudioSource: AudioStreamSource { streamRequest = nil } - func seek(at offset: Int) { + public func seek(at offset: Int) { close() relativePosition = 0 @@ -144,11 +144,11 @@ public class RemoteAudioSource: AudioStreamSource { performOpen(seek: offset) } - func suspend() { + public func suspend() { streamOperationQueue.isSuspended = true } - func resume() { + public func resume() { streamOperationQueue.isSuspended = false } diff --git a/AudioStreaming/Streaming/AudioPlayer/AudioPlayer.swift b/AudioStreaming/Streaming/AudioPlayer/AudioPlayer.swift index 0471cf0..a892c35 100644 --- a/AudioStreaming/Streaming/AudioPlayer/AudioPlayer.swift +++ b/AudioStreaming/Streaming/AudioPlayer/AudioPlayer.swift @@ -124,7 +124,7 @@ open class AudioPlayer { private let frameFilterProcessor: FrameFilterProcessor private let serializationQueue: DispatchQueue - private let sourceQueue: DispatchQueue + public let sourceQueue: DispatchQueue private let entryProvider: AudioEntryProviding @@ -190,6 +190,20 @@ open class AudioPlayer { /// - parameter headers: A `Dictionary` specifying any additional headers to be pass to the network request. public func play(url: URL, headers: [String: String]) { let audioEntry = entryProvider.provideAudioEntry(url: url, headers: headers) + play(audioEntry: audioEntry) + } + + /// Starts the audio playback for the supplied stream + /// + /// - parameter source: A `CoreAudioStreamSource` that will providing streaming data + /// - parameter entryId: A `String` that provides a unique id for this item + /// - parameter format: An `AVAudioFormat` the format of this audio source + public func play(source: CoreAudioStreamSource, entryId: String, format: AVAudioFormat) { + let audioEntry = AudioEntry(source: source, entryId: AudioEntryId(id: entryId), outputAudioFormat: format) + play(audioEntry: audioEntry) + } + + private func play(audioEntry: AudioEntry) { audioEntry.delegate = self checkRenderWaitingAndNotifyIfNeeded() @@ -247,6 +261,16 @@ open class AudioPlayer { queue(url: url, headers: [:], after: afterUrl) } + /// Queues the specified audio stream + /// + /// - parameter source: A `CoreAudioStreamSource` that will providing streaming data + /// - parameter entryId: A `String` that provides a unique id for this item + /// - parameter format: An `AVAudioFormat` the format of this audio source + public func queue(source: CoreAudioStreamSource, entryId: String, format: AVAudioFormat) { + let audioEntry = AudioEntry(source: source, entryId: AudioEntryId(id: entryId), outputAudioFormat: format) + queue(audioEntry: audioEntry) + } + public func removeFromQueue(url: URL) { serializationQueue.sync { if let item = entriesQueue.items(type: .upcoming).first(where: { $0.id.id == url.absoluteString }) { @@ -268,21 +292,8 @@ open class AudioPlayer { /// - Parameter url: A `URL` specifying the audio content to be played. /// - parameter headers: A `Dictionary` specifying any additional headers to be pass to the network request. public func queue(url: URL, headers: [String: String], after afterUrl: URL? = nil) { - serializationQueue.sync { - let audioEntry = entryProvider.provideAudioEntry(url: url, headers: headers) - audioEntry.delegate = self - if let afterUrl = afterUrl { - if let afterUrlEntry = entriesQueue.items(type: .upcoming).first(where: { $0.id.id == afterUrl.absoluteString }) { - entriesQueue.insert(item: audioEntry, type: .upcoming, after: afterUrlEntry) - } - } else { - entriesQueue.enqueue(item: audioEntry, type: .upcoming) - } - } - checkRenderWaitingAndNotifyIfNeeded() - sourceQueue.async { [weak self] in - self?.processSource() - } + let audioEntry = entryProvider.provideAudioEntry(url: url, headers: headers) + queue(audioEntry: audioEntry, after: afterUrl) } /// Queues the specified URLs @@ -303,6 +314,23 @@ open class AudioPlayer { } } + private func queue(audioEntry: AudioEntry, after afterUrl: URL? = nil) { + serializationQueue.sync { + audioEntry.delegate = self + if let afterUrl = afterUrl { + if let afterUrlEntry = entriesQueue.items(type: .upcoming).first(where: { $0.id.id == afterUrl.absoluteString }) { + entriesQueue.insert(item: audioEntry, type: .upcoming, after: afterUrlEntry) + } + } else { + entriesQueue.enqueue(item: audioEntry, type: .upcoming) + } + } + checkRenderWaitingAndNotifyIfNeeded() + sourceQueue.async { [weak self] in + self?.processSource() + } + } + /// Stops the audio playback public func stop(clearQueue: Bool = true) { guard playerContext.internalState != .stopped else { return } @@ -805,7 +833,7 @@ open class AudioPlayer { } extension AudioPlayer: AudioStreamSourceDelegate { - func dataAvailable(source: CoreAudioStreamSource, data: Data) { + public func dataAvailable(source: CoreAudioStreamSource, data: Data) { guard let readingEntry = playerContext.audioReadingEntry, readingEntry.has(same: source) else { return } @@ -835,12 +863,12 @@ extension AudioPlayer: AudioStreamSourceDelegate { } } - func errorOccurred(source: CoreAudioStreamSource, error: Error) { + public func errorOccurred(source: CoreAudioStreamSource, error: Error) { guard let entry = playerContext.audioReadingEntry, entry.has(same: source) else { return } raiseUnexpected(error: .networkError(.failure(error))) } - func endOfFileOccurred(source: CoreAudioStreamSource) { + public func endOfFileOccurred(source: CoreAudioStreamSource) { let hasSameSource = playerContext.audioReadingEntry?.has(same: source) ?? false guard playerContext.audioReadingEntry == nil || hasSameSource else { source.delegate = nil @@ -877,7 +905,7 @@ extension AudioPlayer: AudioStreamSourceDelegate { } } - func metadataReceived(data: [String: String]) { + public func metadataReceived(data: [String: String]) { asyncOnMain { [weak self] in guard let self = self else { return } self.delegate?.audioPlayerDidReadMetadata(player: self, metadata: data)