Skip to content

Commit

Permalink
Allow playing custom streams (#94)
Browse files Browse the repository at this point in the history
* Allow playing custom streams

This lets users implement custom streams that can be played. For
example, I have a websocket interface that I fetch data from. I
can wrap that stream into a CoreAudioStreamSource and add that to
the player.

* Add example of using a custom stream

* Add ability to queue custom streams
  • Loading branch information
jacksonh authored Sep 19, 2024
1 parent b89d3d9 commit 624e575
Show file tree
Hide file tree
Showing 8 changed files with 226 additions and 35 deletions.
4 changes: 4 additions & 0 deletions AudioPlayer/AudioPlayer.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
objects = {

/* Begin PBXBuildFile section */
42BE42F52C9322AA00C0E448 /* CustomStreamSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 42BE42F42C9322AA00C0E448 /* CustomStreamSource.swift */; };
9806E8182BC5D12500757370 /* App.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9806E8172BC5D12500757370 /* App.swift */; };
9806E81A2BC5D12500757370 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9806E8192BC5D12500757370 /* ContentView.swift */; };
9806E81C2BC5D12700757370 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 9806E81B2BC5D12700757370 /* Assets.xcassets */; };
Expand Down Expand Up @@ -47,6 +48,7 @@
/* End PBXCopyFilesBuildPhase section */

/* Begin PBXFileReference section */
42BE42F42C9322AA00C0E448 /* CustomStreamSource.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomStreamSource.swift; sourceTree = "<group>"; };
9806E8142BC5D12500757370 /* AudioPlayer.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AudioPlayer.app; sourceTree = BUILT_PRODUCTS_DIR; };
9806E8172BC5D12500757370 /* App.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = App.swift; sourceTree = "<group>"; };
9806E8192BC5D12500757370 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -195,6 +197,7 @@
98E3921C2BD845E100B586E9 /* AudioPlayer */ = {
isa = PBXGroup;
children = (
42BE42F42C9322AA00C0E448 /* CustomStreamSource.swift */,
9806E8302BC6927D00757370 /* AudioPlayerModel.swift */,
9806E8292BC68F8700757370 /* AudioPlayerView.swift */,
98BFB41C2BCD7BB800E812C0 /* EqualizerView.swift */,
Expand Down Expand Up @@ -292,6 +295,7 @@
9816A8BB2BC87BC200AD1299 /* AudioPlayerService.swift in Sources */,
984DE9572BDAFC7E004B427A /* AudioPlayerControlsView.swift in Sources */,
9806E8182BC5D12500757370 /* App.swift in Sources */,
42BE42F52C9322AA00C0E448 /* CustomStreamSource.swift in Sources */,
989E08E72BF7A4E300599F17 /* PrefersTabNavigationEnvironmentKey.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import AVFoundation
import SwiftUI
import AudioStreaming

struct AudioPlayerControls: View {
@State var model: Model
Expand Down Expand Up @@ -247,11 +248,23 @@ extension AudioPlayerControls {
func play(_ track: AudioTrack) {
if track != currentTrack {
currentTrack?.status = .idle
audioPlayerService.play(url: track.url)
currentTrack = track
if track.url.scheme == "custom" {
let source = createStreamSource()
let audioFormat = AVAudioFormat(
commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 2, interleaved: false
)!
audioPlayerService.play(source: source, entryId: track.url.absoluteString, format: audioFormat)
currentTrack = track
} else {
audioPlayerService.play(url: track.url)
}
}
}

func createStreamSource() -> CoreAudioStreamSource {
return CustomStreamAudioSource(underlyingQueue: audioPlayerService.player.sourceQueue)
}

func onTick() {
let duration = audioPlayerService.duration
let progress = audioPlayerService.progress
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,13 @@ public class AudioPlayerModel {

private let radioTracks: [AudioContent] = [.offradio, .enlefko, .pepper966, .kosmos, .kosmosJazz, .radiox]
private let audioTracks: [AudioContent] = [.khruangbin, .piano, .optimized, .nonOptimized, .remoteWave, .local, .localWave, .loopBeatFlac]
private let customStreams: [AudioContent] = [.custom("custom://sinwave")]

func audioTracksProvider() -> [AudioPlaylist] {
[
AudioPlaylist(title: "Radio", tracks: radioTracks.map { AudioTrack.init(from: $0) }),
AudioPlaylist(title: "Tracks", tracks: audioTracks.map { AudioTrack.init(from:$0) })
AudioPlaylist(title: "Tracks", tracks: audioTracks.map { AudioTrack.init(from:$0) }),
AudioPlaylist(title: "Generated", tracks: customStreams.map { AudioTrack.init(from:$0) })
]
}

Expand Down
139 changes: 139 additions & 0 deletions AudioPlayer/AudioPlayer/Content/AudioPlayer/CustomStreamSource.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
//
// CustomStreamSource.swift
// AudioPlayer
//
// Created by Jackson Harper on 12/9/24.
//

import AVFoundation
import Foundation

import AudioStreaming

// This is a basic example of playing a custom audio stream. We generate
// a small audio data on load and then pass it off to AudioStreaming.
final class CustomStreamAudioSource: NSObject, CoreAudioStreamSource {
weak var delegate: AudioStreamSourceDelegate?

var underlyingQueue: DispatchQueue

var position = 0
var length = 0

var audioFileHint: AudioFileTypeID {
kAudioFileWAVEType
}

init(underlyingQueue: DispatchQueue) {
self.underlyingQueue = underlyingQueue
}

// no-op
func close() {}

// no-op
func suspend() {}

func resume() {}

func seek(at _: Int) {
// The streaming process is started by a seek(0) call from AudioStreaming
generateData()
}

private func generateData() {
let frequency = 440.0
let sampleRate = 44100
let duration = 20.0

let lpcmData = generateSineWave(frequency: frequency, sampleRate: sampleRate, duration: duration)
let waveFile = createWavFile(using: lpcmData)

// We enqueue this because during startup the seek call will be made, but the player
// is not completely setup and ready to handle data yet, as its expected to be
// generated asyncronously.
underlyingQueue.asyncAfter(deadline: .now().advanced(by: .milliseconds(100))) {
self.delegate?.dataAvailable(source: self, data: waveFile)
}
}
}

// Functions for generating some sample data

// Function to generate a sine wave as Data
func generateSineWave(frequency: Double, sampleRate: Int, duration: Double, amplitude: Double = 0.5) -> Data {
let numberOfSamples = Int(Double(sampleRate) * duration)
let twoPi = 2.0 * Double.pi
var lpcmData = Data()

for sampleIndex in 0 ..< numberOfSamples {
let time = Double(sampleIndex) / Double(sampleRate)
let sampleValue = amplitude * sin(twoPi * frequency * time)

let pcmValue = Int16(sampleValue * Double(Int16.max))
withUnsafeBytes(of: pcmValue.littleEndian) { lpcmData.append(contentsOf: $0) }
}

return lpcmData
}

func createWavFile(using rawData: Data) -> Data {
let waveHeaderFormate = createWaveHeader(data: rawData) as Data
let waveFileData = waveHeaderFormate + rawData
return waveFileData
}

// from: https://stackoverflow.com/questions/49399823/in-ios-how-to-create-audio-file-wav-mp3-file-from-data
private func createWaveHeader(data: Data) -> NSData {
let sampleRate: Int32 = 44100
let chunkSize: Int32 = 36 + Int32(data.count)
let subChunkSize: Int32 = 16
let format: Int16 = 1
let channels: Int16 = 2
let bitsPerSample: Int16 = 16
let byteRate: Int32 = sampleRate * Int32(channels * bitsPerSample / 8)
let blockAlign: Int16 = channels * bitsPerSample / 8
let dataSize = Int32(data.count)

let header = NSMutableData()

header.append([UInt8]("RIFF".utf8), length: 4)
header.append(intToByteArray(chunkSize), length: 4)

// WAVE
header.append([UInt8]("WAVE".utf8), length: 4)

// FMT
header.append([UInt8]("fmt ".utf8), length: 4)

header.append(intToByteArray(subChunkSize), length: 4)
header.append(shortToByteArray(format), length: 2)
header.append(shortToByteArray(channels), length: 2)
header.append(intToByteArray(sampleRate), length: 4)
header.append(intToByteArray(byteRate), length: 4)
header.append(shortToByteArray(blockAlign), length: 2)
header.append(shortToByteArray(bitsPerSample), length: 2)

header.append([UInt8]("data".utf8), length: 4)
header.append(intToByteArray(dataSize), length: 4)

return header
}

private func intToByteArray(_ i: Int32) -> [UInt8] {
return [
// little endian
UInt8(truncatingIfNeeded: i & 0xFF),
UInt8(truncatingIfNeeded: (i >> 8) & 0xFF),
UInt8(truncatingIfNeeded: (i >> 16) & 0xFF),
UInt8(truncatingIfNeeded: (i >> 24) & 0xFF),
]
}

private func shortToByteArray(_ i: Int16) -> [UInt8] {
return [
// little endian
UInt8(truncatingIfNeeded: i & 0xFF),
UInt8(truncatingIfNeeded: (i >> 8) & 0xFF),
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ protocol AudioPlayerServiceDelegate: AnyObject {
final class AudioPlayerService {
weak var delegate: AudioPlayerServiceDelegate?

private var player: AudioPlayer
var player: AudioPlayer
private var audioSystemResetObserver: Any?

var duration: Double {
Expand Down Expand Up @@ -60,6 +60,11 @@ final class AudioPlayerService {
player.play(url: url)
}

func play(source: CoreAudioStreamSource, entryId: String, format: AVAudioFormat) {
activateAudioSession()
player.play(source: source, entryId: entryId, format: format)
}

func queue(url: URL) {
activateAudioSession()
player.queue(url: url)
Expand Down
4 changes: 2 additions & 2 deletions AudioStreaming/Streaming/Audio Source/AudioStreamSource.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import AudioToolbox
import Foundation

protocol AudioStreamSourceDelegate: AnyObject {
public protocol AudioStreamSourceDelegate: AnyObject {
/// Indicates that there's data available
func dataAvailable(source: CoreAudioStreamSource, data: Data)
/// Indicates an error occurred
Expand All @@ -17,7 +17,7 @@ protocol AudioStreamSourceDelegate: AnyObject {
func metadataReceived(data: [String: String])
}

protocol CoreAudioStreamSource: AnyObject {
public protocol CoreAudioStreamSource: AnyObject {
/// An `Int` that represents the position of the audio
var position: Int { get }
/// The length of the audio in bytes
Expand Down
18 changes: 9 additions & 9 deletions AudioStreaming/Streaming/Audio Source/RemoteAudioSource.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ enum RemoteAudioSourceError: Error {
}

public class RemoteAudioSource: AudioStreamSource {
weak var delegate: AudioStreamSourceDelegate?
public weak var delegate: AudioStreamSourceDelegate?

var position: Int {
public var position: Int {
return seekOffset + relativePosition
}

var length: Int {
public var length: Int {
guard let parsedHeader = parsedHeaderOutput else { return 0 }
return parsedHeader.fileLength
}
Expand All @@ -40,7 +40,7 @@ public class RemoteAudioSource: AudioStreamSource {
private var shouldTryParsingIcycastHeaders: Bool = false
private let icycastHeadersProcessor: IcycastHeadersProcessor

var audioFileHint: AudioFileTypeID {
public var audioFileHint: AudioFileTypeID {
guard let output = parsedHeaderOutput, output.typeId != 0 else {
return audioFileType(fileExtension: url.pathExtension)
}
Expand All @@ -49,7 +49,7 @@ public class RemoteAudioSource: AudioStreamSource {

private let mp4Restructure: RemoteMp4Restructure

let underlyingQueue: DispatchQueue
public let underlyingQueue: DispatchQueue
let streamOperationQueue: OperationQueue
let netStatusService: NetStatusProvider
var waitingForNetwork = false
Expand Down Expand Up @@ -114,7 +114,7 @@ public class RemoteAudioSource: AudioStreamSource {
httpHeaders: [:])
}

func close() {
public func close() {
retrierTimeout.cancel()
streamOperationQueue.isSuspended = false
streamOperationQueue.cancelAllOperations()
Expand All @@ -125,7 +125,7 @@ public class RemoteAudioSource: AudioStreamSource {
streamRequest = nil
}

func seek(at offset: Int) {
public func seek(at offset: Int) {
close()

relativePosition = 0
Expand All @@ -144,11 +144,11 @@ public class RemoteAudioSource: AudioStreamSource {
performOpen(seek: offset)
}

func suspend() {
public func suspend() {
streamOperationQueue.isSuspended = true
}

func resume() {
public func resume() {
streamOperationQueue.isSuspended = false
}

Expand Down
Loading

0 comments on commit 624e575

Please sign in to comment.