I did watch WWDC 2019 Session 716 and understand that an active audio session is key to unlocking low‑level networking on watchOS. I’m configuring my audio session and engine as follows:
private func configureAudioSession(completion: @escaping (Bool) -> Void) {
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(.playAndRecord, mode: .voiceChat, options: [])
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
// Retrieve sample rate and configure the audio format.
let sampleRate = audioSession.sampleRate
print("Active hardware sample rate: \(sampleRate)")
audioFormat = AVAudioFormat(standardFormatWithSampleRate: sampleRate, channels: 1)
// Configure the audio engine.
audioInputNode = audioEngine.inputNode
audioEngine.attach(audioPlayerNode)
audioEngine.connect(audioPlayerNode, to: audioEngine.mainMixerNode, format: audioFormat)
try audioEngine.start()
completion(true)
} catch {
print("Error configuring audio session: \(error.localizedDescription)")
completion(false)
}
}
private func setupUDPConnection() {
let parameters = NWParameters.udp
parameters.includePeerToPeer = true
connection = NWConnection(host: "xxx.xxx.xxxxx.xxx", port: 0000, using: parameters)
setupNWConnectionHandlers()
}
private func setupTCPConnection() {
let parameters = NWParameters.tcp
connection = NWConnection(host: "xxx.xxx.xxxxx.xxx", port: 0000, using: parameters)
setupNWConnectionHandlers()
}
private func setupWebSocketConnection() {
guard let url = URL(string: "ws://xxx.xxx.xxxxx.xxx:0000") else {
print("Invalid WebSocket URL")
return
}
let session = URLSession(configuration: .default)
webSocketTask = session.webSocketTask(with: url)
webSocketTask?.resume()
print("WebSocket connection initiated")
sendAudioToServer()
receiveDataFromServer()
sendWebSocketPing(after: 0.6)
}
private func setupNWConnectionHandlers() {
connection?.stateUpdateHandler = { [weak self] state in
DispatchQueue.main.async {
switch state {
case .ready:
print("Connected (NWConnection)")
self?.isConnected = true
self?.failToConnect = false
self?.receiveDataFromServer()
self?.sendAudioToServer()
case .waiting(let error), .failed(let error):
print("Connection error: \(error.localizedDescription)")
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
self?.setupNetwork()
}
case .cancelled:
print("NWConnection cancelled")
self?.isConnected = false
default:
break
}
}
}
connection?.start(queue: .main)
}
I’m using the .playAndRecord category with .voiceChat mode because my app requires duplex streaming—capturing audio to send to a server and playing back the incoming audio. Despite this setup and ensuring that the audio engine is running, my low-level connections (UDP, TCP, and WebSocket via NWConnection/URLSessionWebSocketTask) remain in a “waiting” state and eventually fail with Error 50 on a real Apple Watch.
I'm not using the simple activation snippet with an empty options array because it ask for audio route but i wanted to perform duplex audio streaming with default speaker and mic.