I did watch WWDC 2019 Session 716 and understand that an active audio session is key to unlocking low‑level networking on watchOS. I’m configuring my audio session and engine as follows:
private func configureAudioSession(completion: @escaping (Bool) -> Void) {
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(.playAndRecord, mode: .voiceChat, options: [])
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
// Retrieve sample rate and configure the audio format.
let sampleRate = audioSession.sampleRate
print("Active hardware sample rate: \(sampleRate)")
audioFormat = AVAudioFormat(standardFormatWithSampleRate: sampleRate, channels: 1)
// Configure the audio engine.
audioInputNode = audioEngine.inputNode
audioEngine.attach(audioPlayerNode)
audioEngine.connect(audioPlayerNode, to: audioEngine.mainMixerNode, format: audioFormat)
try audioEngine.start()
completion(true)
} catch {
print("Error configuring audio session: \(error.localizedDescription)")
completion(false)
}
}
private func setupUDPConnection() {
let parameters = NWParameters.udp
parameters.includePeerToPeer = true
connection = NWConnection(host: "***.***.xxxxx.***", port: 0000, using: parameters)
setupNWConnectionHandlers()
}
private func setupTCPConnection() {
let parameters = NWParameters.tcp
connection = NWConnection(host: "***.***.xxxxx.***", port: 0000, using: parameters)
setupNWConnectionHandlers()
}
private func setupWebSocketConnection() {
guard let url = URL(string: "ws://***.***.xxxxx.***:0000") else {
print("Invalid WebSocket URL")
return
}
let session = URLSession(configuration: .default)
webSocketTask = session.webSocketTask(with: url)
webSocketTask?.resume()
print("WebSocket connection initiated")
sendAudioToServer()
receiveDataFromServer()
sendWebSocketPing(after: 0.6)
}
private func setupNWConnectionHandlers() {
connection?.stateUpdateHandler = { [weak self] state in
DispatchQueue.main.async {
switch state {
case .ready:
print("Connected (NWConnection)")
self?.isConnected = true
self?.failToConnect = false
self?.receiveDataFromServer()
self?.sendAudioToServer()
case .waiting(let error), .failed(let error):
print("Connection error: \(error.localizedDescription)")
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
self?.setupNetwork()
}
case .cancelled:
print("NWConnection cancelled")
self?.isConnected = false
default:
break
}
}
}
connection?.start(queue: .main)
}
I am reaching out to seek further assistance regarding the challenges I've been experiencing with establishing a UDP, TCP & web socket connection on watchOS using NWConnection for duplex audio streaming. Despite implementing the recommendations provided earlier, I am still encountering difficulties. Or duplex audio streaming not possible on apple watch?
Lemme start by dropping some links:
-
WWDC 2019 Session 716 Streaming Audio on watchOS 6.
I don’t know a lot about audio, but this question crops up a lot in networking circles so I decide to create a small project to try it out. Here’s what I did:
-
Using Xcode 16.3 on macOS 15.3.2, I created a new project from the watchOS > App template.
-
I set the deployment target to watchOS 11.0.
-
I replaced
ContentView
with the code at the end of this post. -
I ran it on a device running watchOS 11.3.1. Note that:
- This device has Wi-Fi but no WWAN.
- It’s paired to an iPhone.
- Which is nearby.
- And both are in range of a known Wi-Fi network.
-
I tapped Connect. The status changed to “Waiting…” because there’s no audio session in place.
-
I tapped Disconnect.
-
I enabled the Session switch.
-
This presented the audio route UI. In that, I chose my AirPods.
-
Back in the app, I saw the status change to “Activated”.
-
I tapped Connect. After a few seconds the status changed to “Connected”.
Please repeat these steps in your environment and let me know how you get along.
Share and Enjoy
—
Quinn “The Eskimo!” @ Developer Technical Support @ Apple
let myEmail = "eskimo" + "1" + "@" + "apple.com"
IMPORTANT The following code is not meant to be a good example of how to use Swift, SwiftUI, Network framework, or audio sessions. Rather, it’s the smallest example I could come up with to exercise the specific situation discussed in this thread.
import SwiftUI
import AVFAudio
import Network
struct ContentView: View {
@State var status: String = "Tap someting!"
@State var sessionIsActive: Bool = false
@State var connectionQ: NWConnection? = nil
var body: some View {
VStack {
Text(status)
Toggle("Session", isOn: $sessionIsActive)
Button(connectionQ == nil ? "Connect" : "Disconnect") {
if let connection = connectionQ {
self.connectionQ = nil
connection.stateUpdateHandler = nil
connection.cancel()
self.status = "Disconnected"
} else {
self.status = "Connecting…"
let connection = NWConnection(host: "example.com", port: 80, using: .tcp)
self.connectionQ = connection
connection.stateUpdateHandler = { newState in
switch newState {
case .setup: break
case .waiting(_): self.status = "Waiting…"
case .preparing: break
case .ready: self.status = "Connected"
case .failed(_): self.status = "Failed"
case .cancelled: break
@unknown default: break
}
}
connection.start(queue: .main)
}
}
}
.onChange(of: sessionIsActive) { _, newValue in
let session = AVAudioSession.sharedInstance()
if newValue {
do {
self.status = "Activating…"
try session.setCategory(
.playback,
mode: .default,
policy: .longFormAudio,
options: []
)
session.activate(options: []) { didActivate, error in
DispatchQueue.main.async {
if didActivate {
self.status = "Activated"
} else {
self.status = "Activation failed"
}
}
}
} catch {
self.status = "Activation failed"
}
} else {
try? session.setActive(true)
self.status = "Deactivated"
}
}
.padding()
}
}
#Preview {
ContentView()
}