niom-webrtc/src/utils/media_manager.rs

228 lines
9.6 KiB
Rust

use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use wasm_bindgen_futures::JsFuture;
use web_sys::{
MediaStream, MediaStreamConstraints,
RtcPeerConnection, RtcConfiguration, RtcIceServer,
RtcSessionDescriptionInit, RtcSdpType,
};
use js_sys::Reflect;
use crate::models::MediaState;
pub struct MediaManager {
pub state: MediaState,
}
impl MediaManager {
pub fn new() -> Self {
Self {
state: MediaState::Uninitialized,
}
}
pub fn create_peer_connection() -> Result<RtcPeerConnection, String> {
let ice_server = RtcIceServer::new();
let urls = js_sys::Array::new();
// Use centralized default STUN server constant
urls.push(&JsValue::from_str(crate::constants::DEFAULT_STUN_SERVER));
ice_server.set_urls(&urls.into());
let config = RtcConfiguration::new();
let servers = js_sys::Array::new();
servers.push(&ice_server.into());
config.set_ice_servers(&servers.into());
RtcPeerConnection::new_with_configuration(&config)
.map_err(|e| format!("PeerConnection failed: {:?}", e))
}
pub async fn create_offer(pc: &RtcPeerConnection) -> Result<String, String> {
log::info!("🔄 Creating WebRTC offer…");
// 1. Promise awaiten
let js_offer = JsFuture::from(pc.create_offer())
.await
.map_err(|e| format!("create_offer failed: {:?}", e))?;
// 2. SDP-String per JS-Reflection aus dem JS-Objekt holen
let sdp_js = Reflect::get(&js_offer, &JsValue::from_str("sdp"))
.map_err(|_| "Failed to get .sdp field".to_string())?;
let sdp = sdp_js
.as_string()
.ok_or_else(|| "SDP field was not a string".to_string())?;
// 3. Init-Objekt bauen und SDP setzen
let init = RtcSessionDescriptionInit::new(RtcSdpType::Offer);
init.set_sdp(&sdp);
// 4. Local Description setzen
JsFuture::from(pc.set_local_description(&init))
.await
.map_err(|e| format!("set_local_description failed: {:?}", e))?;
log::info!("✅ Offer SDP length: {}", sdp.len());
log::debug!("📋 SDP-Preview: {}...", &sdp[..std::cmp::min(150, sdp.len())]);
Ok(sdp)
}
pub async fn handle_offer(pc: &RtcPeerConnection, offer_sdp: &str) -> Result<String, String> {
log::info!("📨 Handling received offer…");
let remote_init = RtcSessionDescriptionInit::new(RtcSdpType::Offer);
remote_init.set_sdp(offer_sdp);
JsFuture::from(pc.set_remote_description(&remote_init))
.await
.map_err(|e| format!("set_remote_description failed: {:?}", e))?;
log::info!("🔄 Creating answer…");
let js_answer = JsFuture::from(pc.create_answer())
.await
.map_err(|e| format!("create_answer failed: {:?}", e))?;
let sdp_js = Reflect::get(&js_answer, &JsValue::from_str("sdp"))
.map_err(|_| "Failed to get .sdp field from answer".to_string())?;
let sdp = sdp_js
.as_string()
.ok_or_else(|| "Answer SDP field was not a string".to_string())?;
let answer_init = RtcSessionDescriptionInit::new(RtcSdpType::Answer);
answer_init.set_sdp(&sdp);
JsFuture::from(pc.set_local_description(&answer_init))
.await
.map_err(|e| format!("set_local_answer failed: {:?}", e))?;
log::info!("✅ Answer SDP length: {}", sdp.len());
Ok(sdp)
}
pub async fn handle_answer(pc: &RtcPeerConnection, answer_sdp: &str) -> Result<(), String> {
log::info!("📨 Handling received answer...");
// **DEBUG:** State vor Answer-Verarbeitung
// Use the signaling_state() result for debug but avoid importing the enum type locally.
let state = pc.signaling_state();
log::info!("🔍 PeerConnection state before answer: {:?}", state);
// Only proceed if in HaveLocalOffer
if state != web_sys::RtcSignalingState::HaveLocalOffer {
return Err(format!("❌ Falscher State für Answer: {:?}", state));
}
let init = RtcSessionDescriptionInit::new(RtcSdpType::Answer);
init.set_sdp(answer_sdp);
JsFuture::from(pc.set_remote_description(&init))
.await
.map_err(|e| format!("set_remote_answer desc failed: {:?}", e))?;
log::info!("✅ Handled answer, WebRTC handshake complete!");
Ok(())
}
pub fn is_webrtc_supported() -> bool {
web_sys::window()
.and_then(|w| w.navigator().media_devices().ok())
.is_some()
}
pub async fn request_microphone_access(&mut self) -> Result<MediaStream, String> {
if !Self::is_webrtc_supported() {
self.state = MediaState::NotSupported;
return Err("WebRTC not supported".into());
}
self.state = MediaState::Requesting;
let navigator = web_sys::window()
.ok_or("No window")?
.navigator();
let devices = navigator
.media_devices()
.map_err(|_| "MediaDevices not available")?;
let constraints = MediaStreamConstraints::new();
constraints.set_audio(&JsValue::from(true));
constraints.set_video(&JsValue::from(false));
let js_stream = JsFuture::from(devices.get_user_media_with_constraints(&constraints)
.map_err(|e| format!("getUserMedia error: {:?}", e))?)
.await
.map_err(|e| format!("getUserMedia promise rejected: {:?}", e))?;
let stream: MediaStream = js_stream
.dyn_into()
.map_err(|_| "Failed to cast to MediaStream")?;
self.state = MediaState::Granted(stream.clone());
Ok(stream)
}
pub fn stop_stream(&mut self) {
if let MediaState::Granted(stream) = &self.state {
let tracks = stream.get_tracks(); // js_sys::Array
for i in 0..tracks.length() {
// 1. JsValue holen
let js_val = tracks.get(i);
// 2. In MediaStreamTrack casten
let track: web_sys::MediaStreamTrack = js_val
.dyn_into()
.expect("Expected MediaStreamTrack");
// 3. Stoppen
track.stop();
log::info!("\u{1F6D1} Track gestoppt: {}", track.label());
}
self.state = MediaState::Uninitialized;
log::info!("\u{1F6D1} MediaStream gestoppt.");
}
}
/// Fügt alle Tracks eines `MediaStream` zur angegebenen `RtcPeerConnection` hinzu.
///
/// In WebRTC sollten Tracks einzeln mit `addTrack` hinzugefügt werden. Diese
/// Hilfsfunktion iteriert über alle Tracks des Streams und fügt sie hinzu.
pub fn add_stream_to_pc(pc: &RtcPeerConnection, stream: &MediaStream) -> Result<(), String> {
let tracks = stream.get_tracks();
for i in 0..tracks.length() {
let js_val = tracks.get(i);
let track: web_sys::MediaStreamTrack = js_val
.dyn_into()
.map_err(|_| "Failed to cast to MediaStreamTrack")?;
// add_track nimmt (track, stream) in JS. In web-sys gibt add_track einen
// RtcRtpSender zurück; wir ignorieren den Rückgabewert hier.
// `addTrack` ist in manchen web-sys-Versionen nicht direkt verfügbar.
// Wir rufen die JS-Funktion dynamisch auf: pc.addTrack(track, stream)
let add_fn = js_sys::Reflect::get(pc.as_ref(), &JsValue::from_str("addTrack")).map_err(|_| "Failed to get addTrack function".to_string())?;
let func: js_sys::Function = add_fn.dyn_into().map_err(|_| "addTrack is not a function".to_string())?;
let _ = func.call2(pc.as_ref(), &JsValue::from(track.clone()), &JsValue::from(stream.clone()));
log::info!("\u{2705} Track hinzugefügt: {}", track.label());
}
Ok(())
}
/// Fügt einen empfangenen ICE-Candidate zur PeerConnection hinzu.
/// `candidate_json` ist ein JSON-String mit Feldern: candidate, sdpMid, sdpMLineIndex
pub fn add_ice_candidate(pc: &RtcPeerConnection, candidate_json: &str) -> Result<(), String> {
// Parse the JSON into a JsValue
// Ignore empty candidate payloads (some browsers send a final empty candidate)
if candidate_json.trim().is_empty() {
log::info!("🔇 Ignoring empty ICE candidate payload");
return Ok(());
}
let js_val = js_sys::JSON::parse(candidate_json)
.map_err(|e| format!("Failed to parse candidate JSON: {:?}", e))?;
// Prepare a plain JS object: { candidate, sdpMid, sdpMLineIndex }
let obj = js_sys::Object::new();
if let Ok(candidate) = js_sys::Reflect::get(&js_val, &JsValue::from_str("candidate")) {
let _ = js_sys::Reflect::set(&obj, &JsValue::from_str("candidate"), &candidate);
}
if let Ok(sdp_mid) = js_sys::Reflect::get(&js_val, &JsValue::from_str("sdpMid")) {
let _ = js_sys::Reflect::set(&obj, &JsValue::from_str("sdpMid"), &sdp_mid);
}
if let Ok(idx) = js_sys::Reflect::get(&js_val, &JsValue::from_str("sdpMLineIndex")) {
let _ = js_sys::Reflect::set(&obj, &JsValue::from_str("sdpMLineIndex"), &idx);
}
// Call pc.addIceCandidate(obj) dynamically
let add_fn = js_sys::Reflect::get(pc.as_ref(), &JsValue::from_str("addIceCandidate"))
.map_err(|_| "Failed to get addIceCandidate function".to_string())?;
let func: js_sys::Function = add_fn.dyn_into().map_err(|_| "addIceCandidate is not a function".to_string())?;
let _ = func.call1(pc.as_ref(), &obj);
Ok(())
}
}