Started SDP exchange. Signal and Coroutine cleanup.

This commit is contained in:
ghost 2025-09-23 16:27:16 +02:00
parent dc71f02fc7
commit cb0d2765d3
10 changed files with 503 additions and 492 deletions

2
.gitignore vendored
View File

@ -19,4 +19,4 @@ target
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
.idea/

12
Cargo.lock generated
View File

@ -477,6 +477,16 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "console_log"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be8aed40e4edbf4d3b4431ab260b63fdc40f5780a4766824329ea0f1eefe3c0f"
dependencies = [
"log",
"web-sys",
]
[[package]]
name = "const-serialize"
version = "0.6.2"
@ -2667,8 +2677,10 @@ name = "niom-webrtc"
version = "0.1.0"
dependencies = [
"console_error_panic_hook",
"console_log",
"dioxus",
"dioxus-logger",
"futures",
"js-sys",
"log",
"serde",

View File

@ -32,8 +32,10 @@ web-sys = { version = "0.3.77", features = [
"RtcIceServer",
"RtcIceCandidate",
"RtcIceCandidateInit",
"RtcSdpType",
"RtcSessionDescription",
"RtcSessionDescriptionInit",
"RtcSignalingState",
"RtcOfferOptions",
"RtcAnswerOptions"
]}
@ -41,10 +43,12 @@ web-sys = { version = "0.3.77", features = [
# Logging and Tracing
tracing = "0.1"
log = "0.4.27"
console_log = "1.0.0"
# Serialization
serde = { version = "1.0.142", features = ["derive"] }
serde_json = "1.0.100"
futures = "0.3.31"
[features]
default = ["web"]

View File

@ -1,69 +1,71 @@
use dioxus::prelude::*;
use web_sys::WebSocket as BrowserWebSocket;
use web_sys::{WebSocket as BrowserWebSocket, RtcPeerConnection};
use crate::models::SignalingMessage;
use crate::utils::{MediaManager, MediaState};
use crate::utils::MediaManager;
use futures::StreamExt;
#[component]
pub fn CallControls(
peer_id: Signal<String>,
remote_id: Signal<String>,
connected: Signal<bool>,
audio_enabled: Signal<bool>,
media_manager: Signal<MediaManager>,
web_socket: Signal<Option<BrowserWebSocket>>,
local_peer_id: Signal<String>,
remote_peer_id: Signal<String>,
websocket: Signal<Option<BrowserWebSocket>>,
peer_connection: Signal<Option<RtcPeerConnection>>, // **INITIATOR CONNECTION**
) -> Element {
let is_connected = move || connected
.try_read()
.map(|c| *c)
.unwrap_or(false);
let is_audio_enabled = move || audio_enabled
.try_read()
.map(|a| *a)
.unwrap_or(true);
let get_local_id = move || local_peer_id
.try_read()
.map(|id| id.clone())
.unwrap_or_default();
let get_remote_id = move || remote_peer_id
.try_read()
.map(|id| id.clone())
.unwrap_or_default();
let has_mic_permission = move || media_manager
.try_read()
.map(|m| m.is_microphone_active())
.unwrap_or(false);
let get_ws = move || web_socket
.try_read()
.ok()
.and_then(|ws_opt| ws_opt.as_ref().cloned());
let mut mic_granted = use_signal(|| false);
let mut audio_muted = use_signal(|| false);
let mut in_call = use_signal(|| false);
// **COROUTINE** für Answer-Handling (Initiator empfängt Answers)
let answer_handler = use_coroutine(move |mut rx| async move {
while let Some(msg) = rx.next().await {
let SignalingMessage { from, to: _, msg_type, data } = msg;
if msg_type == "answer" {
log::info!("📞 WebRTC-Answer von {} als Initiator verarbeiten", from);
if let Some(pc) = peer_connection.read().as_ref() {
// **DEBUG:** State checken vor Answer
let signaling_state = pc.signaling_state();
log::info!("🔍 Initiator PeerConnection State vor Answer: {:?}", signaling_state);
// **NUR** Answer verarbeiten wenn im korrekten State (have-local-offer)
match signaling_state {
web_sys::RtcSignalingState::HaveLocalOffer => {
log::info!("✅ Korrekter State für Answer - Initiator verarbeitet");
match MediaManager::handle_answer(&pc, &data).await {
Ok(_) => {
log::info!("🎉 WebRTC-Handshake als Initiator abgeschlossen!");
in_call.set(true);
}
Err(e) => log::error!("❌ Initiator Answer-Verarbeitung: {}", e),
}
}
_ => {
log::warn!("⚠️ Answer ignoriert - Initiator PC im falschen State: {:?}", signaling_state);
}
}
} else {
log::error!("❌ Keine Initiator PeerConnection für Answer");
}
}
}
});
rsx! {
div { class: "call-controls",
h2 { "Anruf-Steuerung" }
// Mikrofon-Berechtigung Sektion
div { class: "mic-permission-section",
h3 { "Mikrofon-Berechtigung" }
h3 { "Mikrofon" }
button {
class: "mic-permission-btn primary",
disabled: has_mic_permission(),
disabled: *mic_granted.read(),
onclick: move |_| {
spawn(async move {
if let Ok(mut manager) = media_manager.try_write() {
match manager.request_microphone_access().await {
Ok(_) => log::info!("Mikrofon-Berechtigung erteilt"),
Err(e) => log::error!("Berechtigung verweigert: {}", e),
}
}
});
mic_granted.set(true);
log::info!("🎤 Mikrofon-Berechtigung simuliert");
},
if has_mic_permission() {
if *mic_granted.read() {
"✅ Berechtigung erteilt"
} else {
"🎤 Berechtigung erteilen"
@ -72,70 +74,114 @@ pub fn CallControls(
}
div { class: "control-buttons",
// **INITIATOR** WebRTC-Anruf starten
button {
class: "call-btn primary",
disabled: !is_connected() || !has_mic_permission() || get_remote_id().is_empty(),
disabled: !*mic_granted.read() || !*connected.read() || remote_id.read().is_empty(),
onclick: move |_| {
let socket = get_ws();
let local_id = get_local_id();
let remote_id = get_remote_id();
log::info!("📞 Starte WebRTC-Anruf als Initiator...");
let mut pc_signal = peer_connection.clone();
let ws_signal = websocket.clone();
let from_id = peer_id.read().clone();
let to_id = remote_id.read().clone();
let handler_tx = answer_handler.clone();
spawn(async move {
match MediaManager::create_peer_connection() {
Ok(pc) => {
log::info!("PeerConnection erstellt, sende Offer...");
let offer_msg = SignalingMessage {
from: local_id.clone(),
to: remote_id.clone(),
msg_type: "offer".to_string(),
data: "dummy-sdp-offer".to_string(),
};
if let Some(ws) = socket {
if let Ok(json) = serde_json::to_string(&offer_msg) {
let _ = ws.send_with_str(&json);
log::info!("Offer gesendet an {}", remote_id);
// **INITIATOR:** PeerConnection erstellen
let pc = if pc_signal.read().is_none() {
match MediaManager::create_peer_connection() {
Ok(new_pc) => {
pc_signal.set(Some(new_pc.clone()));
log::info!("✅ Initiator PeerConnection erstellt");
new_pc
}
Err(e) => {
log::error!("❌ Initiator PeerConnection-Erstellung fehlgeschlagen: {}", e);
return;
}
}
} else {
pc_signal.read().as_ref().unwrap().clone()
};
// **INITIATOR:** Offer erstellen und senden
match MediaManager::create_offer(&pc).await {
Ok(offer_sdp) => {
if let Some(socket) = ws_signal.read().as_ref() {
let msg = SignalingMessage {
from: from_id.clone(),
to: to_id.clone(),
msg_type: "offer".to_string(),
data: offer_sdp,
};
if let Ok(json) = serde_json::to_string(&msg) {
let _ = socket.send_with_str(&json);
log::info!("📤 WebRTC-Offer als Initiator gesendet an {}", to_id);
// **SETUP:** Answer-Handler für eingehende Answers
if let Some(socket_clone) = ws_signal.read().as_ref() {
// Note: Answer wird über connection_panel's onmessage empfangen
// und an diese Coroutine weitergeleitet
}
}
}
}
Err(e) => log::error!("PeerConnection-Fehler: {}", e),
Err(e) => log::error!("❌ Initiator Offer-Erstellung fehlgeschlagen: {}", e),
}
});
},
"📞 Anruf starten"
"📞 WebRTC-Anruf starten"
}
// Audio-Stummschaltung
button {
class: if is_audio_enabled() { "mute-btn" } else { "mute-btn muted" },
disabled: !is_connected() || !has_mic_permission(),
class: if *audio_muted.read() { "mute-btn muted" } else { "mute-btn" },
disabled: !*in_call.read(),
onclick: move |_| {
if let Ok(mut enabled) = audio_enabled.try_write() {
*enabled = !*enabled;
log::info!("Audio {}", if *enabled { "aktiviert" } else { "stumm" });
}
let current_muted = *audio_muted.read();
audio_muted.set(!current_muted);
log::info!("🔊 Audio: {}", if !current_muted { "Stumm" } else { "An" });
},
if is_audio_enabled() {
"🔊 Mikrofon an"
if *audio_muted.read() {
"🔇 Stumm"
} else {
"🔇 Stumm geschaltet"
"🔊 Audio An"
}
}
// Anruf beenden
button {
class: "end-btn danger",
disabled: !is_connected(),
disabled: !*in_call.read(),
onclick: move |_| {
if let Ok(mut manager) = media_manager.try_write() {
manager.stop_stream();
log::info!("Anruf beendet");
in_call.set(false);
audio_muted.set(false);
// **SCHRITT 1:** Prüfen ob PeerConnection existiert (Immutable Borrow)
let has_peer_connection = peer_connection.read().is_some();
// **SCHRITT 2:** Falls vorhanden, schließen und entfernen (Separate Borrows)
if has_peer_connection {
// Schritt 2a: PeerConnection holen und schließen
if let Some(pc) = peer_connection.read().as_ref() {
pc.close(); // ← Immutable borrow endet nach dieser Zeile
log::info!("📵 Initiator PeerConnection geschlossen");
}
// Schritt 2b: Danach Signal leeren (Neuer mutable borrow)
peer_connection.set(None); // ✅ Kein aktiver immutable borrow mehr!
}
log::info!("📵 Anruf beendet");
},
"📵 Anruf beenden"
}
}
}
// **HIDDEN:** Answer-Handler für diese Komponente
script {
// JavaScript Bridge für Answer-Weiterleitung an Coroutine
// wird über connection_panel's WebSocket-Handler geleitet
}
}
}

View File

@ -1,74 +1,215 @@
use dioxus::prelude::*;
use web_sys::WebSocket as BrowserWebSocket;
use web_sys::{WebSocket as BrowserWebSocket, RtcPeerConnection, BinaryType, MessageEvent};
use wasm_bindgen::prelude::{Closure, JsValue};
use wasm_bindgen::JsCast;
use crate::models::SignalingMessage;
use crate::utils::MediaManager;
use futures::StreamExt;
#[component]
pub fn ConnectionPanel(
connected: Signal<bool>,
local_peer_id: Signal<String>,
remote_peer_id: Signal<String>,
web_socket: Signal<Option<BrowserWebSocket>>,
mut peer_id: Signal<String>,
mut remote_id: Signal<String>,
mut connected: Signal<bool>,
mut websocket: Signal<Option<BrowserWebSocket>>,
peer_connection: Signal<Option<RtcPeerConnection>>, // **RESPONDER CONNECTION**
) -> Element {
let is_connected = move || connected
.try_read()
.map(|c| *c)
.unwrap_or(false);
let mut ws_status = use_signal(|| "Nicht verbunden".to_string());
let get_local_id = move || local_peer_id
.try_read()
.map(|id| id.clone())
.unwrap_or_default();
// **COROUTINE** für Offer-Handling (Responder empfängt Offers)
let offer_handler = use_coroutine(move |mut rx| async move {
while let Some(msg) = rx.next().await {
let SignalingMessage { from, to, msg_type, data } = msg;
// **KORREKT:** In der Coroutine-Loop
if msg_type == "offer" {
log::info!("📞 WebRTC-Offer von {} als Responder verarbeiten", from);
// **WICHTIG:** Clone für später aufbewahren
let from_clone = from.clone();
// **RESPONDER:** PeerConnection erstellen
let pc = if peer_connection.read().is_none() {
match MediaManager::create_peer_connection() {
Ok(new_pc) => {
peer_connection.set(Some(new_pc.clone()));
log::info!("✅ Responder PeerConnection für {} erstellt", from);
new_pc
}
Err(e) => {
log::error!("❌ Responder PeerConnection-Fehler: {}", e);
continue;
}
}
} else {
peer_connection.read().as_ref().unwrap().clone()
};
// Offer verarbeiten und Answer erstellen
match MediaManager::handle_offer(&pc, &data).await {
Ok(answer_sdp) => {
log::info!("✅ Responder Answer erstellt, sende zurück...");
if let Some(socket) = websocket.read().as_ref() {
let answer_msg = SignalingMessage {
from: to, // ← to wird moved
to: from, // ← from wird hier moved (Original)
msg_type: "answer".to_string(),
data: answer_sdp,
};
if let Ok(json) = serde_json::to_string(&answer_msg) {
let _ = socket.send_with_str(&json);
log::info!("📤 Responder Answer gesendet an {}", from_clone); // ✅ Clone verwenden
}
}
}
Err(e) => log::error!("❌ Responder Answer-Fehler: {}", e),
}
}
}
});
let get_remote_id = move || remote_peer_id
.try_read()
.map(|id| id.clone())
.unwrap_or_default();
// Peer-ID generieren
use_effect(move || {
use js_sys::{Date, Math};
let timestamp = Date::now() as u64;
let random = (Math::random() * 900.0 + 100.0) as u32;
let id = format!("peer-{}-{}", timestamp, random);
peer_id.set(id.clone());
log::info!("🆔 Peer-ID generiert: {}", id);
});
// WebSocket verbinden
let connect_websocket = move |_| {
log::info!("🔌 Verbinde WebSocket...");
ws_status.set("Verbinde...".to_string());
match BrowserWebSocket::new("ws://localhost:3478/ws") {
Ok(socket) => {
socket.set_binary_type(BinaryType::Arraybuffer);
// onopen Handler
let mut ws_status_clone = ws_status.clone();
let mut connected_clone = connected.clone();
let onopen = Closure::wrap(Box::new(move |_: web_sys::Event| {
log::info!("✅ WebSocket verbunden!");
ws_status_clone.set("Verbunden".to_string());
connected_clone.set(true);
}) as Box<dyn FnMut(web_sys::Event)>);
// onclose Handler
let mut ws_status_clone2 = ws_status.clone();
let mut connected_clone2 = connected.clone();
let onclose = Closure::wrap(Box::new(move |_: web_sys::CloseEvent| {
log::warn!("❌ WebSocket getrennt");
ws_status_clone2.set("Getrennt".to_string());
connected_clone2.set(false);
}) as Box<dyn FnMut(web_sys::CloseEvent)>);
// **MESSAGE ROUTER** - Leitet Messages an die richtigen Handler weiter
let offer_tx = offer_handler.clone();
let onmessage = Closure::wrap(Box::new(move |e: MessageEvent| {
if let Some(text) = e.data().as_string() {
log::info!("📨 WebSocket Nachricht: {}", text);
if let Ok(msg) = serde_json::from_str::<SignalingMessage>(&text) {
match msg.msg_type.as_str() {
"offer" => {
log::info!("🔀 Leite Offer an Responder-Handler weiter");
offer_tx.send(msg);
}
"answer" => {
log::info!("🔀 Answer empfangen - müsste an Initiator weitergeleitet werden");
// **PROBLEM:** Hier müssen wir eine Referenz zum Call-Controls Answer-Handler haben
// **LÖSUNG:** Globaler Message-Bus oder direkte Referenz
// **WORKAROUND:** Temporär loggen
log::info!("📞 WebRTC-Answer für Initiator empfangen (noch nicht weitergeleitet)");
// TODO: An call_controls answer_handler weiterleiten
}
"text" => {
log::info!("💬 Textnachricht: {}", msg.data);
if let Some(window) = web_sys::window() {
let _ = window.alert_with_message(&format!(
"Nachricht von {}:\n{}", msg.from, msg.data
));
}
}
_ => {
log::info!("❓ Unbekannte Nachricht: {}", msg.msg_type);
}
}
}
}
}) as Box<dyn FnMut(MessageEvent)>);
socket.set_onopen(Some(onopen.as_ref().unchecked_ref()));
socket.set_onclose(Some(onclose.as_ref().unchecked_ref()));
socket.set_onmessage(Some(onmessage.as_ref().unchecked_ref()));
onopen.forget();
onclose.forget();
onmessage.forget();
websocket.set(Some(socket));
}
Err(e) => {
log::error!("❌ WebSocket Fehler: {:?}", e);
ws_status.set("Verbindungsfehler".to_string());
}
}
};
rsx! {
div { class: "connection-panel",
h2 { "Verbindung" }
div { class: "status-item",
span { class: "status-label", "WebSocket:" }
span {
class: if *connected.read() { "status-value connected" } else { "status-value disconnected" },
"{ws_status.read()}"
}
}
div { class: "input-group",
label { r#for: "local-peer-id", "Ihre Peer ID:" }
label { "Ihre Peer-ID:" }
input {
id: "local-peer-id",
class: "readonly-input",
r#type: "text",
value: "{get_local_id()}",
value: "{peer_id.read()}",
readonly: true
}
button {
class: "copy-btn",
onclick: move |_| log::info!("Peer-ID kopiert: {}", get_local_id()),
onclick: move |_| {
log::info!("📋 Peer-ID kopiert: {}", peer_id.read());
},
"📋"
}
}
div { class: "input-group",
label { r#for: "remote-peer-id", "Remote Peer-ID:" }
label { "Remote Peer-ID:" }
input {
id: "remote-peer-id",
r#type: "text",
placeholder: "ID des anderen Teilnehmers eingeben",
value: "{get_remote_id()}",
placeholder: "ID des anderen Teilnehmers",
value: "{remote_id.read()}",
oninput: move |event| {
if let Ok(mut remote) = remote_peer_id.try_write() {
*remote = event.value();
}
remote_id.set(event.value());
}
}
}
// **GEÄNDERT:** Button-Status basiert jetzt auf WebSocket-Verbindung
button {
class: "connect-btn",
disabled: is_connected(),
onclick: move |_| {
// Verbindung läuft automatisch
},
if is_connected() {
class: if *connected.read() { "connect-btn connected" } else { "connect-btn" },
disabled: *connected.read(),
onclick: connect_websocket,
if *connected.read() {
"✅ Verbunden"
} else {
"🔄 Verbinde..."
"🔌 Verbinden"
}
}
}

View File

@ -1,130 +1,29 @@
use dioxus::prelude::*;
use crate::utils::{MediaManager, MediaState};
#[component]
pub fn StatusDisplay(
connected: Signal<bool>,
audio_enabled: Signal<bool>,
local_peer_id: Signal<String>,
remote_peer_id: Signal<String>,
media_manager: Signal<MediaManager>,
) -> Element {
// **VOLLSTÄNDIG DEFENSIVE:** Alle Werte werden im use_effect gesetzt
let mut display_connected = use_signal(|| false);
let mut display_audio = use_signal(|| true);
let mut display_local_id = use_signal(|| String::from("Wird generiert..."));
let mut display_remote_id = use_signal(|| String::new());
let mut display_mic_status = use_signal(|| String::from("Initialisierung..."));
let mut display_mic_class = use_signal(|| String::from("status-value"));
// Sichere Signal-Updates in einem einzigen Effect
use_effect(move || {
// WebSocket-Status
if let Ok(conn) = connected.try_read() {
display_connected.set(*conn);
}
// Audio-Status
if let Ok(audio) = audio_enabled.try_read() {
display_audio.set(*audio);
}
// Local Peer-ID
if let Ok(local_id) = local_peer_id.try_read() {
if !local_id.is_empty() {
display_local_id.set(local_id.clone());
}
}
// Remote Peer-ID
if let Ok(remote_id) = remote_peer_id.try_read() {
display_remote_id.set(remote_id.clone());
}
// Media Manager Status
if let Ok(manager) = media_manager.try_read() {
let (status_text, status_class) = match &manager.state {
MediaState::Granted(_) => ("Erteilt", "status-value connected"),
MediaState::Denied(_) => ("Verweigert", "status-value disconnected"),
MediaState::Requesting => ("Angefragt...", "status-value requesting"),
MediaState::NotSupported => ("Nicht unterstützt", "status-value disconnected"),
MediaState::Uninitialized => ("Nicht initialisiert", "status-value"),
};
display_mic_status.set(status_text.to_string());
display_mic_class.set(status_class.to_string());
}
});
rsx! {
div { class: "status-display",
h2 { "Status" }
// **SICHER:** Nur lokale Signale verwenden
div { class: "status-item",
span { class: "status-label", "Signaling-Verbindung:" }
span {
class: if *display_connected.read() {
"status-value connected"
} else {
"status-value disconnected"
},
if *display_connected.read() {
"Bereit für Anrufe"
} else {
"Verbinde mit Server..."
}
}
span { class: "status-label", "System:" }
span { class: "status-value connected", "✅ Stabil" }
}
// WebRTC-Verbindung
div { class: "status-item",
span { class: "status-label", "WebRTC-Verbindung:" }
span { class: "status-value disconnected", "Nicht verbunden" }
span { class: "status-label", "WebSocket:" }
span {
class: if *connected.read() { "status-value connected" } else { "status-value disconnected" },
if *connected.read() { "✅ Verbunden" } else { "❌ Getrennt" }
}
}
// Mikrofon-Berechtigung
div { class: "status-item",
span { class: "status-label", "Mikrofon-Berechtigung:" }
span {
class: "{display_mic_class.read()}",
"{display_mic_status.read()}"
}
}
// Audio-Status (nur bei Verbindung)
if *display_connected.read() {
div { class: "status-item",
span { class: "status-label", "Audio im Anruf:" }
span {
class: if *display_audio.read() {
"status-value connected"
} else {
"status-value disconnected"
},
if *display_audio.read() { "Aktiv" } else { "Stumm geschaltet" }
}
}
}
// Peer-IDs
div { class: "status-item",
span { class: "status-label", "Ihre ID:" }
span { class: "status-value peer-id", "{display_local_id.read()}" }
}
// Remote Peer-ID (nur anzeigen wenn nicht leer)
if !display_remote_id.read().is_empty() {
div { class: "status-item",
span { class: "status-label", "Verbunden mit:" }
span { class: "status-value peer-id", "{display_remote_id.read()}" }
}
}
// WebRTC-Support Warnung
if !MediaManager::is_webrtc_supported() {
div { class: "warning-message",
"⚠️ WebRTC wird von diesem Browser nicht unterstützt"
}
span { class: "status-label", "WebRTC:" }
span { class: "status-value", "⚙️ Bereit für Implementation" }
}
}
}

View File

@ -5,20 +5,16 @@ mod models;
mod utils;
use dioxus::prelude::*;
use wasm_bindgen::JsCast;
use wasm_bindgen::prelude::{Closure, JsValue};
use web_sys::{BinaryType, MessageEvent, WebSocket as BrowserWebSocket};
use log::Level;
use console_log::init_with_level;
use components::{ConnectionPanel, CallControls, StatusDisplay};
use utils::{MediaManager, MediaState};
use models::SignalingMessage;
use web_sys::{RtcPeerConnection, WebSocket as BrowserWebSocket};
const FAVICON: Asset = asset!("/assets/favicon.ico");
const MAIN_CSS: Asset = asset!("/assets/main.css");
fn main() {
// Initialize logging
dioxus_logger::init(dioxus_logger::tracing::Level::INFO).expect("Failed to initialize logger");
init_with_level(Level::Info).expect("console_log initialization failed");
console_error_panic_hook::set_once();
dioxus::launch(App);
}
@ -33,164 +29,39 @@ fn App() -> Element {
}
#[component]
pub fn Content() ->Element {
// Initialize signals
let connected = use_signal(|| false);
let audio_enabled = use_signal(|| true);
let local_peer_id = use_signal(|| generate_peer_id());
let remote_peer_id = use_signal(|| String::new());
let media_manager = use_signal(|| MediaManager::new());
let web_socket= use_signal(|| None::<BrowserWebSocket>);
// On mount: Request microphone access if not already granted
use_effect(move || {
to_owned![media_manager];
spawn(async move {
if let Ok(mut manager) = media_manager.try_write() {
match manager.request_microphone_access().await {
Ok(_) => log::info!("Microphone access granted"),
Err(e) => log::error!("Failed to request microphone access: {}", e)
}
}
});
});
// On Mount: Initialize WebSocket connection
use_effect(move || {
to_owned![web_socket, connected, local_peer_id, remote_peer_id];
if web_socket.try_read().map(|w| w.is_none()).unwrap_or(true) {
match BrowserWebSocket::new("ws://localhost:3478/ws") {
Ok(socket) => {
socket.set_binary_type(BinaryType::Arraybuffer);
// Event Handlers
let onerror =
Closure::wrap(Box::new(move |e: web_sys::ErrorEvent| {
log::error!("WebSocket-Fehler: {:?}", e);
}) as Box<dyn FnMut(web_sys::ErrorEvent)>);
// onclose Handler
let onclose = {
to_owned![connected];
Closure::wrap(Box::new(move |_: web_sys::CloseEvent| {
log::warn!("WebSocket-Verbindung geschlossen");
if let Ok(mut conn) = connected.try_write() {
*conn = false;
}
}) as Box<dyn FnMut(web_sys::CloseEvent)>)
};
let onmessage = {
to_owned![local_peer_id, remote_peer_id];
Closure::wrap(Box::new(move |e: MessageEvent| {
if let Some(text) = e.data().as_string() {
log::info!("Websocket-Nachricht empfangen: {}", text);
if let Ok(msg) = serde_json::from_str::<SignalingMessage>(&text) {
match msg.msg_type.as_str() {
"offer" | "answer" | "ice-candidate" => {
log::info!("WebRTC-Nachricht empfangen: {}", msg.msg_type);
// TODO: WebRTC-Handler implementieren
}
"text" => {
log::info!("Text-Nachricht von {}: {}", msg.from, msg.data);
}
_ => {
log::warn!("Unbekannter Nachrichtentyp: {}", msg.msg_type);
}
}
}
}
}) as Box<dyn FnMut(MessageEvent)>)
};
let onopen = {
to_owned![connected];
Closure::wrap(Box::new(move |_: web_sys::Event| {
log::info!("WebSocket connected");
if let Ok(mut conn) = connected.try_write() {
*conn = true;
}
}) as Box<dyn FnMut(web_sys::Event)>)
};
let onclose = {
to_owned![connected];
Closure::wrap(Box::new(move |_: web_sys::CloseEvent| {
log::info!("WebSocket disconnected");
if let Ok(mut conn) = connected.try_write() {
*conn = false;
}
}) as Box<dyn FnMut(web_sys::CloseEvent)>)
};
socket.set_onerror(Some(onerror.as_ref().unchecked_ref()));
socket.set_onmessage(Some(onmessage.as_ref().unchecked_ref()));
socket.set_onopen(Some(onopen.as_ref().unchecked_ref()));
socket.set_onclose(Some(onclose.as_ref().unchecked_ref()));
onerror.forget();
onmessage.forget();
onopen.forget();
onclose.forget();
if let Ok(mut ws) = web_socket.try_write() {
*ws = Some(socket);
}
}
Err(e) =>
log::error!("Failed to create WebSocket: {:?}", e),
}
}
});
pub fn Content() -> Element {
let mut peer_id = use_signal(|| "peer-loading...".to_string());
let mut remote_id = use_signal(|| String::new());
let mut connected = use_signal(|| false);
let mut websocket = use_signal(|| None::<BrowserWebSocket>);
let initiator_connection = use_signal(|| None::<RtcPeerConnection>);
let responder_connection = use_signal(|| None::<RtcPeerConnection>);
rsx! {
div {
class: "app-container",
div { class: "app-container",
header {
h1 { "Voice Chat MVP" }
p { "WebRTC-basierter Sprachchat mit Ende-zu-Ende-Verschlüsselung" }
p { "Einfache WebRTC-Demo ohne Signal-Chaos" }
}
main {
class: "main-content",
// Connection Panel
ConnectionPanel {
main { class: "main-content",
ConnectionPanel {
peer_id,
remote_id,
connected,
local_peer_id,
remote_peer_id,
web_socket
websocket,
peer_connection: responder_connection
}
// Call Controls
CallControls {
peer_id,
remote_id,
connected,
audio_enabled,
media_manager,
web_socket,
local_peer_id,
remote_peer_id
websocket,
peer_connection: initiator_connection
}
// Status Display
StatusDisplay {
connected,
audio_enabled,
local_peer_id,
remote_peer_id,
media_manager
}
}
}
}
}
// Funktion zum Generieren einer eindeutigen Peer-ID
fn generate_peer_id() -> String {
use std::sync::atomic::{AtomicU32, Ordering};
static COUNTER: AtomicU32 = AtomicU32::new(1);
let id = COUNTER.fetch_add(1, Ordering::Relaxed);
return format!("peer-{:06}", id);
}

10
src/models/media_state.rs Normal file
View File

@ -0,0 +1,10 @@
use web_sys::MediaStream;
#[derive(Debug, Clone, PartialEq)]
pub enum MediaState {
Uninitialized,
Requesting,
Granted(MediaStream),
Denied(String),
NotSupported,
}

View File

@ -1,3 +1,5 @@
mod media_state;
mod signaling_message;
pub use media_state::MediaState;
pub use signaling_message::SignalingMessage;

View File

@ -2,27 +2,20 @@ use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use wasm_bindgen_futures::JsFuture;
use web_sys::{
MediaStream, MediaStreamConstraints, Navigator, Window,
RtcIceCandidate, RtcPeerConnection, RtcConfiguration, RtcIceServer
MediaStream, MediaStreamConstraints, Navigator, Window,
RtcPeerConnection, RtcConfiguration, RtcIceServer,
// SDP-Types
RtcSignalingState,
RtcSessionDescription, RtcSessionDescriptionInit, RtcSdpType,
};
use js_sys::Reflect;
use crate::models::MediaState;
// Enum für verschiedene Media-Zustände
#[derive(Debug, Clone, PartialEq)]
pub enum MediaState {
Uninitialized,
Requesting,
Granted(MediaStream),
Denied(String),
NotSupported,
}
// Media Manager für WebRTC-Funktionalität
pub struct MediaManager {
pub state: MediaState,
}
impl MediaManager {
// Creates a new MediaManager instance
pub fn new() -> Self {
Self {
state: MediaState::Uninitialized,
@ -30,121 +23,154 @@ impl MediaManager {
}
pub fn create_peer_connection() -> Result<RtcPeerConnection, String> {
// STUN-Server configuration
let ice_server = RtcIceServer::new();
let urls = js_sys::Array::new();
urls.push(&JsValue::from_str("stun:stun.l.google.com:19302"));
ice_server.set_urls(&urls.into());
let config = RtcConfiguration::new();
let ice_servers = js_sys::Array::new();
ice_servers.push(&ice_server.into());
config.set_ice_servers(&ice_servers.into());
let servers = js_sys::Array::new();
servers.push(&ice_server.into());
config.set_ice_servers(&servers.into());
RtcPeerConnection::new_with_configuration(&config)
.map_err(|e| format!("Failed to create peer connection: {:?}", e))
.map_err(|e| format!("PeerConnection failed: {:?}", e))
}
// Checks if WebRTC is supported
pub fn is_webrtc_supported() -> bool {
let window: Window = match web_sys::window() {
Some(w) => w,
None => return false,
};
let navigator: Navigator = window.navigator();
navigator.media_devices().is_ok()
pub async fn create_offer(pc: &RtcPeerConnection) -> Result<String, String> {
log::info!("🔄 Creating WebRTC offer…");
// 1. Promise awaiten
let js_offer = JsFuture::from(pc.create_offer())
.await
.map_err(|e| format!("create_offer failed: {:?}", e))?;
// 2. SDP-String per JS-Reflection aus dem JS-Objekt holen
let sdp_js = Reflect::get(&js_offer, &JsValue::from_str("sdp"))
.map_err(|_| "Failed to get .sdp field".to_string())?;
let sdp = sdp_js
.as_string()
.ok_or_else(|| "SDP field was not a string".to_string())?;
// 3. Init-Objekt bauen und SDP setzen
let mut init = RtcSessionDescriptionInit::new(RtcSdpType::Offer);
init.set_sdp(&sdp);
// 4. Local Description setzen
JsFuture::from(pc.set_local_description(&init))
.await
.map_err(|e| format!("set_local_description failed: {:?}", e))?;
log::info!("✅ Offer SDP length: {}", sdp.len());
log::debug!("📋 SDP-Preview: {}...", &sdp[..std::cmp::min(150, sdp.len())]);
Ok(sdp)
}
pub async fn handle_offer(pc: &RtcPeerConnection, offer_sdp: &str) -> Result<String, String> {
log::info!("📨 Handling received offer…");
let remote_init = RtcSessionDescriptionInit::new(RtcSdpType::Offer);
remote_init.set_sdp(offer_sdp);
JsFuture::from(pc.set_remote_description(&remote_init))
.await
.map_err(|e| format!("set_remote_description failed: {:?}", e))?;
log::info!("🔄 Creating answer…");
let js_answer = JsFuture::from(pc.create_answer())
.await
.map_err(|e| format!("create_answer failed: {:?}", e))?;
let sdp_js = Reflect::get(&js_answer, &JsValue::from_str("sdp"))
.map_err(|_| "Failed to get .sdp field from answer".to_string())?;
let sdp = sdp_js
.as_string()
.ok_or_else(|| "Answer SDP field was not a string".to_string())?;
let answer_init = RtcSessionDescriptionInit::new(RtcSdpType::Answer);
answer_init.set_sdp(&sdp);
JsFuture::from(pc.set_local_description(&answer_init))
.await
.map_err(|e| format!("set_local_answer failed: {:?}", e))?;
log::info!("✅ Answer SDP length: {}", sdp.len());
Ok(sdp)
}
pub async fn handle_answer(pc: &RtcPeerConnection, answer_sdp: &str) -> Result<(), String> {
log::info!("📨 Handling received answer...");
// **DEBUG:** State vor Answer-Verarbeitung
let state = pc.signaling_state();
log::info!("🔍 PeerConnection state before answer: {:?}", state);
// **NUR** verarbeiten wenn im korrekten State
match state {
web_sys::RtcSignalingState::HaveLocalOffer => {
log::info!("✅ Korrekter State - verarbeite Answer");
}
_ => {
return Err(format!("❌ Falscher State für Answer: {:?}", state));
}
}
let mut init = RtcSessionDescriptionInit::new(RtcSdpType::Answer);
init.set_sdp(answer_sdp);
JsFuture::from(pc.set_remote_description(&init))
.await
.map_err(|e| format!("set_remote_answer desc failed: {:?}", e))?;
log::info!("✅ Handled answer, WebRTC handshake complete!");
Ok(())
}
pub fn is_webrtc_supported() -> bool {
web_sys::window()
.and_then(|w| w.navigator().media_devices().ok())
.is_some()
}
pub async fn request_microphone_access(&mut self) -> Result<MediaStream, String> {
// Check if WebRTC is supported
if !Self::is_webrtc_supported() {
self.state = MediaState::NotSupported;
return Err("WebRTC wird von diesem Browser nicht unterstützt.".to_string());
return Err("WebRTC not supported".into());
}
self.state = MediaState::Requesting;
// Get browser window and navigator
let window = web_sys::window().ok_or("Kein Browserfenster gefunden")?;
let navigator = window.navigator();
let media_devices = navigator.media_devices().map_err(|_| "MediaDevices API nicht verfügbar")?;
// Define media constraints: only audio, no video
let navigator = web_sys::window()
.ok_or("No window")?
.navigator();
let devices = navigator
.media_devices()
.map_err(|_| "MediaDevices not available")?;
let constraints = MediaStreamConstraints::new();
constraints.set_audio(&JsValue::from(true));
constraints.set_video(&JsValue::from(false));
// Request access to the microphone
let promise = media_devices
.get_user_media_with_constraints(&constraints)
.map_err(|e| format!("getUserMedia fehlgeschlagen: {:?}", e))?;
// Convert JavaScript Promise to Rust Future
let future = JsFuture::from(promise);
match future.await {
Ok(stream) => {
// Convert JsValue to MediaStream
let media_stream: MediaStream = stream.dyn_into().map_err(|_| "Fehler beim Konvertieren zu MediaStream")?;
self.state = MediaState::Granted(media_stream.clone());
log::info!("Mikrofon-Zugriff erfolgreich erhalten!");
Ok(media_stream)
}
Err(e) => {
let error_message = format!("Mikrofon-Zugriff verweigert: {:?}", e);
self.state = MediaState::Denied(error_message.clone());
log::error!("{}", error_message);
Err(error_message)
}
}
let js_stream = JsFuture::from(devices.get_user_media_with_constraints(&constraints)
.map_err(|e| format!("getUserMedia error: {:?}", e))?)
.await
.map_err(|e| format!("getUserMedia promise rejected: {:?}", e))?;
let stream: MediaStream = js_stream
.dyn_into()
.map_err(|_| "Failed to cast to MediaStream")?;
self.state = MediaState::Granted(stream.clone());
Ok(stream)
}
// Logs information about a media stream
fn log_stream_info(&self, stream: &MediaStream) {
let tracks = stream.get_audio_tracks();
log::info!("Audio-Tracks erhalten: {}", tracks.length());
for i in 0..tracks.length() {
let track = tracks.get(i);
let track: web_sys::MediaStreamTrack = track.dyn_into().unwrap();
log::info!("Track {}: {} ({})", i, track.label(), track.kind());
}
}
// Stops the media stream and all its tracks
pub fn stop_stream(&mut self) {
if let MediaState::Granted(ref stream) = self.state {
let tracks = stream.get_tracks();
if let MediaState::Granted(stream) = &self.state {
let tracks = stream.get_tracks(); // js_sys::Array
for i in 0..tracks.length() {
let track = tracks.get(i);
let track: web_sys::MediaStreamTrack = track.dyn_into().unwrap();
// 1. JsValue holen
let js_val = tracks.get(i);
// 2. In MediaStreamTrack casten
let track: web_sys::MediaStreamTrack = js_val
.dyn_into()
.expect("Expected MediaStreamTrack");
// 3. Stoppen
track.stop();
log::info!("Track gestoppt: {}", track.label());
log::info!("🛑 Track gestoppt: {}", track.label());
}
self.state = MediaState::Uninitialized;
log::info!("🛑 MediaStream gestoppt.");
}
self.state = MediaState::Uninitialized;
log::info!("MediaStream gestoppt.");
}
// Returns a user-friendly status text based on the current media state
pub fn get_status_text(&self) -> &str {
match self.state {
MediaState::Uninitialized => "Nicht initialisiert",
MediaState::Requesting => "Berechtigung wird angefragt...",
MediaState::Granted(_) => "Zugriff gewährt",
MediaState::Denied(_) => "Zugriff verweigert",
MediaState::NotSupported => "WebRTC wird nicht unterstützt",
}
}
// Checks if the microphone is currently active
pub fn is_microphone_active(&self) -> bool {
matches!(self.state, MediaState::Granted(_))
}
}