diff --git a/nokhwa-core/src/format_filter.rs b/nokhwa-core/src/format_filter.rs index f8a1279..8be64d4 100644 --- a/nokhwa-core/src/format_filter.rs +++ b/nokhwa-core/src/format_filter.rs @@ -146,31 +146,31 @@ fn format_fulfill( match filter.filter_pref { RequestedFormatType::AbsoluteHighestResolution => { - let mut sources = sources.collect::>(); + let mut sources = sources.collect::>(); sources.sort_by(|a, b| a.resolution().cmp(&b.resolution())); - sources.last().map(|x| *x) + sources.last().copied().copied() } RequestedFormatType::AbsoluteHighestFrameRate => { - let mut sources = sources.collect::>(); + let mut sources = sources.collect::>(); sources.sort_by(|a, b| a.frame_rate().cmp(&b.frame_rate())); - sources.last().map(|x| *x) + sources.last().copied().copied() } RequestedFormatType::HighestResolution(filter_fps) => { let mut sources = sources .filter(|format| format.frame_rate() == filter_fps) - .collect::>(); + .collect::>(); sources.sort(); - sources.last().map(|x| *x) + sources.last().copied().copied() } RequestedFormatType::HighestFrameRate(filter_res) => { let mut sources = sources .filter(|format| format.resolution() == filter_res) - .collect::>(); + .collect::>(); sources.sort(); - sources.last().map(|x| *x) + sources.last().copied().copied() } RequestedFormatType::Exact(exact) => { - sources.filter(|format| format == exact).last().map(|x| *x) + sources.filter(|format| format == &&exact).last().copied() } RequestedFormatType::Closest(closest) => { let mut sources = sources @@ -180,7 +180,7 @@ fn format_fulfill( }) .collect::>(); sources.sort_by(|a, b| a.0.total_cmp(&b.0)); - sources.first().map(|x| *x) + sources.first().copied().map(|(_, cf)| cf) } RequestedFormatType::ClosestGreater(closest) => { let mut sources = sources @@ -194,7 +194,7 @@ fn format_fulfill( }) .collect::>(); sources.sort_by(|a, b| a.0.total_cmp(&b.0)); - sources.first().map(|x| *x) + sources.first().copied().map(|(_, cf)| cf) } RequestedFormatType::ClosestLess(closest) => { let mut sources = sources @@ -208,7 +208,7 @@ fn format_fulfill( }) .collect::>(); sources.sort_by(|a, b| a.0.total_cmp(&b.0)); - sources.first().map(|x| *x) + sources.first().copied().map(|(_, cf)| cf) } RequestedFormatType::None => sources.nth(0).map(|x| *x), } @@ -223,5 +223,5 @@ fn distance_3d_camerafmt_relative(a: CameraFormat, b: CameraFormat) -> f64 { let y = res_y_diff.pow(2) as f64; let z = fps_diff.pow(2) as f64; - (x + y + z) + x + y + z } diff --git a/nokhwa-core/src/frame_format.rs b/nokhwa-core/src/frame_format.rs index 404d714..50dc2b5 100644 --- a/nokhwa-core/src/frame_format.rs +++ b/nokhwa-core/src/frame_format.rs @@ -79,8 +79,6 @@ impl FrameFormat { FrameFormat::Nv12, FrameFormat::Nv21, FrameFormat::Yv12, - FrameFormat::Imc2, - FrameFormat::Imc4, FrameFormat::Luma8, FrameFormat::Rgb8, FrameFormat::RgbA8, @@ -106,8 +104,6 @@ impl FrameFormat { FrameFormat::Nv12, FrameFormat::Nv21, FrameFormat::Yv12, - FrameFormat::Imc2, - FrameFormat::Imc4, ]; pub const LUMA: &'static [FrameFormat] = &[FrameFormat::Luma8]; @@ -164,12 +160,6 @@ impl PartialEq<(ApiBackend, u128)> for PlatformFrameFormat { } } -impl AsRef<(ApiBackend, u128)> for PlatformFrameFormat { - fn as_ref(&self) -> &(ApiBackend, u128) { - &self.as_tuple() - } -} - impl Display for PlatformFrameFormat { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{self:?}") diff --git a/nokhwa-core/src/traits.rs b/nokhwa-core/src/traits.rs index 335ff15..3344dd0 100644 --- a/nokhwa-core/src/traits.rs +++ b/nokhwa-core/src/traits.rs @@ -18,7 +18,7 @@ use crate::{ buffer::Buffer, error::NokhwaError, format_filter::FormatFilter, - frame_format::{FrameFormat, SourceFrameFormat}, + frame_format::SourceFrameFormat, types::{ ApiBackend, CameraControl, CameraFormat, CameraInfo, ControlValueSetter, KnownCameraControl, Resolution, @@ -126,7 +126,7 @@ pub trait CaptureTrait { /// This will also update the cache. /// # Errors /// If you started the stream and the camera rejects the new frame format, this will return an error. - fn set_frame_format(&mut self, fourcc: impl Into) + fn set_frame_format(&mut self, fourcc: SourceFrameFormat) -> Result<(), NokhwaError>; /// Gets the value of [`KnownCameraControl`]. @@ -294,7 +294,7 @@ pub trait AsyncCaptureTrait: CaptureTrait { /// If you started the stream and the camera rejects the new frame format, this will return an error. async fn set_frame_format( &mut self, - fourcc: impl Into, + fourcc: SourceFrameFormat, ) -> Result<(), NokhwaError>; /// Sets the control to `control` in the camera. @@ -332,6 +332,7 @@ pub trait AsyncCaptureTrait: CaptureTrait { async fn stop_stream(&mut self) -> Result<(), NokhwaError>; } +#[cfg(feature = "async")] impl From for Box where T: AsyncCaptureTrait + 'static, diff --git a/nokhwa-core/src/types.rs b/nokhwa-core/src/types.rs index 6cf4472..2b398fa 100644 --- a/nokhwa-core/src/types.rs +++ b/nokhwa-core/src/types.rs @@ -1200,10 +1200,10 @@ impl Display for ApiBackend { #[cfg(all(feature = "mjpeg", not(target_arch = "wasm")))] #[cfg_attr(feature = "docs-features", doc(cfg(feature = "mjpeg")))] #[inline] -fn decompress( - data: impl AsRef<[u8]>, +fn decompress<'a>( + data: &'a [u8], rgba: bool, -) -> Result { +) -> Result, NokhwaError> { use mozjpeg::Decompress; match Decompress::new_mem(data) { @@ -1244,8 +1244,6 @@ fn decompress( #[cfg_attr(feature = "docs-features", doc(cfg(feature = "mjpeg")))] #[inline] pub fn mjpeg_to_rgb(data: &[u8], rgba: bool) -> Result, NokhwaError> { - use mozjpeg::Decompress; - let mut jpeg_decompress = decompress(data, rgba)?; let scanlines_res: Option> = jpeg_decompress.read_scanlines_flat(); @@ -1282,8 +1280,6 @@ pub fn mjpeg_to_rgb(_data: &[u8], _rgba: bool) -> Result, NokhwaError> { #[cfg_attr(feature = "docs-features", doc(cfg(feature = "mjpeg")))] #[inline] pub fn buf_mjpeg_to_rgb(data: &[u8], dest: &mut [u8], rgba: bool) -> Result<(), NokhwaError> { - use mozjpeg::Decompress; - let mut jpeg_decompress = decompress(data, rgba)?; // assert_eq!(dest.len(), jpeg_decompress.min_flat_buffer_size()); @@ -1347,7 +1343,7 @@ pub fn buf_yuyv422_to_rgb(data: &[u8], dest: &mut [u8], rgba: bool) -> Result<() let mut buf:Vec = Vec::new(); if data.len() % 4 != 0 { return Err(NokhwaError::ProcessFrameError { - src: FrameFormat::YUV422, + src: FrameFormat::Yuv422.into(), destination: "RGB888".to_string(), error: "Assertion failure, the YUV stream isn't 4:2:2! (wrong number of bytes)" .to_string(), diff --git a/src/backends/capture/browser_camera.rs b/src/backends/capture/browser_camera.rs index 2a47ea2..c449d66 100644 --- a/src/backends/capture/browser_camera.rs +++ b/src/backends/capture/browser_camera.rs @@ -1,4 +1,5 @@ use async_trait::async_trait; +use js_sys::Array; use nokhwa_core::buffer::Buffer; use nokhwa_core::error::NokhwaError; use nokhwa_core::format_filter::FormatFilter; @@ -8,9 +9,166 @@ use nokhwa_core::types::{ ApiBackend, CameraControl, CameraFormat, CameraIndex, CameraInfo, ControlValueSetter, KnownCameraControl, Resolution, }; +use wasm_bindgen_futures::JsFuture; use std::borrow::Cow; use std::collections::HashMap; -use web_sys::{CanvasRenderingContext2d, OffscreenCanvas}; +use wasm_bindgen::{JsCast, JsValue}; +use web_sys::{ + CanvasRenderingContext2d, Document, Element, MediaDevices, Navigator, OffscreenCanvas, Window, MediaStream, MediaStreamConstraints, HtmlCanvasElement, MediaDeviceInfo, MediaDeviceKind, +}; + +macro_rules! jsv { + ($value:expr) => {{ + JsValue::from($value) + }}; +} + +macro_rules! obj { + ($(($key:expr, $value:expr)),+ ) => {{ + use js_sys::{Map, Object}; + use wasm_bindgen::JsValue; + + let map = Map::new(); + $( + map.set(&jsv!($key), &jsv!($value)); + )+ + Object::from(map) + }}; + ($object:expr, $(($key:expr, $value:expr)),+ ) => {{ + use js_sys::{Map, Object}; + use wasm_bindgen::JsValue; + + let map = Map::new(); + $( + map.set(&jsv!($key), &jsv!($value)); + )+ + let o = Object::from(map); + Object::assign(&$object, &o) + }}; +} + +fn window() -> Result { + match web_sys::window() { + Some(win) => Ok(win), + None => Err(NokhwaError::StructureError { + structure: "web_sys Window".to_string(), + error: "None".to_string(), + }), + } +} + +fn media_devices(navigator: &Navigator) -> Result { + match navigator.media_devices() { + Ok(media) => Ok(media), + Err(why) => Err(NokhwaError::StructureError { + structure: "MediaDevices".to_string(), + error: format!("{why:?}"), + }), + } +} + +fn document(window: &Window) -> Result { + match window.document() { + Some(doc) => Ok(doc), + None => Err(NokhwaError::StructureError { + structure: "web_sys Document".to_string(), + error: "None".to_string(), + }), + } +} + +fn document_select_elem(doc: &Document, element: &str) -> Result { + match doc.get_element_by_id(element) { + Some(elem) => Ok(elem), + None => { + return Err(NokhwaError::StructureError { + structure: format!("Document {element}"), + error: "None".to_string(), + }) + } + } +} + +fn element_cast(from: T, name: &str) -> Result { + if !from.has_type::() { + return Err(NokhwaError::StructureError { + structure: name.to_string(), + error: "Cannot Cast - No Subtype".to_string(), + }); + } + + let casted = match from.dyn_into::() { + Ok(cast) => cast, + Err(_) => { + return Err(NokhwaError::StructureError { + structure: name.to_string(), + error: "Casting Error".to_string(), + }); + } + }; + Ok(casted) +} + +fn element_cast_ref<'a, T: JsCast, U: JsCast>( + from: &'a T, + name: &'a str, +) -> Result<&'a U, NokhwaError> { + if !from.has_type::() { + return Err(NokhwaError::StructureError { + structure: name.to_string(), + error: "Cannot Cast - No Subtype".to_string(), + }); + } + + match from.dyn_ref::() { + Some(v_e) => Ok(v_e), + None => Err(NokhwaError::StructureError { + structure: name.to_string(), + error: "Cannot Cast".to_string(), + }), + } +} + +fn create_element(doc: &Document, element: &str) -> Result { + match Document::create_element(doc, element) { + // ???? thank you intellij + Ok(new_element) => Ok(new_element), + Err(why) => Err(NokhwaError::StructureError { + structure: "Document Video Element".to_string(), + error: format!("{:?}", why.as_string()), + }), + } +} + +fn set_autoplay_inline(element: &Element) -> Result<(), NokhwaError> { + if let Err(why) = element.set_attribute("autoplay", "autoplay") { + return Err(NokhwaError::SetPropertyError { + property: "Video-autoplay".to_string(), + value: "autoplay".to_string(), + error: format!("{why:?}"), + }); + } + + if let Err(why) = element.set_attribute("playsinline", "playsinline") { + return Err(NokhwaError::SetPropertyError { + property: "Video-playsinline".to_string(), + value: "playsinline".to_string(), + error: format!("{why:?}"), + }); + } + + Ok(()) +} + +fn media_devices(navigator: &Navigator) -> Result { + match navigator.media_devices() { + Ok(media) => Ok(media), + Err(why) => Err(NokhwaError::StructureError { + structure: "MediaDevices".to_string(), + error: format!("{why:?}"), + }), + } +} #[derive(Copy, Clone, Hash, Ord, PartialOrd, Eq, PartialEq)] enum JSCameraFacingMode { @@ -42,6 +200,11 @@ struct CustomControls { pub(crate) group_id_exact: bool, } +enum CanvasType { + OffScreen(OffscreenCanvas), + HtmlCanvas(HtmlCanvasElement), +} + /// Quirks: /// - Regular [`CaptureTrait`] will block. Use [``] /// - [REQUIRES AN UP-TO-DATE BROWSER DUE TO USE OF OFFSCREEN CANVAS.](https://caniuse.com/?search=OffscreenCanvas) @@ -52,15 +215,45 @@ pub struct BrowserCamera { format: CameraFormat, init: bool, controls: CustomControls, - cavnas: Option, + cavnas: Option, context: Option, } impl BrowserCamera { - pub fn new(index: &CameraIndex) -> Result {} - - pub async fn new_async(index: &CameraIndex) -> Result { - // + pub fn new(index: &CameraIndex) -> Result {} + + pub async fn new_async(index: &CameraIndex) -> Result { + let window = window()?; + let media_devices = media_devices(&window.navigator())?; + + let stream: MediaStream = match media_devices.get_user_media_with_constraints(&constraints) + { + Ok(promise) => { + let future = JsFuture::from(promise); + match future.await { + Ok(stream) => { + let media_stream: MediaStream = MediaStream::from(stream); + media_stream + } + Err(why) => { + return Err(NokhwaError::StructureError { + structure: "MediaDevicesGetUserMediaJsFuture".to_string(), + error: format!("{why:?}"), + }) + } + } + } + Err(why) => { + return Err(NokhwaError::StructureError { + structure: "MediaDevicesGetUserMedia".to_string(), + error: format!("{why:?}"), + }) + } + }; + + + + } } diff --git a/src/init.rs b/src/init.rs index 2c4854f..6310010 100644 --- a/src/init.rs +++ b/src/init.rs @@ -53,14 +53,52 @@ fn status_avfoundation() -> bool { ) } +#[cfg(feature = "input-jscam")] +pub async fn request_permission() -> Result<(), NokhwaError> { + let window: Window = window()?; + let navigator = window.navigator(); + let media_devices = media_devices(&navigator)?; + + match media_devices.get_user_media_with_constraints( + MediaStreamConstraints::new() + .video(&JsValue::from_bool(true)) + .audio(&JsValue::from_bool(false)), + ) { + Ok(promise) => { + let js_future = JsFuture::from(promise); + match js_future.await { + Ok(stream) => { + let media_stream = MediaStream::from(stream); + media_stream + .get_tracks() + .iter() + .for_each(|track| MediaStreamTrack::from(track).stop()); + Ok(()) + } + Err(why) => Err(NokhwaError::OpenStreamError(format!("{why:?}"))), + } + } + Err(why) => Err(NokhwaError::StructureError { + structure: "UserMediaPermission".to_string(), + error: format!("{why:?}"), + }), + } +} + +#[cfg(not(feature = "input-jscam"))] +pub async fn request_permission() -> Result<(), NokhwaError> { + Ok(()) +} + // todo: make this work on browser code /// Initialize `nokhwa` /// It is your responsibility to call this function before anything else, but only on `MacOS`. /// /// The `on_complete` is called after initialization (a.k.a User granted permission). The callback's argument /// is weather the initialization was successful or not -pub fn nokhwa_initialize(on_complete: impl Fn(bool) + Send + Sync + 'static) { +pub fn nokhwa_initialize_callback(on_complete: impl Fn(bool) + Send + Sync + 'static) { init_avfoundation(on_complete); + // TODO: implement initialization } /// Check the status if `nokhwa`