summaryrefslogtreecommitdiffstats
path: root/old
diff options
context:
space:
mode:
authorKostya Shishkov <kostya.shishkov@gmail.com>2020-10-07 16:02:57 +0200
committerKostya Shishkov <kostya.shishkov@gmail.com>2020-10-07 16:02:57 +0200
commit113ac3d202140491de9a8ad1e75dd96882a8857f (patch)
tree34425f2594d8ddfd9be88a044c32d6175c7f4fad /old
parent0036a7b08b18389e0d8683fe1422ffb861dd9327 (diff)
downloadnihav-player-113ac3d202140491de9a8ad1e75dd96882a8857f.tar.gz
move experimental code to old/
Diffstat (limited to 'old')
-rw-r--r--old/Cargo.toml14
-rw-r--r--old/README.md21
-rw-r--r--old/sdl-patches/0001-remove-not-really-needed-dependencies.patch151
-rw-r--r--old/sdl-patches/0002-remove-obsolete-directives.patch35
-rw-r--r--old/sdl-patches/0003-audio-make-audio-callback-a-trait-instead-of-a-funct.patch61
-rw-r--r--old/sdl-patches/0004-video-add-YUV-overlay-support.patch180
-rw-r--r--old/src/main.rs599
7 files changed, 1061 insertions, 0 deletions
diff --git a/old/Cargo.toml b/old/Cargo.toml
new file mode 100644
index 0000000..68777a5
--- /dev/null
+++ b/old/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "nihav-player"
+version = "0.1.0"
+authors = ["Kostya Shishkov <kostya.shishkov@gmail.com>"]
+edition = "2018"
+
+[dependencies]
+nihav_core = { path="../nihav-core" }
+nihav_registry = { path="../nihav-registry" }
+nihav_allstuff = { path="../nihav-allstuff" }
+sdl = { path="./rust-sdl" }
+
+#sdl2 = "^0.32"
+#download_sdl2 = "0.0.3" \ No newline at end of file
diff --git a/old/README.md b/old/README.md
new file mode 100644
index 0000000..0f00772
--- /dev/null
+++ b/old/README.md
@@ -0,0 +1,21 @@
+# nihav-player
+
+nihav-player is an extremely simple tool to test decoding functionality of NihAV.
+
+## Getting Started
+
+This is a not a stable working application and it is provided just as a demonstration and not as something you should ever use.
+
+In order to build it, put it into directory with other NihAV crates, check out `sdl-0.3.6` crate from https://crates.io/crates/sdl into `rust-sdl` subdirectory, apply patches from `sdl-patches` to it, and invoke `cargo build`.
+
+Usage: `nihav-player [-an] inputfile`. `-an` tells player to ignore audio stream.
+
+## Contributing
+
+You're not supposed to. Even I hardly do that so why should you?
+
+## License
+
+NihAV is licensed under GNU Affero Public License - see [COPYING] for details.
+
+Parts of the project can be relicensed to other free licenses like LGPLv2 on request.
diff --git a/old/sdl-patches/0001-remove-not-really-needed-dependencies.patch b/old/sdl-patches/0001-remove-not-really-needed-dependencies.patch
new file mode 100644
index 0000000..e1cf979
--- /dev/null
+++ b/old/sdl-patches/0001-remove-not-really-needed-dependencies.patch
@@ -0,0 +1,151 @@
+From 4d9e740dfd80df976d976e21a7e33fd9f8b3c362 Mon Sep 17 00:00:00 2001
+From: Kostya Shishkov <kostya.shishkov@gmail.com>
+Date: Fri, 22 Nov 2019 17:47:54 +0100
+Subject: [PATCH 1/4] remove not really needed dependencies
+
+---
+ Cargo.toml | 2 --
+ src/sdl/event.rs | 20 +++++++-------------
+ src/sdl/lib.rs | 2 --
+ src/sdl/video.rs | 8 --------
+ 4 files changed, 7 insertions(+), 25 deletions(-)
+
+diff --git a/Cargo.toml b/Cargo.toml
+index b53c715..ddf8523 100644
+--- a/Cargo.toml
++++ b/Cargo.toml
+@@ -16,6 +16,4 @@ name = "sdl"
+ path = "src/sdl/lib.rs"
+
+ [dependencies]
+-num = "0.1.24"
+-rand = "0.3"
+ libc = "0.1"
+diff --git a/src/sdl/event.rs b/src/sdl/event.rs
+index 17e2cff..12812f8 100644
+--- a/src/sdl/event.rs
++++ b/src/sdl/event.rs
+@@ -1,7 +1,6 @@
+ use std::mem;
+ use libc::c_int;
+ use std::slice;
+-use num::FromPrimitive;
+ use std::ffi::CStr;
+ use std::str;
+
+@@ -512,7 +511,7 @@ pub enum Key {
+ Last,
+ }
+
+-impl FromPrimitive for Key {
++impl Key {
+ fn from_i64(n: i64) -> Option<Key> {
+ use self::Key::*;
+
+@@ -753,11 +752,11 @@ impl FromPrimitive for Key {
+ })
+ }
+
+- fn from_u64(n: u64) -> Option<Key> { FromPrimitive::from_i64(n as i64) }
++ fn from_u64(n: u64) -> Option<Key> { Self::from_i64(n as i64) }
+ }
+
+ fn wrap_key(i: ll::SDLKey) -> Option<Key> {
+- FromPrimitive::from_usize(i as usize)
++ Key::from_u64(i as u64)
+ }
+
+ #[derive(PartialEq, Eq, Copy, Clone)]
+@@ -829,7 +828,7 @@ pub enum Mouse {
+ WheelDown
+ }
+
+-impl FromPrimitive for Mouse {
++impl Mouse {
+ fn from_i64(n: i64) -> Option<Mouse> {
+ Some(match n {
+ 1 => Mouse::Left,
+@@ -840,12 +839,10 @@ impl FromPrimitive for Mouse {
+ _ => return None,
+ })
+ }
+-
+- fn from_u64(n: u64) -> Option<Mouse> { FromPrimitive::from_i64(n as i64) }
+ }
+
+ fn wrap_mouse(bitflags: u8) -> Option<Mouse> {
+- FromPrimitive::from_u8(bitflags)
++ Mouse::from_i64(bitflags as i64)
+ }
+
+ #[derive(PartialEq, Eq, Copy, Clone)]
+@@ -903,7 +900,7 @@ fn wrap_event(raw: ll::SDL_Event) -> Event {
+ let ty = if ty.is_null() { return Event::None; }
+ else { *ty };
+
+- let ty : EventType = match FromPrimitive::from_usize(ty as usize) {
++ let ty : EventType = match EventType::from_u64(ty as u64) {
+ Some(ty) => ty,
+ None => return Event::None
+ };
+@@ -1022,9 +1019,6 @@ pub enum EventType {
+ impl EventType {
+ pub fn get_state(&self) -> bool { get_event_state(*self) }
+ pub fn set_state(&self, state: bool) { set_event_state(*self, state) }
+-}
+-
+-impl FromPrimitive for EventType {
+ fn from_i64(n: i64) -> Option<EventType> {
+ Some(match n as ll::SDL_EventType {
+ ll::SDL_NOEVENT => EventType::None,
+@@ -1048,7 +1042,7 @@ impl FromPrimitive for EventType {
+ })
+ }
+
+- fn from_u64(n: u64) -> Option<EventType> { FromPrimitive::from_i64(n as i64) }
++ fn from_u64(n: u64) -> Option<EventType> { Self::from_i64(n as i64) }
+ }
+
+ pub fn pump_events() {
+diff --git a/src/sdl/lib.rs b/src/sdl/lib.rs
+index cf04157..0f2a910 100644
+--- a/src/sdl/lib.rs
++++ b/src/sdl/lib.rs
+@@ -1,8 +1,6 @@
+ #![allow(raw_pointer_derive)]
+
+ extern crate libc;
+-extern crate rand;
+-extern crate num;
+
+ pub use sdl::*;
+
+diff --git a/src/sdl/video.rs b/src/sdl/video.rs
+index 3f7020a..64084f8 100644
+--- a/src/sdl/video.rs
++++ b/src/sdl/video.rs
+@@ -1,7 +1,6 @@
+ use std::mem;
+ use libc::{c_int, c_float};
+ use std::ptr;
+-use rand::Rng;
+ use std::slice;
+ use std::ffi::CString;
+ use std::path::Path;
+@@ -305,13 +304,6 @@ pub enum Color {
+ RGBA(u8, u8, u8, u8)
+ }
+
+-impl ::rand::Rand for Color {
+- fn rand<R: ::rand::Rng>(rng: &mut R) -> Color {
+- if rng.gen() { RGBA(rng.gen(), rng.gen(), rng.gen(), rng.gen()) }
+- else { RGB(rng.gen(), rng.gen(), rng.gen()) }
+- }
+-}
+-
+ impl Color {
+ pub fn from_mapped(bit: u32, fmt: *const ll::SDL_PixelFormat) -> Color {
+ let mut r = 0;
+--
+1.7.9.5
+
diff --git a/old/sdl-patches/0002-remove-obsolete-directives.patch b/old/sdl-patches/0002-remove-obsolete-directives.patch
new file mode 100644
index 0000000..44c28e9
--- /dev/null
+++ b/old/sdl-patches/0002-remove-obsolete-directives.patch
@@ -0,0 +1,35 @@
+From 03cecd007d1b5688af83f0c2dfcc0d94f04044a2 Mon Sep 17 00:00:00 2001
+From: Kostya Shishkov <kostya.shishkov@gmail.com>
+Date: Fri, 22 Nov 2019 17:49:15 +0100
+Subject: [PATCH 2/4] remove obsolete directives
+
+---
+ src/sdl/lib.rs | 2 --
+ src/sdl/video.rs | 1 -
+ 2 files changed, 3 deletions(-)
+
+diff --git a/src/sdl/lib.rs b/src/sdl/lib.rs
+index 0f2a910..111ccb2 100644
+--- a/src/sdl/lib.rs
++++ b/src/sdl/lib.rs
+@@ -1,5 +1,3 @@
+-#![allow(raw_pointer_derive)]
+-
+ extern crate libc;
+
+ pub use sdl::*;
+diff --git a/src/sdl/video.rs b/src/sdl/video.rs
+index 64084f8..710a1d6 100644
+--- a/src/sdl/video.rs
++++ b/src/sdl/video.rs
+@@ -204,7 +204,6 @@ impl Drop for Surface {
+ }
+ }
+
+-#[allow(raw_pointer_derive)]
+ #[derive(PartialEq, Copy, Clone)]
+ pub struct Palette {
+ pub raw: *mut ll::SDL_Palette
+--
+1.7.9.5
+
diff --git a/old/sdl-patches/0003-audio-make-audio-callback-a-trait-instead-of-a-funct.patch b/old/sdl-patches/0003-audio-make-audio-callback-a-trait-instead-of-a-funct.patch
new file mode 100644
index 0000000..3b324b4
--- /dev/null
+++ b/old/sdl-patches/0003-audio-make-audio-callback-a-trait-instead-of-a-funct.patch
@@ -0,0 +1,61 @@
+From 733f792c6c9d6c850243e794551002358a0b74a5 Mon Sep 17 00:00:00 2001
+From: Kostya Shishkov <kostya.shishkov@gmail.com>
+Date: Wed, 27 Nov 2019 08:22:16 +0100
+Subject: [PATCH 3/4] audio: make audio callback a trait instead of a function
+
+---
+ src/sdl/audio.rs | 17 ++++++-----------
+ 1 file changed, 6 insertions(+), 11 deletions(-)
+
+diff --git a/src/sdl/audio.rs b/src/sdl/audio.rs
+index 18a140f..0c2f6cf 100644
+--- a/src/sdl/audio.rs
++++ b/src/sdl/audio.rs
+@@ -93,15 +93,16 @@ impl Channels {
+ pub fn count(self) -> c_int { match self { Channels::Mono => 1, Channels::Stereo => 2 } }
+ }
+
+-pub type AudioCallback = fn(&mut [u8]);
++pub trait AudioCallback {
++ fn callback(&mut self, out: &mut [u8]);
++}
+
+-#[derive(Copy)]
+ pub struct DesiredAudioSpec {
+ pub freq: c_int,
+ pub format: AudioFormat,
+ pub channels: Channels,
+ pub samples: u16,
+- pub callback: AudioCallback,
++ pub callback: Box<AudioCallback>,
+ }
+
+ impl DesiredAudioSpec {
+@@ -123,12 +124,6 @@ impl DesiredAudioSpec {
+ }
+ }
+
+-impl Clone for DesiredAudioSpec {
+- fn clone(&self) -> DesiredAudioSpec {
+- *self
+- }
+-}
+-
+ #[derive(Copy, Clone)]
+ pub struct ObtainedAudioSpec {
+ pub freq: c_int,
+@@ -154,9 +149,9 @@ impl ObtainedAudioSpec {
+
+ extern fn native_callback(userdata: *const c_void, stream: *mut u8, len: c_int) {
+ unsafe {
+- let callback: Box<AudioCallback> = transmute(userdata);
++ let mut callback: Box<Box<AudioCallback>> = transmute(userdata);
+ let buffer = transmute((stream, len as usize));
+- (*callback)(buffer);
++ callback.callback(buffer);
+ forget(callback); // Don't free the callback!
+ }
+ }
+--
+1.7.9.5
+
diff --git a/old/sdl-patches/0004-video-add-YUV-overlay-support.patch b/old/sdl-patches/0004-video-add-YUV-overlay-support.patch
new file mode 100644
index 0000000..74c7201
--- /dev/null
+++ b/old/sdl-patches/0004-video-add-YUV-overlay-support.patch
@@ -0,0 +1,180 @@
+From a7e89f88df1bff23df6314d592d8e375dfae4048 Mon Sep 17 00:00:00 2001
+From: Kostya Shishkov <kostya.shishkov@gmail.com>
+Date: Wed, 27 Nov 2019 08:22:47 +0100
+Subject: [PATCH 4/4] video: add YUV overlay support
+
+---
+ src/sdl/video.rs | 118 +++++++++++++++++++++++++++++++++++++++++++++++++++++-
+ 1 file changed, 116 insertions(+), 2 deletions(-)
+
+diff --git a/src/sdl/video.rs b/src/sdl/video.rs
+index 710a1d6..9acfcb6 100644
+--- a/src/sdl/video.rs
++++ b/src/sdl/video.rs
+@@ -51,6 +51,26 @@ pub mod ll {
+ pub refcount: c_int
+ }
+
++ pub const SDL_YV12_OVERLAY: uint32_t = 0x32315659;
++ pub const SDL_IYUV_OVERLAY: uint32_t = 0x56555949;
++ pub const SDL_YUY2_OVERLAY: uint32_t = 0x32595559;
++ pub const SDL_UYVY_OVERLAY: uint32_t = 0x59565955;
++ pub const SDL_YVYU_OVERLAY: uint32_t = 0x55595659;
++
++ #[repr(C)]
++ #[derive(Copy, Clone)]
++ pub struct SDL_Overlay {
++ pub format: uint32_t,
++ pub w: c_int,
++ pub h: c_int,
++ pub planes: c_int,
++ pub pitches: *const uint16_t,
++ pub pixels: *const *mut uint8_t,
++ pub hwfuncs: *mut c_void,
++ pub hwdata: *mut c_void,
++ pub flags: uint32_t,
++ }
++
+ #[repr(C)]
+ #[derive(Copy, Clone)]
+ pub struct SDL_Color {
+@@ -109,7 +129,7 @@ pub mod ll {
+ Gmask: uint32_t,
+ Bmask: uint32_t,
+ Amask: uint32_t) -> *mut SDL_Surface;
+- pub fn SDL_CreateRGBSurfaceFrom(pixels: *mut c_void,
++ pub fn SDL_CreateRGBSurfaceFrom(pixels: *const c_void,
+ width: c_int,
+ height: c_int,
+ depth: c_int,
+@@ -181,6 +201,14 @@ pub mod ll {
+ pub fn SDL_LoadBMP_RW(src: *mut SDL_RWops, freesrc: c_int) -> *mut SDL_Surface;
+ pub fn SDL_SaveBMP_RW(surface: *mut SDL_Surface, dst: *mut SDL_RWops, freedst: c_int) -> c_int;
+ pub fn SDL_GL_SwapBuffers();
++
++ pub fn SDL_CreateYUVOverlay(width: c_int, height: c_int, format: uint32_t, display: *mut SDL_Surface)
++ -> *mut SDL_Overlay;
++ pub fn SDL_LockYUVOverlay(overlay: *mut SDL_Overlay) -> c_int;
++ pub fn SDL_UnlockYUVOverlay(overlay: *mut SDL_Overlay);
++ pub fn SDL_DisplayYUVOverlay(overlay: *mut SDL_Overlay,
++ dstrect: *mut SDL_Rect) -> c_int;
++ pub fn SDL_FreeYUVOverlay(overlay: *mut SDL_Overlay);
+ }
+ }
+
+@@ -351,6 +379,7 @@ pub enum SurfaceFlag {
+ SWSurface = 0x00000000,
+ HWSurface = 0x00000001,
+ AsyncBlit = 0x00000004,
++ HWAccel = 0x00000100,
+ SrcColorKey = 0x00001000,
+ SrcAlpha = 0x00010000,
+ RLEAccel = 0x00004000
+@@ -466,6 +495,15 @@ pub fn get_video_surface() -> Result<Surface, String> {
+ else { Ok(wrap_surface(raw, false)) }
+ }
+
++#[derive(PartialEq, Eq, Copy, Clone)]
++pub enum OverlayFormat {
++ YV12,
++ IYUV,
++ YUY2,
++ UYVY,
++ YVYU,
++}
++
+ // TODO: get_video_modes, get_video_driver_name
+
+ impl Surface {
+@@ -485,6 +523,38 @@ impl Surface {
+ }
+ }
+
++ pub fn new_from(pixels: &[u8], pitch: i32, width: isize, height: isize, bpp: isize,
++ rmask: u32, gmask: u32, bmask: u32, amask: u32) -> Result<Surface, String> {
++ unsafe {
++ let raw = ll::SDL_CreateRGBSurfaceFrom(pixels.as_ptr() as *const libc::c_void, width as c_int, height as c_int, bpp as c_int, pitch as c_int,
++ rmask, gmask, bmask, amask);
++
++ if raw.is_null() {
++ Err(get_error())
++ } else {
++ Ok(Surface { raw: raw, owned: true })
++ }
++ }
++ }
++
++ pub fn create_overlay(&self, width: isize, height: isize, format: OverlayFormat) -> Result<Overlay, String> {
++ unsafe {
++ let yuv_fmt = match format {
++ OverlayFormat::YV12 => ll::SDL_YV12_OVERLAY,
++ OverlayFormat::IYUV => ll::SDL_IYUV_OVERLAY,
++ OverlayFormat::YUY2 => ll::SDL_YUY2_OVERLAY,
++ OverlayFormat::UYVY => ll::SDL_UYVY_OVERLAY,
++ OverlayFormat::YVYU => ll::SDL_YVYU_OVERLAY,
++ };
++ let raw = ll::SDL_CreateYUVOverlay(width as c_int, height as c_int, yuv_fmt, self.raw);
++ if raw.is_null() {
++ Err(get_error())
++ } else {
++ Ok(Overlay { raw: raw })
++ }
++ }
++ }
++
+ pub fn from_bmp(path: &Path) -> Result<Surface, String> {
+ let cpath = CString::new(path.to_str().unwrap()).unwrap();
+ let mode = CString::new("rb".as_bytes()).unwrap();
+@@ -742,4 +812,48 @@ pub fn swap_buffers() {
+ }
+
+
+-// TODO: YUV
++#[derive(PartialEq)]
++pub struct Overlay {
++ pub raw: *mut ll::SDL_Overlay,
++}
++
++impl Drop for Overlay {
++ fn drop(&mut self) {
++ unsafe {
++ ll::SDL_FreeYUVOverlay(self.raw);
++ }
++ }
++}
++
++impl Overlay {
++ pub fn display(&self, dest_rect: Option<Rect>) -> bool {
++ unsafe {
++ ll::SDL_DisplayYUVOverlay(self.raw, match dest_rect {
++ Some(ref rect) => mem::transmute(rect),
++ None => ptr::null_mut()
++ }) == 0
++ }
++ }
++
++ pub fn lock(&self) -> bool {
++ unsafe { ll::SDL_LockYUVOverlay(self.raw) == 0 }
++ }
++
++ pub fn unlock(&self) {
++ unsafe { ll::SDL_UnlockYUVOverlay(self.raw) }
++ }
++
++ pub unsafe fn get_pixel_ptr(&self, comp: usize) -> &mut [u8] {
++ let pitch = self.get_pitch(comp);
++ let len = if comp == 0 { pitch as usize * ((*self.raw).h as usize) } else
++ { pitch as usize * (((*self.raw).h / 2) as usize) };
++ let ptr = *((*self.raw).pixels.add(comp));
++ let pixels: &mut [u8] = mem::transmute((ptr, len));
++
++ pixels
++ }
++
++ pub fn get_pitch(&self, comp: usize) -> usize {
++ unsafe { *((*self.raw).pitches.add(comp)) as usize }
++ }
++}
+--
+1.7.9.5
+
diff --git a/old/src/main.rs b/old/src/main.rs
new file mode 100644
index 0000000..c53181d
--- /dev/null
+++ b/old/src/main.rs
@@ -0,0 +1,599 @@
+extern crate sdl;
+extern crate nihav_core;
+extern crate nihav_registry;
+extern crate nihav_allstuff;
+
+use sdl::video::*;
+use sdl::audio::{DesiredAudioSpec, Channels, AudioFormat, AudioCallback};
+use sdl::event::{Event, Key};
+
+use std::env;
+use std::fs::File;
+use std::path::Path;
+use std::time::{Duration, SystemTime};
+use std::thread;
+use std::sync::mpsc;
+use std::sync::{Arc, Mutex};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::str::FromStr;
+
+use nihav_registry::detect;
+use nihav_core::formats::*;
+use nihav_core::frame::*;
+use nihav_core::io::byteio::{FileReader, ByteReader};
+use nihav_core::reorder::*;
+use nihav_core::codecs::*;
+use nihav_core::demuxers::*;
+use nihav_core::scale::*;
+use nihav_core::soundcvt::*;
+use nihav_allstuff::*;
+
+const AUDIO_BUF_SIZE: usize = 1024;
+
+struct AudioFIFO {
+ data: Vec<u8>,
+ max_len: usize,
+ pos: usize,
+ end: usize,
+ done: bool,
+ full: bool,
+}
+
+impl AudioFIFO {
+ fn new(len: usize, max_len: usize) -> Self {
+ Self { data: vec![0; len], max_len, pos: 0, end: 0, done: false, full: false }
+ }
+ fn add(&mut self, src: &[u8]) -> bool {
+ if self.done || self.full { return true; }
+ if self.pos > 0 {
+ for i in 0..(self.end - self.pos) {
+ self.data[i] = self.data[i + self.pos];
+ }
+ self.end -= self.pos;
+ self.pos = 0;
+ }
+ if self.end + src.len() > self.max_len {
+ self.full = true;
+ return false;
+ }
+ if self.end + src.len() > self.data.len() { self.data.resize(self.end + src.len(), 0); }
+ (&mut self.data[self.end..][..src.len()]).copy_from_slice(src);
+ self.end += src.len();
+ true
+ }
+ fn consume(&mut self, dst: &mut [u8]) -> bool {
+ if self.done { for el in dst.iter_mut() { *el = 0; } return true; }
+ let size = dst.len();
+ if self.end - self.pos < size { return false; }
+ dst.copy_from_slice(&self.data[self.pos..][..size]);
+ self.pos += size;
+ if self.pos >= self.max_len / 2 { self.full = false; }
+ true
+ }
+ fn finish(&mut self) {
+ self.done = true;
+ }
+}
+
+struct AudioConsumer {
+ afifo: Arc<Mutex<AudioFIFO>>,
+}
+
+impl AudioConsumer {
+ fn new(afifo: Arc<Mutex<AudioFIFO>>) -> Self { Self { afifo } }
+}
+
+impl AudioCallback for AudioConsumer {
+ fn callback(&mut self, out: &mut [u8]) {
+ let mut done = false;
+ while !done {
+ let ret = self.afifo.lock();
+ if let Ok(mut afifo) = ret {
+ done = afifo.consume(out);
+ }
+ if !done { thread::sleep(Duration::from_millis(400)); }
+ }
+ }
+}
+
+impl Drop for AudioConsumer {
+ fn drop(&mut self) {
+ let ret = self.afifo.lock();
+ if let Ok(mut afifo) = ret {
+ afifo.finish();
+ }
+ }
+}
+
+struct DecoderStuff {
+ dsupp: Box<NADecoderSupport>,
+ dec: Box<dyn NADecoder + Send>,
+ reord: Box<dyn FrameReorderer + Send>,
+}
+
+enum SendEvent {
+ Packet(NAPacket),
+ EOF,
+}
+
+enum DisplayEvent {
+ RGB(u64, NABufferRef<Surface>),
+ YUV(u64, NABufferRef<Overlay>),
+ Audio(NABufferType),
+}
+
+impl DisplayEvent {
+ fn get_time(&self) -> u64 {
+ match *self {
+ DisplayEvent::RGB(time, _) => time,
+ DisplayEvent::YUV(time, _) => time,
+ _ => 0,
+ }
+ }
+}
+
+struct CommonMessages {
+ receiver: mpsc::Receiver<SendEvent>,
+ esend: mpsc::SyncSender<DisplayEvent>,
+ ev_mtx: Arc<Mutex<isize>>,
+ finished: Arc<AtomicBool>,
+}
+
+fn add_audio<T:Copy>(amtx: &mut Arc<Mutex<AudioFIFO>>, data: &[T]) {
+ let len = std::mem::size_of::<T>() * data.len();
+ let mut done = false;
+ while !done {
+ let ret = amtx.try_lock();
+ if let Ok(mut afifo) = ret {
+ unsafe {
+ let u8_ptr = data.as_ptr();
+ let u8_data = std::mem::transmute((u8_ptr, len));
+ done = afifo.add(u8_data);
+ }
+ }
+ if !done { std::thread::sleep(Duration::from_millis(100)); }
+ }
+}
+
+fn open_audio(arate: u32, amtx: Arc<Mutex<AudioFIFO>>) -> NAAudioInfo {
+ let dspec = DesiredAudioSpec {
+ freq: arate as i32, format: AudioFormat::S16Lsb, channels: Channels::Stereo, samples: AUDIO_BUF_SIZE as u16,
+ callback: Box::new(AudioConsumer::new(amtx))
+ };
+ let tspec = sdl::audio::open(dspec).unwrap();
+//println!("target spec: {} Hz fmt {:X} {} ch {} samp {} size", tspec.freq, tspec.format as u32, if tspec.channels == Channels::Stereo { 2 } else { 1 }, tspec.samples, tspec.size);
+ sdl::audio::pause(true);
+ let dst_ch = if tspec.channels == Channels::Stereo { 2 } else { 1 };
+ let snd_fmt = match tspec.format {
+ AudioFormat::U8 => SND_U8_FORMAT,
+ AudioFormat::S8 => unimplemented!(),
+ AudioFormat::U16Lsb => unimplemented!(),
+ AudioFormat::S16Lsb => SND_S16_FORMAT,
+ AudioFormat::U16Msb => unimplemented!(),
+ AudioFormat::S16Msb => NASoniton { bits: 16, be: true, packed: false, planar: false, float: false, signed: true },
+ };
+ NAAudioInfo { sample_rate: tspec.freq as u32, channels: dst_ch, format: snd_fmt, block_len: 1024 }
+}
+
+fn start_audio_dec(audio_dec: Option<DecoderStuff>, dst_info: NAAudioInfo, cmsg: CommonMessages) -> thread::JoinHandle<()> {
+ thread::spawn(move || {
+ let mut audio_dec = audio_dec.unwrap();
+ let dst_chmap = if dst_info.channels == 2 {
+ NAChannelMap::from_str("L,R").unwrap()
+ } else {
+ NAChannelMap::from_str("C").unwrap()
+ };
+ loop {
+ let ret = cmsg.receiver.recv();
+ if ret.is_err() { break; }
+ if let Ok(SendEvent::EOF) = ret { break; }
+ let pkt = if let Ok(SendEvent::Packet(pkt)) = ret { pkt } else { unreachable!(); };
+ let ret = audio_dec.dec.decode(&mut audio_dec.dsupp, &pkt);
+ if let Ok(frm) = ret {
+ let buf = frm.get_buffer();
+ let out_buf = convert_audio_frame(&buf, &dst_info, &dst_chmap).unwrap();
+ cmsg.esend.send(DisplayEvent::Audio(out_buf)).unwrap();
+ let mut count = cmsg.ev_mtx.lock().unwrap();
+ *count += 1;
+ drop(count);
+ thread::yield_now();
+ } else {
+ println!("error decoding audio");
+ }
+ }
+ cmsg.finished.store(true, Ordering::Relaxed);
+ })
+}
+
+fn start_video_dec(video_dec: Option<DecoderStuff>, ifmt: Option<NAVideoInfo>, width: usize, height: usize, tb_num: u32, tb_den: u32, cmsg: CommonMessages, scr_mtx: Arc<Mutex<NABufferRef<Surface>>>) -> thread::JoinHandle<()> {
+ thread::spawn(move || {
+ let mut ifmt = ifmt.unwrap();
+ let mut video_dec = video_dec.unwrap();
+ let rgb32_fmt = NAPixelFormaton { model: ColorModel::RGB(RGBSubmodel::RGB), components: 3,
+ comp_info: [
+ Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 0, next_elem: 4 }),
+ Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 1, next_elem: 4 }),
+ Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 2, next_elem: 4 }),
+ None, None
+ ], elem_size: 4, be: false, alpha: false, palette: false };
+ let ofmt_rgb = ScaleInfo { width, height, fmt: rgb32_fmt };
+ let ofmt_yuv = ScaleInfo { width, height, fmt: YUV420_FORMAT };
+
+ let sc_ifmt = ScaleInfo { width: ifmt.get_width(), height: ifmt.get_height(), fmt: ifmt.get_format() };
+ let mut do_yuv = if let ColorModel::YUV(_) = ifmt.get_format().get_model() { true } else { false };
+ let ofmt = if do_yuv { ofmt_yuv } else { ofmt_rgb };
+ let mut opic = alloc_video_buffer(NAVideoInfo::new(width, height, false, ofmt.fmt), 4).unwrap();
+ let mut scaler = NAScale::new(sc_ifmt, ofmt).unwrap();
+ loop {
+ let ret = cmsg.receiver.recv();
+ if ret.is_err() { break; }
+ if let Ok(SendEvent::EOF) = ret { break; }
+ let pkt = if let Ok(SendEvent::Packet(pkt)) = ret { pkt } else { unreachable!() };
+ let ret = video_dec.dec.decode(&mut video_dec.dsupp, &pkt);
+ if let Ok(frm) = ret {
+ video_dec.reord.add_frame(frm);
+ while let Some(frm) = video_dec.reord.get_frame() {
+ let bt = frm.get_buffer();
+ if let NABufferType::None = bt { continue; }
+ let vinfo = bt.get_video_info().unwrap();
+ if ifmt.get_width() != vinfo.get_width() ||
+ ifmt.get_height() != vinfo.get_height() ||
+ ifmt.get_format() != vinfo.get_format() {
+println!("reinit scaler!");
+ ifmt = vinfo.clone();
+ let sc_ifmt = ScaleInfo { width: ifmt.get_width(), height: ifmt.get_height(), fmt: ifmt.get_format() };
+ do_yuv = if let ColorModel::YUV(_) = ifmt.get_format().get_model() { true } else { false };
+ let ofmt = if do_yuv { ofmt_yuv } else { ofmt_rgb };
+ opic = alloc_video_buffer(NAVideoInfo::new(width, height, false, ofmt.fmt), 4).unwrap();
+ scaler = NAScale::new(sc_ifmt, ofmt).unwrap();
+ }
+ let ret = scaler.convert(&bt, &mut opic);
+ if ret.is_err() { println!(" scaler error {:?}", ret.err()); continue; }
+ ret.unwrap();
+ let ts = frm.get_dts().unwrap_or(frm.get_pts().unwrap_or(0));
+ let time = NATimeInfo::ts_to_time(ts, 1000, tb_num, tb_den);
+
+ let buf = opic.get_vbuf().unwrap();
+ if !do_yuv {
+ let sstride = buf.get_stride(0);
+ let src = buf.get_data();
+ let surface = Surface::new(&[SurfaceFlag::SWSurface], width as isize, height as isize, 32, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000).unwrap();
+ let pitch = unsafe { (*surface.raw).pitch } as usize;
+ surface.with_lock(|x: &mut [u8]| -> bool {
+ let csize = sstride.min(pitch);
+ for (dst, src) in x.chunks_mut(pitch).zip(src.chunks(sstride)) {
+ (&mut dst[..csize]).copy_from_slice(&src[..csize]);
+ }
+ true
+ });
+ let mut count = cmsg.ev_mtx.lock().unwrap();
+ cmsg.esend.send(DisplayEvent::RGB(time, NABufferRef::new(surface))).unwrap();
+ *count += 1;
+ } else {
+ let screen = scr_mtx.lock().unwrap();
+ let overlay = screen.create_overlay(width as isize, height as isize, OverlayFormat::YV12).unwrap();
+ drop(screen);
+ while !overlay.lock() {}
+ let src = buf.get_data();
+ let ysstride = buf.get_stride(0);
+ let ysrc = &src[buf.get_offset(0)..];
+ let usstride = buf.get_stride(2);
+ let usrc = &src[buf.get_offset(2)..];
+ let vsstride = buf.get_stride(1);
+ let vsrc = &src[buf.get_offset(1)..];
+ unsafe {
+ let ydst = overlay.get_pixel_ptr(0);
+ let ydstride = overlay.get_pitch(0);
+ let udst = overlay.get_pixel_ptr(1);
+ let udstride = overlay.get_pitch(1);
+ let vdst = overlay.get_pixel_ptr(2);
+ let vdstride = overlay.get_pitch(2);
+ for (ydst, ysrc) in ydst.chunks_mut(ydstride).take(height).zip(ysrc.chunks(ysstride)) {
+ (&mut ydst[..width]).copy_from_slice(&ysrc[..width]);
+ }
+ for (udst, usrc) in udst.chunks_mut(udstride).take(height).zip(usrc.chunks(usstride)) {
+ (&mut udst[..width / 2]).copy_from_slice(&usrc[..width / 2]);
+ }
+ for (vdst, vsrc) in vdst.chunks_mut(vdstride).take(height).zip(vsrc.chunks(vsstride)) {
+ (&mut vdst[..width / 2]).copy_from_slice(&vsrc[..width / 2]);
+ }
+ }
+ overlay.unlock();
+ let mut count = cmsg.ev_mtx.lock().unwrap();
+ cmsg.esend.send(DisplayEvent::YUV(time, NABufferRef::new(overlay))).unwrap();
+ *count += 1;
+ }
+ }
+ } else {
+ println!("error decoding video");
+ }
+ }
+ cmsg.finished.store(true, Ordering::Relaxed);
+ })
+}
+
+fn play_file(args: Vec<String>) {
+
+ let mut cur_arg: usize = 1;
+ let mut decode_audio = true;
+ while (cur_arg < args.len()) && args[cur_arg].starts_with('-') {
+ match args[cur_arg].as_str() {
+ "--" => { break; },
+ "-an" => { decode_audio = false; },
+ _ => { println!("unknown option {}", args[cur_arg]); return; },
+ }
+ cur_arg += 1;
+ }
+ let name = args[cur_arg].as_str();
+
+ let path = Path::new(name);
+ let mut file = File::open(path).unwrap();
+ let dmx_fact;
+ let mut fr = FileReader::new_read(&mut file);
+ let mut br = ByteReader::new(&mut fr);
+ let res = detect::detect_format(name, &mut br);
+ if res.is_none() {
+ println!("cannot detect format for {}", name);
+ return;
+ }
+ let (dmx_name, _) = res.unwrap();
+ println!("trying demuxer {} on {}", dmx_name, name);
+
+ let mut dmx_reg = RegisteredDemuxers::new();
+ nihav_register_all_demuxers(&mut dmx_reg);
+ let mut dec_reg = RegisteredDecoders::new();
+ nihav_register_all_decoders(&mut dec_reg);
+
+ dmx_fact = dmx_reg.find_demuxer(dmx_name).unwrap();
+ br.seek(SeekFrom::Start(0)).unwrap();
+ let mut dmx = create_demuxer(dmx_fact, &mut br).unwrap();
+
+ let mut width = 640;
+ let mut height = 480;
+ let mut ifmt = None;
+ let mut tb_num = 0;
+ let mut tb_den = 0;
+ let mut arate = 0;
+ let mut video_str = 0;
+ let mut audio_str = 0;
+
+ let mut video_dec: Option<DecoderStuff> = None;
+ let mut audio_dec: Option<DecoderStuff> = None;
+
+ for i in 0..dmx.get_num_streams() {
+ let s = dmx.get_stream(i).unwrap();
+ let info = s.get_info();
+ let decfunc = dec_reg.find_decoder(info.get_name());
+ println!("stream {} - {} {}", i, s, info.get_name());
+ let str_id = s.get_id();
+ if info.is_video() {
+ if video_dec.is_none() {
+ if decfunc.is_none() {
+ println!("no video decoder for {} found!", info.get_name());
+ return;
+ }
+ let mut dec = (decfunc.unwrap())();
+ let mut dsupp = Box::new(NADecoderSupport::new());
+ let props = info.get_properties().get_video_info().unwrap();
+ if props.get_width() != 0 {
+ width = props.get_width();
+ height = props.get_height();
+ ifmt = Some(props.clone());
+ }
+ let reorder_depth = 3;
+ dsupp.pool_u8 = NAVideoBufferPool::new(reorder_depth);
+ dsupp.pool_u16 = NAVideoBufferPool::new(reorder_depth);
+ dsupp.pool_u32 = NAVideoBufferPool::new(reorder_depth);
+ dec.init(&mut dsupp, info).unwrap();
+ let reord = Box::new(IPBReorderer::new());
+ video_dec = Some(DecoderStuff{ dsupp, dec, reord });
+ video_str = str_id;
+ let (tbn, tbd) = s.get_timebase();
+ tb_num = tbn;
+ tb_den = tbd;
+ }
+ } else if info.is_audio() {
+ if audio_dec.is_none() && decode_audio {
+ if decfunc.is_none() {
+ println!("no audio decoder for {} found!", info.get_name());
+ } else {
+ let mut dec = (decfunc.unwrap())();
+ let mut dsupp = Box::new(NADecoderSupport::new());
+ let props = info.get_properties().get_audio_info().unwrap();
+ arate = props.get_sample_rate();
+ dec.init(&mut dsupp, info).unwrap();
+ let reord = Box::new(NoReorderer::new());
+ audio_dec = Some(DecoderStuff{ dsupp, dec, reord });
+ audio_str = str_id;
+ }
+ }
+ } else {
+ println!("decoder {} not found", info.get_name());
+ }
+ }
+
+ while (width <= 384) && (height <= 288) {
+ width <<= 1;
+ height <<= 1;
+ }
+
+ sdl::init(&[sdl::InitFlag::Video, sdl::InitFlag::Audio]);
+ sdl::wm::set_caption("NihAV Player", "nihav-player");
+ let screen = match sdl::video::set_video_mode(width as isize, height as isize, 32,
+ &[SurfaceFlag::HWSurface, SurfaceFlag::AsyncBlit, SurfaceFlag::HWAccel],
+ &[VideoFlag::DoubleBuf]) {
+ Ok(screen) => screen,
+ Err(err) => panic!("failed to set video mode: {}", err)
+ };
+
+ let (vsend, vrecv) = mpsc::sync_channel::<SendEvent>(0);
+ let (asend, arecv) = mpsc::sync_channel::<SendEvent>(0);
+ let (esend, erecv) = mpsc::sync_channel::<DisplayEvent>(50);
+ let events_mtx = Arc::new(Mutex::new(0isize));
+
+ let has_audio = audio_dec.is_some();
+ let mut amtx = Arc::new(Mutex::new(AudioFIFO::new((arate * 8) as usize, (arate * 80) as usize)));
+ let aud_finished = Arc::new(AtomicBool::new(!has_audio));
+ let audio_thread = if has_audio {
+ let ainfo = open_audio(arate, amtx.clone());
+ let cmsg = CommonMessages {
+ receiver: arecv,
+ esend: esend.clone(),
+ ev_mtx: events_mtx.clone(),
+ finished: aud_finished.clone(),
+ };
+ Some(start_audio_dec(audio_dec, ainfo, cmsg))
+ } else { None };
+
+ let has_video = video_dec.is_some();
+ let video_thread: Option<thread::JoinHandle<()>>;
+ let scr_mtx = Arc::new(Mutex::new(NABufferRef::new(screen)));
+ let vid_finished = Arc::new(AtomicBool::new(!has_video));
+ if has_video {
+ let cmsg = CommonMessages {
+ receiver: vrecv,
+ esend: esend,
+ ev_mtx: events_mtx.clone(),
+ finished: vid_finished.clone(),
+ };
+ video_thread = Some(start_video_dec(video_dec, ifmt, width, height, tb_num, tb_den, cmsg, scr_mtx.clone()));
+ } else {
+ video_thread = None;
+ };
+
+ let mut frame_queue: Vec<DisplayEvent> = Vec::new();
+
+ let systime = SystemTime::now();
+ let mut has_data = true;
+
+ 'main : loop {
+ 'event : loop {
+ match sdl::event::poll_event() {
+ Event::Quit => break 'main,
+ Event::None => break 'event,
+ Event::Key(k, _, _, _)
+ if k == Key::Escape || k == Key::Q
+ => break 'main,
+ Event::Key(k, _, _, _)
+ if k == Key::Space
+ => continue 'event,
+ _ => {}
+ }
+ }
+ if has_data {
+ let pktres = dmx.get_frame();
+ if let Err(DemuxerError::EOF) = pktres {
+ has_data = false;
+ if has_video {
+ vsend.send(SendEvent::EOF).unwrap();
+ }
+ if has_audio {
+ asend.send(SendEvent::EOF).unwrap();
+ }
+ } else if let Err(_) = pktres {
+ break;
+ } else if let Ok(pkt) = pktres {
+ let streamno = pkt.get_stream().get_id();
+
+ if has_video && streamno == video_str {
+ vsend.send(SendEvent::Packet(pkt)).unwrap();
+ } else if has_audio && streamno == audio_str {
+ asend.send(SendEvent::Packet(pkt)).unwrap();
+ }
+ }
+ }
+
+ let mut nevents = events_mtx.lock().unwrap();
+ while *nevents > 0 {
+ *nevents -= 1;
+ let ret = erecv.recv();
+ if ret.is_err() { break 'main; }
+ let mut disp_evt = ret.unwrap();
+ match disp_evt {
+ DisplayEvent::Audio(ref mut out_buf) => {
+ unsafe { sdl::audio::ll::SDL_LockAudio(); }
+ match out_buf {
+ NABufferType::AudioPacked(buf) => add_audio(&mut amtx, buf.get_data()),
+ NABufferType::AudioU8(buf) => add_audio(&mut amtx, buf.get_data()),
+ NABufferType::AudioI16(buf) => add_audio(&mut amtx, buf.get_data()),
+ NABufferType::AudioI32(buf) => add_audio(&mut amtx, buf.get_data()),
+ NABufferType::AudioF32(buf) => add_audio(&mut amtx, buf.get_data()),
+ _ => unreachable!(),
+ };
+ unsafe { sdl::audio::ll::SDL_UnlockAudio(); }
+ sdl::audio::pause(false);
+ },
+ _ => { frame_queue.push(disp_evt); },
+ };
+ }
+ drop(nevents);
+ if vid_finished.load(Ordering::Relaxed) &&
+ aud_finished.load(Ordering::Relaxed) && frame_queue.len() == 0 {
+ break;
+ }
+
+ if frame_queue.len() > 0 {
+ let cur_time = systime.elapsed().unwrap();
+ let disp_time = Duration::from_millis(frame_queue[0].get_time());
+
+//println!("cur time {:?} disp time {:?}", cur_time, disp_time);
+ if (disp_time <= cur_time + Duration::from_millis(10)) && disp_time + Duration::from_millis(10) >= cur_time {
+ let screen = scr_mtx.lock().unwrap();
+ let disp_evt = frame_queue.remove(0);
+ match disp_evt {
+ DisplayEvent::RGB(_, ref surf) => {
+ screen.blit(surf);
+ screen.flip();
+ },
+ DisplayEvent::YUV(_, ref ovl) => {
+ ovl.display(Some(screen.get_rect()));
+ },
+ _ => {},
+ };
+ } else if disp_time > cur_time {
+ let diff = disp_time - cur_time;
+ if diff > Duration::from_millis(20) {
+ thread::sleep(Duration::from_millis(20));
+ } else {
+ thread::sleep(diff);
+ }
+ } else {
+ frame_queue.remove(0);
+ }
+ }
+ }
+
+ if has_audio {
+ unsafe { sdl::audio::ll::SDL_LockAudio(); }
+ let mut afifo = amtx.lock().unwrap();
+ afifo.finish();
+ drop(afifo);
+ unsafe { sdl::audio::ll::SDL_UnlockAudio(); }
+ sdl::audio::pause(true);
+ }
+ drop(vsend);
+ drop(asend);
+ if let Some(vthread) = video_thread {
+ vthread.join().unwrap();
+ }
+ if let Some(athread) = audio_thread {
+ athread.join().unwrap();
+ }
+
+ if has_audio {
+ sdl::audio::close();
+ }
+}
+
+fn main() {
+ let args: Vec<String> = env::args().collect();
+
+ if args.len() == 1 {
+ println!("usage: nihav-player input");
+ return;
+ }
+
+ play_file(args);
+
+ sdl::quit();
+}