summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKostya Shishkov <kostya.shishkov@gmail.com>2019-11-28 18:44:08 +0100
committerKostya Shishkov <kostya.shishkov@gmail.com>2019-11-28 18:44:08 +0100
commit860a2b4e79e9ad5b4ba4046998170755ff9bf45c (patch)
treecae66855dbb9ca8665934e1fe9888ff7bce8f3a1
downloadnihav-player-860a2b4e79e9ad5b4ba4046998170755ff9bf45c.tar.gz
initial player implementation
-rw-r--r--Cargo.toml13
-rw-r--r--src/main.rs586
2 files changed, 599 insertions, 0 deletions
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..ce06b0e
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "nihav-player"
+version = "0.1.0"
+authors = ["Kostya Shishkov <kostya.shishkov@gmail.com>"]
+edition = "2018"
+
+[dependencies]
+nihav_core = { path="../nihav-core" }
+nihav_allstuff = { path="../nihav-allstuff" }
+sdl = { path="./rust-sdl" }
+
+#sdl2 = "^0.32"
+#download_sdl2 = "0.0.3" \ No newline at end of file
diff --git a/src/main.rs b/src/main.rs
new file mode 100644
index 0000000..e0c9340
--- /dev/null
+++ b/src/main.rs
@@ -0,0 +1,586 @@
+extern crate sdl;
+extern crate nihav_core;
+extern crate nihav_allstuff;
+
+use sdl::video::*;
+use sdl::audio::{DesiredAudioSpec, Channels, AudioFormat, AudioCallback};
+use sdl::event::{Event, Key};
+
+use std::env;
+use std::fs::File;
+use std::path::Path;
+use std::time::{Duration, SystemTime};
+use std::thread;
+use std::sync::mpsc;
+use std::sync::{Arc, Mutex};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::str::FromStr;
+
+use nihav_core::detect;
+use nihav_core::formats::*;
+use nihav_core::frame::*;
+use nihav_core::io::byteio::{FileReader, ByteReader};
+use nihav_core::reorder::*;
+use nihav_core::codecs::*;
+use nihav_core::demuxers::*;
+use nihav_core::scale::*;
+use nihav_core::soundcvt::*;
+use nihav_allstuff::*;
+
+const AUDIO_BUF_SIZE: usize = 1024;
+
+struct AudioFIFO {
+ data: Vec<u8>,
+ max_len: usize,
+ pos: usize,
+ end: usize,
+ done: bool,
+ full: bool,
+}
+
+impl AudioFIFO {
+ fn new(len: usize, max_len: usize) -> Self {
+ Self { data: vec![0; len], max_len, pos: 0, end: 0, done: false, full: false }
+ }
+ fn add(&mut self, src: &[u8]) -> bool {
+ if self.done || self.full { return true; }
+ if self.pos > 0 {
+ for i in 0..(self.end - self.pos) {
+ self.data[i] = self.data[i + self.pos];
+ }
+ self.end -= self.pos;
+ self.pos = 0;
+ }
+ if self.end + src.len() > self.max_len {
+ self.full = true;
+ return false;
+ }
+ if self.end + src.len() > self.data.len() { self.data.resize(self.end + src.len(), 0); }
+ (&mut self.data[self.end..][..src.len()]).copy_from_slice(src);
+ self.end += src.len();
+ true
+ }
+ fn consume(&mut self, dst: &mut [u8]) -> bool {
+ if self.done { for el in dst.iter_mut() { *el = 0; } return true; }
+ let size = dst.len();
+ if self.end - self.pos < size { return false; }
+ dst.copy_from_slice(&self.data[self.pos..][..size]);
+ self.pos += size;
+ if self.pos >= self.max_len / 2 { self.full = false; }
+ true
+ }
+ fn finish(&mut self) {
+ self.done = true;
+ }
+}
+
+struct AudioConsumer {
+ afifo: Arc<Mutex<AudioFIFO>>,
+}
+
+impl AudioConsumer {
+ fn new(afifo: Arc<Mutex<AudioFIFO>>) -> Self { Self { afifo } }
+}
+
+impl AudioCallback for AudioConsumer {
+ fn callback(&mut self, out: &mut [u8]) {
+ let mut done = false;
+ while !done {
+ let ret = self.afifo.lock();
+ if let Ok(mut afifo) = ret {
+ done = afifo.consume(out);
+ }
+ if !done { thread::sleep(Duration::from_millis(400)); }
+ }
+ }
+}
+
+impl Drop for AudioConsumer {
+ fn drop(&mut self) {
+ let ret = self.afifo.lock();
+ if let Ok(mut afifo) = ret {
+ afifo.finish();
+ }
+ }
+}
+
+struct DecoderStuff {
+ dsupp: Box<NADecoderSupport>,
+ dec: Box<dyn NADecoder + Send>,
+ reord: Box<dyn FrameReorderer + Send>,
+}
+
+enum SendEvent {
+ Packet(NAPacket),
+ EOF,
+}
+
+enum DisplayEvent {
+ RGB(u64, NABufferRef<Surface>),
+ YUV(u64, NABufferRef<Overlay>),
+ Audio(NABufferType),
+}
+
+impl DisplayEvent {
+ fn get_time(&self) -> u64 {
+ match *self {
+ DisplayEvent::RGB(time, _) => time,
+ DisplayEvent::YUV(time, _) => time,
+ _ => 0,
+ }
+ }
+}
+
+struct CommonMessages {
+ receiver: mpsc::Receiver<SendEvent>,
+ esend: mpsc::SyncSender<DisplayEvent>,
+ ev_mtx: Arc<Mutex<isize>>,
+ finished: Arc<AtomicBool>,
+}
+
+fn add_audio<T:Copy>(amtx: &mut Arc<Mutex<AudioFIFO>>, data: &[T]) {
+ let len = std::mem::size_of::<T>() * data.len();
+ let mut done = false;
+ while !done {
+ let ret = amtx.try_lock();
+ if let Ok(mut afifo) = ret {
+ unsafe {
+ let u8_ptr = data.as_ptr();
+ let u8_data = std::mem::transmute((u8_ptr, len));
+ done = afifo.add(u8_data);
+ }
+ }
+ if !done { std::thread::sleep(Duration::from_millis(100)); }
+ }
+}
+
+fn open_audio(arate: u32, amtx: Arc<Mutex<AudioFIFO>>) -> NAAudioInfo {
+ let dspec = DesiredAudioSpec {
+ freq: arate as i32, format: AudioFormat::S16Lsb, channels: Channels::Stereo, samples: AUDIO_BUF_SIZE as u16,
+ callback: Box::new(AudioConsumer::new(amtx))
+ };
+ let tspec = sdl::audio::open(dspec).unwrap();
+//println!("target spec: {} Hz fmt {:X} {} ch {} samp {} size", tspec.freq, tspec.format as u32, if tspec.channels == Channels::Stereo { 2 } else { 1 }, tspec.samples, tspec.size);
+ sdl::audio::pause(true);
+ let dst_ch = if tspec.channels == Channels::Stereo { 2 } else { 1 };
+ let snd_fmt = match tspec.format {
+ AudioFormat::U8 => SND_U8_FORMAT,
+ AudioFormat::S8 => unimplemented!(),
+ AudioFormat::U16Lsb => unimplemented!(),
+ AudioFormat::S16Lsb => SND_S16_FORMAT,
+ AudioFormat::U16Msb => unimplemented!(),
+ AudioFormat::S16Msb => NASoniton { bits: 16, be: true, packed: false, planar: false, float: false, signed: true },
+ };
+ NAAudioInfo { sample_rate: tspec.freq as u32, channels: dst_ch, format: snd_fmt, block_len: 1024 }
+}
+
+fn start_audio_dec(audio_dec: Option<DecoderStuff>, dst_info: NAAudioInfo, cmsg: CommonMessages) -> thread::JoinHandle<()> {
+ thread::spawn(move || {
+ let mut audio_dec = audio_dec.unwrap();
+ let dst_chmap = if dst_info.channels == 2 {
+ NAChannelMap::from_str("L,R").unwrap()
+ } else {
+ NAChannelMap::from_str("C").unwrap()
+ };
+ loop {
+ let ret = cmsg.receiver.recv();
+ if ret.is_err() { break; }
+ if let Ok(SendEvent::EOF) = ret { break; }
+ let pkt = if let Ok(SendEvent::Packet(pkt)) = ret { pkt } else { unreachable!(); };
+ let ret = audio_dec.dec.decode(&mut audio_dec.dsupp, &pkt);
+ if let Ok(frm) = ret {
+ let buf = frm.get_buffer();
+ let out_buf = convert_audio_frame(&buf, &dst_info, &dst_chmap).unwrap();
+ cmsg.esend.send(DisplayEvent::Audio(out_buf)).unwrap();
+ let mut count = cmsg.ev_mtx.lock().unwrap();
+ *count += 1;
+ drop(count);
+ thread::yield_now();
+ } else {
+ println!("error decoding audio");
+ }
+ }
+ cmsg.finished.store(true, Ordering::Relaxed);
+ })
+}
+
+fn start_video_dec(video_dec: Option<DecoderStuff>, ifmt: Option<NAVideoInfo>, width: usize, height: usize, tb_num: u32, tb_den: u32, cmsg: CommonMessages, scr_mtx: Arc<Mutex<NABufferRef<Surface>>>) -> thread::JoinHandle<()> {
+ thread::spawn(move || {
+ let mut ifmt = ifmt.unwrap();
+ let mut video_dec = video_dec.unwrap();
+ let rgb32_fmt = NAPixelFormaton { model: ColorModel::RGB(RGBSubmodel::RGB), components: 3,
+ comp_info: [
+ Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 0, next_elem: 4 }),
+ Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 1, next_elem: 4 }),
+ Some(NAPixelChromaton { h_ss: 0, v_ss: 0, packed: true, depth: 8, shift: 0, comp_offs: 2, next_elem: 4 }),
+ None, None
+ ], elem_size: 4, be: false, alpha: false, palette: false };
+ let ofmt_rgb = ScaleInfo { width, height, fmt: rgb32_fmt };
+ let ofmt_yuv = ScaleInfo { width, height, fmt: YUV420_FORMAT };
+
+ let sc_ifmt = ScaleInfo { width: ifmt.get_width(), height: ifmt.get_height(), fmt: ifmt.get_format() };
+ let mut do_yuv = if let ColorModel::YUV(_) = ifmt.get_format().get_model() { true } else { false };
+ let ofmt = if do_yuv { ofmt_yuv } else { ofmt_rgb };
+ let mut opic = alloc_video_buffer(NAVideoInfo::new(width, height, false, ofmt.fmt), 4).unwrap();
+ let mut scaler = NAScale::new(sc_ifmt, ofmt).unwrap();
+ loop {
+ let ret = cmsg.receiver.recv();
+ if ret.is_err() { break; }
+ if let Ok(SendEvent::EOF) = ret { break; }
+ let pkt = if let Ok(SendEvent::Packet(pkt)) = ret { pkt } else { unreachable!() };
+ let ret = video_dec.dec.decode(&mut video_dec.dsupp, &pkt);
+ if let Ok(frm) = ret {
+ video_dec.reord.add_frame(frm);
+ while let Some(frm) = video_dec.reord.get_frame() {
+ let bt = frm.get_buffer();
+ if let NABufferType::None = bt { continue; }
+ let vinfo = bt.get_video_info().unwrap();
+ if ifmt.get_width() != vinfo.get_width() ||
+ ifmt.get_height() != vinfo.get_height() ||
+ ifmt.get_format() != vinfo.get_format() {
+println!("reinit scaler!");
+ ifmt = vinfo.clone();
+ let sc_ifmt = ScaleInfo { width: ifmt.get_width(), height: ifmt.get_height(), fmt: ifmt.get_format() };
+ do_yuv = if let ColorModel::YUV(_) = ifmt.get_format().get_model() { true } else { false };
+ let ofmt = if do_yuv { ofmt_yuv } else { ofmt_rgb };
+ opic = alloc_video_buffer(NAVideoInfo::new(width, height, false, ofmt.fmt), 4).unwrap();
+ scaler = NAScale::new(sc_ifmt, ofmt).unwrap();
+ }
+ let ret = scaler.convert(&bt, &mut opic);
+ if ret.is_err() { println!(" scaler error {:?}", ret.err()); continue; }
+ ret.unwrap();
+ let ts = frm.get_dts().unwrap_or(frm.get_pts().unwrap_or(0));
+ let time = NATimeInfo::ts_to_time(ts, 1000, tb_num, tb_den);
+
+ let buf = opic.get_vbuf().unwrap();
+ if !do_yuv {
+ let sstride = buf.get_stride(0);
+ let src = buf.get_data();
+ let surface = Surface::new(&[SurfaceFlag::SWSurface], width as isize, height as isize, 32, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000).unwrap();
+ let pitch = unsafe { (*surface.raw).pitch } as usize;
+ surface.with_lock(|x: &mut [u8]| -> bool {
+ let csize = sstride.min(pitch);
+ for (dst, src) in x.chunks_mut(pitch).zip(src.chunks(sstride)) {
+ (&mut dst[..csize]).copy_from_slice(&src[..csize]);
+ }
+ true
+ });
+ let mut count = cmsg.ev_mtx.lock().unwrap();
+ cmsg.esend.send(DisplayEvent::RGB(time, NABufferRef::new(surface))).unwrap();
+ *count += 1;
+ } else {
+ let screen = scr_mtx.lock().unwrap();
+ let overlay = screen.create_overlay(width as isize, height as isize, OverlayFormat::YV12).unwrap();
+ drop(screen);
+ while !overlay.lock() {}
+ let src = buf.get_data();
+ let ysstride = buf.get_stride(0);
+ let ysrc = &src[buf.get_offset(0)..];
+ let usstride = buf.get_stride(2);
+ let usrc = &src[buf.get_offset(2)..];
+ let vsstride = buf.get_stride(1);
+ let vsrc = &src[buf.get_offset(1)..];
+ unsafe {
+ let ydst = overlay.get_pixel_ptr(0);
+ let ydstride = overlay.get_pitch(0);
+ let udst = overlay.get_pixel_ptr(1);
+ let udstride = overlay.get_pitch(1);
+ let vdst = overlay.get_pixel_ptr(2);
+ let vdstride = overlay.get_pitch(2);
+ for (ydst, ysrc) in ydst.chunks_mut(ydstride).take(height).zip(ysrc.chunks(ysstride)) {
+ (&mut ydst[..width]).copy_from_slice(&ysrc[..width]);
+ }
+ for (udst, usrc) in udst.chunks_mut(udstride).take(height).zip(usrc.chunks(usstride)) {
+ (&mut udst[..width / 2]).copy_from_slice(&usrc[..width / 2]);
+ }
+ for (vdst, vsrc) in vdst.chunks_mut(vdstride).take(height).zip(vsrc.chunks(vsstride)) {
+ (&mut vdst[..width / 2]).copy_from_slice(&vsrc[..width / 2]);
+ }
+ }
+ overlay.unlock();
+ let mut count = cmsg.ev_mtx.lock().unwrap();
+ cmsg.esend.send(DisplayEvent::YUV(time, NABufferRef::new(overlay))).unwrap();
+ *count += 1;
+ }
+ }
+ } else {
+ println!("error decoding video");
+ }
+ }
+ cmsg.finished.store(true, Ordering::Relaxed);
+ })
+}
+
+fn play_file(name: &str) {
+ let path = Path::new(name);
+ let mut file = File::open(path).unwrap();
+ let dmx_fact;
+ let mut fr = FileReader::new_read(&mut file);
+ let mut br = ByteReader::new(&mut fr);
+ let res = detect::detect_format(name, &mut br);
+ if res.is_none() {
+ println!("cannot detect format for {}", name);
+ return;
+ }
+ let (dmx_name, _) = res.unwrap();
+ println!("trying demuxer {} on {}", dmx_name, name);
+
+ let mut dmx_reg = RegisteredDemuxers::new();
+ nihav_register_all_demuxers(&mut dmx_reg);
+ let mut dec_reg = RegisteredDecoders::new();
+ nihav_register_all_codecs(&mut dec_reg);
+
+ dmx_fact = dmx_reg.find_demuxer(dmx_name).unwrap();
+ br.seek(SeekFrom::Start(0)).unwrap();
+ let mut dmx = create_demuxer(dmx_fact, &mut br).unwrap();
+
+ let mut width = 640;
+ let mut height = 480;
+ let mut ifmt = None;
+ let mut tb_num = 0;
+ let mut tb_den = 0;
+ let mut arate = 0;
+ let mut video_str = 0;
+ let mut audio_str = 0;
+
+ let mut video_dec: Option<DecoderStuff> = None;
+ let mut audio_dec: Option<DecoderStuff> = None;
+
+ for i in 0..dmx.get_num_streams() {
+ let s = dmx.get_stream(i).unwrap();
+ let info = s.get_info();
+ let decfunc = dec_reg.find_decoder(info.get_name());
+ println!("stream {} - {} {}", i, s, info.get_name());
+ let str_id = s.get_id();
+ if info.is_video() {
+ if video_dec.is_none() {
+ if decfunc.is_none() {
+ println!("no video decoder for {} found!", info.get_name());
+ return;
+ }
+ let mut dec = (decfunc.unwrap())();
+ let mut dsupp = Box::new(NADecoderSupport::new());
+ let props = info.get_properties().get_video_info().unwrap();
+ if props.get_width() != 0 {
+ width = props.get_width();
+ height = props.get_height();
+ ifmt = Some(props.clone());
+ }
+ let reorder_depth = 3;
+ dsupp.pool_u8 = NAVideoBufferPool::new(reorder_depth);
+ dsupp.pool_u16 = NAVideoBufferPool::new(reorder_depth);
+ dsupp.pool_u32 = NAVideoBufferPool::new(reorder_depth);
+ dec.init(&mut dsupp, info).unwrap();
+ let reord = Box::new(IPBReorderer::new());
+ video_dec = Some(DecoderStuff{ dsupp, dec, reord });
+ video_str = str_id;
+ let (tbn, tbd) = s.get_timebase();
+ tb_num = tbn;
+ tb_den = tbd;
+ }
+ } else if info.is_audio() {
+ if audio_dec.is_none() {
+ if decfunc.is_none() {
+ println!("no audio decoder for {} found!", info.get_name());
+ } else {
+ let mut dec = (decfunc.unwrap())();
+ let mut dsupp = Box::new(NADecoderSupport::new());
+ let props = info.get_properties().get_audio_info().unwrap();
+ arate = props.get_sample_rate();
+ dec.init(&mut dsupp, info).unwrap();
+ let reord = Box::new(NoReorderer::new());
+ audio_dec = Some(DecoderStuff{ dsupp, dec, reord });
+ audio_str = str_id;
+ }
+ }
+ } else {
+ println!("decoder {} not found", info.get_name());
+ }
+ }
+
+ while (width <= 384) && (height <= 288) {
+ width <<= 1;
+ height <<= 1;
+ }
+
+ sdl::init(&[sdl::InitFlag::Video, sdl::InitFlag::Audio]);
+ sdl::wm::set_caption("NihAV Player", "nihav-player");
+ let screen = match sdl::video::set_video_mode(width as isize, height as isize, 32,
+ &[SurfaceFlag::HWSurface, SurfaceFlag::AsyncBlit, SurfaceFlag::HWAccel],
+ &[VideoFlag::DoubleBuf]) {
+ Ok(screen) => screen,
+ Err(err) => panic!("failed to set video mode: {}", err)
+ };
+
+ let (vsend, vrecv) = mpsc::sync_channel::<SendEvent>(0);
+ let (asend, arecv) = mpsc::sync_channel::<SendEvent>(0);
+ let (esend, erecv) = mpsc::sync_channel::<DisplayEvent>(50);
+ let events_mtx = Arc::new(Mutex::new(0isize));
+
+ let has_audio = audio_dec.is_some();
+ let mut amtx = Arc::new(Mutex::new(AudioFIFO::new((arate * 8) as usize, (arate * 80) as usize)));
+ let aud_finished = Arc::new(AtomicBool::new(!has_audio));
+ let audio_thread = if has_audio {
+ let ainfo = open_audio(arate, amtx.clone());
+ let cmsg = CommonMessages {
+ receiver: arecv,
+ esend: esend.clone(),
+ ev_mtx: events_mtx.clone(),
+ finished: aud_finished.clone(),
+ };
+ Some(start_audio_dec(audio_dec, ainfo, cmsg))
+ } else { None };
+
+ let has_video = video_dec.is_some();
+ let video_thread: Option<thread::JoinHandle<()>>;
+ let scr_mtx = Arc::new(Mutex::new(NABufferRef::new(screen)));
+ let vid_finished = Arc::new(AtomicBool::new(!has_video));
+ if has_video {
+ let cmsg = CommonMessages {
+ receiver: vrecv,
+ esend: esend,
+ ev_mtx: events_mtx.clone(),
+ finished: vid_finished.clone(),
+ };
+ video_thread = Some(start_video_dec(video_dec, ifmt, width, height, tb_num, tb_den, cmsg, scr_mtx.clone()));
+ } else {
+ video_thread = None;
+ };
+
+ let mut frame_queue: Vec<DisplayEvent> = Vec::new();
+
+ let systime = SystemTime::now();
+ let mut has_data = true;
+
+ 'main : loop {
+ 'event : loop {
+ match sdl::event::poll_event() {
+ Event::Quit => break 'main,
+ Event::None => break 'event,
+ Event::Key(k, _, _, _)
+ if k == Key::Escape || k == Key::Q
+ => break 'main,
+ Event::Key(k, _, _, _)
+ if k == Key::Space
+ => continue 'event,
+ _ => {}
+ }
+ }
+ if has_data {
+ let pktres = dmx.get_frame();
+ if let Err(DemuxerError::EOF) = pktres {
+ has_data = false;
+ if has_video {
+ vsend.send(SendEvent::EOF).unwrap();
+ }
+ if has_audio {
+ asend.send(SendEvent::EOF).unwrap();
+ }
+ } else if let Err(_) = pktres {
+ break;
+ } else if let Ok(pkt) = pktres {
+ let streamno = pkt.get_stream().get_id();
+
+ if has_video && streamno == video_str {
+ vsend.send(SendEvent::Packet(pkt)).unwrap();
+ } else if has_audio && streamno == audio_str {
+ asend.send(SendEvent::Packet(pkt)).unwrap();
+ }
+ }
+ }
+
+ let mut nevents = events_mtx.lock().unwrap();
+ while *nevents > 0 {
+ *nevents -= 1;
+ let ret = erecv.recv();
+ if ret.is_err() { break 'main; }
+ let mut disp_evt = ret.unwrap();
+ match disp_evt {
+ DisplayEvent::Audio(ref mut out_buf) => {
+ unsafe { sdl::audio::ll::SDL_LockAudio(); }
+ match out_buf {
+ NABufferType::AudioPacked(buf) => add_audio(&mut amtx, buf.get_data()),
+ NABufferType::AudioU8(buf) => add_audio(&mut amtx, buf.get_data()),
+ NABufferType::AudioI16(buf) => add_audio(&mut amtx, buf.get_data()),
+ NABufferType::AudioI32(buf) => add_audio(&mut amtx, buf.get_data()),
+ NABufferType::AudioF32(buf) => add_audio(&mut amtx, buf.get_data()),
+ _ => unreachable!(),
+ };
+ unsafe { sdl::audio::ll::SDL_UnlockAudio(); }
+ sdl::audio::pause(false);
+ },
+ _ => { frame_queue.push(disp_evt); },
+ };
+ }
+ drop(nevents);
+ if vid_finished.load(Ordering::Relaxed) &&
+ aud_finished.load(Ordering::Relaxed) && frame_queue.len() == 0 {
+ break;
+ }
+
+ if frame_queue.len() > 0 {
+ let cur_time = systime.elapsed().unwrap();
+ let disp_time = Duration::from_millis(frame_queue[0].get_time());
+
+//println!("cur time {:?} disp time {:?}", cur_time, disp_time);
+ if (disp_time <= cur_time + Duration::from_millis(10)) && disp_time + Duration::from_millis(10) >= cur_time {
+ let screen = scr_mtx.lock().unwrap();
+ let disp_evt = frame_queue.remove(0);
+ match disp_evt {
+ DisplayEvent::RGB(_, ref surf) => {
+ screen.blit(surf);
+ screen.flip();
+ },
+ DisplayEvent::YUV(_, ref ovl) => {
+ ovl.display(Some(screen.get_rect()));
+ },
+ _ => {},
+ };
+ } else if disp_time > cur_time {
+ let diff = disp_time - cur_time;
+ if diff > Duration::from_millis(20) {
+ thread::sleep(Duration::from_millis(20));
+ } else {
+ thread::sleep(diff);
+ }
+ } else {
+ frame_queue.remove(0);
+ }
+ }
+ }
+
+ if has_audio {
+ unsafe { sdl::audio::ll::SDL_LockAudio(); }
+ let mut afifo = amtx.lock().unwrap();
+ afifo.finish();
+ drop(afifo);
+ unsafe { sdl::audio::ll::SDL_UnlockAudio(); }
+ sdl::audio::pause(true);
+ }
+ drop(vsend);
+ drop(asend);
+ if let Some(vthread) = video_thread {
+ vthread.join().unwrap();
+ }
+ if let Some(athread) = audio_thread {
+ athread.join().unwrap();
+ }
+
+ if has_audio {
+ sdl::audio::close();
+ }
+}
+
+fn main() {
+ let args: Vec<_> = env::args().collect();
+
+ if args.len() == 1 {
+ println!("usage: nihav-player input");
+ return;
+ }
+ let name = args[1].as_str();
+
+ play_file(name);
+
+ sdl::quit();
+}