ferretro/ferretro_components/src/provided/ffmpeg.rs

472 lines
18 KiB
Rust

extern crate ffmpeg_next as ffmpeg;
use std::collections::VecDeque;
use std::error::Error;
use std::path::Path;
use crate::prelude::*;
use ffmpeg::{ChannelLayout, Packet, filter, format, frame, media};
use ffmpeg::util::rational::Rational;
use crate::base::ControlFlow;
enum EncoderToWriteFrom {
Video,
Audio,
}
pub struct FfmpegComponent {
av_info: SystemAvInfo,
audio_buf: Vec<(i16, i16)>,
video_pixel_format: format::Pixel,
prev_video_frame: Option<frame::Video>,
video_frames: VecDeque<frame::Video>,
video_encoder: ffmpeg::encoder::Video,
audio_encoder: ffmpeg::encoder::Audio,
video_filter: filter::Graph,
audio_filter: filter::Graph,
frame_properties_locked: bool,
octx: ffmpeg::format::context::Output,
frame: i64,
}
fn video_filter(
video_encoder: &ffmpeg::encoder::video::Video,
av_info: &SystemAvInfo,
pix_fmt: PixelFormat,
) -> Result<filter::Graph, ffmpeg::Error> {
let mut vfilter = filter::Graph::new();
let pix_fmt = match pix_fmt {
PixelFormat::ARGB1555 => if cfg!(target_endian = "big") { "rgb555be" } else { "rgb555le" },
PixelFormat::ARGB8888 => "argb",
PixelFormat::RGB565 => if cfg!(target_endian = "big") { "rgb565be" } else { "rgb565le" },
};
let pixel_aspect = av_info.geometry.aspect_ratio / (av_info.geometry.base_width as f32 / av_info.geometry.base_height as f32);
let fps = if av_info.timing.fps == 0.0 { 60.0 } else { av_info.timing.fps };
let args = format!(
"width={}:height={}:pix_fmt={}:frame_rate={}:pixel_aspect={}:time_base=1/{}",
av_info.geometry.base_width,
av_info.geometry.base_height,
pix_fmt,
fps,
pixel_aspect,
fps,
);
eprintln!("🎥 filter args: {}", args);
vfilter.add(&filter::find("buffer").unwrap(), "in", &args)?;
//scale?
vfilter.add(&filter::find("buffersink").unwrap(), "out", "")?;
{
let mut out = vfilter.get("out").unwrap();
out.set_pixel_format(video_encoder.format());
}
vfilter.output("in", 0)?
.input("out", 0)?
.parse("null")?; // passthrough filter for video
vfilter.validate()?;
// human-readable filter graph
eprintln!("{}", vfilter.dump());
Ok(vfilter)
}
fn audio_filter(
audio_encoder: &ffmpeg::codec::encoder::Audio,
sample_rate: f64,
) -> Result<filter::Graph, ffmpeg::Error> {
let mut afilter = filter::Graph::new();
let sample_rate = if sample_rate == 0.0 { 32040.0 } else { sample_rate };
let args = format!("sample_rate={}:sample_fmt=s16:channel_layout=stereo:time_base=1/60", sample_rate);
eprintln!("🔊 filter args: {}", args);
afilter.add(&filter::find("abuffer").unwrap(), "in", &args)?;
//aresample?
afilter.add(&filter::find("abuffersink").unwrap(), "out", "")?;
{
let mut out = afilter.get("out").unwrap();
out.set_sample_format(audio_encoder.format());
out.set_channel_layout(audio_encoder.channel_layout());
out.set_sample_rate(audio_encoder.rate());
}
afilter.output("in", 0)?
.input("out", 0)?
.parse("anull")?;
afilter.validate()?;
// human-readable filter graph
eprintln!("{}", afilter.dump());
if let Some(codec) = audio_encoder.codec() {
if !codec
.capabilities()
.contains(ffmpeg::codec::capabilities::Capabilities::VARIABLE_FRAME_SIZE)
{
eprintln!("setting constant frame size {}", audio_encoder.frame_size());
afilter
.get("out")
.unwrap()
.sink()
.set_frame_size(audio_encoder.frame_size());
}
}
Ok(afilter)
}
impl RetroComponent for FfmpegComponent {
fn pre_run(&mut self, _retro: &mut LibretroWrapper) -> ControlFlow {
self.frame += 1;
ControlFlow::Continue
}
fn post_run(&mut self, _retro: &mut LibretroWrapper) -> ControlFlow {
match self.video_frames.pop_front() {
Some(mut vframe) => {
vframe.set_pts(Some(self.frame));
eprintln!("🎞 queue frame pts {:?}", vframe.pts());
self.video_filter.get("in").unwrap().source().add(&vframe).unwrap();
let mut filtered_vframe = frame::Video::empty();
loop {
match self.video_filter.get("out").unwrap().sink().frame(&mut filtered_vframe) {
Ok(..) => {
eprintln!("🎥 Got filtered video frame {}x{} pts {:?}", filtered_vframe.width(), filtered_vframe.height(), filtered_vframe.pts());
if self.video_filter.get("in").unwrap().source().failed_requests() > 0 {
println!("🎥 failed to put filter input frame");
}
//filtered_vframe.set_pts(Some(frame));
self.video_encoder.send_frame(&filtered_vframe).unwrap();
self.receive_and_write_packets(EncoderToWriteFrom::Video);
},
Err(e) => {
eprintln!("Error getting filtered video frame: {:?}", e);
break;
}
}
}
let mut aframe = frame::Audio::new(
format::Sample::I16(format::sample::Type::Packed),
self.audio_buf.len(),
ChannelLayout::STEREO
);
if aframe.planes() > 0 {
aframe.set_channels(2);
aframe.set_rate(44100);
aframe.set_pts(Some(self.frame));
let aplane: &mut [(i16, i16)] = aframe.plane_mut(0);
eprintln!("Audio buffer length {} -> {}", self.audio_buf.len(), aplane.len());
aplane.copy_from_slice(self.audio_buf.as_ref());
//eprintln!("src: {:?}, dest: {:?}", self.audio_buf, aplane);
self.audio_buf.clear();
eprintln!("frame audio: {:?}", aframe);
eprintln!("🎞 queue frame pts {:?}", aframe.pts());
self.audio_filter.get("in").unwrap().source().add(&aframe).unwrap();
let mut filtered_aframe = frame::Audio::empty();
loop {
match self.audio_filter.get("out").unwrap().sink().frame(&mut filtered_aframe) {
Ok(..) => {
eprintln!("🔊 Got filtered audio frame {:?} pts {:?}", filtered_aframe, filtered_aframe.pts());
if self.audio_filter.get("in").unwrap().source().failed_requests() > 0 {
println!("🎥 failed to put filter input frame");
}
//let faplane: &[f32] = filtered_aframe.plane(0);
//filtered_aframe.set_pts(Some(frame));
self.audio_encoder.send_frame(&filtered_aframe).unwrap();
self.receive_and_write_packets(EncoderToWriteFrom::Audio);
},
Err(e) => {
eprintln!("Error getting filtered audio frame: {:?}", e);
break;
}
}
}
}
},
None => println!("Video not ready during frame {}", self.frame)
}
ControlFlow::Continue
}
fn post_load_game(&mut self, _retro: &mut LibretroWrapper, _rom: &Path) -> Result<(), Box<dyn Error>> {
self.frame_properties_locked = true;
Ok(())
}
}
impl FfmpegComponent {
pub fn new(
retro: &LibretroWrapper,
video_path: impl AsRef<Path>,
) -> Self {
let mut octx = format::output(&video_path).unwrap();
let mut av_info = retro.get_system_av_info();
let fps_int = av_info.timing.fps.round() as i32;
let fps_int = if fps_int == 0 { 60 } else { fps_int };
let detected_vcodec = octx.format().codec(&video_path, media::Type::Video);
//let detected_acodec = octx.format().codec(&video_path, media::Type::Audio);
let wavname = Path::new("out.wav");
let detected_acodec = octx.format().codec(&wavname, media::Type::Audio);
let vcodec = ffmpeg::encoder::find(detected_vcodec).unwrap().video().unwrap();
let acodec = ffmpeg::encoder::find(detected_acodec).unwrap().audio().unwrap();
let mut video_output = octx.add_stream(vcodec).unwrap();
video_output.set_time_base(Rational::new(1, 60));
let mut video_encoder = video_output.codec().encoder().video().unwrap();
video_encoder.set_bit_rate(2560000);
video_encoder.set_format(video_encoder.codec().unwrap().video().unwrap().formats().unwrap().nth(0).unwrap());
video_encoder.set_time_base(Rational::new(1, 60));
video_encoder.set_frame_rate(Some(Rational::new(fps_int, 1)));
//video_encoder.set_frame_rate(av_info.timing.fps.into());
if av_info.geometry.base_height == 0 && av_info.geometry.base_width == 0 {
av_info.geometry.base_width = 320;
av_info.geometry.base_height = 224;
av_info.geometry.aspect_ratio = 4.33;
}
if av_info.timing.sample_rate == 0.0 {
av_info.timing.sample_rate = 44100.0;
}
video_encoder.set_width(av_info.geometry.base_width);
video_encoder.set_height(av_info.geometry.base_height);
//video_encoder.set_aspect_ratio(av_info.geometry.aspect_ratio as f64);
let pix_fmt = PixelFormat::ARGB1555; // temporary until env call is made
let video_filter = video_filter(&video_encoder, &av_info, pix_fmt).unwrap();
let video_encoder = video_encoder.open_as(vcodec).unwrap();
//video_output.set_parameters(&video_encoder);
let mut audio_output = octx.add_stream(acodec).unwrap();
let mut audio_encoder = audio_output.codec().encoder().audio().unwrap();
//let mut video_encoder = octx.add_stream(vcodec).unwrap().codec().encoder().video().unwrap();
/*
let mut audio_output = octx.add_stream(acodec).unwrap();
let mut audio_encoder = audio_output.codec().encoder().audio().unwrap();
*/
/*
retroarch inits
static bool ffmpeg_init_config(struct ff_config_param *params,
if (!ffmpeg_init_muxer_pre(handle))
if (!ffmpeg_init_video(handle))
av_frame_alloc
*/
audio_encoder.set_bit_rate(640000);
audio_encoder.set_max_bit_rate(990000);
//audio_encoder.set_rate(44100);
audio_encoder.set_rate(av_info.timing.sample_rate.round() as i32);
audio_encoder.set_channels(2);
audio_encoder.set_channel_layout(ChannelLayout::STEREO);
audio_encoder.set_format(audio_encoder.codec().unwrap().audio().unwrap().formats().unwrap().nth(0).unwrap());
audio_encoder.set_time_base(Rational::new(1, 60));
audio_output.set_time_base(Rational::new(1, 60));
let audio_encoder = audio_encoder.open_as(acodec).unwrap();
//audio_output.set_parameters(&audio_encoder);
let audio_filter = audio_filter(&audio_encoder, av_info.timing.sample_rate).unwrap();
//audio_encoder.set_rate(av_info.timing.sample_rate.round() as i32);
octx.write_header().unwrap();
ffmpeg::format::context::output::dump(&octx, 0, None);
let mut comp = FfmpegComponent {
av_info: av_info.clone(),
audio_buf: Default::default(),
video_pixel_format: format::Pixel::RGB555,
prev_video_frame: None,
video_frames: Default::default(),
video_encoder,
audio_encoder,
video_filter,
audio_filter,
frame_properties_locked: false,
octx,
frame: 0
};
comp.set_system_av_info(&av_info);
comp
}
fn receive_and_write_packets(&mut self, encoder: EncoderToWriteFrom)
{
let stream_index = match encoder {
EncoderToWriteFrom::Video => 0,
EncoderToWriteFrom::Audio => 1,
};
let mut encoded_packet = ffmpeg::Packet::empty();
loop
{
match match encoder {
EncoderToWriteFrom::Video => self.video_encoder.receive_packet(&mut encoded_packet),
EncoderToWriteFrom::Audio => self.audio_encoder.receive_packet(&mut encoded_packet),
} {
Ok(..) => {
//if encoded_packet.size() > 0 {
encoded_packet.set_stream(stream_index);
eprintln!("📦 Writing packet, pts {:?} dts {:?} size {}", encoded_packet.pts(), encoded_packet.dts(), encoded_packet.size());
if stream_index == 0 {
encoded_packet.rescale_ts(Rational(1, 60), self.octx.stream(stream_index).unwrap().time_base());
}
eprintln!("📦 rescaled , pts {:?} dts {:?} size {}", encoded_packet.pts(), encoded_packet.dts(), encoded_packet.size());
match encoded_packet.write_interleaved(&mut self.octx) {
Ok(..) => eprintln!("Write OK"),
Err(e) => eprintln!("Error writing: {}", e),
}
//encoded_packet.write_interleaved(&mut self.octx).unwrap(); // AAA
//}
//else {
//eprintln!("Did not try to write 0-length packet");
//}
},
Err(e) => {
eprintln!("Error writing packet: {:?}", e);
break;
}
}
}
}
pub fn end(&mut self) {
let mut packet = Packet::empty();
eprintln!("flushed: {:?}", self.video_encoder.flush(&mut packet).unwrap());
self.video_encoder.send_eof().unwrap();
self.receive_and_write_packets(EncoderToWriteFrom::Video);
self.audio_encoder.send_eof().unwrap();
self.receive_and_write_packets(EncoderToWriteFrom::Audio);
self.octx.write_trailer().unwrap();
}
}
impl Drop for FfmpegComponent {
fn drop(&mut self) {
self.end();
}
}
impl RetroCallbacks for FfmpegComponent {
fn video_refresh(&mut self, data: &[u8], width: u32, height: u32, pitch: u32) {
let mut vframe = frame::Video::new(self.video_pixel_format, width, height);
let stride = vframe.stride(0);
let pitch = pitch as usize;
let vplane = vframe.data_mut(0);
if data.len() == vplane.len() && pitch == stride {
vplane.copy_from_slice(&data);
} else {
for y in 0..(height as usize) {
let ffbegin = y * stride;
let lrbegin = y * pitch;
let min = usize::min(stride, pitch);
vplane[ffbegin..(ffbegin + min)].copy_from_slice(
&data[lrbegin..(lrbegin + min)]
);
}
}
//vframe.set_pts(Some(self.frame as i64));
self.prev_video_frame.replace(vframe.clone());
self.video_frames.push_back(vframe);
}
fn video_refresh_dupe(&mut self, width: u32, height: u32, _pitch: u32) {
if let Some(frame) = &self.prev_video_frame {
self.video_frames.push_back(frame.clone());
} else {
let vframe = frame::Video::new(self.video_pixel_format, width, height);
self.video_frames.push_back(vframe);
}
}
fn audio_sample(&mut self, left: i16, right: i16) {
self.audio_buf.push((left, right));
}
fn audio_sample_batch(&mut self, stereo_pcm: &[i16]) -> usize {
let left_iter = stereo_pcm.iter().step_by(2).cloned();
let right_iter = stereo_pcm.iter().skip(1).step_by(2).cloned();
self.audio_buf.extend(Iterator::zip(left_iter, right_iter));
stereo_pcm.len()
}
fn set_pixel_format(&mut self, format: PixelFormat) -> Option<bool> {
if self.frame_properties_locked {
return Some(false);
}
self.video_pixel_format = match format {
PixelFormat::ARGB1555 => format::Pixel::RGB555,
PixelFormat::ARGB8888 => format::Pixel::RGB32,
PixelFormat::RGB565 => format::Pixel::RGB565,
};
self.video_filter = video_filter(&self.video_encoder, &self.av_info, format).unwrap();
Some(true)
}
fn get_variable(&mut self, key: &str) -> Option<String> {
match key {
"parallel-n64-gfxplugin" => Some("angrylion".to_string()),
_ => None,
}
}
fn set_system_av_info(&mut self, system_av_info: &SystemAvInfo) -> Option<bool> {
if self.frame_properties_locked {
return Some(false);
}
//self.video_encoder.set_frame_rate(system_av_info.timing.fps.into());
//self.video_encoder.set_time_base(Rational::new(1, 60));
//self.video_encoder.set_frame_rate(Some(Rational::new(1, 60)));
if system_av_info.timing.sample_rate.round() as i32 > 0 {
self.audio_encoder.set_rate(system_av_info.timing.sample_rate.round() as i32);
}
self.av_info.timing = system_av_info.timing.clone();
self.set_geometry(&system_av_info.geometry);
Some(true)
}
fn set_geometry(&mut self, geometry: &GameGeometry) -> Option<bool> {
if self.frame_properties_locked {
return Some(false);
}
self.video_encoder.set_width(geometry.base_width);
self.video_encoder.set_height(geometry.base_height);
//self.video_encoder.set_aspect_ratio(geometry.aspect_ratio as f64);
self.av_info.geometry = geometry.clone();
let pixel_format = match self.video_pixel_format {
format::Pixel::RGB555 => PixelFormat::ARGB1555,
format::Pixel::RGB32 => PixelFormat::ARGB8888,
format::Pixel::RGB565 => PixelFormat::RGB565,
_ => unimplemented!(),
};
self.video_filter = video_filter(&self.video_encoder, &self.av_info, pixel_format).unwrap();
Some(true)
}
}