fix audio sync when being played by quicktime
the thing I was doing is on the audio, i was setting the pts (presentation timestamp) after the frame was filtered by ffmpeg, which could be frames later, and could be batched. i set the pts before filtering, but that made things worse. for some reason, the audio filter is remapping the pts I use (frame count, 1/60) to the time base 1/44100. the video filter does not do this, which is why I specifically remap it later I had to make that remap step I added later skip the audio channel
This commit is contained in:
parent
7c03aac388
commit
3beb1a8519
|
@ -154,7 +154,7 @@ let wavname = Path::new("out.wav");
|
|||
video_encoder.set_format(video_encoder.codec().unwrap().video().unwrap().formats().unwrap().nth(0).unwrap());
|
||||
|
||||
video_encoder.set_time_base(Rational::new(1, 60));
|
||||
video_encoder.set_frame_rate(Some(Rational::new(60, 1)));
|
||||
video_encoder.set_frame_rate(Some(Rational::new(fps_int, 1)));
|
||||
|
||||
//video_encoder.set_frame_rate(av_info.timing.fps.into());
|
||||
|
||||
|
@ -255,7 +255,9 @@ static bool ffmpeg_init_config(struct ff_config_param *params,
|
|||
//if encoded_packet.size() > 0 {
|
||||
encoded_packet.set_stream(stream_index);
|
||||
eprintln!("📦 Writing packet, pts {:?} dts {:?} size {}", encoded_packet.pts(), encoded_packet.dts(), encoded_packet.size());
|
||||
if stream_index == 0 {
|
||||
encoded_packet.rescale_ts(Rational(1, 60), self.octx.stream(stream_index).unwrap().time_base());
|
||||
}
|
||||
eprintln!("📦 rescaled , pts {:?} dts {:?} size {}", encoded_packet.pts(), encoded_packet.dts(), encoded_packet.size());
|
||||
|
||||
match encoded_packet.write_interleaved(&mut self.octx) {
|
||||
|
@ -314,6 +316,7 @@ static bool ffmpeg_init_config(struct ff_config_param *params,
|
|||
);
|
||||
aframe.set_channels(2);
|
||||
aframe.set_rate(44100);
|
||||
aframe.set_pts(Some(frame));
|
||||
let aplane: &mut [(i16, i16)] = aframe.plane_mut(0);
|
||||
eprintln!("Audio buffer length {} -> {}", self.audio_buf.len(), aplane.len());
|
||||
aplane.copy_from_slice(self.audio_buf.as_ref());
|
||||
|
@ -322,7 +325,6 @@ static bool ffmpeg_init_config(struct ff_config_param *params,
|
|||
|
||||
eprintln!("frame audio: {:?}", aframe);
|
||||
|
||||
//aframe.set_pts(Some(frame));
|
||||
eprintln!("🎞 queue frame pts {:?}", aframe.pts());
|
||||
self.audio_filter.get("in").unwrap().source().add(&aframe).unwrap();
|
||||
|
||||
|
@ -335,7 +337,7 @@ static bool ffmpeg_init_config(struct ff_config_param *params,
|
|||
println!("🎥 failed to put filter input frame");
|
||||
}
|
||||
//let faplane: &[f32] = filtered_aframe.plane(0);
|
||||
filtered_aframe.set_pts(Some(frame));
|
||||
//filtered_aframe.set_pts(Some(frame));
|
||||
|
||||
self.audio_encoder.send_frame(&filtered_aframe).unwrap();
|
||||
self.receive_and_write_packets(EncoderToWriteFrom::Audio);
|
||||
|
@ -463,7 +465,7 @@ impl retro::wrapper::Handler for MyEmulator {
|
|||
|
||||
self.video_encoder.set_width(geometry.base_width);
|
||||
self.video_encoder.set_height(geometry.base_height);
|
||||
self.video_encoder.set_aspect_ratio(geometry.aspect_ratio as f64);
|
||||
//self.video_encoder.set_aspect_ratio(geometry.aspect_ratio as f64);
|
||||
self.av_info.geometry = geometry;
|
||||
let pixel_format = match self.video_pixel_format {
|
||||
format::Pixel::RGB555 => PixelFormat::ARGB1555,
|
||||
|
|
Loading…
Reference in New Issue