I am developing a chat application in WPF .NET Framework 4.7.2.
I want to implement video recording functionality using the web camera of the PC.
Up to now, I have done this:
I use AForge.Video
and AForge.Video.DirectShow
to use the webcam and get the frames.
Aforge creates a new thread for every frame. I'm receiving where I save the image and pass it on the UI thread to show the image.
private void Cam_NewFrame(object sender, NewFrameEventArgs eventArgs)
{
//handle frames from camera
try
{
//New task to save the bitmap (new frame) into an image
Task.Run(() =>
{
if (_recording)
{
currentreceivedframebitmap = (Bitmap)eventArgs.Frame.Clone();
currentreceivedframebitmap.Save($@"{CurrentRecordingFolderForImages}/{imgNumber}-{guidName}.png", ImageFormat.Png);
imgNumber++;
}
});
//convert bitmap to bitmapImage to show it on the ui
BitmapImage bi;
CurrentFrame = new Bitmap(eventArgs.Frame);
using (var bitmap = (Bitmap)eventArgs.Frame.Clone())
{
bi = new BitmapImage();
bi.BeginInit();
MemoryStream ms = new MemoryStream();
bitmap.Save(ms, ImageFormat.Bmp);
bi.StreamSource = ms;
bi.CacheOption = BitmapCacheOption.OnLoad;
bi.EndInit();
}
bi.Freeze();
Dispatcher.BeginInvoke(new ThreadStart(delegate
{
imageFrames.Source = bi;
}));
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
When the record finishes i take the image and make the video using ffmpeg.
public static void ImagesToVideo(string ffmpegpath, string guid, string CurrentRecordingFolderForImages, string outputPath, int frameRate, int quality, int avgFrameRate)
{
Process process;
process = new Process
{
StartInfo = new ProcessStartInfo
{
FileName = $@"{ffmpegpath}",
//-r framerate , vcodec video codec, -crf video quality 0-51
Arguments = $@" -r {frameRate} -i {CurrentRecordingFolderForImages}\%d-{guid}.png -r {avgFrameRate} -vcodec libx264 -crf {quality} -pix_fmt yuv420p {outputPath}",
UseShellExecute = false,
RedirectStandardOutput = true,
CreateNoWindow = true,
RedirectStandardError = true
},
EnableRaisingEvents = true,
};
process.Exited += ExeProcess_Exited;
process.Start();
string processOutput = null;
while ((processOutput = process.StandardError.ReadLine()) != null)
{
//TO-DO handle errors
Debug.WriteLine(processOutput);
}
}
For the sound i use Naudio to record it and save it
waveSource = new WaveIn();
waveSource.StartRecording();
waveFile = new WaveFileWriter(AudioFilePath, waveSource.WaveFormat);
waveSource.WaveFormat = new WaveFormat(8000, 1);
waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(waveSource_DataAvailable);
waveSource.RecordingStopped += new EventHandler<StoppedEventArgs>(waveSource_RecordingStopped);
private void waveSource_DataAvailable(object sender, WaveInEventArgs e)
{
if (waveFile != null)
{
waveFile.Write(e.Buffer, 0, e.BytesRecorded);
waveFile.Flush();
}
}
and then ffmpeg again to merge video with sound
public static void AddAudioToVideo(string ffmpegpath, string VideoPath, string AudioPath, string outputPath)
{
_videoPath = VideoPath;
_audioPath = AudioPath;
Process process;
process = new Process
{
StartInfo = new ProcessStartInfo
{
FileName = $@"{ffmpegpath}",
Arguments = $" -i {VideoPath} -i {AudioPath} -map 0:v -map 1:a -c:v copy -shortest {outputPath} -y",
UseShellExecute = false,
RedirectStandardOutput = true,
CreateNoWindow = true,
RedirectStandardError = true
},
EnableRaisingEvents = true,
};
process.Exited += ExeProcess_Exited;
process.Start();
string processOutput = null;
while ((processOutput = process.StandardError.ReadLine()) != null)
{
// do something with processOutput
Debug.WriteLine(processOutput);
}
}
Questions:
- Is there a better approach to achieve what im trying to do?
- My camera has 30 fps capability but i receive only 16 fps how could this happen?
- Sometimes video and sound are not synchronized.
i created a sample project github.com/dinos19/WPFVideoRecorder