-2

I've got a program that uses the AForge libraries to keep a running stream of 60 seconds of images from a webcam in a memory buffer. Upon detection of movement it records the next 30 seconds into the same buffer, which ends up overwriting the first 30 seconds. In effect you have a rolling 30 seconds of video either side of any movement being made that isn't already being recorded, giving you a total of 60 seconds of video.

The problem is, 60 seconds of bitmap images from AForge COMPRESSED is around 3GB in RAM. To top it off, the resultant avi file is around 3MB. That's quite a large difference!

Can anyone see where I might be going wrong? At this rate, it would be more beneficial to just record videos straight to disk for an hour each time and manually cycle through them for any events!

The system consists of the following three components:

CameraController.cs - Sorts the initialisation for each connected webcam. I've left in commented-out components to give an idea of previous settings used.

public class CameraController : ServiceBase
    {

        public virtual void OnStart()
        {
            Start(60, 0.4f);
        }


        private FilterInfoCollection _VideoCaptureDevices;
        private MotionDetector _MotionDetector;
        private Dictionary<string, Recording> _Streams = new Dictionary<string, Recording>();
        private Dictionary<int, VideoCaptureDevice> _Devices = new Dictionary<int, VideoCaptureDevice>();
        private int _Framerate;


        private int _MaxVideoLength;
        private float _MotionSensitivity;


        public void Start(int maxVideoLength, float motionSensitivity){

            _MaxVideoLength = maxVideoLength;
            _MotionSensitivity = motionSensitivity;

            Init();

        }

        public void Init()
        {
            try
            {
                _MotionDetector = GetDefaultMotionDetector();

                _VideoCaptureDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);

                int counter = 0;
                foreach (FilterInfo device in _VideoCaptureDevices)
                {
                    var videoDevice = new VideoCaptureDevice(device.MonikerString);

                    //_Framerate = videoDevice.VideoCapabilities[0].AverageFrameRate == 0
                    //    ? 25
                    //    : videoDevice.VideoCapabilities[0].AverageFrameRate;

                    _Framerate = 15;

                    _Streams.Add(videoDevice.@Source, new Recording(counter, device.Name, videoDevice.@Source, _MaxVideoLength, _Framerate));

                    videoDevice.NewFrame += new NewFrameEventHandler(NewFrame);
                    videoDevice.Start();

                    _Devices.Add(counter++, videoDevice);
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
        }


        public void NewFrame(object sender, NewFrameEventArgs eventArgs)
        {
            try
            {
                var device = (VideoCaptureDevice) sender;
                _Streams[@device.Source].AddBitmap((Bitmap) eventArgs.Frame.Clone());

                if (_Streams[@device.Source].IsRecording)
                {
                    _Streams[@device.Source].CheckRecording();

                    if (_Streams[@device.Source].SaveRequired)
                        _Streams[@device.Source].WriteToFile();
                }
                else
                {
                    var motion = _MotionDetector.ProcessFrame(_Streams[@device.Source].Bitmap);
                    if (motion > _MotionSensitivity)
                        _Streams[@device.Source].StartRecording();
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }

        }



        public void StopVideo(bool stopWebcams = false)
        {

            foreach (var device in _Devices)
            {
                var stream = _Streams[device.Value.Source];

                if(stream.IsRecording)
                    stream.FileWriter.Close();

                if(device.Value.IsRunning && stopWebcams)
                    device.Value.SignalToStop();
            }

        }





        public static AForge.Vision.Motion.MotionDetector GetDefaultMotionDetector()
        {
            AForge.Vision.Motion.IMotionDetector detector = null;
            AForge.Vision.Motion.IMotionProcessing processor = null;
            AForge.Vision.Motion.MotionDetector motionDetector = null;

            //detector = new AForge.Vision.Motion.TwoFramesDifferenceDetector()
            //{
            //  DifferenceThreshold = 15,
            //  SuppressNoise = true
            //};

            //detector = new AForge.Vision.Motion.CustomFrameDifferenceDetector()
            //{
            //  DifferenceThreshold = 15,
            //  KeepObjectsEdges = true,
            //  SuppressNoise = true
            //};

            detector = new AForge.Vision.Motion.SimpleBackgroundModelingDetector()
            {
                DifferenceThreshold = 10,
                FramesPerBackgroundUpdate = 10,
                KeepObjectsEdges = true,
                MillisecondsPerBackgroundUpdate = 10,
                SuppressNoise = true
            };

            //processor = new AForge.Vision.Motion.GridMotionAreaProcessing()
            //{
            //  HighlightColor = System.Drawing.Color.Red,
            //  HighlightMotionGrid = true,
            //  GridWidth = 100,
            //  GridHeight = 100,
            //  MotionAmountToHighlight = 100F
            //};

            processor = new AForge.Vision.Motion.BlobCountingObjectsProcessing()
            {
                //HighlightColor = System.Drawing.Color.Red,
                //HighlightMotionRegions = true,
                MinObjectsHeight = 10,
                MinObjectsWidth = 10
            };

            motionDetector = new AForge.Vision.Motion.MotionDetector(detector, processor);

            return motionDetector;
        }
    }

Then there's Recording.cs - Controls when to stop/start/write recordings

public class Recording
{
    public int Id { get; set; }
    public string Name { get; set; }
    public string Source { get; set; }
    public Bitmap Bitmap { get; set; }
    public bool IsRecording { get; set; }
    public bool SaveRequired { get; set; }
    public int TimeLimitSec { get; set; }
    public int FrameRate { get; set; }


    public string DirString = ConfigurationManager.AppSettings["DesinationFolder"].ToString();

    public Stopwatch Timer = new Stopwatch();

    public VideoFileWriter FileWriter = new VideoFileWriter();

    public VideoBuffer VideoBuffer;
    public int BufferPosition { get; set; }

    public Recording(int id, string name, string source, int timeLimit, int framerate)
    {
        Id = id;
        Name = name;
        Source = @source;
        IsRecording = false;
        SaveRequired = false;
        TimeLimitSec = timeLimit;
        FrameRate = framerate;
        VideoBuffer = new VideoBuffer(timeLimit, framerate);
    }

    public string FileName { get; set; }

    public void StartRecording()
    {
        IsRecording = true;
        Timer.Reset();
        Timer.Start();
    }

    public void StopRecording()
    {
        IsRecording = false;
        SaveRequired = true;
        Timer.Reset();
        Timer.Stop();
    }

    public void WriteToFile()
    {
        try
        {
            if (!Directory.Exists(@DirString))
                Directory.CreateDirectory(@DirString);

            FileName = @DirString + @"\Video_" + Id + "_" + Name + "_" + DateTime.Now.ToFileTime() + ".avi";

            FileWriter.Open(FileName, Bitmap.Width, Bitmap.Height, FrameRate, VideoCodec.Default);

            for (int frame = 0; frame < VideoBuffer.BufferPosition; frame++)
            {
                FileWriter.WriteVideoFrame(Compression.Decompress<Bitmap>(VideoBuffer.Buffer[frame]));
            }

            FileWriter.Close();

            SaveRequired = false;
        }
        catch (Exception ex)
        {
            Console.WriteLine(ex.Message);
        }
    }


    public void AddBitmap(Bitmap bitmap)
    {
        try
        {
            this.Bitmap = bitmap;

            this.VideoBuffer.AddBitmap(bitmap);
        }
        catch (Exception ex)
        {
            Console.WriteLine(ex.Message);
        }
    }

    public void CheckRecording()
    {
        try
        {
            if (IsRecording && Timer.Elapsed.TotalSeconds > TimeLimitSec)
                StopRecording();
        }
        catch (Exception ex)
        {
            var msg = ex.Message;
            Console.WriteLine(ex.Message);
        }
    }

    private void SaveImage()
    {
        Bitmap.Save(@"D:\Storage\IMG_"+ Id + "_" + Name + "_" + DateTime.Now.ToFileTime() + ".jpg");
    }
}

And Finally VideoBuffer.cs - To control a running buffer of Bitmaps. Note that Bitmaps have also been compressed into byte[].

public class VideoBuffer
    {
        public int BufferLengthSeconds { get; set; }

        public byte[][] Buffer { get; set; }

        public int BufferPosition { get; set; }

        public int MaxPosition { get; set; }

        public bool Recorded { get; set; }


        public VideoBuffer(int secondsToBuffer, int framerate)
        {
            MaxPosition = secondsToBuffer * framerate * 2; // Have our buffer before an event is started, as well as the length of time for the next

            //Buffer = new Bitmap[MaxPosition + 1]; // Plus one allows us to add the latest bitmap and then clone everything but the first index
            Buffer = new byte[MaxPosition + 1][];


            BufferPosition = 0;
        }


        public void AddBitmap(Bitmap bitmap)
        {
            try
            {
                // If we haven't reached the maximum buffer size, keep adding it as normal
                if (BufferPosition < MaxPosition)
                {
                    Buffer[BufferPosition++] = Compression.Compress(bitmap);
                }
                else
                {
                    // Otherwise, shuffle everything down one.
                    Buffer[MaxPosition] = Compression.Compress(bitmap);
                    var tempBuffer = new byte[MaxPosition + 1][];

                    Array.Copy(Buffer, 1, tempBuffer, 0, Buffer.Length - 1);

                    tempBuffer.CopyTo(Buffer, 0);

                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
        }
    }

So the question really is, how do I reduce the memory footprint of the buffer further, but still hold the last 30 seconds of video in memory at any one time?

I'm a bit burnt out this week and can't see what might be missing. Any suggestions are most welcome!

Dezzamondo
  • 2,169
  • 2
  • 20
  • 33
  • Would a memory mapped file do what you need? – mjwills Dec 01 '17 at 21:35
  • So why again you store 60 seconds in form of frames? – Evk Dec 01 '17 at 21:37
  • A memory mapped file might put too much load on the disk. But 6GB RAM is not much these days, how many RAM do you have for this action? Obviously you dont have enough RAm so you need to compress your frame for saving and uncompress again for working with it. https://stackoverflow.com/questions/3517965/convert-bmp-to-png-in-memory-for-clipboard-pasting-in-net Or go and buy more RAM for 50 dollars – Harry Dec 02 '17 at 08:24

1 Answers1

1

Some quick math says HD Video at 1920x1080x24 bit color at 15fps for 60 seconds is about 5.3 GB. You are getting some frame compression to use up 3GB.

The VideoFileWriter (why isn't that variable local to the function?) is using the default AVI video codec, which will also compress between frames. Since presumably frames are mostly static, that saves a lot of space.

I would suggest finding a way to save your in-memory video as a compressed video stream.

NetMage
  • 26,163
  • 3
  • 34
  • 55
  • The `VideoFileWriter` was a hangover from refactoring an earlier version :) I think you're right with the compressed video stream suggestion. Also, I switched the Bitmap compression to convert the image into `.png` first, before then compressing it. Whilst it didn't improve the compression by much, it certainly improved the video quality, which was a bit of a surprise! – Dezzamondo Dec 02 '17 at 15:22