I want to get accurate transfer rate and time left while copying a large list of files (also big ones). This should be visualized via a main progress bar and defined labels for speed and time left.
At start of the routine, a backgroundworker iterate through a given directory and builds a dictionary with source and target path.
Mechanism found here: Calculating Time Remaining on File Copy. (Answer from Steven Liekens)
The problem is, that 100% progress is not never reached, it finished sometime with 96%-98%, but never with 100%. Maybe there is difference to the calculated totalBytes of the (recursive) directory calculation with a different method.
Next point is, that the transfer rate shown is even correct, if it was multiplied with 10 .... So the shown transfer rate with my ssd machine shows 30 MB/s, which is more like 300 MB/s. The shown rate is definitely wrong.
So 2 problems here: 1.) wrong calculation of the progress 2.) wrong transfer rate calculation (the speed is quite ok)
Can anyone point out why?
private void CopyFileList(DoWorkEventArgs eWorker)
{
if (fileWorker.CancellationPending)
{
eWorker.Cancel = true;
}
else
{
var totalBytes = GetTotalBytes();
var currentBytesTransferred = 0L;
var totalBytesTransferred = 0L;
var snapshots = new Queue<long>(30);
var timer = new System.Timers.Timer(100D);
foreach (var file in FilesToCopy)
{
var sourcePath = file.Key;
var destinationPath = file.Value;
var sourceFile = new FileInfo(sourcePath);
//var fileSize = sourceFile.Length;
timer.Elapsed += (sender, e) =>
{
if (fileWorker.CancellationPending)
{
eWorker.Cancel = true;
}
else
{
// Remember only the last 30 snapshots; discard older snapshots
if (snapshots.Count == 30)
{
snapshots.Dequeue();
}
snapshots.Enqueue(Interlocked.Exchange(ref currentBytesTransferred, 0L));
var averageSpeed = snapshots.Average();
var bytesLeft = totalBytes - totalBytesTransferred;
var speedText = string.Format("{0:#} MB/s", averageSpeed / (1024 * 1024));
var timeLeftText = string.Empty;
if (averageSpeed > 0)
{
var timeLeft = TimeSpan.FromSeconds(bytesLeft / averageSpeed);
var timeLeftRounded = TimeSpan.FromSeconds(Math.Round(timeLeft.TotalSeconds));
timeLeftText = string.Format("{0}", timeLeftRounded);
}
else
{
timeLeftText = ("-infinite-");
}
Console.WriteLine(speedText);
Console.WriteLine("Time Left: " + timeLeftText);
fileWorker.ReportProgress((currentBytesTransferred / maximum) * 100));
}
};
using (var inputStream = sourceFile.OpenRead())
using (var outputStream = File.OpenWrite(destinationPath))
{
timer.Start();
var buffer = new byte[4096 * 8];
var numBytes = default(int);
var numBytesMax = buffer.Length;
var timeout = TimeSpan.FromMinutes(10D);
do
{
var mre = new ManualResetEvent(false);
inputStream.BeginRead(buffer, 0, numBytesMax, asyncReadResult =>
{
numBytes = inputStream.EndRead(asyncReadResult);
outputStream.BeginWrite(buffer, 0, numBytes, asyncWriteResult =>
{
if (fileWorker.CancellationPending)
{
eWorker.Cancel = true;
return;
}
else
{
outputStream.EndWrite(asyncWriteResult);
currentBytesTransferred = Interlocked.Add(ref currentBytesTransferred, numBytes);
Results.SizeProgressed = totalBytesTransferred;
totalBytesTransferred = Interlocked.Add(ref totalBytesTransferred, numBytes);
mre.Set();
}
}, null);
}, null);
mre.WaitOne(timeout);
} while (numBytes != 0);
timer.Stop();
}
}
}
}
An example:
Copying a directory (21 directories, 228 files, total: 326MB) to a NAS took: 12.4434796 s and reported was a average speed with 2-3 MB/s. But the copying speed was nearly 10 times higher.