38

I used this code:

using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Windows.Forms;
using System.IO;

namespace WindowsApplication1 {
  public partial class Form1 : Form {
    // Class to report progress
    private class UIProgress {
      public UIProgress(string name_, long bytes_, long maxbytes_) {
        name = name_; bytes = bytes_; maxbytes = maxbytes_;
      }
      public string name;
      public long bytes;
      public long maxbytes;
    }
    // Class to report exception {
    private class UIError {
      public UIError(Exception ex, string path_) {
        msg = ex.Message; path = path_; result = DialogResult.Cancel;
      }
      public string msg;
      public string path;
      public DialogResult result;
    }
    private BackgroundWorker mCopier;
    private delegate void ProgressChanged(UIProgress info);
    private delegate void CopyError(UIError err);
    private ProgressChanged OnChange;
    private CopyError OnError;

    public Form1() {
      InitializeComponent();
      mCopier = new BackgroundWorker();
      mCopier.DoWork += Copier_DoWork;
      mCopier.RunWorkerCompleted += Copier_RunWorkerCompleted;
      mCopier.WorkerSupportsCancellation = true;
      OnChange += Copier_ProgressChanged;
      OnError += Copier_Error;
      button1.Click += button1_Click;
      ChangeUI(false);
    }

    private void Copier_DoWork(object sender, DoWorkEventArgs e) {
      // Create list of files to copy
      string[] theExtensions = { "*.jpg", "*.jpeg", "*.bmp", "*.png", "*.gif" };
      List<FileInfo> files = new List<FileInfo>();
      string path = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments);
      DirectoryInfo dir = new DirectoryInfo(path);
      long maxbytes = 0;
      foreach (string ext in theExtensions) {
        FileInfo[] folder = dir.GetFiles(ext, SearchOption.AllDirectories);
        foreach (FileInfo file in folder) {
          if ((file.Attributes & FileAttributes.Directory) != 0) continue;
          files.Add(file);
          maxbytes += file.Length;
        }
      }
      // Copy files
      long bytes = 0;
      foreach (FileInfo file in files) {
        try {
          this.BeginInvoke(OnChange, new object[] { new UIProgress(file.Name, bytes, maxbytes) });
          File.Copy(file.FullName, @"c:\temp\" + file.Name, true);
        }
        catch (Exception ex) {
          UIError err = new UIError(ex, file.FullName); 
          this.Invoke(OnError, new object[] { err });
          if (err.result == DialogResult.Cancel) break;
        }
        bytes += file.Length;
      }
    }
    private void Copier_ProgressChanged(UIProgress info) {
      // Update progress
      progressBar1.Value = (int)(100.0 * info.bytes / info.maxbytes);
      label1.Text = "Copying " + info.name;
    }
    private void Copier_Error(UIError err) {
      // Error handler
      string msg = string.Format("Error copying file {0}\n{1}\nClick OK to continue copying files", err.path, err.msg);
      err.result = MessageBox.Show(msg, "Copy error", MessageBoxButtons.OKCancel, MessageBoxIcon.Exclamation);
    }
    private void Copier_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) {
      // Operation completed, update UI
      ChangeUI(false);
    }
    private void ChangeUI(bool docopy) {
      label1.Visible = docopy;
      progressBar1.Visible = docopy;
      button1.Text = docopy ? "Cancel" : "Copy";
      label1.Text = "Starting copy...";
      progressBar1.Value = 0;
    }
    private void button1_Click(object sender, EventArgs e) {
      bool docopy = button1.Text == "Copy";
      ChangeUI(docopy);
      if (docopy) mCopier.RunWorkerAsync();
      else mCopier.CancelAsync();
    }
  }
}

posted here (the one that nobugz posted) in copying files and displaying the status in progress bar.

I wanted to continuously increment the value of the progress bar while copying, especially large files. What happens in this sample code is that the value in progress bar stops on every file copied and after one file has been copied it will then increment to the size of the next file to be copied. I wanted it to work like CopyFileEx in Windows that progress bar continuously increment when copying (I cant use CopyFileEx because I wanted to have my own implementation).

Kromster
  • 7,181
  • 7
  • 63
  • 111
patlimosnero
  • 963
  • 4
  • 13
  • 22
  • Your question is not very clear for me. In code you provided `File.Copy` function used. Its a managed wraper for `CopyFile` WinAPI function. Do you desire to refuse from any WinAPI function in file copy process? – Anton Semenov May 18 '11 at 12:36
  • Indeed, why create your own implementation? CopyFileEx would do exactly what you want. – Polity May 18 '11 at 12:54
  • Yeah your right, why create one if I can use an existing one. The problem is, this is what is in app specifications. – patlimosnero May 18 '11 at 13:11
  • 1
    http://www.informit.com/guides/content.aspx?g=dotnet&seqNum=827 might be of some use. – Jim Mischel May 18 '11 at 14:30

6 Answers6

55

You need something like this:

public delegate void ProgressChangeDelegate(double Percentage, ref bool Cancel);
public delegate void Completedelegate();

class CustomFileCopier
{
    public CustomFileCopier(string Source, string Dest)
    {
        this.SourceFilePath = Source;
        this.DestFilePath = Dest;

        OnProgressChanged += delegate { };
        OnComplete += delegate { };
    }

    public void Copy()
    {
        byte[] buffer = new byte[1024 * 1024]; // 1MB buffer
        bool cancelFlag = false;

        using (FileStream source = new FileStream(SourceFilePath, FileMode.Open, FileAccess.Read))
        {
            long fileLength = source.Length;
            using (FileStream dest = new FileStream(DestFilePath, FileMode.CreateNew, FileAccess.Write))
            {
                long totalBytes = 0;
                int currentBlockSize = 0;

                while ((currentBlockSize = source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    totalBytes += currentBlockSize;
                    double percentage = (double)totalBytes * 100.0 / fileLength;

                    dest.Write(buffer, 0, currentBlockSize);

                    cancelFlag = false;
                    OnProgressChanged(percentage, ref cancelFlag);

                    if (cancelFlag == true)
                    {
                        // Delete dest file here
                        break;
                    }
                }
            }
        }

        OnComplete();
    }

    public string SourceFilePath { get; set; }
    public string DestFilePath { get; set; }

    public event ProgressChangeDelegate OnProgressChanged;
    public event Completedelegate OnComplete;
}

Just run it in separate thread and subscribe for OnProgressChanged event.

Wai Ha Lee
  • 8,598
  • 83
  • 57
  • 92
Anton Semenov
  • 6,227
  • 5
  • 41
  • 69
  • 2
    Did someone adapt this to copy folders as well? – MemphiZ Mar 14 '13 at 13:44
  • can you show how to use the delegate? The "OnComplete += delegate { };" looks a bit disturbing – Offler Jun 21 '13 at 06:26
  • 2
    This is an empty delegate stub, it do nothing. I introduce it only for simplify code. otherwise code `OnProgressChanged(persentage, ref cancelFlag);` should be writen as `if ( OnProgressChanged != null) OnProgressChanged(persentage, ref cancelFlag);` You can read about delegates here http://msdn.microsoft.com/en-us/library/900fyy8e(v=vs.71).aspx – Anton Semenov Jun 24 '13 at 07:13
  • 1
    Well, tbh you should obey a bit more to the event pattern by creating CancelEventArgs for the progress event and not use ref in any way for this. Besides that, thanks for the code snippet. Helped a lot! – Grisgram Feb 04 '19 at 14:09
27

I like this solution, because

The copy engine is in the framework

public delegate void IntDelegate(int Int);

public static event IntDelegate FileCopyProgress;
public static void CopyFileWithProgress(string source, string destination)
{
    var webClient = new WebClient();
    webClient.DownloadProgressChanged += DownloadProgress;
    webClient.DownloadFileAsync(new Uri(source), destination);
}

private static void DownloadProgress(object sender, DownloadProgressChangedEventArgs e)
{
    if(FileCopyProgress != null)
        FileCopyProgress(e.ProgressPercentage);
}

UNC Paths

This should work with UNC paths as long as the permissions are set up. If not, you will get this error, in which case, I vote for the authenticated request user route.

System.UnauthorizedAccessException: Access to the path '\testws01\c$\foo' is denied.

ASP.NET is not authorized to access the requested resource. Consider granting access rights to the resource to the ASP.NET request identity. ASP.NET has a base process identity (typically {MACHINE}\ASPNET on IIS 5 or Network Service on IIS 6 and IIS 7, and the configured application pool identity on IIS 7.5) that is used if the application is not impersonating. If the application is impersonating via <identity impersonate="true"/>, the identity will be the anonymous user (typically IUSR_MACHINENAME) or the authenticated request user.

toddmo
  • 20,682
  • 14
  • 97
  • 107
  • I totally agree, this is far the easiest solution. – Maestro Oct 13 '14 at 13:04
  • 2
    This is very nice (and quick), but my only concern is the inability to capture proper IO Exceptions. `DownloadFileAsync` throws `ArgumentNullException`, `WebException` and `InvalidOperationException` none of which are much use for when file handling goes wrong. Still, if all you need is a quick way to copy files with progress and you know all will be well, this is a great way. – TEK Mar 21 '16 at 10:33
  • @TEK, I would be surprised if the IOException was not in the inner exception property of the Exception thrown. If you can point me to a url for the mono code for this class, I'll see what it is actually doing. – toddmo Mar 21 '16 at 14:52
  • @toddmo I can do one better, I can link you directly to Microsoft's repo: http://referencesource.microsoft.com/#System/net/System/Net/webclient.cs,6a7f5544614801fd Thank you for taking a look. – TEK Mar 21 '16 at 20:38
  • @TEK, ok yes, it wraps every other exception inside of a WebException but keeps the inner exception. So I'd simulate an IO Error (lock the file, whatever) to verify it, but I bet the IO exception is there. I would run the experiment in .net fiddle but it requires disk access. – toddmo Mar 21 '16 at 21:01
  • @codea, Yes, if allowed: `System.UnauthorizedAccessException: Access to the path '\\test\c$\blah' is denied. Consider granting access rights to the resource to the ASP.NET request identity. ASP.NET has a base process identity (typically {MACHINE}\ASPNET on IIS 5 or Network Service on IIS 6 and IIS 7, and the configured application pool identity on IIS 7.5) that is used if the application is not impersonating. If the application is impersonating via , the identity will be the anonymous user (typically IUSR_MACHINENAME) or the authenticated request user. ` – toddmo Oct 23 '16 at 17:37
  • Thanks a lot for chiming in :), I just tested and it works perfectly. – codea Oct 23 '16 at 18:35
  • Great solution. Also **supported by Windows 10**. Don't forget to add a try-catch for an _OperationCancelledException_. It's thrown in the case that the user clicks to _abort_. – Beauty Jan 25 '17 at 13:51
  • Note that this downloads in background, i.e. it returns before the download completes. Consider this when comparing to other methods. Replace with `DownloadFileTaskAsync` to get a Task. – mafu Apr 05 '20 at 18:01
10

Making your own file copy logic by using 2 streams as presented by Gal is a viable option but its not recommended solely because there is a deeply intergrated Windows operation which is optimized in reliability, security and performance named CopyFileEx.

That said, in the following article: File Copy Progress, Custom Thread Pools they do exactly what you want, but of course you have to use CopyFileEx.

Wai Ha Lee
  • 8,598
  • 83
  • 57
  • 92
Polity
  • 14,734
  • 2
  • 40
  • 40
  • 2
    `CopyFileEx` is very broken when copying large files across the network. See http://blog.mischel.com/2008/10/14/copying-large-files-on-windows/ for details. Also, it's pretty easy to improve on the speed of `CopyFileEx` using two streams and a little bit of asynchronous coding. – Jim Mischel May 18 '11 at 14:29
  • @JimMischel Don't make such a broad statement. Streams do not support DMA. Async is not a magic solution. – mafu Apr 04 '20 at 18:45
  • 1
    @mafu Don't be too quick to judge that which you don't fully understand. The blog post (which is, unfortunately unavailable currently) documents very well how broken CopyFileEx is, or was at the time I wrote that comment. And at the time I was using a custom CopyFile method that far outperformed CopyFileEx simply by using two different threads: one for reading and one for writing. No, async isn't magic. But properly used, it *can* improve performance. – Jim Mischel Apr 04 '20 at 19:40
  • I should have picked a friendlier tone, sorry about that. I also don't know enough about this topic to argue in detail. My main point was that anything using streams afaik requires moving bytes through the CPU, which should be possible to be avoided. When, however, using streams, your idea is likely very good. – mafu Apr 04 '20 at 22:47
7

Here's an optimized solution that utilizes .NET extensions and a double-buffer for better performance. A new overload of CopyTo is added to FileInfo with an Action that indicates progress only when it has changed.

This sample implementation in WPF with a progress bar named progressBar1 that performs the copy operation in the background.

private FileInfo _source = new FileInfo(@"C:\file.bin");
private FileInfo _destination = new FileInfo(@"C:\file2.bin");

private void CopyFile()
{
  if(_destination.Exists)
    _destination.Delete();

  Task.Run(()=>{
    _source.CopyTo(_destination, x=>Dispatcher.Invoke(()=>progressBar1.Value = x));
  }).GetAwaiter().OnCompleted(() => MessageBox.Show("File Copied!"));
}

Here's an example for a Console Application

class Program
{
  static void Main(string[] args)
  {
    var _source = new FileInfo(@"C:\Temp\bigfile.rar");
    var _destination = new FileInfo(@"C:\Temp\bigfile2.rar");

    if (_destination.Exists) _destination.Delete();

    _source.CopyTo(_destination, x => Console.WriteLine($"{x}% Complete"));
    Console.WriteLine("File Copied.");
  }
}

To use, create a new file, such as FileInfoExtensions.cs and add this code:

public static class FileInfoExtensions
{
  public static void CopyTo(this FileInfo file, FileInfo destination, Action<int> progressCallback)
  {
    const int bufferSize = 1024 * 1024;  //1MB
    byte[] buffer = new byte[bufferSize], buffer2 = new byte[bufferSize];
    bool swap = false;
    int progress = 0, reportedProgress = 0, read = 0;
    long len = file.Length;
    float flen = len;
    Task writer = null;

    using (var source = file.OpenRead())
    using (var dest = destination.OpenWrite())
    {
      dest.SetLength(source.Length);
      for (long size = 0; size < len; size += read)
      {
        if ((progress = ((int)((size / flen) * 100))) != reportedProgress)
          progressCallback(reportedProgress = progress);
        read = source.Read(swap ? buffer : buffer2, 0, bufferSize);
        writer?.Wait();  // if < .NET4 // if (writer != null) writer.Wait(); 
        writer = dest.WriteAsync(swap ? buffer : buffer2, 0, read);
        swap = !swap;
      }
      writer?.Wait();  //Fixed - Thanks @sam-hocevar
    }
  }
}

The double buffer works by using one thread to read and one thread to write, so the max speed is dictated only by the slower of the two. Two buffers are used (a double buffer), ensuring that the read and write threads are never using the same buffer at the same time.

Example: the code reads into buffer 1, then when the read completes, a write operation starts writing the contents of buffer 1. Without waiting finish writing, the buffer is swapped to buffer 2 and data is read into buffer 2 while buffer 1 is still being written. Once the read completes in buffer 2, it waits for write to complete on buffer 1, starts writing buffer 2, and the process repeats. Essentially, 1 thread is always reading, and one is always writing.

WriteAsync uses overlapped I/O, which utilizes I/O completion ports, which rely on hardware to perform asynchronous operations rather than threads, making this very efficient. TLDR: I lied about there being 2 threads, but the concept is the same.

Robear
  • 986
  • 1
  • 11
  • 15
  • I did use Your code for quite a while and randomly started having problems, when I got DAMAGED FILES due to this implementation - one of the writers did continue to write to file one more time - it did write the beginning of the source file to the end of destination file thus corrupting it. Can not recommend this, sorry. – halloweenlv Feb 24 '16 at 10:27
  • @halloweenlv this code is not very well written, but it almost works. You can fix it by replacing the `dest.Write(…)` at the end with `if (writer != null) writer.Wait();` – sam hocevar Mar 02 '16 at 12:47
  • @sam-hocevar I updated the sample code and tested it. Thanks for catching that bug. I also added code for testing it in a console app. Could you please elaborate on "this code is not very well written"? Any suggestions are welcome. – Robear Mar 07 '16 at 16:20
  • @Robear apart from the `using` directives, the code is very “C-ish” (not that there’s anything wrong with C, but in C# things are often done very differently), has a few redundancies (the `swap ? buffer : buffer2` part), also `buffer2` is used when `swap` is false, not when it’s true, the last `Read` call will ask for too many bytes, the variable name `progress2` makes it look like it has something to do with `buffer2` but it’s just the previous value of `progress`… of course there’s nothing really wrong with all that, it just doesn’t look very C#. – sam hocevar Mar 07 '16 at 19:27
  • Ah okay. Well if the main objection is that it doesn't "look" like C#, I'm fine with that. The code utilizes a double buffer, allowing a read and a write to occur simultaneously, so there's no redundancy there - it's an optimization (absence of double buffers in stream copies always irks me). I've renamed *progress2* to *reportedProgress* for clarity. If you have any other suggestions, though, specifically with regards to making it "look more C#ish", please feel free to pastebin me something and link it here. – Robear Mar 08 '16 at 01:10
  • Also note that the *count* parameter for the Read method specified the maximum size to be read, not the desired size. It's really just to specify the upper bounds for the buffer. There's no benefit to adding logic to reducing the read request size. [MSDN - Read Method](https://msdn.microsoft.com/query/dev14.query?appId=Dev14IDEF1&l=EN-US&k=k(System.IO.FileStream.Read);k(TargetFrameworkMoniker-.NETFramework,Version%3Dv4.5.2);k(DevLang-csharp)&rd=true) – Robear Mar 08 '16 at 01:17
  • 1
    great snippet. should add a progressCallback(100); before writer?.Wait(); otherwise the progress will end at 99 only. – Richie86 Jan 25 '19 at 06:46
  • I recommend to use dest.Flush after file is written instead of set dest.SetLength of file length at start otherwise it will be difficult to see if data is really commited. The progressbar also is little complicated, I would just update after 33ms (GUI time) instead of using integer type for comparation. – Tib Schott Mar 18 '22 at 16:42
  • @TibSchott The reason for the `SetLength` is to prevent fragmentation and to improve performance. Removing it would only assist in debugging, in which case you could inspect `dest.Position`. The GUI updating should be handled by the application, so it could still so it every 33ms or whatever. Presumably the callback will write a value in a critical section that the GUI thread would read. Here it's just an optimization to prevent the callback from being called excessively (which can impact performance), presuming we're not looking for a high precision value. – Robear Mar 19 '22 at 03:15
  • @Robear I needed to remove `SetLength` because if data copy fails, the result has exact same file size as it has been copied. You might think file has copied but in real it failed. Additional avoiding `writer.Flush` works with RAM and system buffer instead writing stuff to HDD immediate. Sure its faster when buffer is used but on interrupt it will summary wrong file length. Depends if drive is internally, network drive, system cache/HDD cache and DotNet procedure. – Tib Schott Mar 21 '22 at 07:28
3

You can copy parts of the file stream from each file, and update after each "chunk" you update. Thus it will be more continuous - you can also easily calculate the relative size of the current "chunk" you are copying relative to the total stream size in order to show the correct percentage done.

Gal
  • 5,338
  • 5
  • 33
  • 55
0

you can use Dispatcher to update your ProgressBar .

UpdateProgressBarDelegate updatePbDelegate = new UpdateProgressBarDelegate(ProgressBar1.SetValue);

Dispatcher.Invoke(updatePbDelegate, System.Windows.Threading.DispatcherPriority.Background, new object[] { ProgressBar.ValueProperty, value });
Akrem
  • 5,033
  • 8
  • 37
  • 64