-1

i have IIS deployed web site on which i have virtual folder containing folders and files. i am using following code to copy the files from Http site. but i copy only one file at a time. instead of coping files one by one i want to copy all directory.

 private static void DirectoryCopy(string sourceDirName, string destDirName, bool copySubDirs)
    {
        // Get the subdirectories for the specified directory.
        DirectoryInfo dir = new DirectoryInfo(sourceDirName);
        DirectoryInfo[] dirs = dir.GetDirectories();

        if (!dir.Exists)
        {
            throw new DirectoryNotFoundException(
                "Source directory does not exist or could not be found: "
                + sourceDirName);
        }

        // If the destination directory doesn't exist, create it. 
        if (!Directory.Exists(destDirName))
        {
            Directory.CreateDirectory(destDirName);
        }

        // Get the files in the directory and copy them to the new location.
        FileInfo[] files = dir.GetFiles();
        foreach (FileInfo file in files)
        {
            string temppath = Path.Combine(destDirName, file.Name);
            file.CopyTo(temppath, false);
        }

        // If copying subdirectories, copy them and their contents to new location. 
        if (copySubDirs)
        {
            foreach (DirectoryInfo subdir in dirs)
            {
                string temppath = Path.Combine(destDirName, subdir.Name);
                DirectoryCopy(subdir.FullName, temppath, copySubDirs);
            }
        }
    }
Avinash patil
  • 1,689
  • 4
  • 17
  • 39
  • 1
    May be help you http://stackoverflow.com/questions/124492/c-sharp-httpwebrequest-command-to-get-directory-listing – kaushik0033 Oct 04 '13 at 07:50
  • 1
    Please see link : http://www.codeproject.com/Articles/34415/Downloading-Multiple-Files-over-HTTP-Connection – kaushik0033 Oct 04 '13 at 07:50

3 Answers3

0

Well, you can try to make the whole operation asynchronous, but I'm not sure if the result will be satisfactory for you. I have never heard of a function that could copy everything at once. In every OS there is a queue of files waiting to be written ;)

If the operation takes too much time, simply use ajax and notify the user of the current progress, so the website doesn't just freez on him without any notification.

walther
  • 13,466
  • 5
  • 41
  • 67
0

If you want to use FTP directory to download all file in specific directory.

For being able to download all files from a FTP directory to a local folder, you will have to list all files in the remote directory and then download them one by one. You can use the following code to do the same:

    string[] files = GetFileList();
    foreach (string file in files)
    {
        Download(file);
    }

    public string[] GetFileList()
    {
        string[] downloadFiles;
        StringBuilder result = new StringBuilder();
        WebResponse response = null;
        StreamReader reader = null;
        try
        {
            FtpWebRequest reqFTP;
            reqFTP = (FtpWebRequest)FtpWebRequest.Create(new Uri("ftp://" + ftpServerIP + "/"));
            reqFTP.UseBinary = true;
            reqFTP.Credentials = new NetworkCredential(ftpUserID, ftpPassword);
            reqFTP.Method = WebRequestMethods.Ftp.ListDirectory;
            reqFTP.Proxy = null;
            reqFTP.KeepAlive = false;
            reqFTP.UsePassive = false;
            response = reqFTP.GetResponse();
            reader = new StreamReader(response.GetResponseStream());
            string line = reader.ReadLine();
            while (line != null)
            {
                result.Append(line);
                result.Append("\n");
                line = reader.ReadLine();
            }
            // to remove the trailing '\n'
            result.Remove(result.ToString().LastIndexOf('\n'), 1);
            return result.ToString().Split('\n');
        }
        catch (Exception ex)
        {
            if (reader != null)
            {
                reader.Close();
            }
            if (response != null)
            {
                response.Close();
            }                
            downloadFiles = null;
            return downloadFiles;
        }
    }

    private void Download(string file)
    {                       
        try
        {                
            string uri = "ftp://" + ftpServerIP + "/" + remoteDir + "/" + file;
            Uri serverUri = new Uri(uri);
            if (serverUri.Scheme != Uri.UriSchemeFtp)
            {
                return;
            }       
            FtpWebRequest reqFTP;                
            reqFTP = (FtpWebRequest)FtpWebRequest.Create(new Uri("ftp://" + ftpServerIP + "/" + remoteDir + "/" + file));                                
            reqFTP.Credentials = new NetworkCredential(ftpUserID, ftpPassword);                
            reqFTP.KeepAlive = false;                
            reqFTP.Method = WebRequestMethods.Ftp.DownloadFile;                                
            reqFTP.UseBinary = true;
            reqFTP.Proxy = null;                 
            reqFTP.UsePassive = false;
            FtpWebResponse response = (FtpWebResponse)reqFTP.GetResponse();
            Stream responseStream = response.GetResponseStream();
            FileStream writeStream = new FileStream(localDestnDir + "\" + file, FileMode.Create);                
            int Length = 2048;
            Byte[] buffer = new Byte[Length];
            int bytesRead = responseStream.Read(buffer, 0, Length);               
            while (bytesRead > 0)
            {
                writeStream.Write(buffer, 0, bytesRead);
                bytesRead = responseStream.Read(buffer, 0, Length);
            }                
            writeStream.Close();
            response.Close(); 
        }
        catch (WebException wEx)
        {
            MessageBox.Show(wEx.Message, "Download Error");
        }
        catch (Exception ex)
        {
            MessageBox.Show(ex.Message, "Download Error");
        }
    }
kaushik0033
  • 679
  • 6
  • 12
  • i dont have the file list saved somewhere. these files are generated by my program, and its names and count always changing – Avinash patil Oct 04 '13 at 08:04
0

Copying a directory does not exist. You create a new destination directory and copy all files in the source directory. If the source directory contains directories, repeat the process for each directory in there, ad infinitum.

Your comment here indicates you're actually trying to solve another problem. What is that problem?

If your actual problem is that files disappear between dir.GetFiles() and file.CopyTo(), apply the appropriate try..catch clauses to catch errors of files not existing anymore.

If your actual problem is that files are added between dir.GetFiles() and file.CopyTo(), keep a list of the names of files that you did copy, call dir.GetFiles() again after copying all and intersect the results to see if new files were added.

Community
  • 1
  • 1
CodeCaster
  • 147,647
  • 23
  • 218
  • 272