I have written a console app to download files from an FTP and then upload to a different FTP location. The downloading on the files takes around 10 seconds, but the upload around 6 minutes. There are 256 files each around 5-30KB in size. So very small.
The upload and download code is very similar, it iterates through all files in the directory then uploads. It is fairly simple as seen below, it iterates and uploads files to the ftp from the D:\LEV\ folder.
EDIT: This is run on a Azure 'small' Windows virtual machine, so I assume bandwidth isn't a problem? Also I am performing the same task on another virtual machine using the windows ftp.exe to upload and it is 2 times quicker than my console app on the same machine.
Any clues why it is so slow, or are there ways to improve the speed?
static public void Upload(string file1)
{
string upftpServerIP = "ftp://ftp.domain.co.uk/lev/";
string upftpUserID = "username";
string upftpPassword = "password";
string uri = upftpServerIP + file1;
Uri serverUri = new Uri(uri);
if (serverUri.Scheme != Uri.UriSchemeFtp)
{
return;
}
FtpWebRequest reqFTP;
reqFTP = (FtpWebRequest)FtpWebRequest.Create(new Uri(upftpServerIP + file1));
reqFTP.Credentials = new NetworkCredential(upftpUserID, upftpPassword);
reqFTP.KeepAlive = false;
reqFTP.Method = WebRequestMethods.Ftp.UploadFile;
reqFTP.UseBinary = true;
reqFTP.Proxy = null;
reqFTP.UsePassive = true;
Console.WriteLine("Uploading " + file1);
FileStream fs = File.OpenRead(@"D:\LEV\" + file1);
byte[] buffer = new byte[fs.Length];
fs.Read(buffer, 0, buffer.Length);
fs.Close();
Stream ftpstream = reqFTP.GetRequestStream();
ftpstream.Write(buffer, 0, buffer.Length);
ftpstream.Close();
}
static public string[] GetFileListUpload()
{
string[] uploadFiles = Directory.GetFiles(@"D:\LEV\", "*.*", SearchOption.TopDirectoryOnly);
return uploadFiles;
}