I had this problem and found the solution based on Jonathan's code here. There are some problem with his code, but here is my solution. If you want to upload a large file, something like 1Gbyte file, you have to chuck the file and send it through several request (one request gives time out). first you set the max limit for client and server side.
<system.webServer>
<security>
<requestFiltering>
<requestLimits maxAllowedContentLength="2147483647" />
</requestFiltering>
</security>
<system.webServer>
and
<system.web>
<httpRuntime targetFramework="4.5" maxRequestLength="2147483647" />
</system.web>
then chunk the file, and send each chuck, wait for response and send the next chunk. here is the javascript and controller code.
<div id="VideoDiv">
<label>Filename:</label>
<input type="file" id="fileInput" /><br/><br/>
<input type="button" id="btnUpload" value="Upload a presentation"/><br/><br/>
<div id="progressbar_container" style="width: 100%; height: 30px; position: relative; background-color: grey; display: none">
<div id="progressbar" style="width: 0%; height: 100%; position: absolute; background-color: green"></div>
<span id="progressbar_label" style="position: absolute; left: 35%; top: 20%">Uploading...</span>
</div>
</div>
Javascript code to chuck, call controller and update progressbar:
var progressBarStart = function() {
$("#progressbar_container").show();
}
var progressBarUpdate = function (percentage) {
$('#progressbar_label').html(percentage + "%");
$("#progressbar").width(percentage + "%");
}
var progressBarComplete = function() {
$("#progressbar_container").fadeOut(500);
}
var file;
$('#fileInput').change(function(e) {
file = e.target.files[0];
});
var uploadCompleted = function() {
var formData = new FormData();
formData.append('fileName', file.name);
formData.append('completed', true);
var xhr2 = new XMLHttpRequest();
xhr2.onload = function() {
progressBarUpdate(100);
progressBarComplete();
}
xhr2.open("POST", "/Upload/UploadComplete?fileName=" + file.name + "&complete=" + 1, true);
xhr2.send(formData);
}
var multiUpload = function(count, counter, blob, completed, start, end, bytesPerChunk) {
counter = counter + 1;
if (counter <= count) {
var chunk = blob.slice(start, end);
var xhr = new XMLHttpRequest();
xhr.onload = function() {
start = end;
end = start + bytesPerChunk;
if (count == counter) {
uploadCompleted();
} else {
var percentage = (counter / count) * 100;
progressBarUpdate(percentage);
multiUpload(count, counter, blob, completed, start, end, bytesPerChunk);
}
}
xhr.open("POST", "/Upload/MultiUpload?id=" + counter.toString() + "&fileName=" + file.name, true);
xhr.send(chunk);
}
}
$("#VideoDiv").on("click", "#btnUpload", function() {
var blob = file;
var bytesPerChunk = 3757000;
var size = blob.size;
var start = 0;
var end = bytesPerChunk;
var completed = 0;
var count = size % bytesPerChunk == 0 ? size / bytesPerChunk : Math.floor(size / bytesPerChunk) + 1;
var counter = 0;
progressBarStart();
multiUpload(count, counter, blob, completed, start, end, bytesPerChunk);
});
and here is the upload controller to store the chucnk in ("App_Data/Videos/Temp") and later merge them and store in ("App_Data/Videos"):
public class UploadController : Controller
{
private string videoAddress = "~/App_Data/Videos";
[HttpPost]
public string MultiUpload(string id, string fileName)
{
var chunkNumber = id;
var chunks = Request.InputStream;
string path = Server.MapPath(videoAddress+"/Temp");
string newpath = Path.Combine(path, fileName+chunkNumber);
using (FileStream fs = System.IO.File.Create(newpath))
{
byte[] bytes = new byte[3757000];
int bytesRead;
while ((bytesRead=Request.InputStream.Read(bytes,0,bytes.Length))>0)
{
fs.Write(bytes,0,bytesRead);
}
}
return "done";
}
[HttpPost]
public string UploadComplete(string fileName, string complete)
{
string tempPath = Server.MapPath(videoAddress + "/Temp");
string videoPath = Server.MapPath(videoAddress);
string newPath = Path.Combine(tempPath, fileName);
if (complete=="1")
{
string[] filePaths = Directory.GetFiles(tempPath).Where(p=>p.Contains(fileName)).OrderBy(p => Int32.Parse(p.Replace(fileName, "$").Split('$')[1])).ToArray();
foreach (string filePath in filePaths)
{
MergeFiles(newPath, filePath);
}
}
System.IO.File.Move(Path.Combine(tempPath, fileName),Path.Combine(videoPath,fileName));
return "success";
}
private static void MergeFiles(string file1, string file2)
{
FileStream fs1 = null;
FileStream fs2 = null;
try
{
fs1 = System.IO.File.Open(file1, FileMode.Append);
fs2 = System.IO.File.Open(file2, FileMode.Open);
byte[] fs2Content = new byte[fs2.Length];
fs2.Read(fs2Content, 0, (int) fs2.Length);
fs1.Write(fs2Content, 0, (int) fs2.Length);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message + " : " + ex.StackTrace);
}
finally
{
if (fs1 != null) fs1.Close();
if (fs2 != null) fs2.Close();
System.IO.File.Delete(file2);
}
}
}
However, if two users at same time upload files with same name, there will be some problem, and you have to handle this issue. By reading responseText, you can catch some error and exception and trim it.