1

I'm reading a large file into a stream, of which I'm filling a 4k buffer which I'm writing to a HttpWebRequest stream.

For some reason I'm getting out of memory exceptions, not sure why.

Am I doing something incorrectly?

My method:

public void StreamBlob(FileInfo file, Uri blobContainerSasUri, string containerName)
{
    try
    {
        var method = "PUT";
        var token = blobContainerSasUri.Query;
        var requestUri =
            new Uri($"https://{blobContainerSasUri.Host}/{containerName}/{string.Concat(file.Name, token)}");

        using (var fileStream = file.OpenRead())
        {
            var request = (HttpWebRequest) WebRequest.Create(requestUri);
            request.Method = method;
            request.ContentType = MimeMapping.GetMimeMapping(file.FullName);
            request.Headers.Add("x-ms-blob-type", "BlockBlob");
            request.ContentLength = fileStream.Length;
            request.AllowWriteStreamBuffering = false;

            using (var serverStream = request.GetRequestStream())
            {
                var buffer = new byte[4096];
                while (true)
                {
                    var bytesRead = fileStream.Read(buffer, 0, buffer.Length);
                    if (bytesRead > 0)
                        serverStream.Write(buffer, 0, bytesRead);
                    else
                        break;
                }
            }

            using (var resp = (HttpWebResponse) request.GetResponse())
            {
                try
                {
                    if (resp.StatusCode == HttpStatusCode.OK)
                        _logger.Log.Info("Received http response {resp}");
                }

                catch (Exception ex)
                {
                    _logger.Log.Warn($"Received http response {resp}", ex)
                }
            }
        }
    }
    catch (Exception ex)
    {
        _logger.Log.Warn($"Error uploading {file.Fullname}, ex")
    }
dcrdev
  • 137
  • 1
  • 12
  • Are you monitoring your resource consumption when running this? – Jaskier Dec 07 '18 at 14:43
  • where are you getting the exceptions? remove the `try-catch` statements for the time being, and you will be able to find where you run out of memory. – absoluteAquarian Dec 07 '18 at 14:45
  • How big is the file you're reading into memory? Sounds like you might want to ensure your buffer is the right length with var buffer = new byte[fileStream.Length]; Also, read this: https://stackoverflow.com/questions/2809514/outofmemoryexception-when-i-read-500mb-filestream – sr28 Dec 07 '18 at 14:50
  • Possible duplicate of [Send and receive large file over streams in ASP.NET Web Api C#](https://stackoverflow.com/questions/43678963/send-and-receive-large-file-over-streams-in-asp-net-web-api-c-sharp) – Mark Schultheiss Dec 07 '18 at 14:51
  • @sr28 You absolutely don't want your buffer to be the size of the file. Than you will hold the complete file in memory. – Magnus Dec 07 '18 at 14:59
  • You can simplify the transfer of the data to: `fileStream.CopyTo(serverStream)` – Magnus Dec 07 '18 at 15:02
  • How about if you set `AllowWriteStreamBuffering` to `true` and don't set `ContentLength` at all? – Magnus Dec 07 '18 at 15:14
  • @Magnus almost immediate out of memory exception – dcrdev Dec 07 '18 at 15:19
  • Where is the exception coming from in the stack trace? – Magnus Dec 07 '18 at 15:21
  • Interestingly after changing it to fileStream.CopyTo I now get " Message=The stream does not support concurrent IO read or write operations." – dcrdev Dec 07 '18 at 15:21
  • How about setting AllowWriteStreamBuffering to true before GetResponse() ? – Magnus Dec 07 '18 at 15:26
  • Operation timed out emanating from System.HttpWebRequest – dcrdev Dec 07 '18 at 15:35

0 Answers0