0

I have a list of objects that I want to convert to json and upload to azure blob storage.

To do it in sequence I could use following code:

var objects = new List<object>();

foreach (var obj in objects)
{
    var blobClient = _blobContainerClient.GetBlobClient($"my_file_name.json");

    using (var ms = new MemoryStream(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(obj))))
    {
        await blobClient.UploadAsync(ms, options: new BlobUploadOptions {...});
    }
}

I found this article which shows how to upload large amounts of random data in parallel to Azure storage

This example uses file reference, but I want to use MemoryStream. So I need to slightly modify the solution, but then I have a couple of questions on how to better implement this:

  • Should I create multiple MemoryStream in parallel.
  • Should I create multiple MemoryStream and not dispose them and then pass to upload method (and maybe dispose later?)
  • Should I use something else instead of MemoryStream.
Ramūnas
  • 1,494
  • 18
  • 37
  • Please edit your question and include the code that is failing. – Gaurav Mantri Jan 07 '22 at 08:46
  • There is no failing code yet. I want to to convert my sample code to the structure provided in the article, but I am not sure of the best appoach to do it. I have edited the question and removed my hypothesis about possible exception so it would not mislead. – Ramūnas Jan 07 '22 at 09:51
  • What are your options? Dispose `MemoryStream` or not? Then dispose it - it's a standard approach and premature optimization is evil. It will be another question if you get some performance issues – AndrewSilver Jan 09 '22 at 19:56

1 Answers1

0

Try with this code I tried in my system I have taken the student class for testing purpose you pass the your object and i taken array of string of json

using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Azure;
using Azure.Storage;
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Models;


using Newtonsoft.Json;
using Microsoft.Azure.Storage;
using Nito.AsyncEx;
using System.IO;
using System.Diagnostics;
using System.Text;

namespace UploadJsondata
{
    class studentInfo
    {
        public int Roll { get; set; }
        public string name { get; set; }
        public List<string> courses { get; set; }
    }
    class Program
    {
        static void Main(string[] args)
        {
            AsyncContext.Run(() => UploadFilesAsync());
        }
  
        static async Task UploadFilesAsync()
        {
           studentInfo student1 = new studentInfo()
            {
                Roll = 110,
                name = "shruti",
                courses = new List<string>()
               {
                 "Math230",
                 "Calculus1",
                 "CS100",
                 "ML"
                   }
            };


            string stringjson = JsonConvert.SerializeObject(student1);
            Console.WriteLine(stringjson);
            string[] data = new string[2];
            data[0] = stringjson;
            data[1] = stringjson;
            BlobServiceClient blobServiceClient = new BlobServiceClient("Connection String");

            BlobContainerClient containerClient = blobServiceClient.GetBlobContainerClient("test");
            //Container name
         

            // Start a timer to measure how long it takes to upload all the files.
            Stopwatch timer = Stopwatch.StartNew();

            try
            {
                
                int count = 0;

               

                // Specify the StorageTransferOptions
                BlobUploadOptions options = new BlobUploadOptions
                {
                    TransferOptions = new StorageTransferOptions
                    {
                        // Set the maximum number of workers that 
                        // may be used in a parallel transfer.
                        MaximumConcurrency = 8,

                        // Set the maximum length of a transfer to 50MB.
                        MaximumTransferSize = 50 * 1024 * 1024
                    }
                };

                // Create a queue of tasks that will each upload one file.
                var tasks = new Queue<Task<Response<BlobContentInfo>>>();

              
                    string name = "name";
                    int i = 1;
                  
                    foreach(string x in data)
                    {
                        MemoryStream ms = new MemoryStream(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(x)));
                        BlobClient blob1 = containerClient.GetBlobClient(name+i);
                        i++;
                        tasks.Enqueue(blob1.UploadAsync(ms, options));
                    count++;
                }
                  
                // Run all the tasks asynchronously.
                await Task.WhenAll(tasks);

                timer.Stop();
                Console.WriteLine($"Uploaded {count} files in {timer.Elapsed.TotalSeconds} seconds");
            }
            catch (RequestFailedException ex)
            {
                Console.WriteLine($"Azure request failed: {ex.Message}");
            }
            catch (DirectoryNotFoundException ex)
            {
                Console.WriteLine($"Error parsing files in the directory: {ex.Message}");
            }
            catch (Exception ex)
            {
                Console.WriteLine($"Exception: {ex.Message}");
            }
        }
       
    }
 }

OUTPUT

The both json data uploaded to azure storage

enter image description here

enter image description here

ShrutiJoshi-MT
  • 1,622
  • 1
  • 4
  • 9