Try with this code I tried in my system I have taken the student
class for testing purpose you pass the your object and i taken array of string of json
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Azure;
using Azure.Storage;
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Models;
using Newtonsoft.Json;
using Microsoft.Azure.Storage;
using Nito.AsyncEx;
using System.IO;
using System.Diagnostics;
using System.Text;
namespace UploadJsondata
{
class studentInfo
{
public int Roll { get; set; }
public string name { get; set; }
public List<string> courses { get; set; }
}
class Program
{
static void Main(string[] args)
{
AsyncContext.Run(() => UploadFilesAsync());
}
static async Task UploadFilesAsync()
{
studentInfo student1 = new studentInfo()
{
Roll = 110,
name = "shruti",
courses = new List<string>()
{
"Math230",
"Calculus1",
"CS100",
"ML"
}
};
string stringjson = JsonConvert.SerializeObject(student1);
Console.WriteLine(stringjson);
string[] data = new string[2];
data[0] = stringjson;
data[1] = stringjson;
BlobServiceClient blobServiceClient = new BlobServiceClient("Connection String");
BlobContainerClient containerClient = blobServiceClient.GetBlobContainerClient("test");
//Container name
// Start a timer to measure how long it takes to upload all the files.
Stopwatch timer = Stopwatch.StartNew();
try
{
int count = 0;
// Specify the StorageTransferOptions
BlobUploadOptions options = new BlobUploadOptions
{
TransferOptions = new StorageTransferOptions
{
// Set the maximum number of workers that
// may be used in a parallel transfer.
MaximumConcurrency = 8,
// Set the maximum length of a transfer to 50MB.
MaximumTransferSize = 50 * 1024 * 1024
}
};
// Create a queue of tasks that will each upload one file.
var tasks = new Queue<Task<Response<BlobContentInfo>>>();
string name = "name";
int i = 1;
foreach(string x in data)
{
MemoryStream ms = new MemoryStream(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(x)));
BlobClient blob1 = containerClient.GetBlobClient(name+i);
i++;
tasks.Enqueue(blob1.UploadAsync(ms, options));
count++;
}
// Run all the tasks asynchronously.
await Task.WhenAll(tasks);
timer.Stop();
Console.WriteLine($"Uploaded {count} files in {timer.Elapsed.TotalSeconds} seconds");
}
catch (RequestFailedException ex)
{
Console.WriteLine($"Azure request failed: {ex.Message}");
}
catch (DirectoryNotFoundException ex)
{
Console.WriteLine($"Error parsing files in the directory: {ex.Message}");
}
catch (Exception ex)
{
Console.WriteLine($"Exception: {ex.Message}");
}
}
}
}
OUTPUT
The both json data uploaded to azure storage

