I have a very large JSON file, now the car array below can be upto 100,000,000 records. The total file size can vary from 500mb to 10 GB. I am using Newtonsoft json.net
Input
{
"name": "John",
"age": "30",
"cars": [{
"brand": "ABC",
"models": ["Alhambra", "Altea", "AlteaXL", "Arosa", "Cordoba", "CordobaVario", "Exeo", "Ibiza", "IbizaST", "ExeoST", "Leon", "LeonST", "Inca", "Mii", "Toledo"],
"year": "2019",
"month": "1",
"day": "1"
}, {
"brand": "XYZ",
"models": ["Alhambra", "Altea", "AlteaXL", "Arosa", "Cordoba", "CordobaVario", "Exeo", "Ibiza", "IbizaST", "ExeoST", "Leon", "LeonST", "Inca", "Mii", "Toledo"],
"year": "2019",
"month": "10",
"day": "01"
}],
"TestCity": "TestCityValue",
"TestCity1": "TestCityValue1"}
Desired Output File 1 Json
{
"name": "John",
"age": "30",
"cars": {
"brand": "ABC",
"models": ["Alhambra", "Altea", "AlteaXL", "Arosa", "Cordoba", "CordobaVario", "Exeo", "Ibiza", "IbizaST", "ExeoST", "Leon", "LeonST", "Inca", "Mii", "Toledo"],
"year": "2019",
"month": "1",
"day": "1"
},
"TestCity": "TestCityValue",
"TestCity1": "TestCityValue1"
}
File 2 Json
{
"name": "John",
"age": "30",
"cars": {
"brand": "XYZ",
"models": ["Alhambra", "Altea", "AlteaXL", "Arosa", "Cordoba", "CordobaVario", "Exeo", "Ibiza", "IbizaST", "ExeoST", "Leon", "LeonST", "Inca", "Mii", "Toledo"],
"year": "2019",
"month": "10",
"day": "01"
},
"TestCity": "TestCityValue",
"TestCity1": "TestCityValue1"
}
So I came up with the following code which kinda works
public static void SplitJson(Uri objUri, string splitbyProperty)
{
try
{
bool readinside = false;
HttpClient client = new HttpClient();
using (Stream stream = client.GetStreamAsync(objUri).Result)
using (StreamReader streamReader = new StreamReader(stream))
using (JsonTextReader reader = new JsonTextReader(streamReader))
{
Node objnode = new Node();
while (reader.Read())
{
JObject obj = new JObject(reader);
if (reader.TokenType == JsonToken.String && reader.Path.ToString().Contains("name") && !reader.Value.ToString().Equals(reader.Path.ToString()))
{
objnode.name = reader.Value.ToString();
}
if (reader.TokenType == JsonToken.Integer && reader.Path.ToString().Contains("age") && !reader.Value.ToString().Equals(reader.Path.ToString()))
{
objnode.age = reader.Value.ToString();
}
if (reader.Path.ToString().Contains(splitbyProperty) && reader.TokenType == JsonToken.StartArray)
{
int counter = 0;
while (reader.Read())
{
if (reader.TokenType == JsonToken.StartObject)
{
counter = counter + 1;
var item = JsonSerializer.Create().Deserialize<Car>(reader);
objnode.cars = new List<Car>();
objnode.cars.Add(item);
insertIntoFileSystem(objnode, counter);
}
if (reader.TokenType == JsonToken.EndArray)
break;
}
}
}
}
}
catch (Exception)
{
throw;
}
}
public static void insertIntoFileSystem(Node objNode, int counter)
{
string fileName = @"C:\Temp\output_" + objNode.name + "_" + objNode.age + "_" + counter + ".json";
var serialiser = new JsonSerializer();
using (TextWriter tw = new StreamWriter(fileName))
{
using (StringWriter textWriter = new StringWriter())
{
serialiser.Serialize(textWriter, objNode);
tw.WriteLine(textWriter);
}
}
}
ISSUE
Any field after the array is not being captured when file is large in size. Is there a way to skip or do parallel processing of the reader for large array in json. In short I am not able to capture the below part using my code
"TestCity": "TestCityValue", "TestCity1": "TestCityValue1"}