Thanks everyone for the tips. In dealing with JSON and .NET Datatables, I have had to overcome numerous different challenges.
- Tables that return 0 rows, don't serialize so you end up with a Datatable with 0 columns.
- JSON loses the DataType of the Columns
- JSON loses the RowState of the records.
- JSON loses the Original\Proposed values of a Modified Row so it won't save
- JSON converts the DataType Byte[] to a Base64 string, but doesn't convert it back.
As a result, I have found a solution that seems to overcome all of these in one shot. To Summarize the solution..
- Use the Built in XML feature of the DataTable to restore the Schema
- Save off the Original RowState in order to restore it back
- Save off the Original Values for Modified records to restore it back.
Since we all like code...
The Class that is going to be serialized (there are more properties but these are the focus)
public System.Data.DataTable Data { get; set; }
[Newtonsoft.Json.JsonProperty]
private string _DataTableSchema { get; set; }
[Newtonsoft.Json.JsonProperty]
private List<DeconstructedDataRow> _DeconstructedDataRows { get; set; }
private class DeconstructedDataRow
{
public System.Data.DataRowState RowState { get; set; }
public List<object> RowOriginalValues { get; set; }
public DeconstructedDataRow()
{
RowState = System.Data.DataRowState.Unchanged;
RowOriginalValues = new List<object>();
}
}
From the one side, BEFORE Serializing the table you call .Data_Deconstruct....
public void Data_Deconstruct()
{
//Couple of Pre-Checks
_DataTableSchema = String.Empty;
_DeconstructedDataRows.Clear();
if (this.Data == null || this.Data.Columns.Count == 0)
{
return;
}
//We need to mess around a bit so instead of tampering with their original table, we work with a copy
System.Data.DataTable myWorkingData = this.Data.Copy();
//In order to serialize to XML, the table MUST have a name
if (String.IsNullOrWhiteSpace(myWorkingData.TableName) == true)
{
myWorkingData.TableName = System.Guid.NewGuid().ToString();
}
//JSON doesn't carry over the Schema of the Table so we loose the specific
//DataTypes of each colum. So we use the Built-in method of the DataTable object to create
//a XML\String version of its schema
System.IO.StringWriter myStringWriter = new System.IO.StringWriter();
myWorkingData.WriteXmlSchema(myStringWriter, true);
_DataTableSchema = myStringWriter.ToString();
//JSON and the process of Serializing and Deserializing doesn't carry over
//the proper RowState for each Record. In addition, for those records that
//have been Modified, we lose the Original values, and for those records that
//have been Deleted, we can't serialize them. So this is a KLUDGE king that
//seems to sort all this out.
for (Int32 intRowIndex = 0; intRowIndex < myWorkingData.Rows.Count; intRowIndex++)
{
DeconstructedDataRow myDeconstructedDataRow = new DeconstructedDataRow();
//So start by saving off the current RowState
myDeconstructedDataRow.RowState = myWorkingData.Rows[intRowIndex].RowState;
//If the RowState is DELETED, then the ORGINAL Record will not serialize,
//so we need to reject the ORIGINAL state for now and we will restore it later
if (myDeconstructedDataRow.RowState == System.Data.DataRowState.Deleted)
{
this.Data.Rows[intRowIndex].RejectChanges();
}
//If the RowState is MODIFIED, then we have to restore the ORIGINAL values
//when we restore this record. Without the Original Values, the record won't
//update and even if we force it, it will error out because of 'concurrency' errors.
if (myDeconstructedDataRow.RowState == System.Data.DataRowState.Modified)
{
myWorkingData.Rows[intRowIndex].RejectChanges();
myDeconstructedDataRow.RowOriginalValues.AddRange(myWorkingData.Rows[intRowIndex].ItemArray);
}
//And don't forget to add it to our list
this._DeconstructedDataRows.Add(myDeconstructedDataRow);
}
//Clean up our Clone.
myWorkingData.Dispose();
myWorkingData = null;
}
From the other side, AFTER Deserialized you call .Data_Reconstruct....
public void Data_Reconstruct()
{
//Couple of Pre-Checks
if (this.Data == null || String.IsNullOrWhiteSpace(_DataTableSchema) == true)
{
return;
}
//So first we build a new DataTable with the correct Schema
System.Data.DataTable myWorkingData = new System.Data.DataTable();
System.IO.StringReader myStringReader = new System.IO.StringReader(_DataTableSchema);
myWorkingData.ReadXmlSchema(myStringReader);
//Now we transfer over all the data that was serialize 'as-is' from the existing to the new Table
foreach (System.Data.DataRow myRow in this.Data.Rows)
{
//myWorkingData.ImportRow(myRow); //Should have been this easy BUT ...
// JSON converts some data types to a different format, but then when it deserializes
// it doesn't convert them back (not sure why). So we have to account for that
// and at a performance cost
System.Data.DataRow myNewRecord = myWorkingData.NewRow(); //Create a New row from the table with the Proper Schema
foreach (System.Data.DataColumn myField in myWorkingData.Columns)
{
if (myField.DataType.Equals(typeof(System.Byte[])))
{
myNewRecord[myField.ColumnName] = Convert.FromBase64String(Convert.ToString(myRow[myField.ColumnName]));
}
else
{
myNewRecord[myField.ColumnName] = myRow[myField.ColumnName];
}
}
myWorkingData.Rows.Add(myNewRecord);
}
//We have to accept the changes because all rows are currently marked as "Added" (via JSON as well)
myWorkingData.AcceptChanges();
//Now restore their Row States
for (Int32 intRowIndex = 0; intRowIndex < myWorkingData.Rows.Count; intRowIndex++)
{
switch (_DeconstructedDataRows[intRowIndex].RowState)
{
case System.Data.DataRowState.Added:
myWorkingData.Rows[intRowIndex].SetAdded();
break;
case System.Data.DataRowState.Deleted:
myWorkingData.Rows[intRowIndex].Delete();
break;
case System.Data.DataRowState.Modified:
//For Modified, we have to do some kludge stuff or else the UPDATE will not trigger
//We start by saving off the Values that are in the Record NOW (aka the New values)
object[] objNewValues = myWorkingData.Rows[intRowIndex].ItemArray;
//Now we replace those values with the ORIGINAL values we saved off before transporting
for (Int32 intFieldIndex = 0; intFieldIndex < this._DeconstructedDataRows[intRowIndex].RowOriginalValues.Count; intFieldIndex++)
{
if (this._DeconstructedDataRows[intRowIndex].RowOriginalValues[intFieldIndex] == null)
{
this._DeconstructedDataRows[intRowIndex].RowOriginalValues[intFieldIndex] = DBNull.Value;
}
}
myWorkingData.Rows[intRowIndex].ItemArray = this._DeconstructedDataRows[intRowIndex].RowOriginalValues.ToArray();
myWorkingData.Rows[intRowIndex].AcceptChanges();
//and Last we replace those Original values with the New Values, which not only
//correctly sets the Original\Proposed values, but also changes the RowState to MODIFIED
myWorkingData.Rows[intRowIndex].ItemArray = objNewValues;
break;
default:
//These would be the Unchanged
break;
}
}
//And finally, we replace the existing Table with our fixed one.
this.Data = myWorkingData;
}
To test this I created 2 records in my database
Rec1_Delete
Rec2_Modify
Then I loaded the table using the methods above, flagged the Delete Record as Delete, Modified a field(s) in the Modify records, and Added a brand new record.
I then sent the datatable back to the API and in the database, the Deleted record got deleted, the modified record got modified, and the new record was added.
As a side note, I expose the actual DataTable, because if you happen to call this from something other than .NET (like Javascript in an HTML file). you can still use the DataTable directly to read and display data. The whole DeConstruct and ReConstruct was for .NET clients while still allowing for limited access for those others.