One of the requirements that came in was to split a large datatable into smaller ones defined by a batch size (say 2000). The below mentioned sample code helps to achieve this easily.
The given below method is used to create a large amount of sample data:
static DataTable GetTonsOfData()
{
DataTable dt = new DataTable();
dt.Columns.Add(new DataColumn("ID", typeof(Int32)));
dt.Columns.Add(new DataColumn("Name", typeof(string)));
for (int i = 1; i <= 20000; i++)
{
DataRow Row = dt.NewRow();
Row["ID"] = i;
Row["Name"] = "Name " + i.ToString();
dt.Rows.Add(Row);
}
return dt;
}
The given below SplitTable method actually splits the original datatable into smaller datatables and insert them into a list of datatable.
private static List<DataTable> SplitTable(DataTable originalTable, int batchSize)
{
List<DataTable> tables = new List<DataTable>();
int i = 0;
int j = 1;
DataTable newDt = originalTable.Clone();
newDt.TableName = "Table_" + j;
newDt.Clear();
foreach (DataRow row in originalTable.Rows)
{
DataRow newRow = newDt.NewRow();
newRow.ItemArray = row.ItemArray;
newDt.Rows.Add(newRow);
i++;
if (i == batchSize)
{
tables.Add(newDt);
j++;
newDt = originalTable.Clone();
newDt.TableName = "Table_" + j;
newDt.Clear();
i = 0;
}
}
return tables;
}
Now let us see the real working of the above methods.
DataTable dtFull = GetTonsOfData();
List<DataTable> splitdt = SplitTable(dtFull, 2000);
The code can be optimized if LINQ is implemented.