In financial applications, especially for MCX or stock market data, you often need to process large CSV/MCX files, filter records, and insert them efficiently into a database. This article explains a step-by-step approach using C#.
1. Reading CSV/MCX File into DataTable
The method convertdatatableMCX
reads a CSV file and converts it into a DataTable:
public DataTable convertdatatable(string Filepath)
{
DataTable FinalTable = new DataTable();
// Define columns
for (int i = 1; i <= 37; i++)
FinalTable.Columns.Add("column" + i, typeof(string));
FinalTable.Columns.Add("Count", typeof(int));
StreamReader sr = new StreamReader(Filepath);
try
{
string Fulltext = sr.ReadToEnd();
string[] rows = Fulltext.Split('\n');
if (rows.Length > 1)
{
for (int i = 1; i < rows.Length; i++)
{
string[] rowValues = rows[i].Split(',');
if (rowValues.Length > 1)
{
DataRow dr = FinalTable.NewRow();
for (int j = 0; j < 37; j++)
dr[j] = rowValues.Length > j ? rowValues[j].Trim() : "";
dr[37] = i; // Track row number
FinalTable.Rows.Add(dr);
}
else
{
obj.WriteFailureLogFile("No Data in File");
}
}
}
else
{
obj.WriteFailureLogFile("No Data in File");
}
}
catch (Exception ex)
{
obj.WriteErrorLogFile(ex.ToString(),"Converting");
}
finally
{
sr.Close();
sr.Dispose();
}
return FinalTable;
}
(or)
public DataTable convertdatatable(string Filepath)
{
#region table
DataTable FinalTable = new DataTable();
FinalTable.Columns.Add("column1", typeof(String));
FinalTable.Columns.Add("column2", typeof(String));
FinalTable.Columns.Add("column3", typeof(String));
FinalTable.Columns.Add("column4", typeof(String));
FinalTable.Columns.Add("column5", typeof(String));
FinalTable.Columns.Add("column6", typeof(String));
FinalTable.Columns.Add("column7", typeof(String));
FinalTable.Columns.Add("column8", typeof(String));
FinalTable.Columns.Add("column9", typeof(String));
FinalTable.Columns.Add("column10", typeof(String));
FinalTable.Columns.Add("column11", typeof(String));
FinalTable.Columns.Add("column12", typeof(String));
FinalTable.Columns.Add("column13", typeof(String));
FinalTable.Columns.Add("column14", typeof(String));
FinalTable.Columns.Add("column15", typeof(String));
FinalTable.Columns.Add("column16", typeof(String));
FinalTable.Columns.Add("column17", typeof(String));
FinalTable.Columns.Add("column18", typeof(String));
FinalTable.Columns.Add("column19", typeof(String));
FinalTable.Columns.Add("column20", typeof(String));
FinalTable.Columns.Add("column21", typeof(String));
FinalTable.Columns.Add("column22", typeof(String));
FinalTable.Columns.Add("column23", typeof(String));
FinalTable.Columns.Add("column24", typeof(String));
FinalTable.Columns.Add("column25", typeof(String));
FinalTable.Columns.Add("column26", typeof(String));
FinalTable.Columns.Add("column27", typeof(String));
FinalTable.Columns.Add("column28", typeof(String));
FinalTable.Columns.Add("column29", typeof(String));
FinalTable.Columns.Add("column30", typeof(String));
FinalTable.Columns.Add("column31", typeof(String));
FinalTable.Columns.Add("column32", typeof(String));
FinalTable.Columns.Add("column33", typeof(String));
FinalTable.Columns.Add("column34", typeof(String));
FinalTable.Columns.Add("column35", typeof(String));
FinalTable.Columns.Add("column36", typeof(String));
FinalTable.Columns.Add("column37", typeof(String));
FinalTable.Columns.Add("Count", typeof(int));
#endregion
StreamReader sr = new StreamReader(Filepath);
try
{
string filepath = Filepath;
string Fulltext;
Fulltext = sr.ReadToEnd().ToString();
string[] rows = Fulltext.Split('\n');
if (rows.Count() > 1)
{
for (int i = 1; i < rows.Count(); i++)
{
string[] rowValues = rows[i].Split(',');
string column1 = "", column2 = "", column3 = "", column4 = "", column5 = "", column6 = "", column7 = "", column8 = "", column9 = "", column10 = "", column11 = "", column12 = "",
column13 = "", column14 = "", column15 = "", column16 = "", column17 = "", column18 = "", column19 = "", column20 = "", column21 = "", column22 = "", column23 = "", column24 = "",
column25 = "", column26 = "", column27 = "", column28 = "", column29 = "", column30 = "", column31 = "", column32 = "", column33 = "", column34 = "", column35 = "", column36 = "",
column37 = "";
if (rowValues.Length > 1)
{
#region assin
column1 = rowValues[0].ToString().Trim();
column2 = rowValues[1].ToString().Trim();
column3 = rowValues[2].ToString().Trim();
column4 = rowValues[3].ToString().Trim();
column5 = rowValues[4].ToString().Trim();
column6 = rowValues[5].ToString().Trim();
column7 = rowValues[6].ToString().Trim();
column8 = rowValues[7].ToString().Trim();
column9 = rowValues[8].ToString().Trim();
column10 = rowValues[9].ToString().Trim();
column11 = rowValues[10].ToString().Trim();
column12 = rowValues[11].ToString().Trim();
column13 = rowValues[12].ToString().Trim();
column14 = rowValues[13].ToString().Trim();
column15 = rowValues[14].ToString().Trim();
column16 = rowValues[15].ToString().Trim();
column17 = rowValues[16].ToString().Trim();
column18 = rowValues[17].ToString().Trim();
column19 = rowValues[18].ToString().Trim();
column20 = rowValues[19].ToString().Trim();
column21 = rowValues[20].ToString().Trim();
column22 = rowValues[21].ToString().Trim();
column23 = rowValues[22].ToString().Trim();
column24 = rowValues[23].ToString().Trim();
column25 = rowValues[24].ToString().Trim();
column26 = rowValues[25].ToString().Trim();
column27 = rowValues[26].ToString().Trim();
column28 = rowValues[27].ToString().Trim();
column29 = rowValues[28].ToString().Trim();
column30 = rowValues[29].ToString().Trim();
column31 = rowValues[30].ToString().Trim();
column32 = rowValues[31].ToString().Trim();
column33 = rowValues[32].ToString().Trim();
column34 = rowValues[33].ToString().Trim();
column35 = rowValues[34].ToString().Trim();
column36 = rowValues[35].ToString().Trim();
column37 = rowValues[36].ToString().Trim();
#endregion
//Add
DataRow dr = FinalTable.NewRow();
#region adddata
dr[0] = column1;
dr[1] = column2;
dr[2] = column3;
dr[3] = column4;
dr[4] = column5;
dr[5] = column6;
dr[6] = column7;
dr[7] = column8;
dr[8] = column9;
dr[9] = column10;
dr[10] = column11;
dr[11] = column12;
dr[12] = column13;
dr[13] = column14;
dr[14] = column15;
dr[15] = column16;
dr[16] = column17;
dr[17] = column18;
dr[18] = column19;
dr[19] = column20;
dr[20] = column21;
dr[21] = column22;
dr[22] = column23;
dr[23] = column24;
dr[24] = column25;
dr[25] = column26;
dr[26] = column27;
dr[27] = column28;
dr[28] = column29;
dr[29] = column30;
dr[30] = column31;
dr[31] = column32;
dr[32] = column33;
dr[33] = column34;
dr[34] = column35;
dr[35] = column36;
dr[36] = column37;
dr[37] = i;
#endregion
FinalTable.Rows.Add(dr);
}
else
{
obj.WriteFailureLogFile("No Data in File");
}
}
}
else
{
obj.WriteFailureLogFile("No Data in File");
}
}
catch (Exception ex)
{
obj.WriteErrorLogFile(ex.ToString(),"Converting");
}
finally
{
sr.Close();
sr.Dispose();
}
return FinalTable;
}
Key Points
StreamReader
reads the entire file.
Rows are split by newline \n
and columns by comma ,
.
Each row is added to the DataTable
dynamically.
A Count
column tracks the row index.
2. Filtering Data
After loading the MCX data, you can filter rows using a DataView
:
DataTable FilteredData = MCXdata;
DataView dvView = FilteredData.DefaultView;
dvView.RowFilter = "Count > " + TotalRowCount; // Example: filter by Count column
DataTable dtFiltered = dvView.ToTable();
FinalTable = dtFiltered;
Notes:
3. Bulk Insert into SQL Server
Using SqlBulkCopy
, large datasets can be inserted efficiently:
if (FinalTable.Rows.Count > 0)
{
using (SqlConnection con = new SqlConnection("Data Source=173.47.478.2;Initial Catalog=AS78955;User ID=sa;Password=N@yyui#DB&12$%"))
{
con.Open();
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(con))
{
bulkCopy.BulkCopyTimeout = 1000000; // Large timeout for big files
bulkCopy.DestinationTableName = "DTUMCX";
// Map columns
for (int i = 1; i <= 37; i++)
bulkCopy.ColumnMappings.Add("column" + i, "column" + i);
bulkCopy.ColumnMappings.Add("Count", "Count");
bulkCopy.WriteToServer(FinalTable);
}
con.Close();
obj.WriteProcessLogFile("Bulk inserted Successfully. Total Rows - " + FinalTable.Rows.Count);
}
}
(or)
if (FinalTable.Rows.Count > 0)
{
SqlConnection con = new SqlConnection("Data Source=173.47.478.2;Initial Catalog=AS78955;User ID=sa;Password=N@yyui#DB&12$%");
con.Open();
SqlBulkCopy bulkCopy = new SqlBulkCopy(con);
bulkCopy.BulkCopyTimeout = 1000000;
bulkCopy.DestinationTableName = "DTUMCX";
string TotalCount = FinalTable.Rows.Count.ToString();
try
{
bulkCopy.ColumnMappings.Add("column1", "column1");
bulkCopy.ColumnMappings.Add("column2", "column2");
bulkCopy.ColumnMappings.Add("column3", "column3");
bulkCopy.ColumnMappings.Add("column4", "column4");
bulkCopy.ColumnMappings.Add("column5", "column5");
bulkCopy.ColumnMappings.Add("column6", "column6");
bulkCopy.ColumnMappings.Add("column7", "column7");
bulkCopy.ColumnMappings.Add("column8", "column8");
bulkCopy.ColumnMappings.Add("column9", "column9");
bulkCopy.ColumnMappings.Add("column10", "column10");
bulkCopy.ColumnMappings.Add("column1", "column11");
bulkCopy.ColumnMappings.Add("column12", "column12");
bulkCopy.ColumnMappings.Add("column13", "column13");
bulkCopy.ColumnMappings.Add("column14", "column14");
bulkCopy.ColumnMappings.Add("column15", "column15");
bulkCopy.ColumnMappings.Add("column16", "column16");
bulkCopy.ColumnMappings.Add("column17", "column17");
bulkCopy.ColumnMappings.Add("column18", "column18");
bulkCopy.ColumnMappings.Add("column19", "column19");
bulkCopy.ColumnMappings.Add("column20", "column20");
bulkCopy.ColumnMappings.Add("column21", "column21");
bulkCopy.ColumnMappings.Add("column22", "column22");
bulkCopy.ColumnMappings.Add("column23", "column23");
bulkCopy.ColumnMappings.Add("column24", "column24");
bulkCopy.ColumnMappings.Add("column25", "column25");
bulkCopy.ColumnMappings.Add("column26", "column26");
bulkCopy.ColumnMappings.Add("column27", "column27");
bulkCopy.ColumnMappings.Add("column28", "column28");
bulkCopy.ColumnMappings.Add("column29", "column29");
bulkCopy.ColumnMappings.Add("column30", "column30");
bulkCopy.ColumnMappings.Add("column31", "column31");
bulkCopy.ColumnMappings.Add("column32", "column32");
bulkCopy.ColumnMappings.Add("column33", "column33");
bulkCopy.ColumnMappings.Add("column34", "column34");
bulkCopy.ColumnMappings.Add("column35", "column35");
bulkCopy.ColumnMappings.Add("column36", "column36");
bulkCopy.ColumnMappings.Add("column37", "column37");
bulkCopy.WriteToServer(FinalTable);
con.Close();
obj.WriteProcessLogFile("Bulk inserted SuccessFully.Total Rows - " + TotalCount);
}
Key Points
SqlBulkCopy
is optimized for inserting large volumes of data.
Column mappings ensure DataTable columns match SQL table columns.
BulkCopyTimeout
can be increased for very large files.
4. Error Handling and Logging
try-catch-finally
ensures errors are logged and resources are released.
obj.WriteFailureLogFile
logs missing or malformed rows.
obj.WriteErrorLogFile
logs exceptions during conversion.
5. Advantages of this Approach
Efficiency: Handles large MCX files efficiently.
Maintainability: Adding or removing columns is straightforward.
Filtering: Easy to filter rows dynamically before insert.
Logging: Helps track processing errors and missing data.
Automation: Can be scheduled to process daily market data files automatically.
6. Example Workflow
Load MCX CSV file using convertdatatableMCX
.
Filter rows based on a condition (e.g., Count > TotalRowCount
).
Bulk insert the filtered data into DTUMCX
SQL Server table.
Log success or failure messages for auditing.
Conclusion
This approach is ideal for financial applications dealing with large MCX or stock market datasets. By combining DataTable conversion, DataView filtering, and SqlBulkCopy, you can achieve efficient, reliable, and maintainable data processing pipelines in C#.