You need to load a large csv file in the database (1.5 GB). Tried to do line-by-line entry using INSERT - this is a very long time. 100,000 lines load in about 2 minutes. The file has 120,000,000 lines. I read that you can use SQLBulkCopy and it will be much faster. But the question is, is it possible to make a CSV download broken down by columns? For example, in my CSV Series and Number columns with a separator ",". The table also has two similar columns. Are there any other faster ways to load or how to make a mapping of fields
string line = ""; string con_str = "server;Database=base;Trusted_Connection=True;"; using (SqlConnection connection = new SqlConnection(con_str)) { connection.Open(); using (StreamReader file = new StreamReader(new BufferedStream(File.OpenRead(@"D:\bzip2\WriteLines.csv"), 10 * 1024 * 1024))) { //string table = "ElmaBadPassport"; while ((line = file.ReadLine()) != null) { if (line.Length == 11) { string[] values = line.Split(','); SqlCommand cmd = new SqlCommand("Insert INTO ElmaBadPassport(Series,Number) VALUES (@series, @number)",connection); cmd.Parameters.AddWithValue("@series",values[0].ToString()); cmd.Parameters.AddWithValue("@Number", values[1].ToString()); cmd.ExecuteNonQuery(); //Console.WriteLine(cmd.ExecuteNonQuery()); } } } connection.Close(); }