I have some code that at the end of the program's life, uploads the entire contents of 6 different lists into a database. The problem is, they're parallel lists with about 14,000 items in each, and I have to run an Insert query for each of their separate item(s). This takes a long time, is there a faster way to do this? Here's a sample of the relevant code:
    public void uploadContent()
    {
        var cs = Properties.Settings.Default.Database;
        SqlConnection dataConnection = new SqlConnection(cs);
        dataConnection.Open();
        for (int i = 0; i < urlList.Count; i++)
        {
            SqlCommand dataCommand = new SqlCommand(Properties.Settings.Default.CommandString, dataConnection);
            try
            {
                dataCommand.Parameters.AddWithValue("@user", userList[i]);
                dataCommand.Parameters.AddWithValue("@computer", computerList[i]);
                dataCommand.Parameters.AddWithValue("@date", timestampList[i]);
                dataCommand.Parameters.AddWithValue("@itemName", domainList[i]);
                dataCommand.Parameters.AddWithValue("@itemDetails", urlList[i]);
                dataCommand.Parameters.AddWithValue("@timesUsed", hitsList[i]);
                dataCommand.ExecuteNonQuery();
            }
            catch (Exception e)
            {
                using (StreamWriter sw = File.AppendText("errorLog.log"))
                {
                    sw.WriteLine(e);
                }
            }
        }
        dataConnection.Close();
    }
Here is the command string the code is pulling from the config file:
CommandString:
INSERT dbo.InternetUsage VALUES (@user, @computer, @date, @itemName, @itemDetails, @timesUsed)
 
     
     
     
     
    