I'm trying to write data to a MySql database via c #. The problem is that all columns together should be unique but are too long for a key. To check for duplicates, I currently have a select query to check whether the record already exists and that it will not be added via an insert query. Now the script runs once a day and checks several million records if they are already there and if not they will be added.
example: At 2 million, that would be 2 million select querys over x million rows and again insert querys for all those who are not duplicates.
Is there a better way to check for duplicates? As I said unique, only all columns are together. The Select needs longer and longer the more lines there are ...
foreach (var item in list)
{
string query = "SELECT id FROM dirs WHERE Directory = \"" + item.dir + "\" AND IdentityReference = \"" + item.IdentityReference + "\" AND AccessControlType = \"" + item.AccessControlType + "\" AND FileSystemRights = \"" + item.FileSystemRights + "\" AND IsInherited = " + item.IsInherited.ToString();
MySqlCommand commanDatabase = new MySqlCommand(query, databaseConnection);
MySqlDataReader dataReader = commanDatabase.ExecuteReader();
bool duplicate = false;
while (dataReader.Read())
{
duplicate = true;
break;
}
dataReader.Close();
if (!duplicate)
{
query = "INSERT INTO dirs (Directory, IdentityReference, AccessControlType, FileSystemRights, IsInherited) VALUES ";
query += "(\"" + item.dir + "\", \"" + item.IdentityReference + "\", \"" + item.AccessControlType + "\", \"" + item.FileSystemRights + "\", " + item.IsInherited.ToString() + ")";
commanDatabase = new MySqlCommand(query, databaseConnection);
commanDatabase.CommandTimeout = 60;
commanDatabase.ExecuteNonQuery();
}
}