目前还没有更好的方法,但是通过将 SaveChanges 移动到 for 循环中大约10个项目,可能会有些许改善。
int i = 0;
foreach (Employees item in sequence)
{
t = new Employees ();
t.Text = item.Text;
dataContext.Employees.AddObject(t);
// this will add max 10 items together
if((i % 10) == 0){
dataContext.SaveChanges();
// show some progress to user based on
// value of i
}
i++;
}
dataContext.SaveChanges();
Since during all this inserts, the context is the same and it is getting bigger, you can 每插入1000次重新生成上下文对象。var yourContext = new YourContext(); I think this is the big gain.
Doing this improvements in an importing data process of mine, took it from 7 minutes to 6 seconds.
使用下面的代码,您可以使用一个方法来扩展部分上下文类,该方法将获取实体对象的集合并将它们批量复制到数据库中。只需将 MyEntities 中的类的名称替换为您的实体类的名称,并将其添加到您的项目中,使用正确的命名空间。之后,您需要做的就是调用 BulkInsertAll 方法,移交您想要插入的实体对象。不要重用上下文类,而是在每次使用时创建一个新实例。这是必需的,至少在某些版本的 EF 中是如此,因为与此处使用的 SQLConnection 相关联的身份验证数据在使用该类一次之后就会丢失。我不知道为什么。
这个版本是 EF5的
public partial class MyEntities
{
public void BulkInsertAll<T>(T[] entities) where T : class
{
var conn = (SqlConnection)Database.Connection;
conn.Open();
Type t = typeof(T);
Set(t).ToString();
var objectContext = ((IObjectContextAdapter)this).ObjectContext;
var workspace = objectContext.MetadataWorkspace;
var mappings = GetMappings(workspace, objectContext.DefaultContainerName, typeof(T).Name);
var tableName = GetTableName<T>();
var bulkCopy = new SqlBulkCopy(conn) { DestinationTableName = tableName };
// Foreign key relations show up as virtual declared
// properties and we want to ignore these.
var properties = t.GetProperties().Where(p => !p.GetGetMethod().IsVirtual).ToArray();
var table = new DataTable();
foreach (var property in properties)
{
Type propertyType = property.PropertyType;
// Nullable properties need special treatment.
if (propertyType.IsGenericType &&
propertyType.GetGenericTypeDefinition() == typeof(Nullable<>))
{
propertyType = Nullable.GetUnderlyingType(propertyType);
}
// Since we cannot trust the CLR type properties to be in the same order as
// the table columns we use the SqlBulkCopy column mappings.
table.Columns.Add(new DataColumn(property.Name, propertyType));
var clrPropertyName = property.Name;
var tableColumnName = mappings[property.Name];
bulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping(clrPropertyName, tableColumnName));
}
// Add all our entities to our data table
foreach (var entity in entities)
{
var e = entity;
table.Rows.Add(properties.Select(property => GetPropertyValue(property.GetValue(e, null))).ToArray());
}
// send it to the server for bulk execution
bulkCopy.BulkCopyTimeout = 5 * 60;
bulkCopy.WriteToServer(table);
conn.Close();
}
private string GetTableName<T>() where T : class
{
var dbSet = Set<T>();
var sql = dbSet.ToString();
var regex = new Regex(@"FROM (?<table>.*) AS");
var match = regex.Match(sql);
return match.Groups["table"].Value;
}
private object GetPropertyValue(object o)
{
if (o == null)
return DBNull.Value;
return o;
}
private Dictionary<string, string> GetMappings(MetadataWorkspace workspace, string containerName, string entityName)
{
var mappings = new Dictionary<string, string>();
var storageMapping = workspace.GetItem<GlobalItem>(containerName, DataSpace.CSSpace);
dynamic entitySetMaps = storageMapping.GetType().InvokeMember(
"EntitySetMaps",
BindingFlags.GetProperty | BindingFlags.NonPublic | BindingFlags.Instance,
null, storageMapping, null);
foreach (var entitySetMap in entitySetMaps)
{
var typeMappings = GetArrayList("TypeMappings", entitySetMap);
dynamic typeMapping = typeMappings[0];
dynamic types = GetArrayList("Types", typeMapping);
if (types[0].Name == entityName)
{
var fragments = GetArrayList("MappingFragments", typeMapping);
var fragment = fragments[0];
var properties = GetArrayList("AllProperties", fragment);
foreach (var property in properties)
{
var edmProperty = GetProperty("EdmProperty", property);
var columnProperty = GetProperty("ColumnProperty", property);
mappings.Add(edmProperty.Name, columnProperty.Name);
}
}
}
return mappings;
}
private ArrayList GetArrayList(string property, object instance)
{
var type = instance.GetType();
var objects = (IEnumerable)type.InvokeMember(property, BindingFlags.GetProperty | BindingFlags.NonPublic | BindingFlags.Instance, null, instance, null);
var list = new ArrayList();
foreach (var o in objects)
{
list.Add(o);
}
return list;
}
private dynamic GetProperty(string property, object instance)
{
var type = instance.GetType();
return type.InvokeMember(property, BindingFlags.GetProperty | BindingFlags.NonPublic | BindingFlags.Instance, null, instance, null);
}
}
这个版本是 EF6的
public partial class CMLocalEntities
{
public void BulkInsertAll<T>(T[] entities) where T : class
{
var conn = (SqlConnection)Database.Connection;
conn.Open();
Type t = typeof(T);
Set(t).ToString();
var objectContext = ((IObjectContextAdapter)this).ObjectContext;
var workspace = objectContext.MetadataWorkspace;
var mappings = GetMappings(workspace, objectContext.DefaultContainerName, typeof(T).Name);
var tableName = GetTableName<T>();
var bulkCopy = new SqlBulkCopy(conn) { DestinationTableName = tableName };
// Foreign key relations show up as virtual declared
// properties and we want to ignore these.
var properties = t.GetProperties().Where(p => !p.GetGetMethod().IsVirtual).ToArray();
var table = new DataTable();
foreach (var property in properties)
{
Type propertyType = property.PropertyType;
// Nullable properties need special treatment.
if (propertyType.IsGenericType &&
propertyType.GetGenericTypeDefinition() == typeof(Nullable<>))
{
propertyType = Nullable.GetUnderlyingType(propertyType);
}
// Since we cannot trust the CLR type properties to be in the same order as
// the table columns we use the SqlBulkCopy column mappings.
table.Columns.Add(new DataColumn(property.Name, propertyType));
var clrPropertyName = property.Name;
var tableColumnName = mappings[property.Name];
bulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping(clrPropertyName, tableColumnName));
}
// Add all our entities to our data table
foreach (var entity in entities)
{
var e = entity;
table.Rows.Add(properties.Select(property => GetPropertyValue(property.GetValue(e, null))).ToArray());
}
// send it to the server for bulk execution
bulkCopy.BulkCopyTimeout = 5*60;
bulkCopy.WriteToServer(table);
conn.Close();
}
private string GetTableName<T>() where T : class
{
var dbSet = Set<T>();
var sql = dbSet.ToString();
var regex = new Regex(@"FROM (?<table>.*) AS");
var match = regex.Match(sql);
return match.Groups["table"].Value;
}
private object GetPropertyValue(object o)
{
if (o == null)
return DBNull.Value;
return o;
}
private Dictionary<string, string> GetMappings(MetadataWorkspace workspace, string containerName, string entityName)
{
var mappings = new Dictionary<string, string>();
var storageMapping = workspace.GetItem<GlobalItem>(containerName, DataSpace.CSSpace);
dynamic entitySetMaps = storageMapping.GetType().InvokeMember(
"EntitySetMaps",
BindingFlags.GetProperty | BindingFlags.Public | BindingFlags.Instance,
null, storageMapping, null);
foreach (var entitySetMap in entitySetMaps)
{
var typeMappings = GetArrayList("EntityTypeMappings", entitySetMap);
dynamic typeMapping = typeMappings[0];
dynamic types = GetArrayList("Types", typeMapping);
if (types[0].Name == entityName)
{
var fragments = GetArrayList("MappingFragments", typeMapping);
var fragment = fragments[0];
var properties = GetArrayList("AllProperties", fragment);
foreach (var property in properties)
{
var edmProperty = GetProperty("EdmProperty", property);
var columnProperty = GetProperty("ColumnProperty", property);
mappings.Add(edmProperty.Name, columnProperty.Name);
}
}
}
return mappings;
}
private ArrayList GetArrayList(string property, object instance)
{
var type = instance.GetType();
var objects = (IEnumerable)type.InvokeMember(
property,
BindingFlags.GetProperty | BindingFlags.Public | BindingFlags.Instance, null, instance, null);
var list = new ArrayList();
foreach (var o in objects)
{
list.Add(o);
}
return list;
}
private dynamic GetProperty(string property, object instance)
{
var type = instance.GetType();
return type.InvokeMember(property, BindingFlags.GetProperty | BindingFlags.Public | BindingFlags.Instance, null, instance, null);
}
}
最后,给你们 Linq-To-Sql 爱好者的小礼物。
partial class MyDataContext
{
partial void OnCreated()
{
CommandTimeout = 5 * 60;
}
public void BulkInsertAll<T>(IEnumerable<T> entities)
{
entities = entities.ToArray();
string cs = Connection.ConnectionString;
var conn = new SqlConnection(cs);
conn.Open();
Type t = typeof(T);
var tableAttribute = (TableAttribute)t.GetCustomAttributes(
typeof(TableAttribute), false).Single();
var bulkCopy = new SqlBulkCopy(conn) {
DestinationTableName = tableAttribute.Name };
var properties = t.GetProperties().Where(EventTypeFilter).ToArray();
var table = new DataTable();
foreach (var property in properties)
{
Type propertyType = property.PropertyType;
if (propertyType.IsGenericType &&
propertyType.GetGenericTypeDefinition() == typeof(Nullable<>))
{
propertyType = Nullable.GetUnderlyingType(propertyType);
}
table.Columns.Add(new DataColumn(property.Name, propertyType));
}
foreach (var entity in entities)
{
table.Rows.Add(properties.Select(
property => GetPropertyValue(
property.GetValue(entity, null))).ToArray());
}
bulkCopy.WriteToServer(table);
conn.Close();
}
private bool EventTypeFilter(System.Reflection.PropertyInfo p)
{
var attribute = Attribute.GetCustomAttribute(p,
typeof (AssociationAttribute)) as AssociationAttribute;
if (attribute == null) return true;
if (attribute.IsForeignKey == false) return true;
return false;
}
private object GetPropertyValue(object o)
{
if (o == null)
return DBNull.Value;
return o;
}
}
If you have a collection of entities e.g storeEntities you can store them using SqlBulkCopy as follows
var bulkCopy = new SqlBulkCopy(connection);
bulkCopy.DestinationTableName = TableName;
var dataReader = storeEntities.AsDataReader();
bulkCopy.WriteToServer(dataReader);
Use : db.Set<tale>.AddRange(list);
Ref :
TESTEntities db = new TESTEntities();
List<Person> persons = new List<Person> {
new Person{Name="p1",Place="palce"},
new Person{Name="p2",Place="palce"},
new Person{Name="p3",Place="palce"},
new Person{Name="p4",Place="palce"},
new Person{Name="p5",Place="palce"}
};
db.Set<Person>().AddRange(persons);
db.SaveChanges();
Not too high, not too low. There isn't a particular value that fit in all scenarios since it depends on multiple factors such as row size, index, trigger, etc.