问题描述
我们有一个端点,它从数据库加载记录并从记录创建一个 CSV,然后返回文件流。但是当记录大于 200K 时,我们会得到 OutOfMemoryException。
public async Task<IActionResult> Export()
{
var records = // get all records from the database
var memoryStream = new MemoryStream();
var streamWriter = new StreamWriter(memoryStream);
var csvWriter = new CsvWriter(streamWriter,CultureInfo.InvariantCulture);
await csvWriter.WriteRecordsAsync(records);
csvWriter.Flush();
streamWriter.Flush();
memoryStream.Flush();
string filename = $"Records_{DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss")}.csv";
memoryStream.Seek(0,SeekOrigin.Begin);
return File(memoryStream,"text/csv",filename);
}
是否有更好的方法来防止 OutOfMemoryException。
解决方法
我不知道您为什么会收到此错误。但是从我下面的代码中,我能够读取大量数据,我用大约 3Gb 的数据进行了测试。您的数据大小是多少?
这是我使用 CSV 助手的代码。
private IEnumerable<Dictionary<string,EntityProperty>> ReadCSV(Stream source,IEnumerable<TableField> cols)
{
using (TextReader reader = new StreamReader(source,Encoding.UTF8))
{
var cache = new TypeConverterCache();
cache.AddConverter<float>(new CSVSingleConverter());
cache.AddConverter<double>(new CSVDoubleConverter());
var csv = new CsvReader(reader,new CsvHelper.Configuration.CsvConfiguration(global::System.Globalization.CultureInfo.InvariantCulture)
{
Delimiter = ";",HasHeaderRecord = true,CultureInfo = global::System.Globalization.CultureInfo.InvariantCulture,TypeConverterCache = cache
});
csv.Read();
csv.ReadHeader();
var map = (
from col in cols
from src in col.Sources()
let index = csv.GetFieldIndex(src,isTryGet: true)
where index != -1
select new { col.Name,Index = index,Type = col.DataType }).ToList();
while (csv.Read())
{
yield return map.ToDictionary(
col => col.Name,col => EntityProperty.CreateEntityPropertyFromObject(csv.GetField(col.Type,col.Index)));
}
}
}