c # npoi batch append data to excel

Directly attached to the code:

using DongYang.Core.Model.Domain;
using DongYang.Core.Utils;
using NLog;
using NPOI.XSSF.UserModel;
using System;
using System.Collections.Generic;
using System.Data;
using System.Diagnostics;
using System.IO;
using System.Threading;

namespace DongYang.Core.Service
{
    public class DYTrackANODetail700013TodayNewService
    {
        private readonly Logger _logger = LogManager.GetCurrentClassLogger();//日志组件

        /// <summary>
        /// 导出excel
        /// </summary>
        /// <param name="anomateexcels"></param>
        /// <param name="currentTime"></param>
        public void ExportToExcel(List<Anomateexcel> anomateexcels, DateTime currentTime)
        {
            Stopwatch sw = new Stopwatch();
            sw.Start();

            FileStream file = null;
            string strBeginTime = string.Empty;//查询开始时间
            string strEndTime = string.Empty;tryquery end time//

            
            { 
                // template file 
                String TemplateFileName AppDomain.CurrentDomain.BaseDirectory + = " \\ template.xlsx " ;
                 // export file 
                String ReportFileName = FileHelper.GetExportFilePath (currentTime) + $ " \\ {currentTime.ToString ( " YYYYMMDD_HHMMSS " )} .xlsx " ;
                 // get the query date 
                the this .GetTime (currentTime, OUT strBeginTime, OUT strEndTime);
                 // query the total number of data 
                int COUNT = the this.GetDataTableCount (strBeginTime, strEndTime);
                 IF (COUNT == 0 ) return ;
                 // first copy empty file 
                File.Copy (TemplateFileName, ReportFileName);
                 // page query data in a loop inside, open the file copy and append data, close the file handle 
                var Pages = Math.Ceiling (Convert.ToDouble (COUNT) / ConfigHelper.PageSize);
                 for ( int the pageIndex = . 1 ; the pageIndex <= Pages; the pageIndex ++ ) 
                { 
                    var the startRow = (the pageIndex - . 1 ) * ConfigHelper.PageSize + 1 ;
                    var endRow the pageIndex * = ConfigHelper.PageSize;
                     var dt = the this .GetDataTable (strBeginTime, strEndTime, the startRow, endRow); 

                    File = new new the FileStream (ReportFileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); 
                    XSSFWorkbook xssfworkbook = new new XSSFWorkbook (file); // file into memory, the operation excel in memory 
                    File.close (); 
                    XSSFSheet xssfsheet = xssfworkbook.GetSheet (ConfigHelper.WorkSheetName) AS XSSFSheet; 

                    var by beginRow = . 3 + the startRow -1;
                    for (var i = 0; i < dt.Rows.Count; i++)
                    {
                        var excelRow = xssfsheet.CreateRow(beginRow++);
                        foreach (var anomateexcel in anomateexcels)
                        {
                            var excelCell = excelRow.CreateCell(anomateexcel.Excellist.ToInt());
                            excelCell.SetCellType(NPOI.SS.UserModel.CellType.String);
                            var value = dt.Rows[i][anomateexcel.Anofield].ToString();
                            excelCell.SetCellValue(value);
                        }
                    } 

                    Xssfsheet.ForceFormulaRecalculation = to true ;
                     // the file data is written to memory 
                    the using (FS = the FileStream File.OpenWrite (ReportFileName)) 
                    { 
                        xssfworkbook.Write (FS); 
                        xssfworkbook.Close (); 
                    } 

                    the Thread.Sleep ( 100 ); 
                } 
            } 
            the catch (Exception EX) 
            { 
                _logger.Error ($ " export data error, Message: ex.Message {}, the stackTrace: ex.StackTrace {} " );
            } 
            The finally 
            { 
                IF (! File = null ) File.close (); 
            } 

            sw.Stop (); 
            _logger.Info ($ " Date: {currentTime}, {sw.Elapsed.TotalSeconds} Processed seconds " ); 
        } 

        # region acquisition start time and end time /// <Summary> /// acquisition start time and end time
         /// </ Summary> /// <param name = "currentTime"> </ param> /// <param name = "strBeginTime"> </ param> /// <param name = "strEndTime"> </ param>privatevoid GetTime(DateTime currentTime, out

         
         
         
         
         
          string strBeginTime, out string strEndTime)
        {
            DateTime beginTime;
            DateTime endTime;
            if (currentTime < currentTime.Date.AddHours(7.5))
            {
                beginTime = currentTime.Date.AddDays(-1).AddHours(7.5);//前天7:30
                endTime = currentTime.Date.AddHours(7.5);//当天的7:30
            }
            else
            {
                beginTime = currentTime.Date.AddHours(7.5);
                endTime = currentTime.Date.AddDays(1).AddHours(7.5);
            }

            strBeginTime = beginTime.ToString("yyyy-MM-dd HH:mm");
            strEndTime = endTime.ToString("yyyy-MM-dd HH:mm");
        }

        #endregion

        #region 查询数据

        /// <summary>
        /// 查询数据
        /// </summary>
        /// <param name="strBeginTime"></param>
        /// <param name="strEndTime"></param>
        /// <param name="startRow"></param>
        /// <param name="endRow"></param>
        /// <returns></returns>
        private DataTable GetDataTable(string strBeginTime, string strEndTime, int startRow, int endRow)
        {
            var sql = $@"
select * from 
(
    select row_number() over(order by A asc,AB asc,AC asc) as rownumber, * 
    from {ConfigHelper.ExcelExportTableName}
    where E between '{strBeginTime}' and '{strEndTime}'
) as t 
where rownumber between {startRow} and {endRow}
";
            DataTable dt = DapperSqlHelper.QueryDataTable(sql);
            return dt;
        }

        #endregion

        #region 查询总数///<summary>/// 查询总数
        ///</summary>///<param name="strBeginTime"></param>///<param name="strEndTime"></param>///<returns></returns>privateint GetDataTableCount(string strBeginTime, string strEndTime)
        {var sql = $"select count(1) from {ConfigHelper.ExcelExportTableName} where E between '{strBeginTime}' and '{strEndTime} ' ;"

         
         
         
         
         
         
            
            DataTable dt = DapperSqlHelper.QueryDataTable(sql);
            var count = Convert.ToInt32(dt.Rows[0][0]);
            return count;
        }

        #endregion
    }
}

It began as a one-time reading of data in line with the conditions, because the data is large, probably seven or eight thousand, and 400 fields per record. Later it became so optimized page read, each taking data 100, then go down to insert data excel.

Guess you like

Origin www.cnblogs.com/subendong/p/12109568.html