String sql = "insert into " + T_PROD + month + "(id,SERV_ID,ACC_NBR,PROD_ID,IS_PHS_TK,CREATE_DATE,CUST_ID,STATE,CITY)" + " values(seq_rep_ogn_prod_id.nextval,?,?,?,?,?,?,?,?)"; try { in = new BufferedReader(new FileReader(d.getPath() + filename)); conn.setAutoCommit(false); pstm = conn.prepareStatement(sql); String linedata = in.readLine(); int i = 1; Long SERV_ID = null; String ACC_NBR = ""; Long PROD_ID = null; String IS_PHS_TK = ""; java.util.Date date = null; Timestamp CREATE_DATE = null; Long CUST_ID = null; String STATE = ""; String CITY = ""; while (linedata != null) { String[] data = linedata.split("\\;"); if (data.length != 7) { log("read 第 " + i + "行" + linedata); //log("长度不够或超出了。"); } else { try { if (data[0].trim().length() != 0) { SERV_ID = Long.parseLong(data[0].trim()); } else { SERV_ID = null; } // SERV_ID = Long.parseLong(data[0].trim()); ACC_NBR = data[1].trim(); if (data[2].trim().length() != 0) { PROD_ID = Long.parseLong(data[2].trim()); } else { PROD_ID = null; } // PROD_ID = Long.parseLong(data[2].trim()); IS_PHS_TK = data[3].trim(); if (data[4].trim().length() <= 8) { date = sim2.parse(data[4].trim()); } else { date = sim.parse(data[4].trim()); } // date = sim.parse(data[4].trim()); // 存入oracle时间要完整的时间类型得用到Timestamp CREATE_DATE = new Timestamp(date.getTime()); if (data[5].trim().length() != 0) { CUST_ID = Long.parseLong(data[5].trim()); } else { CUST_ID = null; } // CUST_ID = Long.parseLong(data[5].trim()); STATE = data[6].trim(); CITY = city.trim(); if (SERV_ID == null) { pstm.setNull(1, java.sql.Types.BIGINT); } else { pstm.setLong(1, SERV_ID); } // pstm.setLong(1, SERV_ID); pstm.setString(2, ACC_NBR); if (PROD_ID == null) { pstm.setNull(3, java.sql.Types.BIGINT); } else { pstm.setLong(3, PROD_ID); } // pstm.setLong(3, PROD_ID); pstm.setString(4, IS_PHS_TK); pstm.setTimestamp(5, CREATE_DATE); if (CUST_ID == null) { pstm.setNull(6, java.sql.Types.BIGINT); } else { pstm.setLong(6, CUST_ID); } // pstm.setLong(6, CUST_ID); pstm.setString(7, STATE); pstm.setString(8, CITY); pstm.addBatch(); if (i % commitCount == 0 && i != 0) { // 利用executeBatch方法执行1000条SQL语句 pstm.executeBatch(); pstm.clearBatch(); conn.commit(); } } catch (Exception e) { e.printStackTrace(); log("read 第 " + i + "行 " + linedata); } } i++; linedata = in.readLine(); } pstm.executeBatch(); pstm.clearBatch(); conn.commit(); // close(); //关闭资源 in.close(); } catch (Exception e1) { log(e1.getMessage()); }
上面的代码中最主要的就是几句而已,用来下面的这几句,可以提高上百倍的数据操作插入的效率,
先取消自动commit()这个方法,用下面这句:
conn.setAutoCommit(false);
预编译一下sql语句:
pstm = conn.prepareStatement(sql);
根据sql中的位置来设置相应的参数:
pstm.setLong(3, PROD_ID);
设置完一条就加到bath里面去
pstm.addBatch();
每一千条就提交一次,不然内容会受不了的,1000这个大小可以根据实际内存来设置,
下面这两个方法是在commit()前一定要操作的,因为这样才可以释放掉内存。
pstm.executeBatch();
pstm.clearBatch();
if (i % commitCount == 0 && i != 0) {
// 利用executeBatch方法执行1000条SQL语句
pstm.executeBatch();
pstm.clearBatch();
conn.commit();
pstm.executeBatch();
pstm.clearBatch();
conn.commit();
// close(); //关闭资源
最后一定要关闭数据库连接,不然这个连接会耗费很多资源的。