读取excel,executemany整篇写入数据库

'''
    读取excel,整篇写入数据库
    单行操作效率低下,利用executemany批量处理,可以大大缩减时间
    亲测插入10000个数字,executemany只需要0.75s,而execute需要超过4.5s
'''
import sqlite3
import xlrd
import os

class Batch_deal(object):

    path = 'XXX'

    def load_files(self):
        # 获取excel地址
        files = os.listdir(self.path)  # 返回子目录下所有文件名集合
        for file in files:
            if file[-3:] == 'xls':
                yield self.path + file
            else:
                pass

    def deal_file(self, file):
        # 打开excel,循环所有行,获取数据,添加到list
        data = xlrd.open_workbook(file)     # 读取excel
        table = data.sheets()[0]
        nrows = table.nrows         # 获取总行数
        param = []
        for row_ in range(1, nrows + 1):      # 遍历行数,添加数据到list
            param.append([table.cell(row_, 0).value, table.cell(row_, 1).value, table.cell(row_, 2).value])
        # 批量数据保存
        sql = self.insert_mass()
        self.batch_insert(sql, param)

    def batch_insert(self, sql, param):     # 批量导入,sql为插入语句, param为插入值list
        conn = sqlite3.connect('zaofa.db')  # sqlite数据库
        cursor = conn.cursor()      # 建立游标
        try:
            cursor.executemany(sql, param)  # 批量执行
            conn.commit()
        except Exception as e:
            print(e)
            conn.rollback()         # 数据回滚,若一个插入失败都不做插入

    def insert_mass(self):
        sql = "insert into zaofa(name, age, phone) values (?, ?, ?)"
        return sql

    def main(self):
        # 循环所有数据,依次执行
        files = self.load_files()
        for file in files:
            self.deal_file(file)


if __name__ == '__main__':

    Batch_deal = Batch_deal()
    Batch_deal.main()

猜你喜欢

转载自blog.csdn.net/Luzaofa/article/details/81454026