多线程执行IO密集型任务

import threading
import time
import multiprocessing
# 导入pymysql模块
import pymysql


class dataMysql(object):
    """对数据库执行查询操作 """

    def __init__(self, ip="127.0.0.1", user="root", password="test", database='test'):
        self.conn = pymysql.connect(host=ip, user=user, password=password, database=database, charset="utf8")
        self.cursor = self.conn.cursor()

    def change_data_mysql(self, sql, parameter=None, get=None):
        """
        修改数据,增,修改,删除可共用方法
        :param cursor:
        :param sql:
        :return:
        """
        result = ''
        try:
            if parameter:
                self.cursor.execute(sql, parameter)  # 使用executemany做批量处理
            else:
                self.cursor.execute(sql)
            if get:
                result = self.cursor.fetchall()
            else:
                self.conn.commit()  # 提交修改数据
            last_id = self.cursor.lastrowid  # 变动数据的id
        except Exception as e:
            self.conn.rollback()
            print(e)
        return result

def get_data_from_database(test, data=[]):
    MysqlObj = dataMysql()
    for i in test:
        sql = "select id  from fail where id=%s; " % i
        datas = MysqlObj.change_data_mysql(sql, get=True)
        for i in datas:
            data.append(i)
    time.sleep(1)

def get_data_from_database2(test, data=[]):
    MysqlObj = dataMysql()
    for i in test:
        sql = "select id  from fail where id=%s; " % i
        datas = MysqlObj.change_data_mysql(sql, get=True)
        for i in datas:
            data.append(i)
    time.sleep(1)

def get_count_from_database(test, data=[]):
    MysqlObj = dataMysql()
    for i in test:
        sql = "select id  from fail where id=%s; " % i
        datas = MysqlObj.change_data_mysql(sql, get=True)
        for i in datas:
            data.append(i)
    time.sleep(2)

def get_count_from_database2(test, data=[]):
    MysqlObj = dataMysql()
    for i in test:
        sql = "select id  from fail where id=%s; " % i
        datas = MysqlObj.change_data_mysql(sql, get=True)
        for i in datas:
            data.append(i)
    time.sleep(2)


from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor  # 线程池,进程池

if __name__ == '__main__':
    print("cpu count:", multiprocessing.cpu_count(), "\n")
    table = {}
    for i in ['1','2','3','4']:
        table[i] = []

    print("========== 直接执行IO密集型任务 ==========")
    time_0 = time.time()
    get_data_from_database([1,2,3,4,5], data= table["1"])
    get_data_from_database2([1,2,3,4,5], data= table["2"])
    get_count_from_database([1,2,3,4,5], data= table["3"])
    get_count_from_database2([1,2,3,4,5], data= table["4"])
    print(table)
    print("结束:", time.time() - time_0, "\n")

    print("========== 多线程执行IO密集型任务 ==========")
    time_0 = time.time()
    table = {}
    for i in ['1','2','3','4']:
        table[i] = []
    t1 = threading.Thread(target=get_data_from_database, args=([1,2,3,4,5],), kwargs={'data': table["1"]})
    t2 = threading.Thread(target=get_data_from_database2, args=([1,2,3,4,5],), kwargs={'data': table["2"]})
    t3 = threading.Thread(target=get_count_from_database, args=([1,2,3,4,5],), kwargs={'data': table["3"]})
    t4 = threading.Thread(target=get_count_from_database2, args=([1,2,3,4,5],), kwargs={'data': table["4"]})
    t1.start()
    t2.start()
    t3.start()
    t4.start()
    t1.join()
    t2.join()
    t3.join()
    t4.join()

    # 情景 当一个函数多次执行
    
    # for i in range(4):
    #     t = threading.Thread(target=get_data_from_database, args=([1,2,3,4,5],), kwargs={'data': table["1"]})
    #     t.start()

    # thread_list = [threading.Thread(target=get_data_from_database, args=([1,2,3,4,5],),kwargs={'data': table["4"]) for i in range(2)]
    # for t in thread_list:
    #     t.start()
    # for t in thread_list:
    #     if t.is_alive():
    #         t.join()
    print(table)
    print("结束:", time.time() - time_0, "\n")

    

猜你喜欢

转载自blog.csdn.net/weixin_42322206/article/details/105473502