Decorator statistics python function time-consuming multi-process multi-thread

Function test time-consuming decorator

runTime.py

import datetime


def getRunTime(f):
  def x(*args, **kwargs):
    start = datetime.datetime.now()
    f(*args, **kwargs)
    end = datetime.datetime.now()
    print("{:<10} usetime: {}\n".format(f.__name__, str(end - start)))

  return x

Use time-consuming speed measurement

multiprocessmap.py

from multiprocessing import Pool
import runTime as rt

sumc = range(10000000)


def add(x):
  for i in range(10):
    x = x * i


@rt.getRunTime
def r1():
  for i in sumc:
    add(i)


@rt.getRunTime
def poolMap():
  with Pool(processes=3) as pool:
    pool.map_async(add, sumc)

    pool.close()
    pool.join()


@rt.getRunTime
def poolImap():
  with Pool(processes=8) as pool:
    pool.imap_unordered(add, sumc, 100000)
    pool.close()
    pool.join()


@rt.getRunTime
def poolImap_unordered():
  with Pool(processes=8) as pool:
    pool.imap_unordered(add, sumc, 100000)

    pool.close()
    pool.join()


if __name__ == "__main__":
  r1()
  poolMap()
  poolImap()
  poolImap_unordered()

Guess you like

Origin blog.csdn.net/qq_43373608/article/details/108096097