python 多进程 multiprocessing 简介

版权声明: https://blog.csdn.net/dashoumeixi/article/details/80945720
multiprocessing 的接口跟threading 类似;
   在windows中运行 加上:
                   if __name__ == '__main__':

下面有map_async 和 apply_async . 他们的行为和concurrent.future的Executor map,submit匹配

Executor.map == > Pool.map_async 都是一次返回所有结果,是有序的
Executor.submit ==> Pool.apply_async 完成一个任务就返回

下面的例子使用继承的方式就不写了,与threading一样,重写run;

    在多进程通信中的multiprocess.Queue 与 JoinableQueue用法与 queue.Queue一样;

    比如get() 将阻塞 . 如果调用了JoinableQueue.join() 则,需要调用task_done() .Queue简介

   

import time,queue,threading,multiprocessing,os

def get_process():
    #分别是当前进程id,父进程id,是否还在运行,进程名字
    #os.getpid == multiprocessing.current_process().pid
    print('pid:',os.getpid(),' is running',',ppid:',os.getppid(),
         ',isalive:', multiprocessing.current_process().is_alive(),',name:',
          multiprocessing.current_process().name)
    time.sleep(3)
    print(os.getpid(), ' finsihed')

if __name__ == '__main__':
    p = multiprocessing.Process(target=get_process,name='hahaha')  #参数与threading.Thread 一致,如果要传参数(args=(xxx))
    p.daemon = True       #这个属性默认为False, 相当于主程序会等待子进程结束;设置True之后,子进程是一个后台进程
    p.start()
    #p.terminate()        终止子进程,注意终止了后,还是要调用join, 否则is_alive()还是True
    p.join()              #用于等待子进程结束,不论daemon是什么; 相当与unix的wait
    print('main end')

多进程通信:

1. multiprocess.Queue

#multiprocessing.Queue 与 queue.Queue 用法一致,
msg_format = 'pid:{} {} {}'

def getter(q:multiprocessing.Queue):
    while 1:
        value = q.get()                 #阻塞,直到有数据
        print('\t\t'+msg_format.format(multiprocessing.current_process().pid,'get',value))
def setter(q:multiprocessing.Queue):
    pid = multiprocessing.current_process().pid
    for i in range(3):
        q.put(i)
        print(msg_format.format(pid,'set',i))
    q.close()                           #close() 指示当前进程不会在此队列上放置更多数据


if __name__ == '__main__':
    q = multiprocessing.Queue()
    get_process = multiprocessing.Process(target=getter,args=(q,))
    set_process = multiprocessing.Process(target=setter,args=(q,))
    get_process.start()
    set_process.start()
    pid = os.getpid()
    while 1:
        print('main thread {} . getprocess alive : {} , setprocess alive : {}'.format(
            pid,get_process.is_alive(),set_process.is_alive()
        ))
        time.sleep(5)

2. JoinableQueue (应用于进程)用法与queue.Queue(线程安全)一致 

import sys,multiprocessing,os,time

"""
    JoinableQueue 与 queue.Queue的用法一样
    只是一个用于进程一个用于线程
"""
def getter(q:multiprocessing.JoinableQueue):
    while 1:
        time.sleep(1)
        value = q.get()
        print('pid:',os.getpid(),',get :', value)
        q.task_done()                   #如果注释掉这行, q.join() 将永远阻塞

if __name__ =='__main__':
    q = multiprocessing.JoinableQueue()
    p1 = multiprocessing.Process(target=getter,args=(q,))
    p1.start()
    for i in range(5):
        q.put(i)
    q.join()

    while 1:
        print('main threading , children:',multiprocessing.active_children())
        time.sleep(1)

3.Pipe 管道

import sys,multiprocessing,os,time

"""
    使用Pipe 2个进程互相发送数据.
    recv 阻塞 , 直到有数据到来. 
    close 关闭管道
"""
def sender(pipe:multiprocessing.Pipe):
    pipe.send({'1':1,'2':2})
    print('sender 发送完成!')
    pipe.close()

def recver(pipe:multiprocessing.Pipe):
    time.sleep(3)
    print('recver ready!')
    obj =  pipe.recv()
    print('recver recv:',obj)
    pipe.close()


if __name__ =='__main__':
    con1 , con2 = multiprocessing.Pipe()
    p1 = multiprocessing.Process(target=sender,args=(con1,),name='sender')
    p2 = multiprocessing.Process(target=recver,args=(con2,),name='recver')
    p2.start()
    p1.start()

    while 1:
        time.sleep(3)
        print( ' active process :' , multiprocessing.active_children())



4.Pool. map, apply, imap 都是同步函数

"""
        map , apply 都是同步函数,区别是apply 等到返回再执行下个进程
        map 是等到所有进程返回
"""
def sub(x):
    time.sleep(2)
    print('sub process :{} value:{}'.format(multiprocessing.current_process().pid
                                            ,x))
    return x

if __name__ == '__main__':
    pool = multiprocessing.Pool(4)
    for i in  range(4):
        res = pool.apply(sub,args=(i,))
        print('main :',res)
    # res = pool.map(sub,[1,2,3,4])
    # pool.close()

    while 1:
        time.sleep(1)
        print('mian threading , active :',multiprocessing.active_children())

  
map_async : 异步函数.
import sys,multiprocessing,os,time,threading

"""
   map_async 异步函数,一次返回所有结果,即要等待所有结果
   可选:callback函数 , 如果成功则调用,否则调用error_callback
"""
def sub(x):
    time.sleep(2)
    print('sub process pid :{} , tid:{} value:{}'.format(multiprocessing.current_process().pid,
                                                         threading.currentThread().ident
                                                        ,x))
    return x
def call_back(v):
    print('pid:', os.getpid(),',tid:',threading.currentThread().ident,',v:',v)

if __name__ == '__main__':
    print('main pid:', os.getpid(), ',tid:', threading.currentThread().ident)
    pool = multiprocessing.Pool(4)
    results = pool.map_async(sub,range(10),callback=call_back)
    pool.close()
    pool.join()
    print(results.get())


apply_async:

"""
   apply_async :
   唯一需要注意的是:
   for i in range(10):
       res = pool.apply_async(sub,args=(i,),callback=call_back)
       res.get() 
   不要这样使用,否则又同步执行了
"""
def sub(x):
    time.sleep(2)
    print('sub process pid :{} , tid:{} value:{}'.format(multiprocessing.current_process().pid,
                                                         threading.currentThread().ident
                                                        ,x))
    return x
def call_back(v):
    print('----> callback pid:', os.getpid(),',tid:',threading.currentThread().ident,',v:',v)

if __name__ == '__main__':
    print('main pid:', os.getpid(), ',tid:', threading.currentThread().ident)
    pool = multiprocessing.Pool(4)
    results = [pool.apply_async(sub,args=(i,),callback=call_back) for i in range(10)]
    pool.close()
    pool.join()

猜你喜欢

转载自blog.csdn.net/dashoumeixi/article/details/80945720