The crawler-based asynchronous fetch thread pool

from multiprocessing.dummy Import Pool # Thread Pool Module 

# must only be one parameter 
DEF my_requests (URL):
     return requests.get (URL = URL, headers = headers) .text 


Start = the time.time () 
URLs = [
     ' HTTP : //127.0.0.1: 5000 / Bobo ' ,
     ' http://127.0.0.1:5000/jay ' ,
     ' http://127.0.0.1:5000/tom ' , 
] 

the pool = Pool (. 3 )
 # Map: two parameter 
# parameter 1: custom function, there can be only one parameter 
# parameter 2: list or Dictionary
# Effect is to make the map the parameter list of the function element 1 asynchronous processing parameters corresponding to 2 or custom dictionary representation in 
page_texes = pool.map (my_requests, URLs)
 Print (page_texes) 


Print (the time.time () - Start)

 

Guess you like

Origin www.cnblogs.com/songzhixue/p/11303823.html