一、多线程
import threading from time import ctime,sleep def music(func): for i in range(2): print("I was listening to %s. %s" %(func,ctime())) sleep(1) def move(func): for i in range(2): print("I was at the %s! %s" %(func,ctime())) sleep(5) threads = [] t1 = threading.Thread(target=music,args=(u‘爱情买卖‘,)) threads.append(t1) t2 = threading.Thread(target=move,args=(u‘阿凡达‘,)) threads.append(t2) if __name__ == ‘__main__‘: for t in threads: t.setDaemon(True) t.start() t.join() print("all over %s" %ctime())
二、线程池(自实现)
‘‘‘ 线程池的概念就是我们将1000件活,原本由1000个人来做, 现在只分配5个人来做,这5个人就是线程池数, 并且他们处与一直运行状态,除非主程序结束,否则,将不会结束。 ‘‘‘ from queue import Queue from threading import Thread import random import time def person(i,q): while True: #这个人一直处与可以接活干的状态 q.get() print("Thread",i,"is doing the job") time.sleep(random.randint(1,5))#每个人干活的时间不一样,自然就会导致每个人分配的件数不同(这里是干活的地方) q.task_done() #接到的活做完了,向上汇报 q = Queue() #分配1000件活 for x in range(100): q.put(x) #叫了5个人去干活 for i in range(5): worker=Thread(target=person, args=(i,q)) worker.setDaemon(True) worker.start() q.join() #这5个人把1000件活都做完后,结束.
三、线程池(库实现)
看吧!只用4行代码就搞定了!其中三行还是固定写法。
import requests from multiprocessing.dummy import Pool as ThreadPool urls = [ ‘http://www.baidu.com‘, ‘http://www.163.com‘, ‘http://www.sina.cn‘, ‘http://www.live.com‘, ‘http://www.mozila.org‘, ‘http://www.sohu.com‘, ‘http://www.tudou.com‘, ‘http://www.qq.com‘, ‘http://www.taobao.com‘, ‘http://www.alibaba.com‘, ] # Make the Pool of workers pool = ThreadPool(4) # 注意此处的 map 函数!!!! # Open the urls in their own threads # and return the results results = pool.map(requests.get, urls) #close the pool and wait for the work to finish pool.close() pool.join()
from multiprocessing import Pool def f(x): return x*x with Pool(5) as p: print(p.map(f, [1, 2, 3]))
四、如何更加高效(生产、消费者模式)
比起经典的方式来说简单很多,效率高,易懂,而且没什么死锁的陷阱。
from multiprocessing import Pool, Queue import redis import requests queue = Queue(20) def consumer(): r = redis.Redis(host=‘127.0.0.1‘,port=6379,db=1) while True: k, url = r.blpop([‘pool‘,]) queue.put(url) def worker(): while True: url = queue.get() print(requests.get(url).text) def process(ptype): try: if ptype: consumer() else: worker() except: pass pool = Pool(5) print pool.map(process, [1,0,0,0,0]) pool.close() pool.join()
时间: 2024-10-12 13:31:05