zoukankan      html  css  js  c++  java
  • python多线程几种方法实现

    python多线程编程

    Python多线程编程中常用方法:

    1、join()方法:如果一个线程或者在函数执行的过程中调用另一个线程,并且希望待其完成操作后才能执行,那么在调用线程的时就可以使用被调线程的join方法join([timeout]) timeout:可选参数,线程运行的最长时间

    2、isAlive()方法:查看线程是否还在运行

    3、getName()方法:获得线程名

    4、setDaemon()方法:主线程退出时,需要子线程随主线程退出,则设置子线程的setDaemon()

    Python线程同步:

    (1)Thread的Lock和RLock实现简单的线程同步:

    复制代码
    import threading
    import time
    class mythread(threading.Thread):
        def __init__(self,threadname):
            threading.Thread.__init__(self,name=threadname)
        def run(self):
            global x
            lock.acquire()
            for i in range(3):
                x = x+1
            time.sleep(1)
            print x
            lock.release()
    
    if __name__ == '__main__':
        lock = threading.RLock()
        t1 = []
        for i in range(10):
            t = mythread(str(i))
            t1.append(t)
        x = 0
        for i in t1:
            i.start()
    复制代码

    (2)使用条件变量保持线程同步:

    复制代码
    # coding=utf-8
    import threading
    
    class Producer(threading.Thread):
        def __init__(self,threadname):
            threading.Thread.__init__(self,name=threadname)
        def run(self):
            global x
            con.acquire()
            if x == 10000:
                con.wait() 
                pass
            else:
                for i in range(10000):
                    x = x+1
                    con.notify()
            print x
            con.release()
    
    class Consumer(threading.Thread):
        def __init__(self,threadname):
            threading.Thread.__init__(self,name=threadname)
        def run(self):
            global x
            con.acquire()
            if x == 0:
                con.wait()
                pass
            else:
                for i in range(10000):
                    x = x-1
                con.notify()
            print x
            con.release()
    
    if __name__ == '__main__':
        con = threading.Condition()
        x = 0
        p = Producer('Producer')
        c = Consumer('Consumer')
        p.start()
        c.start()
        p.join()
        c.join()
        print x
    复制代码

    (3)使用队列保持线程同步:

    复制代码
    # coding=utf-8
    import threading
    import Queue
    import time
    import random
    
    class Producer(threading.Thread):
        def __init__(self,threadname):
            threading.Thread.__init__(self,name=threadname)
        def run(self):
            global     queue
            i = random.randint(1,5)
            queue.put(i)
            print self.getName(),' put %d to queue' %(i)
            time.sleep(1)
    
    class Consumer(threading.Thread):
        def __init__(self,threadname):
            threading.Thread.__init__(self,name=threadname)
        def run(self):
            global     queue
            item = queue.get()
            print self.getName(),' get %d from queue' %(item)
            time.sleep(1)
    
    if __name__ == '__main__':
        queue = Queue.Queue()
        plist = []
        clist = []
        for i in range(3):
            p = Producer('Producer'+str(i))
            plist.append(p)
        for j in range(3):
            c = Consumer('Consumer'+str(j))
            clist.append(c)
        for pt in plist:
            pt.start()
            pt.join()
        for ct in clist:
            ct.start()
            ct.join()
    复制代码

    生产者消费者模式的另一种实现:

    复制代码
    # coding=utf-8
    import time
    import threading
    import Queue
    
    class Consumer(threading.Thread):
        def __init__(self, queue):
            threading.Thread.__init__(self)
            self._queue = queue
    
        def run(self):
            while True:
                # queue.get() blocks the current thread until an item is retrieved.
                msg = self._queue.get()
                # Checks if the current message is the "quit"
                if isinstance(msg, str) and msg == 'quit':
                    # if so, exists the loop
                    break
                # "Processes" (or in our case, prints) the queue item
                print "I'm a thread, and I received %s!!" % msg
            # Always be friendly!
            print 'Bye byes!'
    
    class Producer(threading.Thread):
        def __init__(self, queue):
            threading.Thread.__init__(self)
            self._queue = queue
    
        def run(self):
            # variable to keep track of when we started
            start_time = time.time()
            # While under 5 seconds..
            while time.time() - start_time < 5:
                # "Produce" a piece of work and stick it in the queue for the Consumer to process
                self._queue.put('something at %s' % time.time())
                # Sleep a bit just to avoid an absurd number of messages
                time.sleep(1)
            # This the "quit" message of killing a thread.
            self._queue.put('quit')
    
    if __name__ == '__main__':
        queue = Queue.Queue()
        consumer = Consumer(queue)
        consumer.start()
        producer1 = Producer(queue)
        producer1.start()
    复制代码

    使用线程池(Thread pool)+同步队列(Queue)的实现方式:

    复制代码
    # A more realistic thread pool example
    # coding=utf-8
    import time 
    import threading 
    import Queue 
    import urllib2 
    
    class Consumer(threading.Thread): 
        def __init__(self, queue):
            threading.Thread.__init__(self)
            self._queue = queue 
     
        def run(self):
            while True: 
                content = self._queue.get() 
                if isinstance(content, str) and content == 'quit':
                    break
                response = urllib2.urlopen(content)
            print 'Bye byes!'
     
    def Producer():
        urls = [
            'http://www.python.org', 'http://www.yahoo.com'
            'http://www.scala.org', 'http://cn.bing.com'
            # etc.. 
        ]
        queue = Queue.Queue()
        worker_threads = build_worker_pool(queue, 4)
        start_time = time.time()
        # Add the urls to process
        for url in urls: 
            queue.put(url)  
        # Add the 'quit' message
        for worker in worker_threads:
            queue.put('quit')
        for worker in worker_threads:
            worker.join()
     
        print 'Done! Time taken: {}'.format(time.time() - start_time)
     
    def build_worker_pool(queue, size):
        workers = []
        for _ in range(size):
            worker = Consumer(queue)
            worker.start() 
            workers.append(worker)
        return workers
     
    if __name__ == '__main__':
        Producer()
    复制代码

    另一个使用线程池+Map的实现:

    复制代码
    import urllib2 
    from multiprocessing.dummy import Pool as ThreadPool 
     
    urls = [
        'http://www.python.org', 
        'http://www.python.org/about/',
        'http://www.python.org/doc/',
        'http://www.python.org/download/',
        'http://www.python.org/community/'
        ]
     
    # Make the Pool of workers
    pool = ThreadPool(4) 
    # Open the urls in their own threads
    # and return the results
    results = pool.map(urllib2.urlopen, urls)
    #close the pool and wait for the work to finish 
    pool.close() 
    pool.join()
  • 相关阅读:
    在VMware中为Red Hat配置静态ip并可访问网络-Windows下的VMware
    03-nginx虚拟主机配置
    解决nginx: [emerg] bind() to [::]:80 failed (98: Address already in use)
    02-nginx信号量
    RedHat Linux设置yum软件源为本地ISO
    01-nginx介绍及编译安装
    Linux.负载均衡
    01-MySQL优化大的思路
    10 华电内部文档搜索系统 search02
    10 华电内部文档搜索系统 search03
  • 原文地址:https://www.cnblogs.com/xbkp/p/5444167.html
Copyright © 2011-2022 走看看