批评这个python代码(带有线程池的爬虫)

时间:2010-09-06 18:44:14

标签: python multithreading web-crawler pool

这个python代码有多好?需要批评) 这段代码中有一个错误,有时候脚本会打印“ALL WAIT - CAN FINISH!” 并冻结(没有更多的动作发生..)但我找不到这种情况发生的原因?

使用threadpool的网站抓取工具:

import sys
from urllib import urlopen
from BeautifulSoup import BeautifulSoup, SoupStrainer
import re
from Queue import Queue, Empty
from threading import Thread

W_WAIT = 1
W_WORK = 0

class Worker(Thread):
    """Thread executing tasks from a given tasks queue"""
    def __init__(self, pool, tasks):
        Thread.__init__(self)
        self.tasks = tasks
        self.daemon = True
        self.start()
        self.pool = pool
        self.state = None

    def is_wait(self):
        return self.state == W_WAIT


    def run(self):
        while True:
            #if all workers wait - time to exsit
            print "CHECK WAIT: !!! ",self.pool.is_all_wait()
            if self.pool.is_all_wait():
                print "ALL WAIT - CAN FINISH!"
                return
            try:
                func, args, kargs = self.tasks.get(timeout=3)
            except Empty:
                print "task wait timeout"
                continue

            self.state = W_WORK
            print "START !!! in thread %s" % str(self)
            #print args

            try: func(*args, **kargs)
            except Exception, e: print e
            print "!!! STOP in thread %s", str(self)
            self.tasks.task_done()
            self.state = W_WAIT
            #threads can fast empty it!
            #if self.tasks.qsize() == 0:
            #    print "QUIT!!!!!!"
            #    break

class ThreadPool:
    """Pool of threads consuming tasks from a queue"""
    def __init__(self, num_threads):
        #self.tasks = Queue(num_threads)
        self.tasks = Queue()
        self.workers = []
        for _ in range(num_threads): 
            self.workers.append(Worker(self,self.tasks))


    def add_task(self, func, *args, **kargs):
        """Add a task to the queue"""
        self.tasks.put((func, args, kargs))

    def wait_completion(self):
        """Wait for completion of all the tasks in the queue"""
        self.tasks.join()

    def is_all_wait(self):
        for w in self.workers:
            if not w.is_wait():
                return False
        return True

visited = set()
queue = Queue()
external_links_set = set()
internal_links_set = set()
external_links = 0

def process(pool,host,url):

    try:

        content = urlopen(url).read()
    except UnicodeDecodeError:
        return


    for link in BeautifulSoup(content, parseOnlyThese=SoupStrainer('a')):
        try:
            href = link['href']
        except KeyError:
            continue


        if not href.startswith('http://'):
            href = 'http://%s%s' % (host, href)
        if not href.startswith('http://%s%s' % (host, '/')):
            continue

        internal_links_set.add(href)


        if href not in visited:
            visited.add(href)
            pool.add_task(process,pool,host,href)

        else:
            pass

def start(host,charset):
    pool = ThreadPool(20)
    pool.add_task(process,pool,host,'http://%s/' % (host))
    pool.wait_completion()

start('evgenm.com','utf8') 

Thanx寻求帮助!我做了新的实现: 你能对这段代码#2说些什么? ==================================尝试#2 ============= ==========================

    import sys
    from urllib import urlopen
    from BeautifulSoup import BeautifulSoup, SoupStrainer
    import re
    from Queue import Queue, Empty
    from threading import Thread


    W_STOP = 1

class Worker(Thread):
    """Thread executing tasks from a given tasks queue"""
    def __init__(self, pool, tasks):
        Thread.__init__(self)
        self.tasks = tasks
        self.daemon = True
        self.pool = pool
        self.state = None
        self.start()



    def stop(self):
        self.state = W_STOP

    def run(self):
        while True:
            if self.state == W_STOP:
                print "\ncalled stop"
                break
            try:
                func, args, kargs = self.tasks.get(timeout=3)
            except Empty:
                continue
            print "\n***START*** %s" % str(self)
            try: 
                func(*args, **kargs)
            except Exception, e: 
                print e
            print "\n***STOP*** %s", str(self)
            self.tasks.task_done()



class ThreadPool:
    """Pool of threads consuming tasks from a queue"""
    def __init__(self, num_threads):
        #self.tasks = Queue(num_threads)
        self.tasks = Queue()
        self.workers = []
        for _ in range(num_threads): 
            self.workers.append(Worker(self,self.tasks))


    def add_task(self, func, *args, **kargs):
        """Add a task to the queue"""
        self.tasks.put((func, args, kargs))

    def wait_completion(self):
        """Wait for completion of all the tasks in the queue"""
        self.tasks.join()

    def stop_threads(self):
        for w in self.workers:
            w.stop()

    def wait_stop(self):
        self.wait_completion()
        self.stop_threads()



    visited = set()
    queue = Queue()
    external_links_set = set()
    internal_links_set = set()
    external_links = 0

    def process(pool,host,url):

        try:

            content = urlopen(url).read()
        except UnicodeDecodeError:
            return


        for link in BeautifulSoup(content, parseOnlyThese=SoupStrainer('a')):
            try:
                href = link['href']
            except KeyError:
                continue


            if not href.startswith('http://'):
                href = 'http://%s%s' % (host, href)
            if not href.startswith('http://%s%s' % (host, '/')):
                continue

            internal_links_set.add(href)


            if href not in visited:
                visited.add(href)
                pool.add_task(process,pool,host,href)

            else:
                pass

    def start(host,charset):
        pool = ThreadPool(20)
        pool.add_task(process,pool,host,'http://%s/' % (host))
        pool.wait_stop()

    start('evgenm.com','utf8') 

2 个答案:

答案 0 :(得分:1)

您正在线程之间共享状态(即在is_all_wait中),而不进行同步。此外,所有线程都在“等待”的事实并不是队列为空的可靠指示(例如,他们都可能正在完成任务)。我怀疑,在队列真正为空之前,线程偶尔会退出。如果这种情况经常发生,您将在队列中留下任务但没有线程来运行它们。所以queue.join()将永远等待。

我的推荐是:

  1. 摆脱is_all_wait - 这不是一个可靠的指标
  2. 摆脱任务state - 这不是必要的
  3. 依靠queue.join告知您处理所有内容的时间
  4. 如果你需要杀死线程(例如,这是一个较大的,长期运行的程序的一部分),那么在queue.join()后执行此操作。

答案 1 :(得分:0)

我有基本的python知识但是python中的线程并不是没用的?我看过很多文章批评全球锁定翻译。