代碼一:
#!/usr/bin/python
# -*- coding: utf-8 -*-
#encoding=utf-8
?
import threading
import Queue
import sys
import urllib2
import re
import MySQLdb
?
#
# 數據庫變量設置
#
DB_HOST = '127.0.0.1'
DB_USER = "XXXX"
DB_PASSWD = "XXXXXXXX"
DB_NAME = "xxxx"
?
#
# 變量設置
#
THREAD_LIMIT = 3
jobs = Queue.Queue(5)
singlelock = threading.Lock()
info = Queue.Queue()
?
def workerbee(inputlist):
????for x in xrange(THREAD_LIMIT):
????????print 'Thead {0} started.'.format(x)
????????t = spider()
????????t.start()
????for i in inputlist:
????????try:
????????????jobs.put(i, block=True, timeout=5)
????????except:
????????????singlelock.acquire()
????????????print "The queue is full !"
????????????singlelock.release()
?
????# Wait for the threads to finish
????singlelock.acquire()??????? # Acquire the lock so we can print
????print "Waiting for threads to finish."
????singlelock.release()??????? # Release the lock
????jobs.join()????????????? # This command waits for all threads to finish.
????# while not jobs.empty():
????#?? print jobs.get()
?
def getTitle(url,time=10):
????response = urllib2.urlopen(url,timeout=time)
????html = response.read()
????response.close()
????reg = r'
'
????title = re.compile(reg).findall(html)
????# title = title[0].decode('gb2312','replace').encode('utf-8')
????title = title[0]
????return title
?
class spider(threading.Thread):
????def run(self):
????????while 1:
????????????try:
????????????????job = jobs.get(True,1)
????????????????singlelock.acquire()
????????????????title = getTitle(job[1])
????????????????info.put([job[0],title], block=True, timeout=5)
????????????????# print 'This {0} is {1}'.format(job[1],title)
????????????????singlelock.release()
????????????????jobs.task_done()
????????????except:
????????????????break;
?
if __name__ == '__main__':
????con = None
????urls = []
????try:
????????con = MySQLdb.connect(DB_HOST,DB_USER,DB_PASSWD,DB_NAME)
????????cur = con.cursor()
????????cur.execute('SELECT id,url FROM `table_name` WHERE `status`=0 LIMIT 10')
????????rows = cur.fetchall()
????????for row in rows:
????????????# print row
????????????urls.append([row[0],row[1]])
????????workerbee(urls)
????????while not info.empty():
????????????print info.get()
????finally:
????????if con:
????????????con.close()
代碼二:
#!/usr/bin/python
# -*- coding: utf-8 -*-
#encoding=utf-8
#Filename:robot.py
import threading,Queue,sys,urllib2,re
#
# 變量設置
#
THREAD_LIMIT = 3 #設置線程數
jobs = Queue.Queue(5) #設置隊列長度
singlelock = threading.Lock() #設置一個線程鎖,避免重復調用
urls = ['http://games.sina.com.cn/w/n/2013-04-28/1634703505.shtml','http://games.sina.com.cn/w/n/2013-04-28/1246703487.shtml','http://games.sina.com.cn/w/n/2013-04-28/1028703471.shtml','http://games.sina.com.cn/w/n/2013-04-27/1015703426.shtml','http://games.sina.com.cn/w/n/2013-04-26/1554703373.shtml','http://games.sina.com.cn/w/n/2013-04-26/1512703346.shtml','http://games.sina.com.cn/w/n/2013-04-26/1453703334.shtml','http://games.sina.com.cn/w/n/2013-04-26/1451703333.shtml','http://games.sina.com.cn/w/n/2013-04-26/1445703329.shtml','http://games.sina.com.cn/w/n/2013-04-26/1434703322.shtml','http://games.sina.com.cn/w/n/2013-04-26/1433703321.shtml','http://games.sina.com.cn/w/n/2013-04-26/1433703320.shtml','http://games.sina.com.cn/w/n/2013-04-26/1429703318.shtml','http://games.sina.com.cn/w/n/2013-04-26/1429703317.shtml','http://games.sina.com.cn/w/n/2013-04-26/1409703297.shtml','http://games.sina.com.cn/w/n/2013-04-26/1406703296.shtml','http://games.sina.com.cn/w/n/2013-04-26/1402703292.shtml','http://games.sina.com.cn/w/n/2013-04-26/1353703286.shtml','http://games.sina.com.cn/w/n/2013-04-26/1348703284.shtml','http://games.sina.com.cn/w/n/2013-04-26/1327703275.shtml','http://games.sina.com.cn/w/n/2013-04-26/1239703265.shtml','http://games.sina.com.cn/w/n/2013-04-26/1238703264.shtml','http://games.sina.com.cn/w/n/2013-04-26/1231703262.shtml','http://games.sina.com.cn/w/n/2013-04-26/1229703261.shtml','http://games.sina.com.cn/w/n/2013-04-26/1228703260.shtml','http://games.sina.com.cn/w/n/2013-04-26/1223703259.shtml','http://games.sina.com.cn/w/n/2013-04-26/1218703258.shtml','http://games.sina.com.cn/w/n/2013-04-26/1202703254.shtml','http://games.sina.com.cn/w/n/2013-04-26/1159703251.shtml','http://games.sina.com.cn/w/n/2013-04-26/1139703233.shtml']
def workerbee(inputlist):
for x in xrange(THREAD_LIMIT):
print 'Thead {0} started.'.format(x)
t = spider()
t.start()
for i in inputlist:
try:
jobs.put(i, block=True, timeout=5)
except:
singlelock.acquire()
print "The queue is full !"
singlelock.release()
# Wait for the threads to finish
singlelock.acquire() # Acquire the lock so we can print
print "Waiting for threads to finish."
singlelock.release() # Release the lock
jobs.join() # This command waits for all threads to finish.
# while not jobs.empty():
# print jobs.get()
def getTitle(url,time=10):
response = urllib2.urlopen(url,timeout=time)
html = response.read()
response.close()
reg = r'
'
title = re.compile(reg).findall(html)
title = title[0].decode('gb2312','replace').encode('utf-8')
return title
class spider(threading.Thread):
def run(self):
while 1:
try:
job = jobs.get(True,1)
singlelock.acquire()
title = getTitle(job)
print 'This {0} is {1}'.format(job,title)
singlelock.release()
jobs.task_done()
except:
break;
if __name__ == '__main__':
workerbee(urls)
更多文章、技術交流、商務合作、聯系博主
微信掃碼或搜索:z360901061
微信掃一掃加我為好友
QQ號聯系: 360901061
您的支持是博主寫作最大的動力,如果您喜歡我的文章,感覺我的文章對您有幫助,請用微信掃描下面二維碼支持博主2元、5元、10元、20元等您想捐的金額吧,狠狠點擊下面給點支持吧,站長非常感激您!手機微信長按不能支付解決辦法:請將微信支付二維碼保存到相冊,切換到微信,然后點擊微信右上角掃一掃功能,選擇支付二維碼完成支付。
【本文對您有幫助就好】元

