本文實例講述了python實現爬蟲抓取小說功能。分享給大家供大家參考,具體如下:
# -*- coding: utf-8 -*- from bs4 import BeautifulSoup from urllib import request import re import os,time #訪問url,返回html頁面 def get_html(url): req = request.Request(url) req.add_header('User-Agent','Mozilla/5.0') response = request.urlopen(url) html = response.read() return html #從列表頁獲取小說書名和鏈接 def get_books(url):#根據列表頁,返回此頁的{書名:鏈接}的字典 html = get_html(url) soup = BeautifulSoup(html,'lxml') fixed_html = soup.prettify() books = soup.find_all('div',attrs={'class':'bbox'}) book_dict = {} for book in books: book_name = book.h3.a.string book_url = book.h3.a.get('href') book_dict[book_name] = book_url return book_dict #根據書名鏈接,獲取具體的章節{名稱:鏈接} 的字典 def get_parts(url): html = get_html(url) soup = BeautifulSoup(html,'lxml') fixed_html = soup.prettify() part_urls = soup.find_all('a') host = "http://www.xiaoshuotxt.org" part_dict = {} for p in part_urls: p_url = str(p.get('href')) if re.search(r'\d{5}.html',p_url) and ("xiaoshuotxt" not in p_url): part_dict[p.string] = host + p_url return part_dict #根據章節的url獲取具體的章節內容 def get_txt(url): html = get_html(url) soup = BeautifulSoup(html,'lxml') fixed_html = soup.prettify() title = soup.h1.string #獲取文章標題 content = soup.find('div',attrs={'class':'zw'}) txt = BeautifulSoup.get_text(content) #正文內容 return txt if __name__ == "__main__": root_dir= r'e:\books' #url = 'http://www.xiaoshuotxt.org/mingzhu/index_2.html' #第2頁的小說 url = "http://www.xiaoshuotxt.org/writer/58" #金庸的小說 books = get_books(url) for book_name,book_url in books.items(): os.mkdir(os.path.join(root_dir,book_name)) part_dict = get_parts(book_url) print(book_name,"共:",len(part_dict),"章節") for part_name,part_url in part_dict.items(): print("正在保存:",part_name) f1 = open(r'e:\books\%s\%s.txt'%(book_name,part_name),'w',encoding='utf-8')#以utf-8編碼創建文件 part_txt = get_txt(part_url) f1.write(str(part_txt)) f1.close() time.sleep(2)
運行效果:
更多關于Python相關內容可查看本站專題:《Python Socket編程技巧總結》、《Python正則表達式用法總結》、《Python數據結構與算法教程》、《Python函數使用技巧總結》、《Python字符串操作技巧匯總》、《Python入門與進階經典教程》及《Python文件與目錄操作技巧匯總》
希望本文所述對大家Python程序設計有所幫助。
更多文章、技術交流、商務合作、聯系博主
微信掃碼或搜索:z360901061

微信掃一掃加我為好友
QQ號聯系: 360901061
您的支持是博主寫作最大的動力,如果您喜歡我的文章,感覺我的文章對您有幫助,請用微信掃描下面二維碼支持博主2元、5元、10元、20元等您想捐的金額吧,狠狠點擊下面給點支持吧,站長非常感激您!手機微信長按不能支付解決辦法:請將微信支付二維碼保存到相冊,切換到微信,然后點擊微信右上角掃一掃功能,選擇支付二維碼完成支付。
【本文對您有幫助就好】元
