創建項目
scrapy startproject tencent
編寫items.py
寫class TencentItem
import scrapy
class TencentItem(scrapy.Item):
# define the fields for your item here like:
# 職位名
positionname = scrapy.Field()
# 詳情連接
positionlink = scrapy.Field()
# 職位類別
positionType = scrapy.Field()
# 招聘人數
peopleNum = scrapy.Field()
# 工作地點
workLocation = scrapy.Field()
# 發布時間
publishTime = scrapy.Field()
創建基礎類的爬蟲
scrapy genspider tencentPosition"tencent.com"
tencentPosition.py
# -*- coding: utf-8 -*- import scrapy from tencent.items import TencentItem class TencentpositionSpider(scrapy.Spider): name = "tencent" allowed_domains = ["tencent.com"] url = "http://hr.tencent.com/position.php?&start=" offset = 0 start_urls = [url + str(offset)] def parse(self, response): for each in response.xpath("//tr[@class='even'] | //tr[@class='odd']"): # 初始化模型對象 item = TencentItem() item['positionname'] = each.xpath("./td[1]/a/text()").extract()[0] # 詳情連接 item['positionlink'] = each.xpath("./td[1]/a/@href").extract()[0] # 職位類別 item['positionType'] = each.xpath("./td[2]/text()").extract()[0] # 招聘人數 item['peopleNum'] = each.xpath("./td[3]/text()").extract()[0] # 工作地點 item['workLocation'] = each.xpath("./td[4]/text()").extract()[0] # 發布時間 item['publishTime'] = each.xpath("./td[5]/text()").extract()[0] yield item if self.offset < 1680: self.offset += 10 # 每次處理完一頁的數據之后,重新發送下一頁頁面請求 # self.offset自增10,同時拼接為新的url,并調用回調函數self.parse處理Response yield scrapy.Request(self.url + str(self.offset), callback = self.parse)
管道文件
pipelines.py
import json class TencentPipeline(object): def __init__(self): self.filename = open("tencent.json", "w") def process_item(self, item, spider): text = json.dumps(dict(item), ensure_ascii = False) + ",n" self.filename.write(text.encode("utf-8")) return item def close_spider(self, spider): self.filename.close()
在settings文件設置pipelines
ITEM_PIPELINES = {
'tencent.pipelines.TencentPipeline': 300,
}
添加請求報頭
DEFAULT_REQUEST_HEADERS
settings.py
BOT_NAME = 'tencent' SPIDER_MODULES = ['tencent.spiders'] NEWSPIDER_MODULE = 'tencent.spiders' ROBOTSTXT_OBEY = True DOWNLOAD_DELAY = 2 DEFAULT_REQUEST_HEADERS = { "User-Agent" : "Mozilla/5.0 (compatible; MSIE 9.0; windows NT 6.1; Trident/5.0;", 'Accept': 'text/html,Application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' } ITEM_PIPELINES = { 'tencent.pipelines.TencentPipeline': 300, }