| 12345678910111213141516171819202122232425262728293031323334353637383940414243 |
- # -*- coding: utf-8 -*-
- # Define your item pipelines here
- #
- # Don't forget to add your pipeline to the ITEM_PIPELINES setting
- # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
- import json
- import pymongo
- import requests
- from pycomm.mongo import DBMongo
- class SportslstPipeline(object):
- def open_spider(self, spider):
- self.host = '192.168.2.200'
- self.port = 27017
- self.dbname = 'kaiyou' # 数据库名
- self.client = pymongo.MongoClient(host=self.host, port=self.port)
- self.tdb = self.client[self.dbname]
- self.port = self.tdb['zq_league'] # 表名
- # competition
- self.port2 = self.tdb['zq_competition'] # 表名
- def process_item(self, item, spider):
- zaopan = item['csource']
- if zaopan == 'zaopan':
- zq_leagues = set(item['zq_league'])
- for zq_league in zq_leagues:
- print(zq_league)
- # try:
- # league_name = zq_league['league_name']
- # except:
- # league_name = None
- # if league_name:
- # self.port.insert(dict(zq_league))
- # if zq_league.get('host_name', None):
- # self.port2.insert(dict(zq_league))
- # return item
- def close_spider(self, spider):
- self.client.close()
|