| 12345678910111213141516171819202122232425262728293031323334353637383940 |
- # -*- coding: utf-8 -*-
- # Define your item pipelines here
- #
- # Don't forget to add your pipeline to the ITEM_PIPELINES setting
- # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
- import json
- import pymongo
- import requests
- from collectSports.biz import getMongo
- from pycomm.mongo import DBMongo
- class SportslstPipeline(object):
- # def open_spider(self, spider):
- # self.host = '192.168.2.200'
- # self.port = 27017
- # self.dbname = 'kaiyou' # 数据库名
- # self.client = pymongo.MongoClient(host=self.host, port=self.port)
- # self.tdb = self.client[self.dbname]
- # self.port = self.tdb['zq_league'] # 表名
- # # competition
- # self.port2 = self.tdb['zq_competition'] # 表名
- def process_item(self, item, spider):
- mongo = getMongo()
- print(mongo)
- zaopan = item['csource']
- if zaopan == 'zaopan':
- zq_leagues = set(item['zq_league'])
- zq_matchs = set(item['zq_match'])
- for zq_league in zq_leagues:
- mongo.changeSet('zq_league').insert(dict(zq_league))
- for zq_match in zq_matchs:
- mongo.changeSet('zq_competition').insert(dict(zq_match))
- # return item
- # def close_spider(self, spider):
- # self.client.close()
|