sportslst.py 1.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940
  1. # -*- coding: utf-8 -*-
  2. # Define your item pipelines here
  3. #
  4. # Don't forget to add your pipeline to the ITEM_PIPELINES setting
  5. # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
  6. import json
  7. import pymongo
  8. import requests
  9. from collectSports.biz import getMongo
  10. from pycomm.mongo import DBMongo
  11. class SportslstPipeline(object):
  12. # def open_spider(self, spider):
  13. # self.host = '192.168.2.200'
  14. # self.port = 27017
  15. # self.dbname = 'kaiyou' # 数据库名
  16. # self.client = pymongo.MongoClient(host=self.host, port=self.port)
  17. # self.tdb = self.client[self.dbname]
  18. # self.port = self.tdb['zq_league'] # 表名
  19. # # competition
  20. # self.port2 = self.tdb['zq_competition'] # 表名
  21. def process_item(self, item, spider):
  22. mongo = getMongo()
  23. print(mongo)
  24. zaopan = item['csource']
  25. if zaopan == 'zaopan':
  26. zq_leagues = set(item['zq_league'])
  27. zq_matchs = set(item['zq_match'])
  28. for zq_league in zq_leagues:
  29. mongo.changeSet('zq_league').insert(dict(zq_league))
  30. for zq_match in zq_matchs:
  31. mongo.changeSet('zq_competition').insert(dict(zq_match))
  32. # return item
  33. # def close_spider(self, spider):
  34. # self.client.close()