sportslst.py 1.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142
  1. # -*- coding: utf-8 -*-
  2. # Define your item pipelines here
  3. #
  4. # Don't forget to add your pipeline to the ITEM_PIPELINES setting
  5. # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
  6. import json
  7. import pymongo
  8. import requests
  9. from pycomm.mongo import DBMongo
  10. class SportslstPipeline(object):
  11. def open_spider(self, spider):
  12. self.host = '192.168.2.200'
  13. self.port = 27017
  14. self.dbname = 'kaiyou' # 数据库名
  15. self.client = pymongo.MongoClient(host=self.host, port=self.port)
  16. self.tdb = self.client[self.dbname]
  17. self.port = self.tdb['zq_league'] # 表名
  18. # competition
  19. self.port2 = self.tdb['zq_competition'] # 表名
  20. def process_item(self, item, spider):
  21. zaopan = item['csource']
  22. if zaopan == 'zaopan':
  23. zq_leagues = set(item['zq_league'])
  24. for zq_league in zq_leagues:
  25. try:
  26. league_name = zq_league['league_name']
  27. except:
  28. league_name = None
  29. if league_name:
  30. self.port.insert(dict(zq_league))
  31. if zq_league.get('host_name', None):
  32. self.port2.insert(dict(zq_league))
  33. return item
  34. def close_spider(self, spider):
  35. self.client.close()