jieshu.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. import datetime
  2. import json
  3. # import re
  4. import logging
  5. import redis
  6. import scrapy
  7. from ..items import Hgjieshu
  8. from ..settings import R_HOST, R_PASSWORD, R_POST, R_DB
  9. class HgjieshuSpider(scrapy.Spider):
  10. name = 'jieshu'
  11. to_day = datetime.datetime.now()
  12. allowed_domains = ['hg3535z.com']
  13. custom_settings = {
  14. "ITEM_PIPELINES": {
  15. 'hg3535.pipeline.jieshu.Jieshuqiupipeline': 300,
  16. },
  17. # 'LOG_LEVEL': 'DEBUG',
  18. # 'LOG_FILE': "../hg3535/log/saiguo{}_{}_{}.log".format(to_day.year, to_day.month, to_day.day)
  19. }
  20. rls = redis.Redis(host=R_HOST, port=R_POST, db=R_DB, password=R_PASSWORD)
  21. def start_requests(self):
  22. match_ids = self.rls.smembers("hg3535.gunqiu.ids")
  23. if match_ids:
  24. for match_id in match_ids:
  25. match_id = match_id.decode()
  26. url = 'https://odata.jiushan6688.com/odds6i/d/getamodds/zh-cn/eid/{}/iip/true/ubt/am/isp/false'.format(match_id)
  27. yield scrapy.Request(url=url, callback=self.parse, dont_filter=True)
  28. def parse(self, response):
  29. logger = logging.getLogger(__name__)
  30. try:
  31. data = json.loads(response.text)
  32. status = data['i'][0]
  33. # if not status:
  34. ball = data['i'][31]
  35. match_id = data['i'][2]
  36. item = Hgjieshu()
  37. item['ball'] = ball
  38. item['match_id'] = match_id
  39. item['status'] = status
  40. yield item
  41. # else:
  42. # return
  43. except Exception as e:
  44. logger.warning(e)
  45. return