jieshu.py 1.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. import datetime
  2. import json
  3. # import re
  4. import logging
  5. import redis
  6. import scrapy
  7. from ..items import Hgjieshu
  8. from ..settings import R_HOST, R_PASSWORD, R_POST, R_DB
  9. class HgjieshuSpider(scrapy.Spider):
  10. name = 'jieshu'
  11. to_day = datetime.datetime.now()
  12. allowed_domains = ['hg3535z.com']
  13. custom_settings = {
  14. "ITEM_PIPELINES": {
  15. 'hg3535.pipeline.jieshu.Jieshuqiupipeline': 300,
  16. },
  17. # 'LOG_LEVEL': 'DEBUG',
  18. # 'LOG_FILE': "../hg3535/log/saiguo{}_{}_{}.log".format(to_day.year, to_day.month, to_day.day)
  19. }
  20. rls = redis.Redis(host=R_HOST, port=R_POST, db=R_DB, password=R_PASSWORD)
  21. def start_requests(self):
  22. match_ids = self.rls.smembers("hg3535.gunqiu.ids")
  23. if match_ids:
  24. for match_id in match_ids:
  25. match_id = match_id.decode()
  26. url = 'https://odata.jiushan6688.com/odds6i/d/getamodds/zh-cn/eid/{}/iip/true/ubt/am/isp/false'.format(match_id)
  27. yield scrapy.Request(url=url, callback=self.parse, dont_filter=True)
  28. def parse(self, response):
  29. logger = logging.getLogger(__name__)
  30. try:
  31. data = json.loads(response.text)
  32. status = data['i'][0]
  33. if not status:
  34. ball = data['i'][31]
  35. match_id = data['i'][2]
  36. self.rls.srem('hg3535.gunqiu.ids', match_id)
  37. item = Hgjieshu()
  38. item['ball'] = ball
  39. item['match_id'] = match_id
  40. # item['status'] = status
  41. yield item
  42. except Exception as e:
  43. logger.warning(e)
  44. return