zuqiu.py 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158
  1. # -*- coding: utf-8 -*-
  2. import logging
  3. import lxml
  4. import scrapy
  5. from items import ZuqiuItem
  6. class ZuqiuSpider(scrapy.Spider):
  7. name = 'zuqiu'
  8. allowed_domains = ['m.hgg070.com']
  9. custom_settings = {
  10. "ITEM_PIPELINES": {
  11. "hgg070_spider.pipelines.zuqiu.ZuqiuPipeline": 200,
  12. },
  13. # 'LOG_LEVEL': 'DEBUG',
  14. # 'LOG_FILE': cpath + "/log/sports_{}_{}_{}.log".format(to_day.year, to_day.month, to_day.day)
  15. }
  16. headers = {
  17. 'Accept': '*/*',
  18. 'Accept-Encoding': 'gzip, deflate',
  19. 'Accept-Language': 'zh-CN,zh;q=0.9',
  20. 'Content-Length': '130',
  21. 'Content-type': 'application/x-www-form-urlencoded',
  22. 'Cookie': '_ga=GA1.2.1009358217.1572056223; _gid=GA1.2.97506800.1572056223; _gat=1',
  23. 'Host': 'm.hgg070.com',
  24. 'Origin': 'http://m.hgg070.com',
  25. 'Proxy-Connection': 'keep-alive',
  26. 'Referer': 'http://m.hgg070.com/',
  27. 'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1'
  28. }
  29. def start_requests(self):
  30. url = "http://m.hgg070.com/app/member/get_league_list.php"
  31. h_types = [('FT', '', '130'), ('FU', 'P', '131'), ('FU', "", '130'), ('FU', 'P', '131')]
  32. for i, h_type in enumerate(h_types):
  33. show_type, isp, length = h_type
  34. self.headers['Content-Length'] = length
  35. from_data = {
  36. 'uid': '4d6e7f8af34715653b6039ca9b43737f096ed82446e3d37e033349aba0e3e753',
  37. 'langx': 'zh-cn',
  38. 'ltype': '3',
  39. 'gtype': 'FT',
  40. 'showtype': show_type,
  41. 'sorttype': '',
  42. 'date': '',
  43. 'isP': isp
  44. }
  45. yield scrapy.FormRequest(url=url, formdata=from_data, callback=self.parse, headers=self.headers,
  46. meta={'index': i}, dont_filter=True)
  47. def parse(self, response):
  48. leagues = response.xpath('//serverresponse/game/league')
  49. url = 'http://m.hgg070.com/app/member/get_game_list.php'
  50. if leagues:
  51. index = response.meta['index']
  52. if index == 0:
  53. date = ''
  54. showtype = 'FT'
  55. isp = ''
  56. self.headers['Content-Length'] = '147'
  57. elif index == 2:
  58. date = 'all'
  59. showtype = 'FU'
  60. isp = ''
  61. self.headers['Content-Length'] = '150'
  62. else:
  63. date = 'all'
  64. showtype = 'FU'
  65. isp = 'P'
  66. self.headers['Content-Length'] = '151'
  67. for league in leagues:
  68. lid = league.xpath('.//league_id/text()').extract_first()
  69. from_data = {
  70. 'uid': '4d6e7f8af34715653b6039ca9b43737f096ed82446e3d37e033349aba0e3e753',
  71. 'langx': 'zh-cn',
  72. 'ltype': '3',
  73. 'gtype': 'FT',
  74. 'showtype': showtype,
  75. 'lid': lid,
  76. 'sorttype': 'league',
  77. 'date': date,
  78. 'isP': isp
  79. }
  80. yield scrapy.FormRequest(url=url, formdata=from_data, callback=self.parse_match, headers=self.headers,
  81. meta={'index': index}, dont_filter=True)
  82. else:
  83. print('未获取到联赛id')
  84. return
  85. def parse_match(self, response):
  86. index = response.meta['index']
  87. url = 'http://m.hgg070.com/app/member/get_game_more.php'
  88. if index == 0:
  89. date = ''
  90. showtype = 'FT'
  91. isp = ''
  92. self.headers['Content-Length'] = '132'
  93. elif index == 1:
  94. date = 'all'
  95. showtype = 'FT'
  96. isp = 'P'
  97. self.headers['Content-Length'] = '136'
  98. elif index == 2:
  99. date = ''
  100. showtype = 'FU'
  101. isp = ''
  102. self.headers['Content-Length'] = '132'
  103. else:
  104. date = 'all'
  105. showtype = 'FU'
  106. isp = 'P'
  107. self.headers['Content-Length'] = '136'
  108. gids = response.xpath('//serverresponse/game/gid/text()').extract()
  109. tags = response.xpath('//serverresponse/game/more_count/text()').extract()
  110. if gids:
  111. for i, gid in enumerate(gids):
  112. from_data = {
  113. 'uid': '4d6e7f8af34715653b6039ca9b43737f096ed82446e3d37e033349aba0e3e753',
  114. 'langx': 'zh-cn',
  115. 'ltype': '3',
  116. 'gtype': 'FT',
  117. 'showtype': showtype,
  118. 'date': date,
  119. 'isP': isp,
  120. 'gid': gid,
  121. }
  122. tag = tags[i]
  123. yield scrapy.FormRequest(url=url, formdata=from_data, callback=self.parse_odds, headers=self.headers,
  124. meta={'index': index, 'tag': tag}, dont_filter=True)
  125. def parse_odds(self, response):
  126. index = response.meta['index']
  127. tag = response.meta['tag']
  128. try:
  129. game = response.xpath('//serverresponse/game')[0]
  130. except:
  131. return
  132. logger = logging.getLogger(__name__)
  133. if game:
  134. game_odds = {}
  135. gopen = game.xpath('//game/gopen/text()').extract_first()
  136. if gopen == 'Y':
  137. game = lxml.etree.fromstring(game.extract())
  138. for i in game.getchildren():
  139. if i.text == None:
  140. game_odds[i.tag] = ""
  141. else:
  142. game_odds[i.tag] = i.text
  143. else:
  144. logger.info('gopen == "N", 详细赔率盘口未开启')
  145. item = ZuqiuItem()
  146. item['data'] = game_odds
  147. item['index'] = index
  148. item['tag'] = tag
  149. yield item