saiguo.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323
  1. import datetime
  2. import re
  3. import scrapy
  4. from lxml import etree
  5. from ..items import Hgsaiguo
  6. class HgjieshuSpider(scrapy.Spider):
  7. name = 'saiguo'
  8. to_day = datetime.datetime.now()
  9. allowed_domains = ['hg3535z.com']
  10. custom_settings = {
  11. "ITEM_PIPELINES": {
  12. 'hg3535.pipeline.saiguo.Jieshuqiupipeline': 300,
  13. },
  14. # 'LOG_LEVEL': 'DEBUG',
  15. # 'LOG_FILE': "../hg3535/log/saiguo{}_{}_{}.log".format(to_day.year, to_day.month, to_day.day)
  16. }
  17. def start_requests(self):
  18. for y in range(1, 5):
  19. for z in range(1, 3):
  20. url = 'https://www.hg3535.cn/zh-cn/info-centre/sportsbook-info/results/{}/normal/{}'.format(y, z)
  21. yield scrapy.Request(url=url, callback=self.parse, dont_filter=True, meta={'pt': y, "page": z})
  22. def parse(self, response):
  23. if response.status == 200:
  24. pt = response.meta['pt']
  25. page = response.meta['page']
  26. if page == 1:
  27. us_datetime = datetime.datetime.now() - datetime.timedelta(hours=12)
  28. else:
  29. us_datetime = datetime.datetime.now() - datetime.timedelta(hours=36)
  30. match_date = us_datetime.strftime("%Y-%m-%d")
  31. # 足球赛果
  32. if pt == 1:
  33. league_ids = response.xpath('//div[@class="rt-l-bar football"]/@id').extract()
  34. league_names = response.xpath('//div[@class="rt-l-bar football"]/span[@class="comp-txt"]/text()').extract()
  35. for index in range(len(league_ids)):
  36. league_id = league_ids[index]
  37. league_name = league_names[index]
  38. response_data = response.xpath('//div[@id="dt-{}"]'.format(league_id)).extract_first()
  39. data = etree.HTML(response_data)
  40. # 球队名
  41. team_names = data.xpath('//div[@class="rt-event"]/@title')
  42. # 全场
  43. f_scores = data.xpath('.//div[contains(@class, "rt-ft ")]')
  44. # 上半场
  45. h_scores = data.xpath('.//div[contains(@class, "rt-ht ")]')
  46. # 时间
  47. stimes = data.xpath('//div[@class="rt-event"]/../div[1]/span/text()')
  48. # 子集玩法
  49. # odd_names = data.xpath('//div[@class="rt-sub rt-data-hide"]/table/tbody[2]/tr/td[2]')
  50. # 子集玩法结果
  51. # odd_plays = data.xpath('//div[@class="rt-sub rt-data-hide"]/table/tbody[2]/tr/td[3]/span')
  52. match_ids = data.xpath('//div[@class="flex-wrap"]/../div[1]/@id')
  53. odd_datas = data.xpath('//div[contains(@class, "rt-sub ")]/table/tbody[2]')
  54. for y in range(len(odd_datas)):
  55. match_id = match_ids[y].replace('e-', '')
  56. league_id = league_id.replace('cmp-', '')
  57. team_name = team_names[y].replace(' ', '').split('-')
  58. # 子集玩法
  59. odd_names = odd_datas[y].xpath('.//tr/td[2]')
  60. # 子集玩法结果
  61. odd_plays = odd_datas[y].xpath('.//tr/td[3]/span')
  62. # 主队
  63. h_name = team_name[0]
  64. # 客队
  65. a_name = team_name[1]
  66. # 上半场
  67. h_score = h_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  68. # 全场
  69. f_score = f_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  70. # 正则时间匹配规则
  71. pattern = re.compile(r"\d{1,3}:\d{1,3}")
  72. match_time = pattern.findall(stimes[y])[0]
  73. play_datas = []
  74. if odd_names:
  75. for i in range(len(odd_names)):
  76. name = odd_names[i].text
  77. plays = odd_plays[i].xpath('text()')
  78. if len(plays) == 2:
  79. play = '{}&&{}'.format(plays[0], plays[1])
  80. else:
  81. play = plays[0]
  82. play_datas.append({'play_name': name, 'play_result': play})
  83. item = Hgsaiguo()
  84. item["league_id"] = league_id
  85. item["league_name"] = league_name
  86. item["match_id"] = match_id
  87. item["match_date"] = match_date
  88. item["match_time"] = match_time
  89. item["home_team"] = h_name
  90. item["guest_team"] = a_name
  91. item["score_half"] = h_score
  92. item["score_full"] = f_score
  93. item["play_data"] = play_datas
  94. item["pt"] = pt
  95. item["page"] = page
  96. yield item
  97. # 篮球赛果
  98. if pt == 2:
  99. league_ids = response.xpath('//div[@class="rt-l-bar sportHasQuater"]/@id').extract()
  100. league_names = response.xpath('//div[@class="rt-l-bar sportHasQuater"]/span[@class="comp-txt"]/text()').extract()
  101. for index in range(len(league_ids)):
  102. league_id = league_ids[index]
  103. league_name = league_names[index]
  104. response_data = response.xpath('//div[@id="dt-{}"]'.format(league_id)).extract_first()
  105. data = etree.HTML(response_data)
  106. # 球队名
  107. team_names = data.xpath('//div[@class="rt-event"]/@title')
  108. # 全场
  109. f_scores = data.xpath('.//div[@class="rt-qft"]')
  110. # 上半场
  111. h_scores = data.xpath('.//div[@class="rt-qt1"]')
  112. # 下半场
  113. x_scores = data.xpath('.//div[@class="rt-qt2"]')
  114. # 时间
  115. stimes = data.xpath('//div[@class="rt-event"]/../div[1]/span/text()')
  116. match_ids = data.xpath('//div[@class="flex-wrap"]/../div[1]/@id')
  117. odd_datas = data.xpath('//div[contains(@class, "rt-sub ")]/table/tbody[2]')
  118. for y in range(len(odd_datas)):
  119. match_id = match_ids[y].replace('e-', '')
  120. league_id = league_id.replace('cmp-', '')
  121. team_name = team_names[y].replace(' ', '').split('-')
  122. # 子集玩法
  123. child_data = odd_datas[y].xpath('./tr')
  124. # 主队
  125. h_name = team_name[0]
  126. # 客队
  127. a_name = team_name[1]
  128. # 上半场
  129. h_score = h_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  130. # 全场
  131. f_score = f_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  132. # 下半场
  133. x_score = x_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  134. # 正则时间匹配规则
  135. pattern = re.compile(r"\d{1,3}:\d{1,3}")
  136. match_time = pattern.findall(stimes[y])[0]
  137. play_datas = []
  138. if child_data:
  139. h_dict = {'team_name': h_name}
  140. a_dict = {'team_name': a_name}
  141. for i in range(len(child_data)):
  142. if i == 0:
  143. h_datas = child_data[i].xpath('.//td/table/tbody/tr[3]/td[@class="r-odds"]')
  144. a_datas = child_data[i].xpath('.//td/table/tbody/tr[4]/td[@class="r-odds"]')
  145. rule = {0: "sc_1th", 1: "sc_2th", 2: "sc_3th", 3: "sc_4th", 4: "sc_other"}
  146. if h_datas and a_datas:
  147. for x in range(len(h_datas)):
  148. # 主队节得分
  149. h_data = h_datas[x].text.replace(' ', '').replace('\r\n', '')
  150. h_dict[rule[x]] = h_data
  151. # 客队节得分
  152. a_data = a_datas[x].text.replace(' ', '').replace('\r\n', '')
  153. a_dict[rule[x]] = a_data
  154. else:
  155. # 子玩法名
  156. child_name = child_data[i].xpath('.//td[contains(@class, "r-bt ")]/text()')[0].replace(' ', '').replace('\r\n', '')
  157. # 子玩法结果
  158. child_play = child_data[i].xpath('.//td[@class="r-odds"]/span[@class="prop"]/text()')[0]
  159. play_datas.append({"play_name": child_name, "play_result": child_play})
  160. play_datas.append(h_dict)
  161. play_datas.append(a_dict)
  162. item = Hgsaiguo()
  163. item["league_id"] = league_id
  164. item["league_name"] = league_name
  165. item["match_id"] = match_id
  166. item["match_date"] = match_date
  167. item["match_time"] = match_time
  168. item["home_team"] = h_name
  169. item["guest_team"] = a_name
  170. item["score_half"] = h_score
  171. item["score_result"] = f_score
  172. item["play_data"] = play_datas
  173. item["pt"] = pt
  174. item["page"] = page
  175. item["score_below"] = x_score
  176. yield item
  177. # 网球赛果
  178. if pt == 3:
  179. league_ids = response.xpath('//div[@class="rt-l-bar tennis"]/@id').extract()
  180. league_names = response.xpath('//div[@class="rt-l-bar tennis"]/span[@class="comp-txt"]/text()').extract()
  181. for index in range(len(league_ids)):
  182. league_id = league_ids[index]
  183. league_name = league_names[index]
  184. response_data = response.xpath('//div[@id="dt-{}"]'.format(league_id)).extract_first()
  185. data = etree.HTML(response_data)
  186. # 球队名
  187. team_names = data.xpath('//div[@class="rt-event"]/@title')
  188. # 赛果
  189. f_scores = data.xpath('.//div[@class="rt-set"]')
  190. # 时间
  191. stimes = data.xpath('//div[@class="rt-event"]/../div[1]/span/text()')
  192. # 赛事id
  193. match_ids = data.xpath('//div[@class="flex-wrap"]/../div[1]/@id')
  194. # 详细赛果信息, 比分等
  195. odd_datas = data.xpath('//div[contains(@class, "rt-sub ")]/table/tbody')
  196. for y in range(len(odd_datas)):
  197. match_id = match_ids[y].replace('e-', '')
  198. league_id = league_id.replace('cmp-', '')
  199. team_name = team_names[y].replace(' ', '').split('-')
  200. # 子集玩法
  201. child_data = odd_datas[y].xpath('./tr')
  202. # 主队
  203. h_name = team_name[0]
  204. # 客队
  205. a_name = team_name[1]
  206. # 全场
  207. f_score = f_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  208. # 正则时间匹配规则
  209. pattern = re.compile(r"\d{1,3}:\d{1,3}")
  210. match_time = pattern.findall(stimes[y])[0]
  211. play_datas = []
  212. if child_data:
  213. rule = {0: "sc_1th", 1: "sc_2th", 2: "sc_3th", 3: "sc_4th", 4: "sc_5th", 5: "game_num", 6: "disc_num"}
  214. h_dict = {'team_name': h_name}
  215. a_dict = {'team_name': a_name}
  216. for i in range(len(child_data)):
  217. if i == 0:
  218. h_datas = child_data[i].xpath('.//tbody/tr[3]/td[contains(@class, "r-odds")]')
  219. a_datas = child_data[i].xpath('.//tbody/tr[4]/td[contains(@class, "r-odds")]')
  220. if h_datas and a_datas:
  221. for x in range(len(h_datas)):
  222. # 主队节得分
  223. h_data = h_datas[x].text.replace(' ', '').replace('\r\n', '')
  224. h_dict[rule[x]] = h_data
  225. # 客队节得分
  226. a_data = a_datas[x].text.replace(' ', '').replace('\r\n', '')
  227. a_dict[rule[x]] = a_data
  228. else:
  229. # 子玩法名
  230. child_name = child_data[i].xpath('.//td[contains(@class, "r-bt ")]/text()')[0].replace(' ', '').replace('\r\n', '')
  231. # 子玩法结果
  232. child_play = child_data[i].xpath('.//td[@class="r-odds"]/span[@class="prop"]')[0]
  233. play = child_play.xpath('string(.)')
  234. play_datas.append({"play_name": child_name, "play_result": play})
  235. play_datas.append(h_dict)
  236. play_datas.append(a_dict)
  237. item = Hgsaiguo()
  238. item["league_id"] = league_id
  239. item["league_name"] = league_name
  240. item["match_id"] = match_id
  241. item["match_date"] = match_date
  242. item["match_time"] = match_time
  243. item["home_team"] = h_name
  244. item["guest_team"] = a_name
  245. item["score_result"] = f_score
  246. item["play_data"] = play_datas
  247. item["pt"] = pt
  248. item["page"] = page
  249. yield item
  250. # 棒球赛果
  251. if pt == 4:
  252. league_ids = response.xpath('//div[@class="rt-l-bar baseball"]/@id').extract()
  253. league_names = response.xpath('//div[@class="rt-l-bar baseball"]/span[@class="comp-txt"]/text()').extract()
  254. for index in range(len(league_ids)):
  255. league_id = league_ids[index]
  256. league_name = league_names[index]
  257. response_data = response.xpath('//div[@id="dt-{}"]'.format(league_id)).extract_first()
  258. data = etree.HTML(response_data)
  259. # 球队名
  260. team_names = data.xpath('//div[@class="rt-event"]/@title')
  261. # 全场
  262. f_scores = data.xpath('.//div[@class="rt-ft"]')
  263. # 上半场
  264. h_scores = data.xpath('.//div[@class="rt-ht"]')
  265. # 时间
  266. stimes = data.xpath('//div[@class="rt-event"]/../div[1]/span/text()')
  267. # 子集玩法
  268. # odd_names = data.xpath('//div[@class="rt-sub rt-data-hide"]/table/tbody[2]/tr/td[2]')
  269. # 子集玩法结果
  270. # odd_plays = data.xpath('//div[@class="rt-sub rt-data-hide"]/table/tbody[2]/tr/td[3]/span')
  271. match_ids = data.xpath('//div[@class="flex-wrap"]/../div[1]/@id')
  272. odd_datas = data.xpath('//div[contains(@class, "rt-sub")]/table/tbody')
  273. for y in range(len(odd_datas)):
  274. match_id = match_ids[y].replace('e-', '')
  275. league_id = league_id.replace('cmp-', '')
  276. team_name = team_names[y].replace(' ', '').split('-')
  277. # 子集玩法
  278. odd_names = odd_datas[y].xpath('.//tr/td[2]')
  279. # 子集玩法结果
  280. odd_plays = odd_datas[y].xpath('.//tr/td[3]/span')
  281. # 主队
  282. h_name = team_name[0]
  283. # 客队
  284. a_name = team_name[1]
  285. # 上半场
  286. h_score = h_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  287. # 全场
  288. f_score = f_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  289. # 正则时间匹配规则
  290. pattern = re.compile(r"\d{1,3}:\d{1,3}")
  291. match_time = pattern.findall(stimes[y])[0]
  292. play_datas = []
  293. if odd_names:
  294. for i in range(len(odd_names)):
  295. # 子玩法名
  296. name = odd_names[i].text.replace(' ', '').replace('\r\n', '')
  297. # 子玩法赛果
  298. play = odd_plays[i].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  299. play_datas.append({"play_name": name, "play_result": play})
  300. item = Hgsaiguo()
  301. item["league_id"] = league_id
  302. item["league_name"] = league_name
  303. item["match_id"] = match_id
  304. item["match_date"] = match_date
  305. item["match_time"] = match_time
  306. item["home_team"] = h_name
  307. item["guest_team"] = a_name
  308. item["score_half"] = h_score
  309. item["score_full"] = f_score
  310. item["play_data"] = play_datas
  311. item["pt"] = pt
  312. item["page"] = page
  313. yield item