saiguo.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331
  1. import datetime
  2. import re
  3. import scrapy
  4. from lxml import etree
  5. from ..items import Hgsaiguo
  6. class HgjieshuSpider(scrapy.Spider):
  7. name = 'saiguo'
  8. to_day = datetime.datetime.now()
  9. allowed_domains = ['hg3535z.com']
  10. custom_settings = {
  11. "ITEM_PIPELINES": {
  12. 'hg3535.pipeline.saiguo.Jieshuqiupipeline': 300,
  13. },
  14. # 'LOG_LEVEL': 'DEBUG',
  15. # 'LOG_FILE': "../hg3535/log/saiguo{}_{}_{}.log".format(to_day.year, to_day.month, to_day.day)
  16. }
  17. def start_requests(self):
  18. for y in range(1, 5):
  19. for z in range(1, 3):
  20. url = 'https://www.hg3535.cn/zh-cn/info-centre/sportsbook-info/results/{}/normal/{}'.format(y, z)
  21. yield scrapy.Request(url=url, callback=self.parse, dont_filter=True, meta={'pt': y, "page": z})
  22. def parse(self, response):
  23. if response.status == 200:
  24. pt = response.meta['pt']
  25. page = response.meta['page']
  26. if page == 1:
  27. us_datetime = datetime.datetime.now() - datetime.timedelta(hours=12)
  28. else:
  29. us_datetime = datetime.datetime.now() - datetime.timedelta(hours=36)
  30. match_date = us_datetime.strftime("%Y-%m-%d")
  31. # 足球赛果
  32. if pt == 1:
  33. league_ids = response.xpath('//div[@class="rt-l-bar football"]/@id').extract()
  34. league_names = response.xpath('//div[@class="rt-l-bar football"]/span[@class="comp-txt"]/text()').extract()
  35. if league_ids:
  36. for index in range(len(league_ids)):
  37. league_id = league_ids[index]
  38. league_name = league_names[index]
  39. response_data = response.xpath('//div[@id="dt-{}"]'.format(league_id)).extract_first()
  40. data = etree.HTML(response_data)
  41. # 球队名
  42. # team_names = data.xpath('//div[@class="rt-event"]/@title')
  43. h_names = data.xpath('//div[@class="rt-event"]/span[1]')
  44. a_names = data.xpath('//div[@class="rt-event"]/span[3]')
  45. # 全场
  46. f_scores = data.xpath('.//div[contains(@class, "rt-ft ")]')
  47. # 上半场
  48. h_scores = data.xpath('.//div[contains(@class, "rt-ht ")]')
  49. # 时间
  50. stimes = data.xpath('//div[@class="rt-event"]/../div[1]/span/text()')
  51. match_ids = data.xpath('//div[@class="flex-wrap"]/../div[1]/@id')
  52. odd_datas = data.xpath('//div[contains(@class, "rt-sub ")]/table/tbody[2]')
  53. for y in range(len(odd_datas)):
  54. match_id = match_ids[y].replace('e-', '')
  55. league_id = league_id.replace('cmp-', '')
  56. # 子集玩法
  57. odd_names = odd_datas[y].xpath('.//tr/td[2]')
  58. # 子集玩法结果
  59. odd_plays = odd_datas[y].xpath('.//tr/td[3]/span')
  60. # 主队
  61. h_name = h_names[y].text
  62. # 客队
  63. a_name = a_names[y].text
  64. # 上半场
  65. h_score = h_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  66. # 全场
  67. f_score = f_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  68. # 正则时间匹配规则
  69. pattern = re.compile(r"\d{1,3}:\d{1,3}")
  70. match_time = pattern.findall(stimes[y])[0]
  71. play_datas = []
  72. if odd_names:
  73. for i in range(len(odd_names)):
  74. name = odd_names[i].text
  75. plays = odd_plays[i].xpath('text()')
  76. if len(plays) == 2:
  77. play = '{}&&{}'.format(plays[0], plays[1])
  78. else:
  79. play = plays[0]
  80. play_datas.append({'play_name': name, 'play_result': play})
  81. item = Hgsaiguo()
  82. item["league_id"] = league_id
  83. item["league_name"] = league_name
  84. item["match_id"] = match_id
  85. item["match_date"] = match_date
  86. item["match_time"] = match_time
  87. item["home_team"] = h_name
  88. item["guest_team"] = a_name
  89. item["score_half"] = h_score
  90. item["score_full"] = f_score
  91. item["play_data"] = play_datas
  92. item["pt"] = pt
  93. item["page"] = page
  94. yield item
  95. else:
  96. return
  97. # 篮球赛果
  98. if pt == 2:
  99. league_ids = response.xpath('//div[@class="rt-l-bar sportHasQuater"]/@id').extract()
  100. league_names = response.xpath('//div[@class="rt-l-bar sportHasQuater"]/span[@class="comp-txt"]/text()').extract()
  101. if league_ids:
  102. for index in range(len(league_ids)):
  103. league_id = league_ids[index]
  104. league_name = league_names[index]
  105. response_data = response.xpath('//div[@id="dt-{}"]'.format(league_id)).extract_first()
  106. data = etree.HTML(response_data)
  107. # 球队名
  108. h_names = data.xpath('//div[@class="rt-event"]/span[1]')
  109. a_names = data.xpath('//div[@class="rt-event"]/span[3]')
  110. # 全场
  111. f_scores = data.xpath('.//div[@class="rt-qft"]')
  112. # 上半场
  113. h_scores = data.xpath('.//div[@class="rt-qt1"]')
  114. # 下半场
  115. x_scores = data.xpath('.//div[@class="rt-qt2"]')
  116. # 时间
  117. stimes = data.xpath('//div[@class="rt-event"]/../div[1]/span/text()')
  118. match_ids = data.xpath('//div[@class="flex-wrap"]/../div[1]/@id')
  119. odd_datas = data.xpath('//div[contains(@class, "rt-sub ")]/table/tbody[2]')
  120. for y in range(len(odd_datas)):
  121. match_id = match_ids[y].replace('e-', '')
  122. league_id = league_id.replace('cmp-', '')
  123. # 子集玩法
  124. child_data = odd_datas[y].xpath('./tr')
  125. # 主队
  126. h_name = h_names[y].text
  127. # 客队
  128. a_name = a_names[y].text
  129. # 上半场
  130. h_score = h_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  131. # 全场
  132. f_score = f_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  133. # 下半场
  134. x_score = x_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  135. # 正则时间匹配规则
  136. pattern = re.compile(r"\d{1,3}:\d{1,3}")
  137. match_time = pattern.findall(stimes[y])[0]
  138. play_datas = []
  139. if child_data:
  140. h_dict = {'team_name': h_name}
  141. a_dict = {'team_name': a_name}
  142. for i in range(len(child_data)):
  143. if i == 0:
  144. h_datas = child_data[i].xpath('.//td/table/tbody/tr[3]/td[@class="r-odds"]')
  145. a_datas = child_data[i].xpath('.//td/table/tbody/tr[4]/td[@class="r-odds"]')
  146. rule = {0: "sc_1th", 1: "sc_2th", 2: "sc_3th", 3: "sc_4th", 4: "sc_other"}
  147. if h_datas and a_datas:
  148. for x in range(len(h_datas)):
  149. # 主队节得分
  150. h_data = h_datas[x].text.replace(' ', '').replace('\r\n', '')
  151. h_dict[rule[x]] = h_data
  152. # 客队节得分
  153. a_data = a_datas[x].text.replace(' ', '').replace('\r\n', '')
  154. a_dict[rule[x]] = a_data
  155. else:
  156. # 子玩法名
  157. child_name = child_data[i].xpath('.//td[contains(@class, "r-bt ")]/text()')[0].replace(' ', '').replace('\r\n', '')
  158. # 子玩法结果
  159. child_play = child_data[i].xpath('.//td[@class="r-odds"]/span[@class="prop"]/text()')[0]
  160. play_datas.append({"play_name": child_name, "play_result": child_play})
  161. play_datas.append(h_dict)
  162. play_datas.append(a_dict)
  163. item = Hgsaiguo()
  164. item["league_id"] = league_id
  165. item["league_name"] = league_name
  166. item["match_id"] = match_id
  167. item["match_date"] = match_date
  168. item["match_time"] = match_time
  169. item["home_team"] = h_name
  170. item["guest_team"] = a_name
  171. item["score_half"] = h_score
  172. item["score_result"] = f_score
  173. item["play_data"] = play_datas
  174. item["pt"] = pt
  175. item["page"] = page
  176. item["score_below"] = x_score
  177. yield item
  178. else:
  179. return
  180. #
  181. # 网球赛果
  182. if pt == 3:
  183. league_ids = response.xpath('//div[@class="rt-l-bar tennis"]/@id').extract()
  184. league_names = response.xpath('//div[@class="rt-l-bar tennis"]/span[@class="comp-txt"]/text()').extract()
  185. if league_ids:
  186. for index in range(len(league_ids)):
  187. league_id = league_ids[index]
  188. league_name = league_names[index]
  189. response_data = response.xpath('//div[@id="dt-{}"]'.format(league_id)).extract_first()
  190. data = etree.HTML(response_data)
  191. # 球队名
  192. h_names = data.xpath('//div[@class="rt-event"]/span[1]')
  193. a_names = data.xpath('//div[@class="rt-event"]/span[3]')
  194. # 赛果
  195. f_scores = data.xpath('.//div[@class="rt-set"]')
  196. # 时间
  197. stimes = data.xpath('//div[@class="rt-event"]/../div[1]/span/text()')
  198. # 赛事id
  199. match_ids = data.xpath('//div[@class="flex-wrap"]/../div[1]/@id')
  200. # 详细赛果信息, 比分等
  201. odd_datas = data.xpath('//div[contains(@class, "rt-sub ")]/table/tbody')
  202. for y in range(len(odd_datas)):
  203. match_id = match_ids[y].replace('e-', '')
  204. league_id = league_id.replace('cmp-', '')
  205. # 子集玩法
  206. child_data = odd_datas[y].xpath('./tr')
  207. # 主队
  208. h_name = h_names[y].text
  209. # 客队
  210. a_name = a_names[y].text
  211. # 全场
  212. f_score = f_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  213. # 正则时间匹配规则
  214. pattern = re.compile(r"\d{1,3}:\d{1,3}")
  215. match_time = pattern.findall(stimes[y])[0]
  216. play_datas = []
  217. if child_data:
  218. rule = {0: "sc_1th", 1: "sc_2th", 2: "sc_3th", 3: "sc_4th", 4: "sc_5th", 5: "game_num", 6: "disc_num"}
  219. h_dict = {'team_name': h_name}
  220. a_dict = {'team_name': a_name}
  221. for i in range(len(child_data)):
  222. if i == 0:
  223. h_datas = child_data[i].xpath('.//tbody/tr[3]/td[contains(@class, "r-odds")]')
  224. a_datas = child_data[i].xpath('.//tbody/tr[4]/td[contains(@class, "r-odds")]')
  225. if h_datas and a_datas:
  226. for x in range(len(h_datas)):
  227. # 主队节得分
  228. h_data = h_datas[x].text.replace(' ', '').replace('\r\n', '')
  229. h_dict[rule[x]] = h_data
  230. # 客队节得分
  231. a_data = a_datas[x].text.replace(' ', '').replace('\r\n', '')
  232. a_dict[rule[x]] = a_data
  233. else:
  234. # 子玩法名
  235. child_name = child_data[i].xpath('.//td[contains(@class, "r-bt ")]/text()')[0].replace(' ', '').replace('\r\n', '')
  236. # 子玩法结果
  237. child_play = child_data[i].xpath('.//td[@class="r-odds"]/span[@class="prop"]')[0]
  238. play = child_play.xpath('string(.)')
  239. play_datas.append({"play_name": child_name, "play_result": play})
  240. play_datas.append(h_dict)
  241. play_datas.append(a_dict)
  242. item = Hgsaiguo()
  243. item["league_id"] = league_id
  244. item["league_name"] = league_name
  245. item["match_id"] = match_id
  246. item["match_date"] = match_date
  247. item["match_time"] = match_time
  248. item["home_team"] = h_name
  249. item["guest_team"] = a_name
  250. item["score_result"] = f_score
  251. item["play_data"] = play_datas
  252. item["pt"] = pt
  253. item["page"] = page
  254. yield item
  255. else:
  256. return
  257. # 棒球赛果
  258. if pt == 4:
  259. league_ids = response.xpath('//div[@class="rt-l-bar baseball"]/@id').extract()
  260. league_names = response.xpath('//div[@class="rt-l-bar baseball"]/span[@class="comp-txt"]/text()').extract()
  261. if league_ids:
  262. for index in range(len(league_ids)):
  263. league_id = league_ids[index]
  264. league_name = league_names[index]
  265. response_data = response.xpath('//div[@id="dt-{}"]'.format(league_id)).extract_first()
  266. data = etree.HTML(response_data)
  267. # 球队名
  268. h_names = data.xpath('//div[@class="rt-event"]/span[1]')
  269. a_names = data.xpath('//div[@class="rt-event"]/span[3]')
  270. # 全场
  271. f_scores = data.xpath('.//div[@class="rt-ft"]')
  272. # 上半场
  273. h_scores = data.xpath('.//div[@class="rt-ht"]')
  274. # 时间
  275. stimes = data.xpath('//div[@class="rt-event"]/../div[1]/span/text()')
  276. # 子集玩法
  277. # odd_names = data.xpath('//div[@class="rt-sub rt-data-hide"]/table/tbody[2]/tr/td[2]')
  278. # 子集玩法结果
  279. # odd_plays = data.xpath('//div[@class="rt-sub rt-data-hide"]/table/tbody[2]/tr/td[3]/span')
  280. match_ids = data.xpath('//div[@class="flex-wrap"]/../div[1]/@id')
  281. odd_datas = data.xpath('//div[contains(@class, "rt-sub")]/table/tbody')
  282. for y in range(len(odd_datas)):
  283. match_id = match_ids[y].replace('e-', '')
  284. league_id = league_id.replace('cmp-', '')
  285. # 子集玩法
  286. odd_names = odd_datas[y].xpath('.//tr/td[2]')
  287. # 子集玩法结果
  288. odd_plays = odd_datas[y].xpath('.//tr/td[3]/span')
  289. # 主队
  290. h_name = h_names[y].text
  291. # 客队
  292. a_name = a_names[y].text
  293. # 上半场
  294. h_score = h_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  295. # 全场
  296. f_score = f_scores[y].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  297. # 正则时间匹配规则
  298. pattern = re.compile(r"\d{1,3}:\d{1,3}")
  299. match_time = pattern.findall(stimes[y])[0]
  300. play_datas = []
  301. if odd_names:
  302. for i in range(len(odd_names)):
  303. # 子玩法名
  304. name = odd_names[i].text.replace(' ', '').replace('\r\n', '')
  305. # 子玩法赛果
  306. play = odd_plays[i].xpath('string(.)').replace(' ', '').replace('\r\n', '')
  307. play_datas.append({"play_name": name, "play_result": play})
  308. item = Hgsaiguo()
  309. item["league_id"] = league_id
  310. item["league_name"] = league_name
  311. item["match_id"] = match_id
  312. item["match_date"] = match_date
  313. item["match_time"] = match_time
  314. item["home_team"] = h_name
  315. item["guest_team"] = a_name
  316. item["score_half"] = h_score
  317. item["score_full"] = f_score
  318. item["play_data"] = play_datas
  319. item["pt"] = pt
  320. item["page"] = page
  321. yield item
  322. else:
  323. return