Browse Source

新增网球爬虫

Your Name 6 năm trước cách đây
mục cha
commit
d24c6bfe93

+ 2 - 2
hgg070_spider/main.py

@@ -8,10 +8,10 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
 # execute(["scrapy", "crawl", "lanqiu"])
 # execute(["scrapy", "crawl", "lq_sports"])
 # execute(["scrapy", "crawl", "guanjun"])
-execute(["scrapy", "crawl", "wangqiu"])
+# execute(["scrapy", "crawl", "wangqiu"])
 # execute(["scrapy", "crawl", "wqbodan"])
 # execute(["scrapy", "crawl", "bangqiu"])
-# execute(["scrapy", "crawl", "roll_zuqiu"]) # 滚球足球 回来要解开这个注释 其他全部解封
+execute(["scrapy", "crawl", "roll_zuqiu"]) # 滚球足球 回来要解开这个注释 其他全部解封
 # execute(["scrapy", "crawl", "roll_lanqiu"]) #滚球篮球
 # execute(["scrapy", "crawl", "roll_wangqiu"]) #滚球网球
 # execute(["scrapy", "crawl", "roll_bangqiu"])  # 滚球棒球

+ 32 - 38
hgg070_spider/pipelines/lanqiu.py

@@ -1,9 +1,11 @@
 import logging
-from twisted.internet import defer,reactor
+from twisted.internet import defer, reactor
 from ..utils.helper import Helper
-from ..settings import LEAGUE_URL,MATCH_URL
+from ..settings import LEAGUE_URL, MATCH_URL
 import pymongo
-from ..settings import M_HOST,M_USER,M_PASSWORD,M_POST,M_DB
+from ..settings import M_HOST, M_USER, M_PASSWORD, M_POST, M_DB
+
+
 class ZuqiuPipeline(object):
     def open_spider(self, spider):
         self.mongo = pymongo.MongoClient(host=M_HOST, username=M_USER, password=M_PASSWORD, port=M_POST,
@@ -11,42 +13,43 @@ class ZuqiuPipeline(object):
         self.db = self.mongo[M_DB]
 
     @defer.inlineCallbacks
-    def process_item(self,item,spider):
-        logger=logging.getLogger(__name__)
+    def process_item(self, item, spider):
+        logger = logging.getLogger(__name__)
         logger.info("进入管道")
-        out=defer.Deferred()
-        reactor.callInThread(self._do_calculation,item,out)
+        out = defer.Deferred()
+        reactor.callInThread(self._do_calculation, item, out)
         yield out
 
-
-    def _do_calculation(self,item,out):
-        #先保存联赛
+    def _do_calculation(self, item, out):
+        # 先保存联赛
         league_name = item['league']
         uuid = Helper.genearte_uuid(league_name)
-        type=item['showtype']
-        is_rollball,is_today,is_morningplate = 0,0,0
-        if type=="FT":
-            is_today=1
-        elif type=="FU":
-            is_morningplate=1
-        elif type=="RB":
-            is_rollball=1
+        type = item['showtype']
+        is_rollball, is_today, is_morningplate = 0, 0, 0
+        if type == "FT":
+            is_today = 1
+        elif type == "FU":
+            is_morningplate = 1
+        elif type == "RB":
+            is_rollball = 1
         else:
-            is_stringscene=1
-        league_key = ["name_chinese", "kind", "match_mode", "if_stop", "last_time", "lg_id", "source", "uuid","is_rollball","is_today","is_morningplate","is_stringscene"]
-        league_value = [league_name, "1", "1", "0", item['datetime'], item['id'], "hgg070", uuid,is_rollball,is_today,is_morningplate,is_stringscene]
-        #赛事
+            is_stringscene = 1
+        league_key = ["name_chinese", "kind", "match_mode", "if_stop", "last_time", "lg_id", "source", "uuid",
+                      "is_rollball", "is_today", "is_morningplate", "is_stringscene"]
+        league_value = [league_name, "1", "1", "0", item['datetime'], item['id'], "hgg070", uuid, is_rollball, is_today,
+                        is_morningplate, is_stringscene]
+        # 赛事
         childer = dict(zip(league_key, league_value))
-        #联赛
-        obj = {"game_code": "lq", "title": "league", "source": "hgg070","data":[childer]}
-        res=Helper.async_post(LEAGUE_URL,obj)
+        # 联赛
+        obj = {"game_code": "lq", "title": "league", "source": "hgg070", "data": [childer]}
+        res = Helper.async_post(LEAGUE_URL, obj)
 
         if res:
-            if res.get('status')==1:
+            if res.get('status') == 1:
                 logging.warning("联赛提交成功,{}".format(res))
-                #提交赛事
-                lres=Helper.async_post(MATCH_URL,childer)
-                if lres.get('status')==1:
+                # 提交赛事
+                lres = Helper.async_post(MATCH_URL, childer)
+                if lres.get('status') == 1:
                     logging.warning("联赛提交成功,{}".format(res))
                 else:
                     logging.warning("联赛提交失败,{}".format(res))
@@ -55,12 +58,3 @@ class ZuqiuPipeline(object):
                 logging.warning("联赛提交失败,{}".format(res))
         else:
             logging.warning("联赛提交失败,{}".format(res))
-
-
-
-
-
-
-
-
-

+ 160 - 0
hgg070_spider/pipelines/roll_zuqiu.py

@@ -0,0 +1,160 @@
+import datetime
+import json
+import time
+import logging
+import pymongo
+from twisted.internet import defer, reactor
+from ..utils.helper import Helper
+from ..settings import M_HOST, M_USER, M_PASSWORD, M_POST, M_DB, LEAGUE_URL, ODDS_URL, MATCH_URL, MATCH_RESULT
+
+
+class RollPipeline(object):
+    def open_spider(self, spider):
+        self.mongo = pymongo.MongoClient(host=M_HOST, username=M_USER, password=M_PASSWORD, port=M_POST,
+                                         authSource=M_DB)
+        self.db = self.mongo[M_DB]
+        with open('./conf/zuqiu.json', 'r', encoding='utf8') as zq:
+            hgg070 = json.load(zq)
+        self.hgg070 = hgg070
+
+    # @defer.inlineCallbacks
+    # def process_item(self,item,spider):
+    #     out=defer.Deferred()
+    #     reactor.callInThread(self._do_calculation,item,out)
+    #     yield out
+
+    # def _do_calculation(self,item,out):
+    def process_item(self, item, spider):
+        logger = logging.getLogger(__name__)
+        match_all = item['data']
+        team_h, team_c = match_all['team_h'], match_all['team_c']
+        league_name, league_id = match_all['league'], match_all['gidm']
+        us_time, re_time = match_all['datetime'], match_all['re_time']
+        match_id = match_all['gid']
+        tag_number = item['tag']
+        uuid = Helper.genearte_uuid(league_name)
+        league_list = []
+        pt = 4
+        last_time = '{}-12-31 23:59:59'.format(datetime.datetime.now().year)
+        match_date, match_time, time3 = Helper.change_time(us_time)
+        utime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+        if self.db.zq_league070.find({'lg_id': league_id}).count() < 1:
+            league_dict = {"game_code": "zq", "title": "league", "source": "hgg070"}
+            league_key = ["name_chinese", "kind", "match_mode", "if_stop", "last_time", "lg_id", "source", "uuid"]
+            league_value = [league_name, "1", "1", "0", last_time, league_id, "hgg070", uuid]
+            league_data = dict(zip(league_key, league_value))
+            league_list.append(league_data)
+            league_dict['data'] = league_list
+            res = Helper.async_post(LEAGUE_URL, league_dict)
+            if res:
+                if res.get('status') == 1:
+                    self.db.zq_league070.insert(league_data)
+                    logging.info('足球联赛提交, {}'.format(res))
+            else:
+                logging.warning('足球联赛接口异常, {}'.format(res))
+        else:
+            logging.info('{},联赛已存在, 不提交'.format(league_name))
+        json_key = 'zq_rollball'
+        match_list = []
+        match_identity = Helper.genearte_uuid(team_h + team_c + match_date)
+        if self.db.zq_competition070.find({'match_id': match_id, 'is_rollball': 1}).count() < 1:
+            match_dict = {"game_code": "zq", "title": "match", "source": "hgg070"}
+            match_kay = ["home_team", "guest_team", "lg_id", "status", "match_id", "match_date", "match_time",
+                         "tag", "source", "is_rollball", "is_morningplate", "is_stringscene", "us_time", "uuid",
+                         "half_match_id", "is_today", "is_horn", 'match_identity']
+            match_value = [team_h, team_c, league_id, 0, match_id, match_date, match_time, tag_number,
+                           "hgg070", 1, 0, 0, us_time, uuid, 0, 0, 0,
+                           match_identity]
+            match_data = dict(zip(match_kay, match_value))
+            match_list.append(match_data)
+            match_dict['data'] = match_list
+            res = Helper.async_post(MATCH_URL, match_dict)
+            if res:
+                if res.get('status') == 1:
+                    self.db.zq_competition070.insert(match_data)
+                    logging.info('足球赛事提交, {}'.format(res))
+                else:
+                    logger.warning('足球赛事表提交失败, {}'.format(res))
+                    # logger.warning(match_dict)
+            else:
+                logger.warning('足球赛事接口异常提交失败, {}'.format(res))
+                # logger.warning(match_dict)
+        else:
+            logger.info('足球赛事已存在,不提交')
+
+        data_list = []
+        for i in self.hgg070[json_key]:
+            r_code = i['prodds']
+            p_code = i['plodds']
+            if match_all.get(r_code) == 'Y':
+                for y in i['items']:
+                    odd = match_all.get(y['rodds'])
+                    if odd:
+                        code = y['lodds']
+                        sole_str = p_code + code + '0' + str(match_id) + 'hgg070'
+                        sole = Helper.genearte_MD5(sole_str, pt)
+                        ratio_name = y['ratio_name']
+                        if ratio_name:
+                            condition = match_all[ratio_name]
+                        else:
+                            condition = y['latio']
+                        hash_str = p_code + code + '0' + str(odd) + "hgg070" + str(match_id)
+                        odds_only = Helper.genearte_MD5(hash_str, pt)
+                        odd_key = ["match_id", "lg_id", "odds_code", "status", "sort", "p_code", "odds",
+                                   "condition", "odds_only", "sole", "source", "type", "team"]
+                        odd_value = [match_id, league_id, code, '0', '0', code, odd, condition,
+                                     odds_only, sole, 'hgg070', '0', '']
+                        odd_dict = dict(zip(odd_key, odd_value))
+                        data_list.append(odd_dict)
+
+        odds_key = ["game_code", "title", "match_id", "lg_id", "data", "source", "odds_only", "tag", "uuid",
+                    "is_stringscene", "utime", "pt", 'match_identity']
+        odds_value = ["zq", "odds", match_id, league_id, data_list, "hgg070", '', tag_number, uuid,
+                      0, utime, pt, match_identity]
+        odds_dict = dict(zip(odds_key, odds_value))
+        if data_list:
+            res = Helper.async_post(ODDS_URL, odds_dict)
+            if res:
+                if res.get('status') == 1:
+                    logger.info('足球详细赔率提交成功, {}'.format(res))
+                    # logger.info(odds_dict)
+                else:
+                    logger.warning('足球详细赔率提交失败, {}'.format(res))
+                    # logger.warning(odds_dict)
+            else:
+                logging.warning('足球详细赔率接口异常, {}'.format(res))
+        else:
+            logger.info('足球详细赔率列表为空')
+        data_list = []
+        zq_rball = {"home_team": team_h, "guest_team": team_c,
+                    "lg_id": league_id, "home_rate": 0,
+                    "guest_rate": 0, "home_score": score_home,
+                    "guest_score": score_guest, "all_goal": all_goal, "status": 1,
+                    "first_score": "", "last_score": "", "match_score": match_score, "uuid": uuid,
+                    "match_winer": "", "match_time": time_game, 'match_identity': match_identity,
+                    "match_process": half_way, "tag": number,
+                    "match_id": match_id, "p_code": ""}
+        data_list.append(zq_rball)
+        r_data_dict = {
+            "game_code": "zq",
+            "title": "match_result_r",
+            "source": "hg3535",
+            "data": data_list
+        }
+        if data_list:
+            try:
+                res = Helper.async_post(MATCH_RESULT, r_data_dict)
+                if res:
+                    if res.get('status') == 1:
+                        logger.info('足球滚球结果记录提交成功, {}'.format(res))
+                        self.db.zq_match_result35.insert(zq_rball)
+                        # logger.info(r_data_dict)
+                    else:
+                        logger.warning('足球滚球结果记录提交失败, {}'.format(res))
+                        # logger.warning(r_data_dict)
+                else:
+                    logger.warning('足球滚球结果记录接口异常,提交失败, {}'.format(res))
+                    # logger.warning(r_data_dict)
+            except Exception as e:
+                logger.warning('滚球数据接口异常,提交失败, {}'.format(e))
+        # reactor.callFromThread(out.callback, item)

+ 51 - 45
hgg070_spider/spiders/lq_sports.py

@@ -3,25 +3,27 @@ import scrapy
 from ..items import LanqiuItem
 import copy
 import lxml.etree
-import re,os,json
+import re, os, json
 from ..utils.helper import Helper
 import time
 from ..items import LanqiuItem
 import xmltodict
 
+
 class LqSportsSpider(scrapy.Spider):
     name = 'lq_sports'
     allowed_domains = ['m.hgg070.com/']
     start_urls = ['http://m.hgg070.com//']
     remath = re.compile("篮球")
+
     # custom_settings={
     #     "ITEM_PIPELINES": {
     #         "hgg070_spider.pipelines.lq_sports.LqSportsPipeline": 200,
     #     },
     # }
     def start_requests(self):
-        #今日,早盘
-        h_types=[('FT'),('FU')]
+        # 今日,早盘
+        h_types = [('FT'), ('FU')]
         headers = {
             'Accept': '*/*',
             'Accept-Encoding': 'gzip, deflate',
@@ -38,7 +40,7 @@ class LqSportsSpider(scrapy.Spider):
         url = "http://m.hgg070.com/app/member/get_league_list.php"
         for item in h_types:
             showtype = item
-            data={
+            data = {
                 'uid': '3970335d20df9b8ceca8673ae9b6ea910c912492f595c0ef163623ae0ea883b6',
                 'langx': 'zh-cn',
                 'ltype': '3',
@@ -48,50 +50,51 @@ class LqSportsSpider(scrapy.Spider):
                 'date': '',
                 'isP': ''
             }
-            yield scrapy.FormRequest(url=url,formdata=data,callback=self.parse,headers=headers,
-                                      meta={"data":data}, dont_filter=True)
+            yield scrapy.FormRequest(url=url, formdata=data, callback=self.parse, headers=headers,
+                                     meta={"data": data}, dont_filter=True)
 
     def parse(self, response):
-        #获取id并判断抓取的球型
-        data=response.meta["data"]
-        fromdata=copy.deepcopy(data)
-        league=response.xpath('//league')
-        url="http://m.hgg070.com/app/member/get_game_list.php"
+        # 获取id并判断抓取的球型
+        data = response.meta["data"]
+        fromdata = copy.deepcopy(data)
+        league = response.xpath('//league')
+        url = "http://m.hgg070.com/app/member/get_game_list.php"
         for le in league:
-            name=le.xpath('./league_name/text()').extract_first()
-            if len(self.remath.findall(name))>0:
+            name = le.xpath('./league_name/text()').extract_first()
+            if len(self.remath.findall(name)) > 0:
                 lid = le.xpath('./league_id/text()').extract_first()
                 # 抓取今日
-                if data["showtype"]=="FT":
-                    data['lid'],data['sorttype'],data['date']=lid,'league',''
+                if data["showtype"] == "FT":
+                    data['lid'], data['sorttype'], data['date'] = lid, 'league', ''
                 # 抓取早盘
-                elif data["showtype"]=="FU":
+                elif data["showtype"] == "FU":
                     data['lid'], data['sorttype'], data['date'] = lid, 'league', 'all'
-                yield scrapy.FormRequest(url=url,formdata=data,callback=self.detailball,meta={"data":fromdata},dont_filter=True)
+                yield scrapy.FormRequest(url=url, formdata=data, callback=self.detailball, meta={"data": fromdata},
+                                         dont_filter=True)
 
-    def detailball(self,response):
-        data=response.meta["data"]
-        url="http://m.hgg070.com/app/member/get_game_more.php"
-        #获取联赛id gid
-        game=response.xpath("//game")
+    def detailball(self, response):
+        data = response.meta["data"]
+        url = "http://m.hgg070.com/app/member/get_game_more.php"
+        # 获取联赛id gid
+        game = response.xpath("//game")
         for g in game:
-            gid=g.xpath("./gid/text()").extract_first()
+            gid = g.xpath("./gid/text()").extract_first()
             more_count = g.xpath("./more_count/text()").extract_first()
-            data["gid"]=gid
-            yield scrapy.FormRequest(url=url,formdata=data,callback=self.getItem,meta={"more_count":more_count,"isP":data["isP"]},dont_filter=True)
-
+            data["gid"] = gid
+            yield scrapy.FormRequest(url=url, formdata=data, callback=self.getItem,
+                                     meta={"more_count": more_count, "isP": data["isP"]}, dont_filter=True)
 
-    def getItem(self,response):
+    def getItem(self, response):
         more_count = response.meta["more_count"]
         isP = response.meta["isP"]
-        data= xmltodict.parse(response.text)['serverresponse']['game']
-        game_lists=[i for i in data if i['gopen']=='Y']
+        data = xmltodict.parse(response.text)['serverresponse']['game']
+        game_lists = [i for i in data if i['gopen'] == 'Y']
 
         if game_lists:
             for gl in game_lists:
-                cpath=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-                with open(cpath+"/conf/hgg070.json",encoding='utf8') as hg:
-                    hgg=json.load(hg)['bk']
+                cpath = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+                with open(cpath + "/conf/hgg070.json", encoding='utf8') as hg:
+                    hgg = json.load(hg)['bk']
                 datetime = gl['datetime'][:-8] + " " + gl['datetime'][-8:]
                 team_h = gl['team_h']
                 team_c = gl['team_c']
@@ -100,16 +103,17 @@ class LqSportsSpider(scrapy.Spider):
                 match_uid = Helper.genearte_uuid(team_h + team_c + datetime)
                 data = []
                 for hg in hgg:
-                    items=hg['items']
-                    if gl[hg['prodds']]=='Y':
+                    items = hg['items']
+                    if gl[hg['prodds']] == 'Y':
                         for x in items:
                             odds_code = gl[x['rodds']]
                             p_code = gl[hg['prodds']]
-                            odds=gl["ior_OUH"]
-                            #有两个条件,加两条数据
-                            if x['ratio_name']:      #大的
-                                condition_u=gl[x['ratio_name']]
-                                odds_only = hg["plodds"] + x["lodds"] + '0' + condition_u + str(odds) + "hg3535" + str(match_id)
+                            odds = gl["ior_OUH"]
+                            # 有两个条件,加两条数据
+                            if x['ratio_name']:  # 大的
+                                condition_u = gl[x['ratio_name']]
+                                odds_only = hg["plodds"] + x["lodds"] + '0' + condition_u + str(odds) + "hg3535" + str(
+                                    match_id)
                                 sole = hg["plodds"] + x["lodds"] + '0' + str(match_id) + "hg3535"
                                 tobj = {"match_id": match_id, "lg_id": league_id, "odds_code": odds_code, "status": 0,
                                         "sort": 0, "p_code": p_code,
@@ -117,23 +121,25 @@ class LqSportsSpider(scrapy.Spider):
                                         "source": "hgg070", "type": 0, "team": ""}
                                 data.append(tobj)
 
-                            if x['latio']:   #小的
+                            if x['latio']:  # 小的
                                 condition_s = gl[x['latio']]
-                                odds_only =hg["plodds"] + x["lodds"] + '0' +condition_s + str(odds) + "hg3535" + str(match_id)
+                                odds_only = hg["plodds"] + x["lodds"] + '0' + condition_s + str(odds) + "hg3535" + str(
+                                    match_id)
                                 sole = hg["plodds"] + x["lodds"] + '0' + str(match_id) + "hg3535"
                                 tobj = {"match_id": match_id, "lg_id": league_id, "odds_code": odds_code, "status": 0,
                                         "sort": 0, "p_code": p_code,
-                                        "odds": odds,"condition": condition_s, "odds_only": odds_only, "sole": sole,
+                                        "odds": odds, "condition": condition_s, "odds_only": odds_only, "sole": sole,
                                         "source": "hgg070", "type": 0, "team": ""}
                                 data.append(tobj)
 
                             if not x['latio'] and not x['ratio_name']:
                                 condition_s = ''
-                                odds_only = hg["plodds"] + x["lodds"] + '0' +condition_s + str(odds) + "hg3535" + str(match_id)
+                                odds_only = hg["plodds"] + x["lodds"] + '0' + condition_s + str(odds) + "hg3535" + str(
+                                    match_id)
                                 sole = hg["plodds"] + x["lodds"] + '0' + str(match_id) + "hg3535"
                                 tobj = {"match_id": match_id, "lg_id": league_id, "odds_code": odds_code, "status": 0,
                                         "sort": 0, "p_code": p_code,
-                                        "odds": odds,"condition": condition_s, "odds_only": odds_only, "sole": sole,
+                                        "odds": odds, "condition": condition_s, "odds_only": odds_only, "sole": sole,
                                         "source": "hgg070", "type": 0, "team": ""}
                                 data.append(tobj)
 
@@ -150,5 +156,5 @@ class LqSportsSpider(scrapy.Spider):
                     item['team_h'] = team_h
                     item['team_c'] = team_c
                     item['isP'] = isP
-                    print('wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww',item)
+                    print('wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww', item)
                     yield item

+ 109 - 0
hgg070_spider/spiders/roll_zuqiu.py

@@ -0,0 +1,109 @@
+# -*- coding: utf-8 -*-
+import logging
+# import lxml
+import scrapy
+import xmltodict
+
+from ..items import ZuqiuItem
+
+
+class ZuqiuSpider(scrapy.Spider):
+    name = 'roll_zuqiu'
+    allowed_domains = ['m.hgg070.com']
+    custom_settings = {
+        "ITEM_PIPELINES": {
+            "hgg070_spider.pipelines.roll_zuqiu.RollPipeline": 200,
+        },
+        # 'LOG_LEVEL': 'DEBUG',
+        # 'LOG_FILE': cpath + "/log/sports_{}_{}_{}.log".format(to_day.year, to_day.month, to_day.day)
+    }
+
+    headers = {
+        'Accept': '*/*',
+        'Accept-Encoding': 'gzip, deflate',
+        'Accept-Language': 'zh-CN,zh;q=0.9',
+        'Content-Length': '130',
+        'Content-type': 'application/x-www-form-urlencoded',
+        'Cookie': '_ga=GA1.2.1009358217.1572056223; _gid=GA1.2.97506800.1572056223; _gat=1',
+        'Host': 'm.hgg070.com',
+        'Origin': 'http://m.hgg070.com',
+        'Proxy-Connection': 'keep-alive',
+        'Referer': 'http://m.hgg070.com/',
+        'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1'
+    }
+
+    def start_requests(self):
+        url = "http://m.hgg070.com/app/member/get_league_list.php"
+        from_data = {
+            'uid': '9965a18b03dc6aacf12290bd5b8267fd7e38ec76eadf899b6108e82da5974cdd',
+            'langx': 'zh-cn',
+            'ltype': '3',
+            'gtype': 'FT',
+            'showtype': 'RB',
+            'sorttype': '',
+            'date': '',
+            'isP': ''
+        }
+        yield scrapy.FormRequest(url=url, formdata=from_data, callback=self.parse, headers=self.headers, dont_filter=True)
+
+    def parse(self, response):
+        leagues = response.xpath('//serverresponse/game/league')
+        url = 'http://m.hgg070.com/app/member/get_game_list.php'
+        if leagues:
+            self.headers['Content-Length'] = '141'
+            for league in leagues:
+                lid = league.xpath('.//league_id/text()').extract_first()
+                from_data = {
+                    'uid': '9965a18b03dc6aacf12290bd5b8267fd7e38ec76eadf899b6108e82da5974cdd',
+                    'langx': 'zh-cn',
+                    'ltype': '3',
+                    'gtype': 'FT',
+                    'showtype': 'RB',
+                    'lid': lid,
+                    'sorttype': '',
+                    'date': '',
+                    'isP': ''
+                }
+                yield scrapy.FormRequest(url=url, formdata=from_data, callback=self.parse_match, headers=self.headers, dont_filter=True)
+        else:
+            print('未获取到联赛id')
+            return
+
+    def parse_match(self, response):
+        url = 'http://m.hgg070.com/app/member/get_game_more.php'
+        self.headers['Content-Length'] = '132'
+        gids = response.xpath('//serverresponse/game/gid/text()').extract()
+        tags = response.xpath('//serverresponse/game/more_count/text()').extract()
+        if gids:
+            for i, gid in enumerate(gids):
+                from_data = {
+                    'uid': '9965a18b03dc6aacf12290bd5b8267fd7e38ec76eadf899b6108e82da5974cdd',
+                    'langx': 'zh-cn',
+                    'ltype': '3',
+                    'gtype': 'FT',
+                    'showtype': 'RB',
+                    'date': '',
+                    'isP': '',
+                    'gid': gid,
+                }
+                tag = tags[i]
+                yield scrapy.FormRequest(url=url, formdata=from_data, callback=self.parse_odds, headers=self.headers,
+                                         meta={'tag': tag}, dont_filter=True)
+
+    def parse_odds(self, response):
+        logger = logging.getLogger(__name__)
+        tag = response.meta['tag']
+        game = xmltodict.parse(response.text)
+        try:
+            game_odds = game['serverresponse']['game'][0]
+        except:
+            game_odds = game['serverresponse']['game']
+        if game_odds['gopen'] == 'Y':
+            item = ZuqiuItem()
+            item['data'] = game_odds
+            item['tag'] = tag
+            yield item
+        else:
+            logger.info('gopen == "N", 详细赔率盘口未开启')
+            return
+