Your Name 6 жил өмнө
parent
commit
70766924a1

+ 1 - 1
hgg070_spider/conf/bangqiu.json

@@ -1,5 +1,5 @@
 {
-  "bs": [
+  "bq": [
     {
         "plodds": "CO",
         "prodds": "sw_R",

+ 1 - 1
hgg070_spider/conf/lanqiu.json

@@ -1,5 +1,5 @@
 {
-  "bk": [
+  "lq": [
     {
         "plodds": "CO",
         "prodds": "sw_R",

+ 2 - 2
hgg070_spider/conf/wangqiu.json

@@ -1,5 +1,5 @@
 {
-  "tn": [
+  "wq": [
     {
             "plodds": "TN",
             "prodds": "sw_OU",
@@ -168,7 +168,7 @@
         ]
     }
     ],
-  "tn_rollball": [
+  "wq_rollball": [
     {
             "plodds": "TN",
             "prodds": "sw_ROU",

+ 5 - 5
hgg070_spider/conf/zuqiu.json

@@ -6901,7 +6901,7 @@
         "items": [
             {
                 "lodds": "tbhb",
-                "rodds": "ior_OUHO",
+                "rodds": "ior_POUHO",
                 "ratio": "0",
                 "ratio_name": "ratio_ouho",
                 "latio": "",
@@ -6912,7 +6912,7 @@
             },
             {
                 "lodds": "tbhs",
-                "rodds": "ior_OUHU",
+                "rodds": "ior_POUHU",
                 "ratio": "0",
                 "ratio_name": "ratio_ouhu",
                 "latio": "",
@@ -6925,12 +6925,12 @@
     },
     {
             "plodds": "TB",
-            "prodds": "sw_OUH",
+            "prodds": "sw_OUC",
             "enabled": 0,
             "items": [
                 {
                     "lodds": "tbgb",
-                    "rodds": "ior_OUCO",
+                    "rodds": "ior_POUCO",
                     "ratio": "0",
                     "ratio_name": "ratio_ouco",
                     "latio": "",
@@ -6941,7 +6941,7 @@
                 },
                 {
                     "lodds": "tbgs",
-                    "rodds": "ior_OUCU",
+                    "rodds": "ior_POUCU",
                     "ratio": "0",
                     "ratio_name": "ratio_oucu",
                     "latio": "",

+ 2 - 2
hgg070_spider/main.py

@@ -6,9 +6,9 @@ from scrapy.cmdline import execute
 sys.path.append(os.path.dirname(os.path.abspath(__file__)))
 # execute(["scrapy", "crawl", "zuqiu"])
 # execute(["scrapy", "crawl", "lanqiu"])
-execute(["scrapy", "crawl", "lq_sports"])
+# execute(["scrapy", "crawl", "lq_sports"])
 # execute(["scrapy", "crawl", "guanjun"])
-# execute(["scrapy", "crawl", "wangqiu"])
+execute(["scrapy", "crawl", "wangqiu"])
 # execute(["scrapy", "crawl", "wqbodan"])
 # execute(["scrapy", "crawl", "bangqiu"])
 # execute(["scrapy", "crawl", "roll_zuqiu"]) # 滚球足球 回来要解开这个注释 其他全部解封

+ 17 - 24
hgg070_spider/pipelines/lq_sports.py

@@ -1,50 +1,43 @@
 import logging
-from twisted.internet import defer,reactor
+from twisted.internet import defer, reactor
 from ..utils.helper import Helper
-from ..settings import LEAGUE_URL,MATCH_URL
+from ..settings import LEAGUE_URL, MATCH_URL
 import time
+
+
 class LqSportsPipeline(object):
     @defer.inlineCallbacks
-    def process_item(self,item,spider):
+    def process_item(self, item, spider):
         print('555555555555555555555555555555555555555555555555555555555555555555555')
-        logger=logging.getLogger(__name__)
+        logger = logging.getLogger(__name__)
         logger.info("进入管道")
-        out=defer.Deferred()
-        reactor.callInThread(self._do_calculation,item,out)
+        out = defer.Deferred()
+        reactor.callInThread(self._do_calculation, item, out)
         yield out
 
-
-    def _do_calculation(self,item,out):
-        #先保存联赛
+    def _do_calculation(self, item, out):
+        # 先保存联赛
         league_name = item['league']
         uuid = Helper.genearte_uuid(league_name)
-        #是否串场
+        # 是否串场
         if item['isP'] == 'P':
             ris_stringscene = 1
         else:
             ris_stringscene = 0
         # 现在时间,时间戳
         utime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
-        odds_key = ["game_code", "title", "match_id", "lg_id","data", "source", "odds_only", "tag", "uuid",
+        odds_key = ["game_code", "title", "match_id", "lg_id", "data", "source", "odds_only", "tag", "uuid",
                     "is_stringscene", "utime", "pt", 'match_identity']
-        odds_value = ["lq", "odds", item['match_id'], item['league_id'], item["content"],"hgg070", [], item['more_count'], uuid,
+        odds_value = ["lq", "odds", item['match_id'], item['league_id'], item["content"], "hgg070", [],
+                      item['more_count'], uuid,
                       ris_stringscene, utime, item['isP'], item["match_identity"]]
-        #赛事
+        # 赛事
         childer = dict(zip(odds_key, odds_value))
-        res=Helper.async_post(LEAGUE_URL,childer)
+        res = Helper.async_post(LEAGUE_URL, childer)
         if res:
-            if res.get('status')==1:
+            if res.get('status') == 1:
                 logging.warning("联赛提交成功,{}".format(res))
             else:
                 logging.warning("联赛提交失败,{}".format(res))
         else:
             logging.warning("联赛提交失败,{}".format(res))
-
-
-
-
-
-
-
-
-

+ 152 - 0
hgg070_spider/pipelines/wangqiu.py

@@ -0,0 +1,152 @@
+import datetime
+import json
+import time
+import logging
+import pymongo
+from twisted.internet import defer, reactor
+from ..utils.helper import Helper
+from ..settings import M_HOST, M_USER, M_PASSWORD, M_POST, M_DB, LEAGUE_URL, ODDS_URL, MATCH_URL
+
+
+class WangqiuPipeline(object):
+    def open_spider(self, spider):
+        self.mongo = pymongo.MongoClient(host=M_HOST, username=M_USER, password=M_PASSWORD, port=M_POST,
+                                         authSource=M_DB)
+        self.db = self.mongo[M_DB]
+        with open('./conf/wangqiu.json', 'r', encoding='utf8') as wq:
+            hgg070 = json.load(wq)
+        self.hgg070 = hgg070
+
+    #
+    # @defer.inlineCallbacks
+    def process_item(self, item, spider):
+        pass
+        # out = defer.Deferred()
+        # reactor.callInThread(self._do_calculation, item, out)
+        # yield out
+
+        # def _do_calculation(self, item, out):
+        #     #     pass
+        #     # def process_item(self, item, spider):
+        logger = logging.getLogger(__name__)
+        match_all = item['data']
+        pt = str(item['index'])
+        team_h, team_c = match_all['team_h'], match_all['team_c']
+        league_name, league_id = match_all['league'], match_all['gidm']
+        us_time, re_time = match_all['datetime'], match_all['re_time']
+        match_id = match_all['gid']
+        tag_number = item['tag']
+        uuid = Helper.genearte_uuid(league_name)
+        league_list = []
+        last_time = '{}-12-31 23:59:59'.format(datetime.datetime.now().year)
+        match_date, match_time, time3 = Helper.change_time(us_time)
+        utime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+        if self.db.wq_league070.find({'lg_id': league_id}).count() < 1:
+            league_dict = {"game_code": "wq", "title": "league", "source": "hgg070"}
+            league_key = ["name_chinese", "kind", "match_mode", "if_stop", "last_time", "lg_id", "source", "uuid"]
+            league_value = [league_name, "1", "1", "0", last_time, league_id, "hgg070", uuid]
+            league_data = dict(zip(league_key, league_value))
+            league_list.append(league_data)
+            league_dict['data'] = league_list
+            res = Helper.async_post(LEAGUE_URL, league_dict)
+            if res:
+                if res.get('status') == 1:
+                    self.db.wq_league070.insert(league_data)
+                    logging.info('网球联赛提交, {}'.format(res))
+            else:
+                logging.warning('网球联赛接口异常, {}'.format(res))
+        else:
+            logging.info('{},联赛已存在, 不提交'.format(league_name))
+        pt_dict = {'0': 'is_today', '1': 'is_morningplate', '2': 'is_stringscene', '3': 'is_rollball'}
+        pt_status = pt_dict[pt]
+        ris_stringscene = 0
+        json_key = 'wq'
+        if pt == '0':
+            is_rollball = 0
+            is_today = 1
+            is_morningplate = 0
+            is_stringscene = 0
+        elif pt == '2':
+            is_rollball = 0
+            is_today = 0
+            is_morningplate = 1
+            is_stringscene = 0
+        else:
+            # json_key = 'chuanchang'
+            is_today = 0
+            is_rollball = 0
+            is_morningplate = 0
+            is_stringscene = 1
+            ris_stringscene = 1
+
+        match_list = []
+        match_identity = Helper.genearte_uuid(team_h + team_c + match_date)
+        if self.db.wq_competition070.find({'match_id': match_id, pt_status: 1}).count() < 1:
+            match_dict = {"game_code": "wq", "title": "match", "source": "hgg070"}
+            match_key = ["home_team", "guest_team", "lg_id", "status", "match_id", "match_date", "match_time",
+                         "tag", "source", "is_rollball", "is_morningplate", "is_stringscene", "us_time", "uuid",
+                         "half_match_id", "is_today", 'rule', "is_horn", "match_identity"]
+            match_value = [team_h, team_c, league_id, 0, match_id, match_date, match_time, tag_number,
+                           "hgg070", is_rollball, is_morningplate, is_stringscene, us_time, uuid, 0, is_today, '', 0,
+                           match_identity]
+            match_data = dict(zip(match_key, match_value))
+            match_list.append(match_data)
+            match_dict['data'] = match_list
+            res = Helper.async_post(MATCH_URL, match_dict)
+            if res:
+                if res.get('status') == 1:
+                    self.db.wq_competition070.insert(match_data)
+                    logging.info('网球赛事提交, {}'.format(res))
+                else:
+                    logger.warning('网球赛事表提交失败, {}'.format(res))
+                    # logger.warning(match_dict)
+            else:
+                logger.warning('网球赛事接口异常提交失败, {}'.format(res))
+                # logger.warning(match_dict)
+        else:
+            logger.info('网球赛事已存在,不提交')
+
+        data_list = []
+        for i in self.hgg070[json_key]:
+            r_code = i['prodds']
+            p_code = i['plodds']
+            if match_all.get(r_code) == 'Y':
+                for y in i['items']:
+                    odd = match_all.get(y['rodds'])
+                    if odd:
+                        code = y['lodds']
+                        sole_str = p_code + code + '0' + str(match_id) + 'hgg070'
+                        sole = Helper.genearte_MD5(sole_str, pt)
+                        ratio_name = y['ratio_name']
+                        if ratio_name:
+                            condition = match_all[ratio_name]
+                        else:
+                            condition = y['latio']
+                        hash_str = p_code + code + '0' + str(odd) + "hgg070" + str(match_id)
+                        odds_only = Helper.genearte_MD5(hash_str, pt)
+                        odd_key = ["match_id", "lg_id", "odds_code", "status", "sort", "p_code", "odds",
+                                   "condition", "odds_only", "sole", "source", "type", "team"]
+                        odd_value = [match_id, league_id, code, '0', '0', code, odd, condition,
+                                     odds_only, sole, 'hgg070', '0', '']
+                        odd_dict = dict(zip(odd_key, odd_value))
+                        data_list.append(odd_dict)
+
+        odds_key = ["game_code", "title", "match_id", "lg_id", "data", "source", "odds_only", "tag", "uuid",
+                    "is_stringscene", "utime", "pt", 'match_identity']
+        odds_value = ["wq", "odds", match_id, league_id, data_list, "hgg070", '', tag_number, uuid,
+                      ris_stringscene, utime, pt, match_identity]
+        odds_dict = dict(zip(odds_key, odds_value))
+        if data_list:
+            res = Helper.async_post(ODDS_URL, odds_dict)
+            if res:
+                if res.get('status') == 1:
+                    logger.info('网球详细赔率提交成功, {}'.format(res))
+                    # logger.info(odds_dict)
+                else:
+                    logger.warning('网球详细赔率提交失败, {}'.format(res))
+                    # logger.warning(odds_dict)
+            else:
+                logging.warning('网球详细赔率接口异常, {}'.format(res))
+        else:
+            logger.info('网球详细赔率列表为空')
+    #     reactor.callFromThread(out.callback, item)

+ 61 - 11
hgg070_spider/pipelines/zuqiu.py

@@ -1,4 +1,5 @@
 import datetime
+import json
 import time
 import logging
 import pymongo
@@ -12,17 +13,18 @@ class ZuqiuPipeline(object):
         self.mongo = pymongo.MongoClient(host=M_HOST, username=M_USER, password=M_PASSWORD, port=M_POST,
                                          authSource=M_DB)
         self.db = self.mongo[M_DB]
+        with open('./conf/zuqiu.json', 'r', encoding='utf8') as zq:
+            hgg070 = json.load(zq)
+        self.hgg070 = hgg070
 
-    # @defer.inlineCallbacks
-    # def process_item(self,item,spider):
-    #     out=defer.Deferred()
-    #     reactor.callInThread(self._do_calculation,item,out)
-    #     yield out
+    @defer.inlineCallbacks
+    def process_item(self,item,spider):
+        out=defer.Deferred()
+        reactor.callInThread(self._do_calculation,item,out)
+        yield out
 
-    # def _do_calculation(self,item,out):
-    #     pass
-
-    def process_item(self, item, spider):
+    def _do_calculation(self,item,out):
+    # def process_item(self, item, spider):
         logger = logging.getLogger(__name__)
         match_all = item['data']
         pt = str(item['index'])
@@ -35,6 +37,7 @@ class ZuqiuPipeline(object):
         league_list = []
         last_time = '{}-12-31 23:59:59'.format(datetime.datetime.now().year)
         match_date, match_time, time3 = Helper.change_time(us_time)
+        utime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
         if self.db.zq_league070.find({'lg_id': league_id}).count() < 1:
             # if self.db.zq_league35.find({'uuid': uuid}).count() < 1:
             league_dict = {"game_code": "zq", "title": "league", "source": "hgg070"}
@@ -54,6 +57,8 @@ class ZuqiuPipeline(object):
             logging.info('{},联赛已存在, 不提交'.format(league_name))
         pt_dict = {'0': 'is_today', '1': 'is_morningplate', '2': 'is_stringscene', '3': 'is_rollball'}
         pt_status = pt_dict[pt]
+        ris_stringscene = 0
+        json_key = 'zuqiu'
         if pt == '0':
             is_rollball = 0
             is_today = 1
@@ -65,10 +70,12 @@ class ZuqiuPipeline(object):
             is_morningplate = 1
             is_stringscene = 0
         else:
+            json_key = 'chuanchang'
             is_today = 0
             is_rollball = 0
             is_morningplate = 0
             is_stringscene = 1
+            ris_stringscene = 1
 
         match_list = []
         match_identity = Helper.genearte_uuid(team_h + team_c + match_date)
@@ -79,7 +86,7 @@ class ZuqiuPipeline(object):
                          "tag", "source", "is_rollball", "is_morningplate", "is_stringscene", "us_time", "uuid",
                          "half_match_id", "is_today", "is_horn", 'match_identity']
             match_value = [team_h, team_c, league_id, 0, match_id, match_date, match_time, tag_number,
-                           "hg3535", is_rollball, is_morningplate, is_stringscene, us_time, uuid, 0, is_today, 0,
+                           "hgg070", is_rollball, is_morningplate, is_stringscene, us_time, uuid, 0, is_today, 0,
                            match_identity]
             match_data = dict(zip(match_kay, match_value))
             match_list.append(match_data)
@@ -98,4 +105,47 @@ class ZuqiuPipeline(object):
         else:
             logger.info('足球赛事已存在,不提交')
 
-        # reactor.callFromThread(out.callback, item)
+        data_list = []
+        for i in self.hgg070[json_key]:
+            r_code = i['prodds']
+            p_code = i['plodds']
+            if match_all.get(r_code) == 'Y':
+                for y in i['items']:
+                    odd = match_all.get(y['rodds'])
+                    if odd:
+                        code = y['lodds']
+                        sole_str = p_code + code + '0' + str(match_id) + 'hgg070'
+                        sole = Helper.genearte_MD5(sole_str, pt)
+                        ratio_name = y['ratio_name']
+                        if ratio_name:
+                            condition = match_all[ratio_name]
+                        else:
+                            condition = y['latio']
+                        hash_str = p_code + code + '0' + str(odd) + "hgg070" + str(match_id)
+                        odds_only = Helper.genearte_MD5(hash_str, pt)
+                        odd_key = ["match_id", "lg_id", "odds_code", "status", "sort", "p_code", "odds",
+                                   "condition", "odds_only", "sole", "source", "type", "team"]
+                        odd_value = [match_id, league_id, code, '0', '0', code, odd, condition,
+                                     odds_only, sole, 'hgg070', '0', '']
+                        odd_dict = dict(zip(odd_key, odd_value))
+                        data_list.append(odd_dict)
+
+        odds_key = ["game_code", "title", "match_id", "lg_id", "data", "source", "odds_only", "tag", "uuid",
+                    "is_stringscene", "utime", "pt", 'match_identity']
+        odds_value = ["zq", "odds", match_id, league_id, data_list, "hgg070", '', tag_number, uuid,
+                      ris_stringscene, utime, pt, match_identity]
+        odds_dict = dict(zip(odds_key, odds_value))
+        if data_list:
+            res = Helper.async_post(ODDS_URL, odds_dict)
+            if res:
+                if res.get('status') == 1:
+                    logger.info('足球详细赔率提交成功, {}'.format(res))
+                    # logger.info(odds_dict)
+                else:
+                    logger.warning('足球详细赔率提交失败, {}'.format(res))
+                    # logger.warning(odds_dict)
+            else:
+                logging.warning('足球详细赔率接口异常, {}'.format(res))
+        else:
+            logger.info('足球详细赔率列表为空')
+        reactor.callFromThread(out.callback, item)

+ 152 - 0
hgg070_spider/spiders/wangqiu.py

@@ -0,0 +1,152 @@
+# -*- coding: utf-8 -*-
+import logging
+# import lxml
+import scrapy
+import xmltodict
+
+from ..items import ZuqiuItem
+
+
+class ZuqiuSpider(scrapy.Spider):
+    name = 'wangqiu'
+    allowed_domains = ['m.hgg070.com']
+    custom_settings = {
+        "ITEM_PIPELINES": {
+            "hgg070_spider.pipelines.wangqiu.WangqiuPipeline": 200,
+        },
+        # 'LOG_LEVEL': 'DEBUG',
+        # 'LOG_FILE': cpath + "/log/sports_{}_{}_{}.log".format(to_day.year, to_day.month, to_day.day)
+    }
+
+    headers = {
+        'Accept': '*/*',
+        'Accept-Encoding': 'gzip, deflate',
+        'Accept-Language': 'zh-CN,zh;q=0.9',
+        'Content-Length': '130',
+        'Content-type': 'application/x-www-form-urlencoded',
+        'Cookie': '_ga=GA1.2.1009358217.1572056223; _gid=GA1.2.97506800.1572056223; _gat=1',
+        'Host': 'm.hgg070.com',
+        'Origin': 'http://m.hgg070.com',
+        'Proxy-Connection': 'keep-alive',
+        'Referer': 'http://m.hgg070.com/',
+        'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1'
+    }
+
+    def start_requests(self):
+        url = "http://m.hgg070.com/app/member/get_league_list.php"
+        h_types = [('FT', '', '130'), ('FU', 'P', '131'), ('FU', "", '130'), ('FU', 'P', '131')]
+        for i, h_type in enumerate(h_types):
+            show_type, isp, length = h_type
+            self.headers['Content-Length'] = length
+            from_data = {
+                'uid': 'ca93c6189b00e4c4d974f45ccb1dfd5a1c7a7e06a0292d23025673b1ed195480',
+                'langx': 'zh-cn',
+                'ltype': '3',
+                'gtype': 'TN',
+                'showtype': show_type,
+                'sorttype': '',
+                'date': '',
+                'isP': isp
+            }
+            yield scrapy.FormRequest(url=url, formdata=from_data, callback=self.parse, headers=self.headers,
+                                     meta={'index': i}, dont_filter=True)
+
+    def parse(self, response):
+        leagues = response.xpath('//serverresponse/game/league')
+        url = 'http://m.hgg070.com/app/member/get_game_list.php'
+        if leagues:
+            index = response.meta['index']
+            if index == 0:
+                date = ''
+                showtype = 'FT'
+                isp = ''
+                self.headers['Content-Length'] = '147'
+            elif index == 2:
+                date = 'all'
+                showtype = 'FU'
+                isp = ''
+                self.headers['Content-Length'] = '150'
+            else:
+                date = 'all'
+                showtype = 'FU'
+                isp = 'P'
+                self.headers['Content-Length'] = '151'
+            for league in leagues:
+                lid = league.xpath('.//league_id/text()').extract_first()
+                from_data = {
+                    'uid': 'ca93c6189b00e4c4d974f45ccb1dfd5a1c7a7e06a0292d23025673b1ed195480',
+                    'langx': 'zh-cn',
+                    'ltype': '3',
+                    'gtype': 'TN',
+                    'showtype': showtype,
+                    'lid': lid,
+                    'sorttype': 'league',
+                    'date': date,
+                    'isP': isp
+                }
+                yield scrapy.FormRequest(url=url, formdata=from_data, callback=self.parse_match, headers=self.headers,
+                                         meta={'index': index}, dont_filter=True)
+        else:
+            print('未获取到联赛id')
+            return
+
+    def parse_match(self, response):
+        index = response.meta['index']
+        url = 'http://m.hgg070.com/app/member/get_game_more.php'
+        if index == 0:
+            date = ''
+            showtype = 'FT'
+            isp = ''
+            self.headers['Content-Length'] = '132'
+        elif index == 1:
+            date = 'all'
+            showtype = 'FT'
+            isp = 'P'
+            self.headers['Content-Length'] = '136'
+        elif index == 2:
+            date = ''
+            showtype = 'FU'
+            isp = ''
+            self.headers['Content-Length'] = '132'
+        else:
+            date = 'all'
+            showtype = 'FU'
+            isp = 'P'
+            self.headers['Content-Length'] = '136'
+        gids = response.xpath('//serverresponse/game/gid/text()').extract()
+        tags = response.xpath('//serverresponse/game/more_count/text()').extract()
+        if gids:
+            for i, gid in enumerate(gids):
+                from_data = {
+                    'uid': 'ca93c6189b00e4c4d974f45ccb1dfd5a1c7a7e06a0292d23025673b1ed195480',
+                    'langx': 'zh-cn',
+                    'ltype': '3',
+                    'gtype': 'TN',
+                    'showtype': showtype,
+                    'date': date,
+                    'isP': isp,
+                    'gid': gid,
+                }
+                tag = tags[i]
+                yield scrapy.FormRequest(url=url, formdata=from_data, callback=self.parse_odds, headers=self.headers,
+                                         meta={'index': index, 'tag': tag}, dont_filter=True)
+
+    def parse_odds(self, response):
+        logger = logging.getLogger(__name__)
+        index = response.meta['index']
+        tag = response.meta['tag']
+        game = xmltodict.parse(response.text)
+        try:
+            game_odds = game['serverresponse']['game'][0]
+        except:
+            game_odds = game['serverresponse']['game']
+        if game_odds['gopen'] == 'Y':
+            item = ZuqiuItem()
+            item['data'] = game_odds
+            item['index'] = index
+            item['tag'] = tag
+            yield item
+        else:
+            logger.info('gopen == "N", 详细赔率盘口未开启')
+            return
+

+ 15 - 16
hgg070_spider/spiders/zuqiu.py

@@ -1,7 +1,6 @@
 # -*- coding: utf-8 -*-
 import logging
-import lxml
-
+# import lxml
 import scrapy
 import xmltodict
 
@@ -40,7 +39,7 @@ class ZuqiuSpider(scrapy.Spider):
             show_type, isp, length = h_type
             self.headers['Content-Length'] = length
             from_data = {
-                'uid': '4d6e7f8af34715653b6039ca9b43737f096ed82446e3d37e033349aba0e3e753',
+                'uid': '19fbb114b9503aaa806a1920203d73eb85db285f26188e36ae7172f550987364',
                 'langx': 'zh-cn',
                 'ltype': '3',
                 'gtype': 'FT',
@@ -75,7 +74,7 @@ class ZuqiuSpider(scrapy.Spider):
             for league in leagues:
                 lid = league.xpath('.//league_id/text()').extract_first()
                 from_data = {
-                    'uid': '4d6e7f8af34715653b6039ca9b43737f096ed82446e3d37e033349aba0e3e753',
+                    'uid': '19fbb114b9503aaa806a1920203d73eb85db285f26188e36ae7172f550987364',
                     'langx': 'zh-cn',
                     'ltype': '3',
                     'gtype': 'FT',
@@ -119,7 +118,7 @@ class ZuqiuSpider(scrapy.Spider):
         if gids:
             for i, gid in enumerate(gids):
                 from_data = {
-                    'uid': '4d6e7f8af34715653b6039ca9b43737f096ed82446e3d37e033349aba0e3e753',
+                    'uid': '19fbb114b9503aaa806a1920203d73eb85db285f26188e36ae7172f550987364',
                     'langx': 'zh-cn',
                     'ltype': '3',
                     'gtype': 'FT',
@@ -137,17 +136,17 @@ class ZuqiuSpider(scrapy.Spider):
         index = response.meta['index']
         tag = response.meta['tag']
         game = xmltodict.parse(response.text)
-        gopen = game['serverresponse']['game']['gopen']
-        if gopen == 'Y':
-            try:
-                game_odds = game['serverresponse']['game'][0]
-            except:
-                game_odds = game['serverresponse']['game']
+        try:
+            game_odds = game['serverresponse']['game'][0]
+        except:
+            game_odds = game['serverresponse']['game']
+        if game_odds['gopen'] == 'Y':
+            item = ZuqiuItem()
+            item['data'] = game_odds
+            item['index'] = index
+            item['tag'] = tag
+            yield item
         else:
             logger.info('gopen == "N", 详细赔率盘口未开启')
             return
-        item = ZuqiuItem()
-        item['data'] = game_odds
-        item['index'] = index
-        item['tag'] = tag
-        yield item
+