main.py 1.1 KB

12345678910111213141516171819202122232425
  1. # -*- coding: utf-8 -*-
  2. __author__ = 'admin'
  3. __date__ = '2018/3/7 上午10:06'
  4. from scrapy.cmdline import execute
  5. import sys
  6. import os
  7. # print(os.path.dirname(os.path.abspath(__file__)))
  8. sys.path.append(os.path.dirname(os.path.abspath(__file__)))
  9. execute(["scrapy", "crawl", "hg3535"]) # 足球爬虫
  10. # execute(["scrapy", "crawl", "hg3535_supplement"]) #赔率补充爬虫
  11. # execute(["scrapy", "crawl", "lanqiu"])# 篮球爬虫
  12. # execute(["scrapy", "crawl", "tennis"])#网球爬虫
  13. # execute(["scrapy", "crawl", "ball_status"])#足球状态id抓取爬虫
  14. # execute(["scrapy", "crawl", "ball_status_update"]) # 足球状态更新爬虫
  15. # execute(["scrapy", "crawl", "bangqiu"])# 棒球爬虫
  16. # execute(["scrapy", "crawl", "bangqiu_status"]) #棒球状态id抓取爬虫
  17. # execute(["scrapy", "crawl", "bangqiu_status_update"]) #棒球状态更新爬虫
  18. # execute(["scrapy", "crawl", "lanqiu_status"]) # 篮球状态id抓取爬虫
  19. # execute(["scrapy", "crawl", "lanqiu_status_update"]) #篮球状态更新爬虫
  20. # execute(["scrapy", "crawl", "wangqiu_status"]) #网球状态id抓取爬虫
  21. # execute(["scrapy", "crawl", "wangqiu_status_update"]) # 网球状态更新爬虫