| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191 |
- # -*- coding: utf-8 -*-
- # Scrapy settings for hg3535 project
- #
- # For simplicity, this file contains only settings considered important or
- # commonly used. You can find more settings consulting the documentation:
- #
- # https://doc.scrapy.org/en/latest/topics/settings.html
- # https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
- # https://doc.scrapy.org/en/latest/topics/spider-middleware.html
- BOT_NAME = 'hg3535'
- SPIDER_MODULES = ['hg3535.spiders']
- NEWSPIDER_MODULE = 'hg3535.spiders'
- # Crawl responsibly by identifying yourself (and your website) on the user-agent
- USER_AGENT = {
- 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:16.0) Gecko/20120813 Firefox/16.0'
- }
- # Obey robots.txt rules
- ROBOTSTXT_OBEY = False
- # Configure maximum concurrent requests performed by Scrapy (default: 16)
- CONCURRENT_REQUESTS = 16
- # Configure a delay for requests for the same website (default: 0)
- # See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
- # See also autothrottle settings and docs
- #DOWNLOAD_DELAY = 3
- DOWNLOAD_DELAY = 0
- # The download delay setting will honor only one of:
- CONCURRENT_REQUESTS_PER_DOMAIN = 16
- CONCURRENT_REQUESTS_PER_IP = 16
- # Disable cookies (enabled by default)
- #COOKIES_ENABLED = False
- # Disable Telnet Console (enabled by default)
- #TELNETCONSOLE_ENABLED = False
- # Override the default request headers:
- #DEFAULT_REQUEST_HEADERS = {
- # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
- # 'Accept-Language': 'en',
- #}
- # Enable or disable spider middlewares
- # See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
- # from scrapy_deltafetch
- # SPIDER_MIDDLEWARES = {
- # # 'scrapy_deltafetch.DeltaFetch': 100,
- # 'hg3535.middlewares.Hg3535SpiderMiddleware': 543
- # }
- # #
- # DELTAFETCH_ENABLED = True
- # Enable or disable downloader middlewares
- # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
- DOWNLOADER_MIDDLEWARES = {
- # 'hg3535.middlewares.Hg3535DownloaderMiddleware': 200,
- 'hg3535.middlewares.Hg3535timeoutDownloaderMiddleware': 200,
- 'scrapy.downloadermiddlewares.downloadtimeout.DownloadTimeoutMiddleware': 500,
- # 'scrapy.contrib.downloadermiddleware.retry.RetryMiddleware': 502,
- }
- # Enable or disable extensions
- # See https://doc.scrapy.org/en/latest/topics/extensions.html
- #EXTENSIONS = {
- # 'scrapy.extensions.telnet.TelnetConsole': None,
- #}
- # Configure item pipeline
- # See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
- ITEM_PIPELINES = {
- 'scrapy_redis.pipelines.RedisPipeline': 400,
- # # 篮球
- # 'hg3535.pipeline.Lanqiupipeline': 300,
- # # 冠军
- # 'hg3535.pipeline.Guanjunpipeline': 300,
- # # 联赛
- # 'hg3535.pipeline.Liansaipipeline': 300,
- # # 足球
- # 'hg3535.pipeline.Zuqiupipeline': 300,
- # # 网球
- # "hg3535.pipeline.Wangqiupipeline": 300,
- # # 网球波胆
- # 'hg3535.pipeline.Wqbodanpipeline': 300,
- # # 棒球让球&大小盘
- # "hg3535.pipeline.Bangqiupipeline": 300,
- }
- # Enable and configure the AutoThrottle extension (disabled by default)
- # See https://doc.scrapy.org/en/latest/topics/autothrottle.html
- #AUTOTHROTTLE_ENABLED = True
- # The initial download delay
- #AUTOTHROTTLE_START_DELAY = 5
- # The maximum download delay to be set in case of high latencies
- #AUTOTHROTTLE_MAX_DELAY = 60
- # The average number of requests Scrapy should be sending in parallel to
- # each remote server
- #AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
- # Enable showing throttling stats for every response received:
- #AUTOTHROTTLE_DEBUG = False
- # Enable and configure HTTP caching (disabled by default)
- # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
- #HTTPCACHE_ENABLED = True
- #HTTPCACHE_EXPIRATION_SECS = 0
- #HTTPCACHE_DIR = 'httpcache'
- #HTTPCACHE_IGNORE_HTTP_CODES = []
- #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
- # DOWNLOAD_TIMEOUT = 180
- REACTOR_THREADPOOL_MAXSIZE = 40
- # LOG_LEVEL = 'INFO'
- COOKIES_ENABLED = False
- RETRY_ENABLED = False
- DOWNLOAD_TIMEOUT = 1
- REDIRECT_ENABLED = False
- # SCHEDULER_PERSIST = False # 是否在关闭时候保留原来的调度器和去重记录,True=保留,False=清空
- SCHEDULER_FLUSH_ON_START = False
- TELNETCONSOLE_PORT = None
- # TELNETCONSOLE_ENABLED=False
- # AttributeError: 'TelnetConsole' object has no attribute 'port'
- # RETRY_ENABLED = True
- # RETRY_TIMES = 2
- # RETRY_HTTP_CODES = [500, 502, 503, 504, 400, 408]
- # LOG_LEVEL = 'DEBUG'
- # SCHEDULER_PERSIST = False
- # LOG_FILE = './log/'
- # POST_HOST = 'localhost'
- # POST_DATABASE = 'postgres'
- # POST_USER = 'postgres'
- # POST_PORT = '5433'
- # POST_PASSWORD = '123456'
- # POST_HOST = 'localhost'
- # POST_DATABASE = 'postgres'
- # POST_USER = 'postgres'
- # POST_PORT = '5433'
- # POST_PASSWORD = '123456'
- #以后解开这个位置的注释
- # POST_HOST = '172.17.0.4'
- # POST_DATABASE = 'kaiyou'
- # POST_USER = 'kaiyou'
- # POST_PORT = '5432'
- # POST_PASSWORD = 'yjkj8888'
- # POST_HOST = '192.168.2.200'
- # POST_DATABASE = 'kaiyou'
- # POST_USER = 'kaiyou'
- # POST_PORT = '10432'
- # POST_PASSWORD = '123456'
- R_HOST = '192.168.2.200'
- R_POST = '6379'
- R_PASSWORD = '123456'
- #
- # POST_HOST = 'localhost'
- # POST_DATABASE = 'kaiyou'
- # POST_USER = 'kaiyou'
- # POST_PORT = '10432'
- # POST_PASSWORD = '123456'
- SCHEDULER = "scrapy_redis.scheduler.Scheduler"
- DUPEFILTER_CLASS = "scrapy_redis.dupefilter.RFPDupeFilter"
- # SCHEDULER_SERIALIZER = "scrapy_redis.picklecompat"
- # #不要清理redis队列,允许暂停/恢复抓取。
- # SCHEDULER_PERSIST = False
- # #使用优先级队列安排请求。(默认)
- SCHEDULER_QUEUE_CLASS = 'scrapy_redis.queue.PriorityQueue'
- REDIS_HOST = '192.168.2.200'
- REDIS_PORT = 6379
- REDIS_PARAMS = {'password': 123456, 'db': 1}
- #
|