A:
REDIS_HOST = 'localhost' REDIS_PORT = 6379 REDIS_PARAMS ={ 'password': '*******', 'db': 1 }
B:
* * REDIS_URL = 'redis://user:pass@hostname:9001/db'
from scrapy_redis.spiders import RedisSpider ### class BookSpider(RedisSpider):#### name = 'book'#### allowed_domains = ['dangdang.com']#### # start_urls = ['http://book.dangdang.com/'] redis_key = 'book:start_urls'####
setting
####指定那个去重f方法给request DUPEFILTER_CLASS = "scrapy_redis.dupefilter.RFPDupeFilter" ###指定scheduler队列 SCHEDULER = "scrapy_redis.scheduler.Scheduler" ###队列中内容是否持久保存,为FAlse的时候在关闭redis时候清空redis SCHEDULER_PERSIST = True #SCHEDULER_QUEUE_CLASS = "scrapy_redis.queue.SpiderPriorityQueue" #SCHEDULER_QUEUE_CLASS = "scrapy_redis.queue.SpiderQueue" #SCHEDULER_QUEUE_CLASS = "scrapy_redis.queue.SpiderStack" LOG_LEVEL = 'DEBUG' ITEM_PIPELINES = { ###scrapy_redis实现items保存到redis的pipeline 'scrapy_redis.pipelines.RedisPipeline': 400, } REDIS_HOST = '119.91.101.102' REDIS_PORT = 6379 REDIS_PARAMS ={ 'password': '******', 'db': 1 }