# -*- coding: utf-8 -*- import logging from dotenv import load_dotenv import os true_list = ['True', 'true', 'TRUE', 1, '1'] load_dotenv() # Scrapy settings for tutorial project # # For simplicity, this file contains only settings considered important or # commonly used. You can find more settings consulting the documentation: # # https://doc.scrapy.org/en/latest/topics/settings.html # https://doc.scrapy.org/en/latest/topics/downloader-middleware.html # https://doc.scrapy.org/en/latest/topics/spider-middleware.html BOT_NAME = 'tutorial' SPIDER_MODULES = ['spiders'] NEWSPIDER_MODULE = 'spiders' # Crawl responsibly by identifying yourself (and your website) on the user-agent # USER_AGENT = 'tutorial (+http://www.yourdomain.com)' # Obey robots.txt rules ROBOTSTXT_OBEY = True # Configure maximum concurrent requests performed by Scrapy (default: 16) # CONCURRENT_REQUESTS = 32 # Configure a delay for requests for the same website (default: 0) # See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay # See also autothrottle settings and docs # DOWNLOAD_DELAY = 3 # The download delay setting will honor only one of: # CONCURRENT_REQUESTS_PER_DOMAIN = 16 # CONCURRENT_REQUESTS_PER_IP = 16 # Disable cookies (enabled by default) # COOKIES_ENABLED = False # Disable Telnet Console (enabled by default) # TELNETCONSOLE_ENABLED = False # Override the default request headers: # DEFAULT_REQUEST_HEADERS = { # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', # 'Accept-Language': 'en', # } # Enable or disable spider middlewares # See https://doc.scrapy.org/en/latest/topics/spider-middleware.html # SPIDER_MIDDLEWARES = { # 'tutorial.middlewares.TutorialSpiderMiddleware': 543, # } # Enable or disable downloader middlewares # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html # DOWNLOADER_MIDDLEWARES = { # 'tutorial.middlewares.TutorialDownloaderMiddleware': 543, # } # Enable or disable extensions # See https://doc.scrapy.org/en/latest/topics/extensions.html # EXTENSIONS = { # 'scrapy.extensions.telnet.TelnetConsole': None, # } # Configure item pipelines # See https://doc.scrapy.org/en/latest/topics/item-pipeline.html IMAGES_STORE = './image' ITEM_PIPELINES = { 'pipelines.DatabaseWriter.DatabaseWriter': 101, 'pipelines.StatsMailer.StatsMailer': 999, 'pipelines.StatsWriter.StatsWriter': 1000, 'scrapy.pipelines.images.ImagesPipeline': 1, } # Enable and configure the AutoThrottle extension (disabled by default) # See https://doc.scrapy.org/en/latest/topics/autothrottle.html # AUTOTHROTTLE_ENABLED = True # The initial download delay # AUTOTHROTTLE_START_DELAY = 5 # The maximum download delay to be set in case of high latencies # AUTOTHROTTLE_MAX_DELAY = 60 # The average number of requests Scrapy should be sending in parallel to # each remote server # AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0 # Enable showing throttling stats for every response received: # AUTOTHROTTLE_DEBUG = False # Enable and configure HTTP caching (disabled by default) # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings # HTTPCACHE_ENABLED = True # HTTPCACHE_EXPIRATION_SECS = 0 # HTTPCACHE_DIR = 'httpcache' # HTTPCACHE_IGNORE_HTTP_CODES = [] # HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage' DB_CONNECTION = os.getenv('DB_CONNECTION') DB_USERNAME = os.getenv('DB_USERNAME') DB_PASSWORD = os.getenv('DB_PASSWORD') DB_HOST = os.getenv('DB_HOST') DB_PORT = int(os.getenv('DB_PORT')) DB_DATABASE = os.getenv('DB_DATABASE') DB_CHARSET = os.getenv('DB_CHARSET') CONNECTION_STRING = "{drivername}://{user}:{passwd}@{host}:{port}/{db_name}?charset={db_charset}".format( drivername=DB_CONNECTION, user=DB_USERNAME, passwd=DB_PASSWORD, host=DB_HOST, port=str(DB_PORT), db_name=DB_DATABASE, db_charset=DB_CHARSET, ) # Logger config LOG_FILE = os.getenv('LOG_FILE', 'log.log') LOG_ENABLED = os.getenv('LOG_ENABLED', 'True') in true_list LOG_LEVEL = logging._nameToLevel.get(os.getenv('LOG_LEVEL', 'INFO')) LOG_FORMAT = os.getenv('LOG_FORMAT', '%(asctime)s - %(levelname)s - %(name)s : \n %(message)s') LOG_STDOUT = os.getenv('LOG_STDOUT', 'False') in true_list LOG_SHORT_NAMES = False # LOG_DATEFORMAT # Mail config MAIL_FROM = os.getenv('MAIL_FROM', 'scrapy@localhost') MAIL_HOST = os.getenv('MAIL_HOST') MAIL_PORT = int(os.getenv('MAIL_PORT', 465)) MAIL_USER = os.getenv('MAIL_USER') MAIL_PASS = os.getenv('MAIL_PASS') MAIL_TLS = os.getenv('MAIL_TLS', 'False') in true_list MAIL_SSL = os.getenv('MAIL_SSL', 'True') in true_list