Files
ppf/config.py
2021-01-24 03:52:56 +01:00

53 lines
4.0 KiB
Python

from comboparse import ComboParser
class Config(ComboParser):
def load(self):
super(Config, self).load()
self.torhosts = [ str(i).strip() for i in self.common.tor_hosts.split(',') ]
#with open('servers.txt', 'r') as handle:
with open(self.watchd.source_file, 'r') as handle:
self.servers = [x.strip() for x in handle.readlines() if len(x.strip()) > 0]
def __init__(self):
super(Config, self).__init__('config.ini')
section = 'common'
self.add_item(section, 'tor_hosts', str, '127.0.0.1:9050', 'comma-separated list of tor proxy address(es)', True)
section = 'watchd'
self.add_item(section, 'outage_threshold', float, 4.0, 'mininum success percentage required to not drop check results', False)
self.add_item(section, 'max_fail', int, 5, 'number of fails after which a proxy is considered dead', False)
self.add_item(section, 'threads', int, 10, 'number of threads watchd uses to check proxies', True)
self.add_item(section, 'timeout', int, 15, 'timeout for blocking operations (connect/recv/...) for proxy checks in seconds', False)
self.add_item(section, 'submit_after', int, 200, 'min. number of tested proxies for DB write', False)
self.add_item(section, 'debug', bool, False, 'whether to print additional debug info', False)
self.add_item(section, 'use_ssl', int, 0, 'whether to use SSL and port 6697 to connect to targets (slower)', False)
self.add_item(section, 'checktime', int, 1800, 'base checking interval for proxies in db in seconds', False)
self.add_item(section, 'perfail_checktime', int, 3600, 'additional checking interval for proxies in db in seconds per experienced failure', False)
self.add_item(section, 'database', str, 'websites.sqlite', 'filename of database', True)
self.add_item(section, 'oldies', bool, False, 're-test old proxies as well ? (default: False)', False)
self.add_item(section, 'oldies_checktime', int, 43200, 'base checking interval for *old* proxies in seconds (default: 43200)', False)
self.add_item(section, 'oldies_multi', int, 10, 'fetch threads*multi rows when testing oldies (default: 10)', False)
self.add_item(section, 'source_file', str, 'servers.txt', 'server/url list to read from (default: servers.txt)', False)
self.add_item(section, 'tor_safeguard', bool, True, 'enable tor safeguard (default: True)', False)
section = 'httpd'
self.add_item(section, 'listenip', str, '127.0.0.1', 'address for the httpd to listen to (default: 127.0.0.1)', True)
self.add_item(section, 'port', int, 8081, 'port for the httpd to listen to (default: 8081)', True)
self.add_item(section, 'enabled', bool, False, 'start httpd (default: False)', True)
section = 'ppf'
self.add_item(section, 'debug', bool, False, 'whether to print additional debug info', False)
self.add_item(section, 'search', bool, True, 'whether to use searx search engine to find new proxy lists', False)
self.add_item(section, 'timeout', float, 15, 'timeout for blocking operations (connect/recv/...) for proxy checks in seconds', False)
self.add_item(section, 'http_retries', int, 1, 'number of retries for http connects', False)
self.add_item(section, 'checktime', int, 3600, 'base checking interval for urls in db in seconds', False)
self.add_item(section, 'perfail_checktime', int, 3600, 'additional checking interval for urls in db in seconds per resultless check', False)
self.add_item(section, 'max_fail', int, 5, 'number of fails after which an url is considered dead', False)
self.add_item(section, 'database', str, 'proxies.sqlite', 'filename of database', True)
self.add_item(section, 'extract_samedomain', bool, False, 'extract only url from same domains? (default: False)', False)
section = 'scraper'
self.add_item(section, 'debug', bool, False, 'scraper: whether to print additional debug info', False)
self.add_item(section, 'query', str, 'psw', 'build query using Proxies, Search, Websites', False)
self.aparser.add_argument("--file", help="import a single file containing proxy addrs", type=str, default='', required=False)