implement combo config parser
allows all options to be overridden by command line. e.g. [watchd] threads=10 debug=false --watch.threads=50 --debug=true
This commit is contained in:
16
ppf.py
16
ppf.py
@@ -8,10 +8,12 @@ import mysqlite
|
||||
import proxywatchd
|
||||
from misc import _log
|
||||
from soup_parser import soupify
|
||||
import config
|
||||
from config import Config
|
||||
from http2 import RsHttp, _parse_url
|
||||
import rocksock
|
||||
|
||||
config = Config()
|
||||
|
||||
base_header = {
|
||||
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
}
|
||||
@@ -71,7 +73,7 @@ def insert_proxies(proxies, uri, sqlite, timestamp):
|
||||
_log('+%d item(s) from %s' % (len(new), uri), 'added')
|
||||
|
||||
def proxyfind(sqlite = None):
|
||||
if not sqlite: sqlite = mysqlite.mysqlite(config.database,str)
|
||||
if not sqlite: sqlite = mysqlite.mysqlite(config.common.database,str)
|
||||
choice = random.choice(searx_instances)
|
||||
urls = []
|
||||
|
||||
@@ -157,14 +159,14 @@ if __name__ == '__main__':
|
||||
config.load()
|
||||
proxies={'http':'socks4://%s' % random.choice(config.torhosts),'https':'socks4://%s' % random.choice(config.torhosts)}
|
||||
|
||||
sqlite = mysqlite.mysqlite(config.database, str)
|
||||
sqlite = mysqlite.mysqlite(config.common.database, str)
|
||||
## create dbs if required
|
||||
sqlite.execute('CREATE TABLE IF NOT EXISTS uris (added INT, url TEXT, check_time INT, error INT, driver INT, hash TEXT)')
|
||||
sqlite.execute('CREATE TABLE IF NOT EXISTS proxylist (proxy BLOB, country BLOB, added INT, failed INT, tested INT, dronebl INT, proto TEXT, success_count INT, total_duration INT)')
|
||||
sqlite.commit()
|
||||
import_from_file('import.txt', sqlite)
|
||||
|
||||
if config._leechd.search:
|
||||
if config.ppf.search:
|
||||
## load search terms
|
||||
with open('search_terms.txt', 'r') as f:
|
||||
search_terms = [ i.strip() for i in f.read().split('\n') if len(i.strip()) ]
|
||||
@@ -175,7 +177,7 @@ if __name__ == '__main__':
|
||||
empty = [ urignore.append(i.split('/')[2]) for i in searx_instances ]
|
||||
|
||||
# start proxy watcher
|
||||
if config._watchd.threads > 0:
|
||||
if config.watchd.threads > 0:
|
||||
watcherd = proxywatchd.Proxywatchd()
|
||||
watcherd.start()
|
||||
else:
|
||||
@@ -185,11 +187,11 @@ if __name__ == '__main__':
|
||||
while True:
|
||||
try:
|
||||
## any site that needs to be checked ?
|
||||
rows = [ [i[0],i[1],i[2]] for i in sqlite.execute('SELECT url,hash,error FROM uris WHERE (check_time+?+(error*?) <?) ORDER BY RANDOM() LIMIT 25', (config._leechd.checktime, config._leechd.perfail_checktime, time.time())).fetchall() ]
|
||||
rows = [ [i[0],i[1],i[2]] for i in sqlite.execute('SELECT url,hash,error FROM uris WHERE (check_time+?+(error*?) <?) ORDER BY RANDOM() LIMIT 25', (config.ppf.checktime, config.ppf.perfail_checktime, time.time())).fetchall() ]
|
||||
|
||||
if len(rows): proxyleech(sqlite,rows)
|
||||
## search for new website during free time
|
||||
elif config._leechd.search: proxyfind(sqlite)
|
||||
elif config.ppf.search: proxyfind(sqlite)
|
||||
## sleep
|
||||
else: time.sleep(10)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user