outsource configuration to external module

This commit is contained in:
rofl0r
2019-01-05 03:47:03 +00:00
parent 05294186d4
commit ffbe450aee
3 changed files with 41 additions and 36 deletions

21
config.py Normal file
View File

@@ -0,0 +1,21 @@
from ConfigParser import SafeConfigParser
_loaded = False
def load():
if _loaded: return
global database, maxfail, search, torhosts, watchd_threads, checktime, timeout, read_timeout
## read the config files
parser = SafeConfigParser()
parser.read('config.ini')
database = parser.get('global', 'database')
maxfail = parser.getint('global', 'proxy_max_fail')
search = parser.getboolean('proxyfind', 'search')
torhosts = [ str(i).strip() for i in parser.get('global', 'tor_host').split(',') ]
watchd_threads = parser.getint('watcherd', 'threads')
timeout = parser.getint('watcherd', 'timeout')

22
ppf.py
View File

@@ -6,13 +6,13 @@ import random, time
import re
import urllib
import hashlib
from ConfigParser import SafeConfigParser
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
import mysqlite
import proxywatchd
from misc import _log
from soup_parser import soupify
import config
base_header = {
'Accept-Language':'en-US,en;q=0.8',
@@ -24,7 +24,6 @@ base_header = {
searx_instances = ('https://searx.me', 'https://searx.xyz', 'https://searx.site', 'https://searx.win', 'https://searx.ru', 'https://stemy.me/searx', 'https://searx.at', 'https://listi.me', 'https://searx.dk', 'https://searx.laquadrature.net' )
retry_messages = ('Engines cannot retrieve results', 'Rate limit exceeded')
CONFIG = 'config.ini'
def cleanhtml(raw_html):
cleanr = re.compile('<.*?>')
@@ -69,7 +68,7 @@ def insert_proxies(proxies, uri, sqlite):
def proxyfind(sqlite = None):
#print('entering proxyfind...')
if not sqlite: sqlite = mysqlite.mysqlite(database,str)
if not sqlite: sqlite = mysqlite.mysqlite(config.database,str)
uris = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE error=0 and url not like "%github%" ORDER BY RANDOM() LIMIT 10').fetchall() ]
@@ -149,16 +148,11 @@ def proxyleech(sqlite, rows):
if __name__ == '__main__':
## read the config files
parser = SafeConfigParser()
parser.read(CONFIG)
config.load()
print repr(config.torhosts)
proxies={'http':'socks4://%s' % random.choice(config.torhosts),'https':'socks4://%s' % random.choice(config.torhosts)}
database = parser.get('global', 'database')
search = parser.getboolean('proxyfind', 'search')
tor_hosts = parser.get('global', 'tor_host').split(',')
proxies={'http':'socks4://%s' % random.choice(tor_hosts),'https':'socks4://%s' % random.choice(tor_hosts)}
sqlite = mysqlite.mysqlite(database, str)
sqlite = mysqlite.mysqlite(config.database, str)
## create dbs if required
sqlite.execute('CREATE TABLE IF NOT EXISTS uris (added INT, url TEXT, check_time INT, error INT, driver INT, hash TEXT)')
@@ -178,7 +172,7 @@ if __name__ == '__main__':
empty = [ urignore.append(i.split('/')[2]) for i in searx_instances ]
# start proxy watcher
watcherd = proxywatchd.Proxywatchd(CONFIG) if parser.getboolean('watcherd', 'enabled') else None
watcherd = proxywatchd.Proxywatchd() if config.watchd_threads > 0 else None
while True:
try:
@@ -186,7 +180,7 @@ if __name__ == '__main__':
rows = [ [i[0],i[1],i[2]] for i in sqlite.execute('SELECT url,hash,error FROM uris WHERE (check_time<? AND error<?) ORDER BY RANDOM() LIMIT 25', (time.time(), 10)).fetchall() ]
if len(rows): proxyleech(sqlite,rows)
## search for new website during free time
elif search: proxyfind(sqlite)
elif config.search: proxyfind(sqlite)
## sleep
else: time.sleep(10)

View File

@@ -6,7 +6,7 @@ import socket, time, random, sys, string, re
import requests
#from geoip import geolite2
from ConfigParser import SafeConfigParser
import config
import mysqlite
from misc import _log
@@ -18,24 +18,14 @@ class Proxywatchd(Thread):
_log('Requesting proxywatchd to halt (%d thread(s))' % len([item for item in self.threads if item.isAlive()]))
self.running = 0
def __init__(self, config_file):
def __init__(self):
Thread.__init__(self)
config.load()
self.threads = []
self.running = 1
self.parser = SafeConfigParser()
self.parser.read(config_file)
self.maxfail = self.parser.getint('global', 'proxy_max_fail')
self.maxthreads = self.parser.getint('watcherd', 'threads')
self.checktime = self.parser.getint('watcherd', 'checktime')
self.timeout = self.parser.getint('watcherd', 'timeout')
self.database = self.parser.get('global', 'database')
self.torhosts = [ str(i).strip() for i in self.parser.get('global', 'tor_host').split(',') ]
self.read_timeout = self.parser.getint('watcherd', 'read_timeout')
# create table if needed
self.mysqlite = mysqlite.mysqlite(self.database, str)
self.mysqlite = mysqlite.mysqlite(config.database, str)
self.mysqlite.execute('CREATE TABLE IF NOT EXISTS proxylist (proxy BLOB, country BLOB, added INT, failed INT, tested INT, source BLOB, dronebl INT, proto TEXT, duration INT)')
self.mysqlite.commit()
self.echoise = time.time() - 3600;
@@ -49,11 +39,11 @@ class Proxywatchd(Thread):
_log('Starting proxywatchd..', 'notice')
threads = []
self.mysqlite = mysqlite.mysqlite(self.database, str)
self.mysqlite = mysqlite.mysqlite(config.database, str)
while self.running:
if len(threads) < self.maxthreads:
if len(threads) < config.watchd_threads:
t = threading.Thread(target=self.daemon, args=(self.servers,))
t.start()
threads.append(t)
@@ -62,14 +52,14 @@ class Proxywatchd(Thread):
else: time.sleep(1)
if (time.time() - self.echoise) >= 180:
_log('Proxywatchd threads: %d/%d' % (len(threads), self.maxthreads))
_log('Proxywatchd threads: %d/%d' % (len(threads), config.watchd_threads))
self.echoise = time.time()
self.mysqlite.close()
def is_drone_bl(self, proxy):
p = proxy.split(':')[0]
proxies = {'http':'socks4://%s:%s@%s' % (p,p,random.choice(self.torhosts))}
proxies = {'http':'socks4://%s:%s@%s' % (p,p,random.choice(config.torhosts))}
resp = requests.get('http://dronebl.org/lookup?ip=%s' % p, proxies=proxies)
if 'No incidents regarding' in resp.text: return 0
else: return 1
@@ -78,7 +68,7 @@ class Proxywatchd(Thread):
protos = ['http', 'socks5', 'socks4'] if proto is None else proto
for proto in protos:
torhost = random.choice(self.torhosts)
torhost = random.choice(config.torhosts)
duration = time.time()
proxies = [ rocksock.RocksockProxyFromURL('socks4://%s' % torhost),
rocksock.RocksockProxyFromURL('%s://%s' % (proto, proxy[0])),
@@ -86,7 +76,7 @@ class Proxywatchd(Thread):
srv = random.choice(servers).strip()
try:
sock = rocksock.Rocksock(host=srv, port=6697, ssl=True, proxies=proxies, timeout=self.timeout)
sock = rocksock.Rocksock(host=srv, port=6697, ssl=True, proxies=proxies, timeout=config.timeout)
sock.connect()
sock.send('%s\n' % random.choice(['NICK', 'USER', 'JOIN', 'MODE', 'PART', 'INVITE', 'KNOCK', 'WHOIS', 'WHO', 'NOTICE', 'PRIVMSG', 'PING', 'QUIT']))
return sock, proto, duration, torhost, srv
@@ -96,14 +86,14 @@ class Proxywatchd(Thread):
return None, None, None, None, None
def daemon(self, servers):
sqlite = mysqlite.mysqlite(self.database, str)
sqlite = mysqlite.mysqlite(config.database, str)
threadid = ''.join( [ random.choice(string.letters) for x in range(5) ] )
q = 'SELECT proxy,failed,country,proto FROM proxylist WHERE failed<? and tested<? ORDER BY RANDOM() LIMIT ?'
while self.running:
sqlite_requests = []
rows = sqlite.execute(q, (self.maxfail, time.time(), random.randint(10,20))).fetchall()
rows = sqlite.execute(q, (config.maxfail, time.time(), random.randint(10,20))).fetchall()
if not len(rows):
time.sleep(random.randint(10,20))
continue