minor changes

remove comments, minimal code reorganization
This commit is contained in:
mickael
2019-01-06 01:35:18 +00:00
parent aab7ef15d1
commit 63b77043ac

24
ppf.py
View File

@@ -67,25 +67,20 @@ def insert_proxies(proxies, uri, sqlite):
time.sleep(0.1)
def proxyfind(sqlite = None):
#print('entering proxyfind...')
if not sqlite: sqlite = mysqlite.mysqlite(config.database,str)
uris = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE error=0 and url not like "%github%" ORDER BY RANDOM() LIMIT 10').fetchall() ]
if len(uris) > 0 and random.random() < random.random():
search = urllib.quote_plus('site:%s' % random.choice(uris).split('/')[2])
else:
search = urllib.quote_plus(random.choice(search_terms))
choice = random.choice(searx_instances)
urls = []
content = fetch_contents('%s/?q=%s&pageno=%d' % (choice, search, random.randint(0,10)))
uris = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE error=0 and url not like "%github%" ORDER BY RANDOM() LIMIT 10').fetchall() ]
if len(uris) > 0 and random.random() < random.random():
search = 'site:%s' % random.choice(uris).split('/')[2]
else:
search = random.choice(search_terms)
content = fetch_contents('%s/?q=%s&pageno=%d' % (choice, urllib.quote_plus(search), random.randint(0,10)))
if not content: return
soup = soupify(content)
soup = soupify(content)
for a in soup.body.find_all('a'):
if not 'rel' in a.attrs or not 'noreferrer' in a.attrs['rel'] or a.attrs['href'] in urls: continue
badurl = [ i for i in urignore if re.findall(i,a.attrs['href'], re.IGNORECASE) ]
@@ -109,8 +104,6 @@ def is_reserved_ipv4(ip):
return False
def proxyleech(sqlite, rows):
#print('entering proxyleech...')
for row in rows:
try: content = fetch_contents(row[0])
except KeyboardInterrupt as e: raise e
@@ -126,7 +119,6 @@ def proxyleech(sqlite, rows):
pass
hash = hashlib.md5(''.join(uniques)).hexdigest()
#print('unique; hash: %s, len: %d' % (hash, len(uniques)))
## empty list of proxies: increment error by two
if not len(uniques): row[2] = (row[2] * 2)
@@ -155,12 +147,10 @@ if __name__ == '__main__':
proxies={'http':'socks4://%s' % random.choice(config.torhosts),'https':'socks4://%s' % random.choice(config.torhosts)}
sqlite = mysqlite.mysqlite(config.database, str)
## create dbs if required
sqlite.execute('CREATE TABLE IF NOT EXISTS uris (added INT, url TEXT, check_time INT, error INT, driver INT, hash TEXT)')
sqlite.execute('CREATE TABLE IF NOT EXISTS proxylist (proxy BLOB, country BLOB, added INT, failed INT, tested INT, dronebl INT, proto TEXT, success_count INT, total_duration INT)')
sqlite.commit()
import_from_file('import.txt', sqlite)
## load search terms