#!/usr/bin/env python import dbs import random, time import urllib import mysqlite import proxywatchd from misc import _log from config import Config import fetch import sys config = Config() searx_instances = ('https://searx.me', 'https://searx.xyz', 'https://searx.site', 'https://searx.win', 'https://searx.ru', 'https://stemy.me/searx', 'https://searx.at', 'https://listi.me', 'https://searx.dk', 'https://searx.laquadrature.net' ) def import_from_file(fn, sqlite): with open(fn, 'r') as f: for u in f.read().split('\n'): if not len(u): continue exists = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE url=?',(u,)).fetchall() ] if exists: continue print('adding "%s"' % u) sqlite.execute('INSERT INTO uris (added,url,check_time,error,stale_count,proxies_added) VALUES (?,?,?,?,?,?)', (int(time.time()),u,0,0,0,0)) sqlite.commit() def proxyfind(sqlite = None): if not sqlite: sqlite = mysqlite.mysqlite(config.ppf.database,str) uris = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE error=0 and url not like "%github%" ORDER BY RANDOM() LIMIT 10').fetchall() ] if len(uris) > 0 and random.random() < random.random(): search = 'site:%s' % random.choice(uris).split('/')[2] else: search = random.choice(search_terms) search = '%s -intitle:pdf' % search search_args = [ 'category=general', 'time_range=day', 'q=%s' % urllib.quote_plus(search) ] for srx in searx_instances: urls = [] random.shuffle(search_args) search_arg = '&'.join(search_args) for x in range(1,10): content = fetch.fetch_contents('%s/?%s&pageno=%d' % (srx,search_arg,x)) if content: urls = fetch.extract_urls(content, urls, urignore) if len(urls): insert_urls(urls, search_arg, sqlite) def insert_urls(urls, search, sqlite): query = [ 'url=?' for u in urls ] known = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE %s' % ' OR '.join(query),urls).fetchall() ] time_now = int(time.time()) new = [ (time_now,i,0,0,0,0,0) for i in urls if not i in known ] if not len(new): return sqlite.executemany('INSERT INTO uris (added,url,check_time,error,stale_count,retrievals,proxies_added) values(?,?,?,?,?,?,?)', new) sqlite.commit() _log('+%d item(s) from %s' % (len(new), search), 'added') def insert_proxies(proxydb, proxies, url): timestamp = int(time.time()) new = [] for p in proxies: new.append((timestamp,p,3,0,0,0)) while len(new): proxydb.executemany('INSERT INTO proxylist (added,proxy,failed,tested,success_count,total_duration) VALUES (?,?,?,?,?,?)', new[:500]) new = new[500:] proxydb.commit() _log('+%d item(s) from %s' % (len(proxies), url), 'added') def proxyleech(proxydb, urldb, url, stale_count, error, retrievals, proxies_added): try: content = fetch.fetch_contents(url) except KeyboardInterrupt as e: raise e except: content = '' unique_count, new = fetch.extract_proxies(content, proxydb) if retrievals == 0: # new site if content != '' and unique_count == 0: # site works but has zero proxy addresses error = 99999 else: if len(new) == 0: stale_count += 1 else: stale_count = 0 if content == '': error += 1 else: retrievals += 1 error = 0 urldb.execute('UPDATE uris SET error=?,stale_count=?,check_time=?,retrievals=?,proxies_added=? where url=?', (error, stale_count, int(time.time()), retrievals, proxies_added+len(new), url)) urldb.commit() if not len(new): return insert_proxies(proxydb, new, url) def import_proxies_from_file(proxydb, fn): content = open(fn, 'r').read() unique_count, new = fetch.extract_proxies(content, proxydb) if len(new): insert_proxies(proxydb, new, fn) return 0 return 1 if __name__ == '__main__': config.load() fetch.set_config(config) proxies={'http':'socks4://%s' % random.choice(config.torhosts),'https':'socks4://%s' % random.choice(config.torhosts)} proxydb = mysqlite.mysqlite(config.watchd.database, str) dbs.create_table_if_not_exists(proxydb, 'proxylist') urldb = mysqlite.mysqlite(config.ppf.database, str) dbs.create_table_if_not_exists(urldb, 'uris') import_from_file('import.txt', urldb) if len(sys.argv) == 3 and sys.argv[1] == "--file": sys.exit(import_proxies_from_file(proxydb, sys.argv[2])) if config.ppf.search: ## load search terms with open('search_terms.txt', 'r') as f: search_terms = [ i.strip() for i in f.read().split('\n') if len(i.strip()) ] ## load bad terms with open('urignore.txt', 'r') as f: urignore = [ i.strip() for i in f.read().split('\n') if len(i.strip()) ] ## add searx instances as bad terms (avoid loops) for i in searx_instances: urignore.append(i.split('/')[2]) # start proxy watcher if config.watchd.threads > 0: watcherd = proxywatchd.Proxywatchd() watcherd.start() else: watcherd = None while True: try: ## any site that needs to be checked ? rows = urldb.execute('SELECT url,stale_count,error,retrievals,proxies_added FROM uris WHERE error < ? and (check_time+?+((error+stale_count)*?)