#!/usr/bin/env python import dbs import random, time import urllib import mysqlite import proxywatchd from misc import _log from config import Config import fetch import sys config = Config() with open('searx.instances') as h: searx_instances = [ line.strip() for line in h.readlines() if line.lower().startswith('http') ] print(searx_instances) def proxyfind(sqlite = None, urignore=None): search = '' random.shuffle(searx_instances) ## search by working proxy if 'p' in config.scraper.query: proxydb = mysqlite.mysqlite(config.watchd.database,str) proxies = [ i[0] for i in proxydb.execute('SELECT proxy FROM proxylist WHERE failed=0 ORDER BY RANDOM() LIMIT 10').fetchall() ] if len(proxies) and random.random() < random.random(): search = ' '.join( random.sample(proxies, random.randint(1,2))) ## search by relative url if 'w' in config.scraper.query and not len(search) or random.random() < random.random(): if not sqlite: sqlite = mysqlite.mysqlite(config.ppf.database,str) uris = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE error=0 and url not like "%github%" ORDER BY RANDOM() LIMIT 10').fetchall() ] if len(uris) > 0 and random.random() < random.random(): if len(search): search = '%s OR ' % search search = search + 'site:%s' % random.choice(uris).split('/')[2] ## build string if 's' in config.scraper.query and not len(search) or random.random() < random.random(): if len(search): search = '%s OR ' % search search = search + random.choice(search_terms) if not len(search): return search_args = [ 'category=general', 'time_range=%s' % random.choice(['day','week','month','year']), 'q=%s' % urllib.quote_plus(search) ] random.shuffle(search_args) search_arg = '&'.join(search_args) if config.scraper.debug: print('search_arg: %s' % search_arg) for srx in searx_instances: x = 0 while 1: urls = [] if x > 0: content = fetch.fetch_contents('%s/?%s&pageno=%d' % (srx,search_arg,x)) else: content = fetch.fetch_contents('%s/?%s' % (srx,search_arg)) if content: urls = fetch.extract_urls(content, urls, urignore) if not len(urls): break dbs.insert_urls(urls, '%s/?%s (pageno: %d)' % (srx.split('/')[2],search_arg,x) , sqlite) x = x + 1 def load_urignore(): ## load bad terms with open('urignore.txt', 'r') as f: urignore = [ i.strip() for i in f.read().split('\n') if len(i.strip()) ] ## add searx instances as bad terms (avoid loops) for i in searx_instances: urignore.append(i.split('/')[2]) return urignore if __name__ == '__main__': config.load() fetch.set_config(config) proxydb = mysqlite.mysqlite(config.watchd.database, str) dbs.create_table_if_not_exists(proxydb, 'proxylist') urldb = mysqlite.mysqlite(config.ppf.database, str) dbs.create_table_if_not_exists(urldb, 'uris') ## load search terms with open('search_terms.txt', 'r') as f: search_terms = [ i.strip() for i in f.read().split('\n') if len(i.strip()) ] urignore = load_urignore() while True: try: proxyfind(urldb, urignore) except KeyboardInterrupt: break print '\r',