diff --git a/ppf.py b/ppf.py index d0df5ba..dbbfecf 100755 --- a/ppf.py +++ b/ppf.py @@ -96,8 +96,6 @@ def insert_proxies(proxies, uri, sqlite, timestamp): def proxyfind(sqlite = None): if not sqlite: sqlite = mysqlite.mysqlite(config.common.database,str) - choice = random.choice(searx_instances) - urls = [] uris = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE error=0 and url not like "%github%" ORDER BY RANDOM() LIMIT 10').fetchall() ] if len(uris) > 0 and random.random() < random.random(): @@ -105,26 +103,34 @@ def proxyfind(sqlite = None): else: search = random.choice(search_terms) - content = fetch_contents('%s/?q=%s&pageno=%d' % (choice, urllib.quote_plus(search), random.randint(0,10))) - if not content: return + search = '%s -intitle:pdf' % search + search_args = [ 'category=general', 'time_range=day', 'q=%s' % urllib.quote_plus(search) ] + for srx in searx_instances: + urls = [] + random.shuffle(search_args) + search_arg = '&'.join(search_args) + for x in range(1,10): + content = fetch_contents('%s/?%s&pageno=%d' % (srx,search_arg,x)) + if content: urls = extract_urls(content, urls) + if len(urls): insert_urls(urls, search_arg, sqlite) +def extract_urls(content, urls = []): soup = soupify(content) for a in soup.body.find_all('a'): if not 'rel' in a.attrs or not 'noreferrer' in a.attrs['rel'] or a.attrs['href'] in urls: continue badurl = [ i for i in urignore if re.findall(i,a.attrs['href'], re.IGNORECASE) ] if not len(badurl): urls.append(a.attrs['href']) + return urls - if len(urls): - query = [ 'url=?' for u in urls ] - known = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE %s' % ' OR '.join(query),urls).fetchall() ] - time_now = int(time.time()) - new = [ (time_now,i,0,5,0) for i in urls if not i in known ] - if len(new): - sqlite.executemany('INSERT INTO uris (added,url,check_time,error,driver) values(?,?,?,?,?)', new) - sqlite.commit() - _log('+%d item(s) from %s' % (len(new), search), 'added') - +def insert_urls(urls, search, sqlite): + query = [ 'url=?' for u in urls ] + known = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE %s' % ' OR '.join(query),urls).fetchall() ] + time_now = int(time.time()) + new = [ (time_now,i,0,5,0) for i in urls if not i in known ] + if not len(new): return + sqlite.executemany('INSERT INTO uris (added,url,check_time,error,driver) values(?,?,?,?,?)', new) sqlite.commit() + _log('+%d item(s) from %s' % (len(new), search), 'added') def is_usable_proxy(proxy): octets = proxy.split(':')[0].split('.') @@ -146,7 +152,7 @@ def proxyleech(sqlite, rows): except KeyboardInterrupt as e: raise e except: content = '' - matches = re.findall(r'[0-9]+(?:\.[0-9]+){3}:[0-9]+', cleanhtml(content)) + matches = re.findall(r'([0-9]+(?:\.[0-9]+){3}:[0-9]{1,5})[\D$]', cleanhtml(content)) uniques_dict = {} for p in matches: @@ -159,7 +165,10 @@ def proxyleech(sqlite, rows): hash = hashlib.md5(''.join(uniques)).hexdigest() ## empty list of proxies: multiply error by two - if not len(uniques): row[2] = (row[2] * 2) + if not len(uniques): + if row[1]: row[2] = (row[2] * 2) + else: row[2] = 99999 + ## same proxy list: increment error by one elif hash == row[1]: row[2] = (row[2] + 1) ## proxylist was updated: error is zero @@ -214,11 +223,11 @@ if __name__ == '__main__': while True: try: ## any site that needs to be checked ? - rows = [ [i[0],i[1],i[2]] for i in sqlite.execute('SELECT url,hash,error FROM uris WHERE (check_time+?+(error*?)