search: more changes
This commit is contained in:
53
scraper.py
53
scraper.py
@@ -12,26 +12,41 @@ import sys
|
||||
|
||||
config = Config()
|
||||
|
||||
searx_instances = ('https://searx.me', 'https://searx.xyz', 'https://searx.site', 'https://searx.win', 'https://searx.ru', 'https://stemy.me/searx', 'https://searx.at', 'https://listi.me', 'https://searx.dk', 'https://searx.laquadrature.net' )
|
||||
|
||||
#searx_instances = ['https://searx.me', 'https://searx.xyz', 'https://searx.site', 'https://searx.win', 'https://searx.ru', 'https://stemy.me/searx', 'https://searx.at', 'https://listi.me', 'https://searx.dk', 'https://searx.laquadrature.net', 'https://searx.serneels.xyz' ]
|
||||
searx_instances = ['http://searchb5a7tmimez.onion', 'http://nxhhwbbxc4khvvlw.onion','http://searx.l4qlywnpwqsluw65ts7md3khrivpirse744un3x7mlskqauz5pyuzgqd.onion', 'http://ulrn6sryqaifefld.onion']
|
||||
def proxyfind(sqlite = None, urignore=None):
|
||||
if not sqlite: sqlite = mysqlite.mysqlite(config.ppf.database,str)
|
||||
random.shuffle(searx_instances)
|
||||
proxydb = mysqlite.mysqlite(config.watchd.database,str)
|
||||
proxies = [ i[0] for i in proxydb.execute('SELECT proxy FROM proxylist WHERE failed=0 ORDER BY RANDOM() LIMIT 10').fetchall() ]
|
||||
search = ''
|
||||
if len(proxies) and random.random() < random.random():
|
||||
search = ' '.join( random.sample(proxies, random.randint(1,5)))
|
||||
|
||||
uris = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE error=0 and url not like "%github%" ORDER BY RANDOM() LIMIT 10').fetchall() ]
|
||||
if len(uris) > 0 and random.random() < random.random():
|
||||
search = 'site:%s' % random.choice(uris).split('/')[2]
|
||||
else:
|
||||
search = random.choice(search_terms)
|
||||
if not len(search) or random.random() < random.random():
|
||||
if not sqlite: sqlite = mysqlite.mysqlite(config.ppf.database,str)
|
||||
uris = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE error=0 and url not like "%github%" ORDER BY RANDOM() LIMIT 10').fetchall() ]
|
||||
if len(uris) > 0 and random.random() < random.random():
|
||||
if len(search): search = '%s ' % search
|
||||
search = search + 'site:%s' % random.choice(uris).split('/')[2]
|
||||
|
||||
search_args = [ 'category=general', 'time_range=day', 'q=%s' % urllib.quote_plus(search) ]
|
||||
searx = random.sample(searx_instances)
|
||||
urls = []
|
||||
if not len(search) or random.random() < random.random():
|
||||
if len(search): search = '%s ' % search
|
||||
search = search + random.choice(search_terms)
|
||||
|
||||
search_args = [ 'category=general', 'time_range=%s' % random.choice(['day','week','month','year']), 'q=%s' % urllib.quote_plus(search) ]
|
||||
random.shuffle(search_args)
|
||||
search_arg = '&'.join(search_args)
|
||||
for x in range(1,10):
|
||||
content = fetch.fetch_contents('%s/?%s&pageno=%d' % (srx,search_arg,x))
|
||||
if content: urls = fetch.extract_urls(content, urls, urignore)
|
||||
if len(urls): dbs.insert_urls(urls, search_arg, sqlite)
|
||||
for srx in searx_instances:
|
||||
x = 0
|
||||
while 1:
|
||||
urls = []
|
||||
if x > 0: content = fetch.fetch_contents('%s/?%s&pageno=%d' % (srx,search_arg,x))
|
||||
else: content = fetch.fetch_contents('%s/?%s' % (srx,search_arg))
|
||||
if content: urls = fetch.extract_urls(content, urls, urignore)
|
||||
|
||||
if not len(urls): break
|
||||
dbs.insert_urls(urls, '%s/?%s (pageno: %d)' % (srx.split('/')[2],search_arg,x) , sqlite)
|
||||
x = x + 1
|
||||
|
||||
|
||||
def load_urignore():
|
||||
@@ -61,11 +76,7 @@ if __name__ == '__main__':
|
||||
urignore = load_urignore()
|
||||
|
||||
while True:
|
||||
try:
|
||||
proxyfind(urldb, urignore)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
|
||||
break
|
||||
try: proxyfind(urldb, urignore)
|
||||
except KeyboardInterrupt: break
|
||||
|
||||
print '\r',
|
||||
|
||||
Reference in New Issue
Block a user