don't loop over every searx instances

randomly pick one per search, instead
This commit is contained in:
Mickaël Serneels
2019-04-07 18:39:01 +02:00
parent 67aec84320
commit a2783bdfcf

View File

@@ -25,7 +25,7 @@ def proxyfind(sqlite = None, urignore=None):
search = '%s -intitle:pdf' % search
search_args = [ 'category=general', 'time_range=day', 'q=%s' % urllib.quote_plus(search) ]
for srx in random.sample(searx_instances,3):
searx = random.sample(searx_instances)
urls = []
random.shuffle(search_args)
search_arg = '&'.join(search_args)