search.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. import gc
  15. import sys
  16. import threading
  17. from time import time
  18. from uuid import uuid4
  19. from flask_babel import gettext
  20. import requests.exceptions
  21. import searx.poolrequests as requests_lib
  22. from searx.engines import (
  23. categories, engines, settings
  24. )
  25. from searx.answerers import ask
  26. from searx.utils import gen_useragent
  27. from searx.query import RawTextQuery, SearchQuery, VALID_LANGUAGE_CODE
  28. from searx.results import ResultContainer
  29. from searx import logger
  30. from searx.plugins import plugins
  31. from searx.exceptions import SearxParameterException
  32. try:
  33. from thread import start_new_thread
  34. except:
  35. from _thread import start_new_thread
  36. if sys.version_info[0] == 3:
  37. unicode = str
  38. logger = logger.getChild('search')
  39. number_of_searches = 0
  40. max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None)
  41. if max_request_timeout is None:
  42. logger.info('max_request_timeout={0}'.format(max_request_timeout))
  43. else:
  44. if isinstance(max_request_timeout, float):
  45. logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
  46. else:
  47. logger.critical('outgoing.max_request_timeout if defined has to be float')
  48. from sys import exit
  49. exit(1)
  50. def send_http_request(engine, request_params):
  51. # create dictionary which contain all
  52. # informations about the request
  53. request_args = dict(
  54. headers=request_params['headers'],
  55. cookies=request_params['cookies'],
  56. verify=request_params['verify']
  57. )
  58. # specific type of request (GET or POST)
  59. if request_params['method'] == 'GET':
  60. req = requests_lib.get
  61. else:
  62. req = requests_lib.post
  63. request_args['data'] = request_params['data']
  64. # send the request
  65. return req(request_params['url'], **request_args)
  66. def search_one_http_request(engine, query, request_params):
  67. # update request parameters dependent on
  68. # search-engine (contained in engines folder)
  69. engine.request(query, request_params)
  70. # ignoring empty urls
  71. if request_params['url'] is None:
  72. return None
  73. if not request_params['url']:
  74. return None
  75. # send request
  76. response = send_http_request(engine, request_params)
  77. # parse the response
  78. response.search_params = request_params
  79. return engine.response(response)
  80. def search_one_offline_request(engine, query, request_params):
  81. return engine.search(query, request_params)
  82. def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  83. if engines[engine_name].offline:
  84. return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) # noqa
  85. return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
  86. def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  87. engine = engines[engine_name]
  88. try:
  89. search_results = search_one_offline_request(engine, query, request_params)
  90. if search_results:
  91. result_container.extend(engine_name, search_results)
  92. engine_time = time() - start_time
  93. result_container.add_timing(engine_name, engine_time, engine_time)
  94. with threading.RLock():
  95. engine.stats['engine_time'] += engine_time
  96. engine.stats['engine_time_count'] += 1
  97. except ValueError as e:
  98. record_offline_engine_stats_on_error(engine, result_container, start_time)
  99. logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
  100. except Exception as e:
  101. record_offline_engine_stats_on_error(engine, result_container, start_time)
  102. result_container.add_unresponsive_engine((
  103. engine_name,
  104. u'{0}: {1}'.format(gettext('unexpected crash'), e),
  105. ))
  106. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  107. def record_offline_engine_stats_on_error(engine, result_container, start_time):
  108. engine_time = time() - start_time
  109. result_container.add_timing(engine.name, engine_time, engine_time)
  110. with threading.RLock():
  111. engine.stats['errors'] += 1
  112. def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
  113. # set timeout for all HTTP requests
  114. requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
  115. # reset the HTTP total time
  116. requests_lib.reset_time_for_thread()
  117. #
  118. engine = engines[engine_name]
  119. # suppose everything will be alright
  120. requests_exception = False
  121. try:
  122. # send requests and parse the results
  123. search_results = search_one_http_request(engine, query, request_params)
  124. # check if the engine accepted the request
  125. if search_results is not None:
  126. # yes, so add results
  127. result_container.extend(engine_name, search_results)
  128. # update engine time when there is no exception
  129. engine_time = time() - start_time
  130. page_load_time = requests_lib.get_time_for_thread()
  131. result_container.add_timing(engine_name, engine_time, page_load_time)
  132. with threading.RLock():
  133. engine.stats['engine_time'] += engine_time
  134. engine.stats['engine_time_count'] += 1
  135. # update stats with the total HTTP time
  136. engine.stats['page_load_time'] += page_load_time
  137. engine.stats['page_load_count'] += 1
  138. except Exception as e:
  139. # Timing
  140. engine_time = time() - start_time
  141. page_load_time = requests_lib.get_time_for_thread()
  142. result_container.add_timing(engine_name, engine_time, page_load_time)
  143. # Record the errors
  144. with threading.RLock():
  145. engine.stats['errors'] += 1
  146. if (issubclass(e.__class__, requests.exceptions.Timeout)):
  147. result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
  148. # requests timeout (connect or read)
  149. logger.error("engine {0} : HTTP requests timeout"
  150. "(search duration : {1} s, timeout: {2} s) : {3}"
  151. .format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
  152. requests_exception = True
  153. elif (issubclass(e.__class__, requests.exceptions.RequestException)):
  154. result_container.add_unresponsive_engine((engine_name, gettext('request exception')))
  155. # other requests exception
  156. logger.exception("engine {0} : requests exception"
  157. "(search duration : {1} s, timeout: {2} s) : {3}"
  158. .format(engine_name, engine_time, timeout_limit, e))
  159. requests_exception = True
  160. else:
  161. result_container.add_unresponsive_engine((
  162. engine_name,
  163. u'{0}: {1}'.format(gettext('unexpected crash'), e),
  164. ))
  165. # others errors
  166. logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
  167. # suspend or not the engine if there are HTTP errors
  168. with threading.RLock():
  169. if requests_exception:
  170. # update continuous_errors / suspend_end_time
  171. engine.continuous_errors += 1
  172. engine.suspend_end_time = time() + min(settings['search']['max_ban_time_on_fail'],
  173. engine.continuous_errors * settings['search']['ban_time_on_fail'])
  174. else:
  175. # no HTTP error (perhaps an engine error)
  176. # anyway, reset the suspend variables
  177. engine.continuous_errors = 0
  178. engine.suspend_end_time = 0
  179. def search_multiple_requests(requests, result_container, start_time, timeout_limit):
  180. search_id = uuid4().__str__()
  181. for engine_name, query, request_params in requests:
  182. th = threading.Thread(
  183. target=search_one_request_safe,
  184. args=(engine_name, query, request_params, result_container, start_time, timeout_limit),
  185. name=search_id,
  186. )
  187. th._engine_name = engine_name
  188. th.start()
  189. for th in threading.enumerate():
  190. if th.name == search_id:
  191. remaining_time = max(0.0, timeout_limit - (time() - start_time))
  192. th.join(remaining_time)
  193. if th.isAlive():
  194. result_container.add_unresponsive_engine((th._engine_name, gettext('timeout')))
  195. logger.warning('engine timeout: {0}'.format(th._engine_name))
  196. # get default reqest parameter
  197. def default_request_params():
  198. return {
  199. 'method': 'GET',
  200. 'headers': {},
  201. 'data': {},
  202. 'url': '',
  203. 'cookies': {},
  204. 'verify': True
  205. }
  206. # remove duplicate queries.
  207. # FIXME: does not fix "!music !soundcloud", because the categories are 'none' and 'music'
  208. def deduplicate_query_engines(query_engines):
  209. uniq_query_engines = {q["category"] + '|' + q["name"]: q for q in query_engines}
  210. return uniq_query_engines.values()
  211. def get_search_query_from_webapp(preferences, form):
  212. # no text for the query ?
  213. if not form.get('q'):
  214. raise SearxParameterException('q', '')
  215. # set blocked engines
  216. disabled_engines = preferences.engines.get_disabled()
  217. # parse query, if tags are set, which change
  218. # the serch engine or search-language
  219. raw_text_query = RawTextQuery(form['q'], disabled_engines)
  220. raw_text_query.parse_query()
  221. # set query
  222. query = raw_text_query.getSearchQuery()
  223. # get and check page number
  224. pageno_param = form.get('pageno', '1')
  225. if not pageno_param.isdigit() or int(pageno_param) < 1:
  226. raise SearxParameterException('pageno', pageno_param)
  227. query_pageno = int(pageno_param)
  228. # get language
  229. # set specific language if set on request, query or preferences
  230. # TODO support search with multible languages
  231. if len(raw_text_query.languages):
  232. query_lang = raw_text_query.languages[-1]
  233. elif 'language' in form:
  234. query_lang = form.get('language')
  235. else:
  236. query_lang = preferences.get_value('language')
  237. # check language
  238. if not VALID_LANGUAGE_CODE.match(query_lang):
  239. raise SearxParameterException('language', query_lang)
  240. # get safesearch
  241. if 'safesearch' in form:
  242. query_safesearch = form.get('safesearch')
  243. # first check safesearch
  244. if not query_safesearch.isdigit():
  245. raise SearxParameterException('safesearch', query_safesearch)
  246. query_safesearch = int(query_safesearch)
  247. else:
  248. query_safesearch = preferences.get_value('safesearch')
  249. # safesearch : second check
  250. if query_safesearch < 0 or query_safesearch > 2:
  251. raise SearxParameterException('safesearch', query_safesearch)
  252. # get time_range
  253. query_time_range = form.get('time_range')
  254. # check time_range
  255. if query_time_range not in ('None', None, '', 'day', 'week', 'month', 'year'):
  256. raise SearxParameterException('time_range', query_time_range)
  257. # query_engines
  258. query_engines = raw_text_query.engines
  259. # timeout_limit
  260. query_timeout = raw_text_query.timeout_limit
  261. if query_timeout is None and 'timeout_limit' in form:
  262. raw_time_limit = form.get('timeout_limit')
  263. if raw_time_limit in ['None', '']:
  264. raw_time_limit = None
  265. else:
  266. try:
  267. query_timeout = float(raw_time_limit)
  268. except ValueError:
  269. raise SearxParameterException('timeout_limit', raw_time_limit)
  270. # query_categories
  271. query_categories = []
  272. # if engines are calculated from query,
  273. # set categories by using that informations
  274. if query_engines and raw_text_query.specific:
  275. additional_categories = set()
  276. for engine in query_engines:
  277. if 'from_bang' in engine and engine['from_bang']:
  278. additional_categories.add('none')
  279. else:
  280. additional_categories.add(engine['category'])
  281. query_categories = list(additional_categories)
  282. # otherwise, using defined categories to
  283. # calculate which engines should be used
  284. else:
  285. # set categories/engines
  286. load_default_categories = True
  287. for pd_name, pd in form.items():
  288. if pd_name == 'categories':
  289. query_categories.extend(categ for categ in map(unicode.strip, pd.split(',')) if categ in categories)
  290. elif pd_name == 'engines':
  291. pd_engines = [{'category': engines[engine].categories[0],
  292. 'name': engine}
  293. for engine in map(unicode.strip, pd.split(',')) if engine in engines]
  294. if pd_engines:
  295. query_engines.extend(pd_engines)
  296. load_default_categories = False
  297. elif pd_name.startswith('category_'):
  298. category = pd_name[9:]
  299. # if category is not found in list, skip
  300. if category not in categories:
  301. continue
  302. if pd != 'off':
  303. # add category to list
  304. query_categories.append(category)
  305. elif category in query_categories:
  306. # remove category from list if property is set to 'off'
  307. query_categories.remove(category)
  308. if not load_default_categories:
  309. if not query_categories:
  310. query_categories = list(set(engine['category']
  311. for engine in query_engines))
  312. else:
  313. # if no category is specified for this search,
  314. # using user-defined default-configuration which
  315. # (is stored in cookie)
  316. if not query_categories:
  317. cookie_categories = preferences.get_value('categories')
  318. for ccateg in cookie_categories:
  319. if ccateg in categories:
  320. query_categories.append(ccateg)
  321. # if still no category is specified, using general
  322. # as default-category
  323. if not query_categories:
  324. query_categories = ['general']
  325. # using all engines for that search, which are
  326. # declared under the specific categories
  327. for categ in query_categories:
  328. query_engines.extend({'category': categ,
  329. 'name': engine.name}
  330. for engine in categories[categ]
  331. if (engine.name, categ) not in disabled_engines)
  332. query_engines = deduplicate_query_engines(query_engines)
  333. return (SearchQuery(query, query_engines, query_categories,
  334. query_lang, query_safesearch, query_pageno,
  335. query_time_range, query_timeout),
  336. raw_text_query)
  337. class Search(object):
  338. """Search information container"""
  339. def __init__(self, search_query):
  340. # init vars
  341. super(Search, self).__init__()
  342. self.search_query = search_query
  343. self.result_container = ResultContainer()
  344. self.actual_timeout = None
  345. # do search-request
  346. def search(self):
  347. global number_of_searches
  348. # start time
  349. start_time = time()
  350. # answeres ?
  351. answerers_results = ask(self.search_query)
  352. if answerers_results:
  353. for results in answerers_results:
  354. self.result_container.extend('answer', results)
  355. return self.result_container
  356. # init vars
  357. requests = []
  358. # increase number of searches
  359. number_of_searches += 1
  360. # set default useragent
  361. # user_agent = request.headers.get('User-Agent', '')
  362. user_agent = gen_useragent()
  363. search_query = self.search_query
  364. # max of all selected engine timeout
  365. default_timeout = 0
  366. # start search-reqest for all selected engines
  367. for selected_engine in search_query.engines:
  368. if selected_engine['name'] not in engines:
  369. continue
  370. engine = engines[selected_engine['name']]
  371. # skip suspended engines
  372. if engine.suspend_end_time >= time():
  373. logger.debug('Engine currently suspended: %s', selected_engine['name'])
  374. continue
  375. # if paging is not supported, skip
  376. if search_query.pageno > 1 and not engine.paging:
  377. continue
  378. # if time_range is not supported, skip
  379. if search_query.time_range and not engine.time_range_support:
  380. continue
  381. # set default request parameters
  382. request_params = {}
  383. if not engine.offline:
  384. request_params = default_request_params()
  385. request_params['headers']['User-Agent'] = user_agent
  386. if hasattr(engine, 'language') and engine.language:
  387. request_params['language'] = engine.language
  388. else:
  389. request_params['language'] = search_query.lang
  390. request_params['safesearch'] = search_query.safesearch
  391. request_params['time_range'] = search_query.time_range
  392. request_params['category'] = selected_engine['category']
  393. request_params['pageno'] = search_query.pageno
  394. # append request to list
  395. requests.append((selected_engine['name'], search_query.query, request_params))
  396. # update default_timeout
  397. default_timeout = max(default_timeout, engine.timeout)
  398. # adjust timeout
  399. self.actual_timeout = default_timeout
  400. query_timeout = self.search_query.timeout_limit
  401. if max_request_timeout is None and query_timeout is None:
  402. # No max, no user query: default_timeout
  403. pass
  404. elif max_request_timeout is None and query_timeout is not None:
  405. # No max, but user query: From user query except if above default
  406. self.actual_timeout = min(default_timeout, query_timeout)
  407. elif max_request_timeout is not None and query_timeout is None:
  408. # Max, no user query: Default except if above max
  409. self.actual_timeout = min(default_timeout, max_request_timeout)
  410. elif max_request_timeout is not None and query_timeout is not None:
  411. # Max & user query: From user query except if above max
  412. self.actual_timeout = min(query_timeout, max_request_timeout)
  413. logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
  414. .format(self.actual_timeout, default_timeout, query_timeout, max_request_timeout))
  415. # send all search-request
  416. if requests:
  417. search_multiple_requests(requests, self.result_container, start_time, self.actual_timeout)
  418. start_new_thread(gc.collect, tuple())
  419. # return results, suggestions, answers and infoboxes
  420. return self.result_container
  421. class SearchWithPlugins(Search):
  422. """Similar to the Search class but call the plugins."""
  423. def __init__(self, search_query, ordered_plugin_list, request):
  424. super(SearchWithPlugins, self).__init__(search_query)
  425. self.ordered_plugin_list = ordered_plugin_list
  426. self.request = request
  427. def search(self):
  428. if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
  429. super(SearchWithPlugins, self).search()
  430. plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
  431. results = self.result_container.get_ordered_results()
  432. for result in results:
  433. plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
  434. return self.result_container