__init__.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332
  1. '''
  2. searx is free software: you can redistribute it and/or modify
  3. it under the terms of the GNU Affero General Public License as published by
  4. the Free Software Foundation, either version 3 of the License, or
  5. (at your option) any later version.
  6. searx is distributed in the hope that it will be useful,
  7. but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  9. GNU Affero General Public License for more details.
  10. You should have received a copy of the GNU Affero General Public License
  11. along with searx. If not, see < http://www.gnu.org/licenses/ >.
  12. (C) 2013- by Adam Tauber, <asciimoo@gmail.com>
  13. '''
  14. import sys
  15. import threading
  16. from os.path import realpath, dirname
  17. from babel.localedata import locale_identifiers
  18. from urllib.parse import urlparse
  19. from flask_babel import gettext
  20. from operator import itemgetter
  21. from searx import settings
  22. from searx import logger
  23. from searx.data import ENGINES_LANGUAGES
  24. from searx.poolrequests import get, get_proxy_cycles
  25. from searx.utils import load_module, match_language, get_engine_from_settings
  26. logger = logger.getChild('engines')
  27. engine_dir = dirname(realpath(__file__))
  28. engines = {}
  29. categories = {'general': []}
  30. babel_langs = [lang_parts[0] + '-' + lang_parts[-1] if len(lang_parts) > 1 else lang_parts[0]
  31. for lang_parts in (lang_code.split('_') for lang_code in locale_identifiers())]
  32. engine_shortcuts = {}
  33. engine_default_args = {'paging': False,
  34. 'categories': ['general'],
  35. 'supported_languages': [],
  36. 'safesearch': False,
  37. 'timeout': settings['outgoing']['request_timeout'],
  38. 'shortcut': '-',
  39. 'disabled': False,
  40. 'suspend_end_time': 0,
  41. 'continuous_errors': 0,
  42. 'time_range_support': False,
  43. 'engine_type': 'online',
  44. 'display_error_messages': True,
  45. 'tokens': []}
  46. def load_engine(engine_data):
  47. engine_name = engine_data['name']
  48. if '_' in engine_name:
  49. logger.error('Engine name contains underscore: "{}"'.format(engine_name))
  50. sys.exit(1)
  51. if engine_name.lower() != engine_name:
  52. logger.warn('Engine name is not lowercase: "{}", converting to lowercase'.format(engine_name))
  53. engine_name = engine_name.lower()
  54. engine_data['name'] = engine_name
  55. engine_module = engine_data['engine']
  56. try:
  57. engine = load_module(engine_module + '.py', engine_dir)
  58. except (SyntaxError, KeyboardInterrupt, SystemExit, SystemError, ImportError, RuntimeError):
  59. logger.exception('Fatal exception in engine "{}"'.format(engine_module))
  60. sys.exit(1)
  61. except:
  62. logger.exception('Cannot load engine "{}"'.format(engine_module))
  63. return None
  64. for param_name, param_value in engine_data.items():
  65. if param_name == 'engine':
  66. pass
  67. elif param_name == 'categories':
  68. if param_value == 'none':
  69. engine.categories = []
  70. else:
  71. engine.categories = list(map(str.strip, param_value.split(',')))
  72. elif param_name == 'proxies':
  73. engine.proxies = get_proxy_cycles(param_value)
  74. else:
  75. setattr(engine, param_name, param_value)
  76. for arg_name, arg_value in engine_default_args.items():
  77. if not hasattr(engine, arg_name):
  78. setattr(engine, arg_name, arg_value)
  79. # checking required variables
  80. for engine_attr in dir(engine):
  81. if engine_attr.startswith('_'):
  82. continue
  83. if engine_attr == 'inactive' and getattr(engine, engine_attr) is True:
  84. return None
  85. if getattr(engine, engine_attr) is None:
  86. logger.error('Missing engine config attribute: "{0}.{1}"'
  87. .format(engine.name, engine_attr))
  88. sys.exit(1)
  89. # assign supported languages from json file
  90. if engine_data['name'] in ENGINES_LANGUAGES:
  91. setattr(engine, 'supported_languages', ENGINES_LANGUAGES[engine_data['name']])
  92. # find custom aliases for non standard language codes
  93. if hasattr(engine, 'supported_languages'):
  94. if hasattr(engine, 'language_aliases'):
  95. language_aliases = getattr(engine, 'language_aliases')
  96. else:
  97. language_aliases = {}
  98. for engine_lang in getattr(engine, 'supported_languages'):
  99. iso_lang = match_language(engine_lang, babel_langs, fallback=None)
  100. if iso_lang and iso_lang != engine_lang and not engine_lang.startswith(iso_lang) and \
  101. iso_lang not in getattr(engine, 'supported_languages'):
  102. language_aliases[iso_lang] = engine_lang
  103. setattr(engine, 'language_aliases', language_aliases)
  104. # language_support
  105. setattr(engine, 'language_support', len(getattr(engine, 'supported_languages', [])) > 0)
  106. # assign language fetching method if auxiliary method exists
  107. if hasattr(engine, '_fetch_supported_languages'):
  108. setattr(engine, 'fetch_supported_languages',
  109. lambda: engine._fetch_supported_languages(get(engine.supported_languages_url)))
  110. engine.stats = {
  111. 'sent_search_count': 0, # sent search
  112. 'search_count': 0, # succesful search
  113. 'result_count': 0,
  114. 'engine_time': 0,
  115. 'engine_time_count': 0,
  116. 'score_count': 0,
  117. 'errors': 0
  118. }
  119. engine_type = getattr(engine, 'engine_type', 'online')
  120. if engine_type != 'offline':
  121. engine.stats['page_load_time'] = 0
  122. engine.stats['page_load_count'] = 0
  123. # tor related settings
  124. if settings['outgoing'].get('using_tor_proxy'):
  125. # use onion url if using tor.
  126. if hasattr(engine, 'onion_url'):
  127. engine.search_url = engine.onion_url + getattr(engine, 'search_path', '')
  128. elif 'onions' in engine.categories:
  129. # exclude onion engines if not using tor.
  130. return None
  131. engine.timeout += settings['outgoing'].get('extra_proxy_timeout', 0)
  132. for category_name in engine.categories:
  133. categories.setdefault(category_name, []).append(engine)
  134. if engine.shortcut in engine_shortcuts:
  135. logger.error('Engine config error: ambigious shortcut: {0}'.format(engine.shortcut))
  136. sys.exit(1)
  137. engine_shortcuts[engine.shortcut] = engine.name
  138. return engine
  139. def to_percentage(stats, maxvalue):
  140. for engine_stat in stats:
  141. if maxvalue:
  142. engine_stat['percentage'] = int(engine_stat['avg'] / maxvalue * 100)
  143. else:
  144. engine_stat['percentage'] = 0
  145. return stats
  146. def get_engines_stats(preferences):
  147. # TODO refactor
  148. pageloads = []
  149. engine_times = []
  150. results = []
  151. scores = []
  152. errors = []
  153. scores_per_result = []
  154. max_pageload = max_engine_times = max_results = max_score = max_errors = max_score_per_result = 0 # noqa
  155. for engine in engines.values():
  156. if not preferences.validate_token(engine):
  157. continue
  158. if engine.stats['search_count'] == 0:
  159. continue
  160. results_num = \
  161. engine.stats['result_count'] / float(engine.stats['search_count'])
  162. if engine.stats['engine_time_count'] != 0:
  163. this_engine_time = engine.stats['engine_time'] / float(engine.stats['engine_time_count']) # noqa
  164. else:
  165. this_engine_time = 0
  166. if results_num:
  167. score = engine.stats['score_count'] / float(engine.stats['search_count']) # noqa
  168. score_per_result = score / results_num
  169. else:
  170. score = score_per_result = 0.0
  171. if engine.engine_type != 'offline':
  172. load_times = 0
  173. if engine.stats['page_load_count'] != 0:
  174. load_times = engine.stats['page_load_time'] / float(engine.stats['page_load_count']) # noqa
  175. max_pageload = max(load_times, max_pageload)
  176. pageloads.append({'avg': load_times, 'name': engine.name})
  177. max_engine_times = max(this_engine_time, max_engine_times)
  178. max_results = max(results_num, max_results)
  179. max_score = max(score, max_score)
  180. max_score_per_result = max(score_per_result, max_score_per_result)
  181. max_errors = max(max_errors, engine.stats['errors'])
  182. engine_times.append({'avg': this_engine_time, 'name': engine.name})
  183. results.append({'avg': results_num, 'name': engine.name})
  184. scores.append({'avg': score, 'name': engine.name})
  185. errors.append({'avg': engine.stats['errors'], 'name': engine.name})
  186. scores_per_result.append({
  187. 'avg': score_per_result,
  188. 'name': engine.name
  189. })
  190. pageloads = to_percentage(pageloads, max_pageload)
  191. engine_times = to_percentage(engine_times, max_engine_times)
  192. results = to_percentage(results, max_results)
  193. scores = to_percentage(scores, max_score)
  194. scores_per_result = to_percentage(scores_per_result, max_score_per_result)
  195. errors = to_percentage(errors, max_errors)
  196. return [
  197. (
  198. gettext('Engine time (sec)'),
  199. sorted(engine_times, key=itemgetter('avg'))
  200. ),
  201. (
  202. gettext('Page loads (sec)'),
  203. sorted(pageloads, key=itemgetter('avg'))
  204. ),
  205. (
  206. gettext('Number of results'),
  207. sorted(results, key=itemgetter('avg'), reverse=True)
  208. ),
  209. (
  210. gettext('Scores'),
  211. sorted(scores, key=itemgetter('avg'), reverse=True)
  212. ),
  213. (
  214. gettext('Scores per result'),
  215. sorted(scores_per_result, key=itemgetter('avg'), reverse=True)
  216. ),
  217. (
  218. gettext('Errors'),
  219. sorted(errors, key=itemgetter('avg'), reverse=True)
  220. ),
  221. ]
  222. def load_engines(engine_list):
  223. global engines, engine_shortcuts
  224. engines.clear()
  225. engine_shortcuts.clear()
  226. for engine_data in engine_list:
  227. engine = load_engine(engine_data)
  228. if engine is not None:
  229. engines[engine.name] = engine
  230. return engines
  231. def initialize_engines(engine_list):
  232. load_engines(engine_list)
  233. def engine_init(engine_name, init_fn):
  234. try:
  235. init_fn(get_engine_from_settings(engine_name))
  236. except Exception:
  237. logger.exception('%s engine: Fail to initialize', engine_name)
  238. else:
  239. logger.debug('%s engine: Initialized', engine_name)
  240. for engine_name, engine in engines.items():
  241. if hasattr(engine, 'init'):
  242. init_fn = getattr(engine, 'init')
  243. if init_fn:
  244. logger.debug('%s engine: Starting background initialization', engine_name)
  245. threading.Thread(target=engine_init, args=(engine_name, init_fn)).start()
  246. _set_https_support_for_engine(engine)
  247. def _set_https_support_for_engine(engine):
  248. # check HTTPS support if it is not disabled
  249. if engine.engine_type != 'offline' and not hasattr(engine, 'https_support'):
  250. params = engine.request('http_test', {
  251. 'method': 'GET',
  252. 'headers': {},
  253. 'data': {},
  254. 'url': '',
  255. 'cookies': {},
  256. 'verify': True,
  257. 'auth': None,
  258. 'pageno': 1,
  259. 'time_range': None,
  260. 'language': '',
  261. 'safesearch': False,
  262. 'is_test': True,
  263. 'category': 'files',
  264. 'raise_for_status': True,
  265. })
  266. if 'url' not in params:
  267. return
  268. parsed_url = urlparse(params['url'])
  269. https_support = parsed_url.scheme == 'https'
  270. setattr(engine, 'https_support', https_support)