checks.py 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050
  1. # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
  2. #
  3. # Parts based on code that is
  4. # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
  5. # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
  6. #
  7. # This program is free software; you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation; either version 2 of the License, or
  10. # (at your option) any later version.
  11. #
  12. # This program is distributed in the hope that it will be useful,
  13. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. # GNU General Public License for more details.
  16. #
  17. # You should have received a copy of the GNU General Public License along
  18. # with this program; if not, write to the Free Software Foundation, Inc.,
  19. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  20. """module provided pre-acceptance tests
  21. Please read the documentation for the L{Check} class for the interface.
  22. """
  23. from __future__ import print_function
  24. from daklib.config import Config
  25. import daklib.daksubprocess
  26. from daklib.dbconn import *
  27. import daklib.dbconn as dbconn
  28. from daklib.regexes import *
  29. from daklib.textutils import fix_maintainer, ParseMaintError
  30. import daklib.lintian as lintian
  31. import daklib.utils as utils
  32. import daklib.upload
  33. import apt_inst
  34. import apt_pkg
  35. from apt_pkg import version_compare
  36. import datetime
  37. import os
  38. import six
  39. import subprocess
  40. import textwrap
  41. import time
  42. import yaml
  43. def check_fields_for_valid_utf8(filename, control):
  44. """Check all fields of a control file for valid UTF-8"""
  45. for field in control.keys():
  46. try:
  47. a = six.ensure_text(field)
  48. b = six.ensure_text(control[field])
  49. except UnicodeDecodeError:
  50. raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
  51. class Reject(Exception):
  52. """exception raised by failing checks"""
  53. pass
  54. class RejectExternalFilesMismatch(Reject):
  55. """exception raised by failing the external hashes check"""
  56. def __str__(self):
  57. return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
  58. class RejectACL(Reject):
  59. """exception raise by failing ACL checks"""
  60. def __init__(self, acl, reason):
  61. self.acl = acl
  62. self.reason = reason
  63. def __str__(self):
  64. return "ACL {0}: {1}".format(self.acl.name, self.reason)
  65. class Check(object):
  66. """base class for checks
  67. checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
  68. raise a L{daklib.checks.Reject} exception including a human-readable
  69. description why the upload should be rejected.
  70. """
  71. def check(self, upload):
  72. """do checks
  73. @type upload: L{daklib.archive.ArchiveUpload}
  74. @param upload: upload to check
  75. @raise daklib.checks.Reject: upload should be rejected
  76. """
  77. raise NotImplementedError
  78. def per_suite_check(self, upload, suite):
  79. """do per-suite checks
  80. @type upload: L{daklib.archive.ArchiveUpload}
  81. @param upload: upload to check
  82. @type suite: L{daklib.dbconn.Suite}
  83. @param suite: suite to check
  84. @raise daklib.checks.Reject: upload should be rejected
  85. """
  86. raise NotImplementedError
  87. @property
  88. def forcable(self):
  89. """allow to force ignore failing test
  90. C{True} if it is acceptable to force ignoring a failing test,
  91. C{False} otherwise
  92. """
  93. return False
  94. class SignatureAndHashesCheck(Check):
  95. def check_replay(self, upload):
  96. # Use private session as we want to remember having seen the .changes
  97. # in all cases.
  98. session = upload.session
  99. history = SignatureHistory.from_signed_file(upload.changes)
  100. r = history.query(session)
  101. if r is not None:
  102. raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
  103. return True
  104. """Check signature of changes and dsc file (if included in upload)
  105. Make sure the signature is valid and done by a known user.
  106. """
  107. def check(self, upload):
  108. allow_source_untrusted_sig_keys = Config().value_list('Dinstall::AllowSourceUntrustedSigKeys')
  109. changes = upload.changes
  110. if not changes.valid_signature:
  111. raise Reject("Signature for .changes not valid.")
  112. self.check_replay(upload)
  113. self._check_hashes(upload, changes.filename, six.itervalues(changes.files))
  114. source = None
  115. try:
  116. source = changes.source
  117. except Exception as e:
  118. raise Reject("Invalid dsc file: {0}".format(e))
  119. if source is not None:
  120. if changes.primary_fingerprint not in allow_source_untrusted_sig_keys:
  121. if not source.valid_signature:
  122. raise Reject("Signature for .dsc not valid.")
  123. if source.primary_fingerprint != changes.primary_fingerprint:
  124. raise Reject(".changes and .dsc not signed by the same key.")
  125. self._check_hashes(upload, source.filename, six.itervalues(source.files))
  126. if upload.fingerprint is None or upload.fingerprint.uid is None:
  127. raise Reject(".changes signed by unknown key.")
  128. """Make sure hashes match existing files
  129. @type upload: L{daklib.archive.ArchiveUpload}
  130. @param upload: upload we are processing
  131. @type filename: str
  132. @param filename: name of the file the expected hash values are taken from
  133. @type files: sequence of L{daklib.upload.HashedFile}
  134. @param files: files to check the hashes for
  135. """
  136. def _check_hashes(self, upload, filename, files):
  137. try:
  138. for f in files:
  139. f.check(upload.directory)
  140. except daklib.upload.FileDoesNotExist as e:
  141. raise Reject('{0}: {1}\n'
  142. 'Perhaps you need to include the file in your upload?'
  143. .format(filename, six.text_type(e)))
  144. except daklib.upload.UploadException as e:
  145. raise Reject('{0}: {1}'.format(filename, six.text_type(e)))
  146. class WeakSignatureCheck(Check):
  147. """Check that .changes and .dsc are not signed using a weak algorithm"""
  148. def check(self, upload):
  149. changes = upload.changes
  150. if changes.weak_signature:
  151. raise Reject("The .changes was signed using a weak algorithm (such as SHA-1)")
  152. source = changes.source
  153. if source is not None:
  154. if source.weak_signature:
  155. raise Reject("The source package was signed using a weak algorithm (such as SHA-1)")
  156. return True
  157. class SignatureTimestampCheck(Check):
  158. """Check timestamp of .changes signature"""
  159. def check(self, upload):
  160. changes = upload.changes
  161. now = datetime.datetime.utcnow()
  162. timestamp = changes.signature_timestamp
  163. age = now - timestamp
  164. age_max = datetime.timedelta(days=365)
  165. age_min = datetime.timedelta(days=-7)
  166. if age > age_max:
  167. raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days))
  168. if age < age_min:
  169. raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days)))
  170. return True
  171. class ChangesCheck(Check):
  172. """Check changes file for syntax errors."""
  173. def check(self, upload):
  174. changes = upload.changes
  175. control = changes.changes
  176. fn = changes.filename
  177. for field in ('Distribution', 'Source', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes'):
  178. if field not in control:
  179. raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
  180. if len(changes.binaries) > 0:
  181. for field in ('Binary', 'Description'):
  182. if field not in control:
  183. raise Reject('{0}: binary upload requires {1} field'.format(fn, field))
  184. check_fields_for_valid_utf8(fn, control)
  185. source_match = re_field_source.match(control['Source'])
  186. if not source_match:
  187. raise Reject('{0}: Invalid Source field'.format(fn))
  188. version_match = re_field_version.match(control['Version'])
  189. if not version_match:
  190. raise Reject('{0}: Invalid Version field'.format(fn))
  191. version_without_epoch = version_match.group('without_epoch')
  192. match = re_file_changes.match(fn)
  193. if not match:
  194. raise Reject('{0}: Does not match re_file_changes'.format(fn))
  195. if match.group('package') != source_match.group('package'):
  196. raise Reject('{0}: Filename does not match Source field'.format(fn))
  197. if match.group('version') != version_without_epoch:
  198. raise Reject('{0}: Filename does not match Version field'.format(fn))
  199. for bn in changes.binary_names:
  200. if not re_field_package.match(bn):
  201. raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
  202. if changes.sourceful and changes.source is None:
  203. raise Reject("Changes has architecture source, but no source found.")
  204. if changes.source is not None and not changes.sourceful:
  205. raise Reject("Upload includes source, but changes does not say so.")
  206. try:
  207. fix_maintainer(changes.changes['Maintainer'])
  208. except ParseMaintError as e:
  209. raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
  210. try:
  211. changed_by = changes.changes.get('Changed-By')
  212. if changed_by is not None:
  213. fix_maintainer(changed_by)
  214. except ParseMaintError as e:
  215. raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
  216. try:
  217. changes.byhand_files
  218. except daklib.upload.InvalidChangesException as e:
  219. raise Reject('{0}'.format(e))
  220. if len(changes.files) == 0:
  221. raise Reject("Changes includes no files.")
  222. for bugnum in changes.closed_bugs:
  223. if not re_isanum.match(bugnum):
  224. raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
  225. return True
  226. class SuffixCheck(Check):
  227. """Checks suffix of .changes and .buildinfo files.
  228. buildd uploads will include _${arch}.changes and _${arch}.buildinfo, so such endings
  229. should be reserved for uploads including binaries for ${arch} to avoid conflicts
  230. (for example in policy queues where dak stores the .changes and .buildinfo for later
  231. processing)
  232. """
  233. def check(self, upload):
  234. session = upload.session
  235. changes = upload.changes
  236. suffixes = []
  237. changes_match = re_file_changes.match(changes.filename)
  238. assert(changes_match)
  239. suffixes.append((changes.filename, changes_match.group('suffix')))
  240. for bi in changes.buildinfo_files:
  241. bi_match = re_file_buildinfo.match(bi.filename)
  242. assert(bi_match)
  243. suffixes.append((bi.filename, bi_match.group('suffix')))
  244. for fn, suffix in suffixes:
  245. if suffix in changes.architectures:
  246. continue
  247. if session.query(Architecture).filter_by(arch_string=suffix).first():
  248. raise Reject("The upload includes '{}' whose filename includes the architecture name {}, but does not include binaries for {}. It is rejected to avoid filename conflicts with later buildd uploads.".format(fn, suffix, suffix))
  249. return True
  250. class ExternalHashesCheck(Check):
  251. """Checks hashes in .changes and .dsc against an external database."""
  252. def check_single(self, session, f):
  253. q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE :pattern", {'pattern': '%/{}'.format(f.filename)})
  254. (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
  255. if not ext_size:
  256. return
  257. if ext_size != f.size:
  258. raise RejectExternalFilesMismatch(f.filename, 'size', f.size, ext_size)
  259. if ext_md5sum != f.md5sum:
  260. raise RejectExternalFilesMismatch(f.filename, 'md5sum', f.md5sum, ext_md5sum)
  261. if ext_sha1sum != f.sha1sum:
  262. raise RejectExternalFilesMismatch(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
  263. if ext_sha256sum != f.sha256sum:
  264. raise RejectExternalFilesMismatch(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
  265. def check(self, upload):
  266. cnf = Config()
  267. if not cnf.use_extfiles:
  268. return
  269. session = upload.session
  270. changes = upload.changes
  271. for f in six.itervalues(changes.files):
  272. self.check_single(session, f)
  273. source = changes.source
  274. if source is not None:
  275. for f in six.itervalues(source.files):
  276. self.check_single(session, f)
  277. class BinaryCheck(Check):
  278. """Check binary packages for syntax errors."""
  279. def check(self, upload):
  280. debug_deb_name_postfix = "-dbgsym"
  281. # XXX: Handle dynamic debug section name here
  282. self._architectures = set()
  283. for binary in upload.changes.binaries:
  284. self.check_binary(upload, binary)
  285. for arch in upload.changes.architectures:
  286. if arch == 'source':
  287. continue
  288. if arch not in self._architectures:
  289. raise Reject('{}: Architecture field includes {}, but no binary packages for {} are included in the upload'.format(upload.changes.filename, arch, arch))
  290. binaries = {binary.control['Package']: binary
  291. for binary in upload.changes.binaries}
  292. for name, binary in list(binaries.items()):
  293. if name in upload.changes.binary_names:
  294. # Package is listed in Binary field. Everything is good.
  295. pass
  296. elif daklib.utils.is_in_debug_section(binary.control):
  297. # If we have a binary package in the debug section, we
  298. # can allow it to not be present in the Binary field
  299. # in the .changes file, so long as its name (without
  300. # -dbgsym) is present in the Binary list.
  301. if not name.endswith(debug_deb_name_postfix):
  302. raise Reject('Package {0} is in the debug section, but '
  303. 'does not end in {1}.'.format(name, debug_deb_name_postfix))
  304. # Right, so, it's named properly, let's check that
  305. # the corresponding package is in the Binary list
  306. origin_package_name = name[:-len(debug_deb_name_postfix)]
  307. if origin_package_name not in upload.changes.binary_names:
  308. raise Reject(
  309. "Debug package {debug}'s corresponding binary package "
  310. "{origin} is not present in the Binary field.".format(
  311. debug=name, origin=origin_package_name))
  312. else:
  313. # Someone was a nasty little hacker and put a package
  314. # into the .changes that isn't in debian/control. Bad,
  315. # bad person.
  316. raise Reject('Package {0} is not mentioned in Binary field in changes'.format(name))
  317. return True
  318. def check_binary(self, upload, binary):
  319. fn = binary.hashed_file.filename
  320. control = binary.control
  321. for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
  322. if field not in control:
  323. raise Reject('{0}: Missing mandatory field {1}.'.format(fn, field))
  324. check_fields_for_valid_utf8(fn, control)
  325. # check fields
  326. package = control['Package']
  327. if not re_field_package.match(package):
  328. raise Reject('{0}: Invalid Package field'.format(fn))
  329. version = control['Version']
  330. version_match = re_field_version.match(version)
  331. if not version_match:
  332. raise Reject('{0}: Invalid Version field'.format(fn))
  333. version_without_epoch = version_match.group('without_epoch')
  334. architecture = control['Architecture']
  335. if architecture not in upload.changes.architectures:
  336. raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
  337. if architecture == 'source':
  338. raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
  339. self._architectures.add(architecture)
  340. source = control.get('Source')
  341. if source is not None and not re_field_source.match(source):
  342. raise Reject('{0}: Invalid Source field'.format(fn))
  343. # check filename
  344. match = re_file_binary.match(fn)
  345. if package != match.group('package'):
  346. raise Reject('{0}: filename does not match Package field'.format(fn))
  347. if version_without_epoch != match.group('version'):
  348. raise Reject('{0}: filename does not match Version field'.format(fn))
  349. if architecture != match.group('architecture'):
  350. raise Reject('{0}: filename does not match Architecture field'.format(fn))
  351. # check dependency field syntax
  352. def check_dependency_field(
  353. field, control,
  354. dependency_parser=apt_pkg.parse_depends,
  355. allow_alternatives=True,
  356. allow_relations=('', '<', '<=', '=', '>=', '>')):
  357. value = control.get(field)
  358. if value is not None:
  359. if value.strip() == '':
  360. raise Reject('{0}: empty {1} field'.format(fn, field))
  361. try:
  362. depends = dependency_parser(value)
  363. except:
  364. raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
  365. for group in depends:
  366. if not allow_alternatives and len(group) != 1:
  367. raise Reject('{0}: {1}: alternatives are not allowed'.format(fn, field))
  368. for dep_pkg, dep_ver, dep_rel in group:
  369. if dep_rel not in allow_relations:
  370. raise Reject('{}: {}: depends on {}, but only relations {} are allowed for this field'.format(fn, field, " ".join(dep_pkg, dep_rel, dep_ver), allow_relations))
  371. for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
  372. 'Recommends', 'Replaces', 'Suggests'):
  373. check_dependency_field(field, control)
  374. check_dependency_field("Provides", control,
  375. allow_alternatives=False,
  376. allow_relations=('', '='))
  377. check_dependency_field("Built-Using", control,
  378. dependency_parser=apt_pkg.parse_src_depends,
  379. allow_alternatives=False,
  380. allow_relations=('=',))
  381. class BinaryTimestampCheck(Check):
  382. """check timestamps of files in binary packages
  383. Files in the near future cause ugly warnings and extreme time travel
  384. can cause errors on extraction.
  385. """
  386. def check(self, upload):
  387. cnf = Config()
  388. future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24 * 3600)
  389. past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
  390. class TarTime(object):
  391. def __init__(self):
  392. self.future_files = dict()
  393. self.past_files = dict()
  394. def callback(self, member, data):
  395. if member.mtime > future_cutoff:
  396. self.future_files[member.name] = member.mtime
  397. elif member.mtime < past_cutoff:
  398. self.past_files[member.name] = member.mtime
  399. def format_reason(filename, direction, files):
  400. reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
  401. for fn, ts in six.iteritems(files):
  402. reason += " {0} ({1})".format(fn, time.ctime(ts))
  403. return reason
  404. for binary in upload.changes.binaries:
  405. filename = binary.hashed_file.filename
  406. path = os.path.join(upload.directory, filename)
  407. deb = apt_inst.DebFile(path)
  408. tar = TarTime()
  409. for archive in (deb.control, deb.data):
  410. archive.go(tar.callback)
  411. if tar.future_files:
  412. raise Reject(format_reason(filename, 'future', tar.future_files))
  413. if tar.past_files:
  414. raise Reject(format_reason(filename, 'past', tar.past_files))
  415. class SourceCheck(Check):
  416. """Check source package for syntax errors."""
  417. def check_filename(self, control, filename, regex):
  418. # In case we have an .orig.tar.*, we have to strip the Debian revison
  419. # from the version number. So handle this special case first.
  420. is_orig = True
  421. match = re_file_orig.match(filename)
  422. if not match:
  423. is_orig = False
  424. match = regex.match(filename)
  425. if not match:
  426. raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
  427. if match.group('package') != control['Source']:
  428. raise Reject('{0}: filename does not match Source field'.format(filename))
  429. version = control['Version']
  430. if is_orig:
  431. upstream_match = re_field_version_upstream.match(version)
  432. if not upstream_match:
  433. raise Reject('{0}: Source package includes upstream tarball, but {1} has no Debian revision.'.format(filename, version))
  434. version = upstream_match.group('upstream')
  435. version_match = re_field_version.match(version)
  436. version_without_epoch = version_match.group('without_epoch')
  437. if match.group('version') != version_without_epoch:
  438. raise Reject('{0}: filename does not match Version field'.format(filename))
  439. def check(self, upload):
  440. if upload.changes.source is None:
  441. if upload.changes.sourceful:
  442. raise Reject("{}: Architecture field includes source, but no source package is included in the upload".format(upload.changes.filename))
  443. return True
  444. if not upload.changes.sourceful:
  445. raise Reject("{}: Architecture field does not include source, but a source package is included in the upload".format(upload.changes.filename))
  446. changes = upload.changes.changes
  447. source = upload.changes.source
  448. control = source.dsc
  449. dsc_fn = source._dsc_file.filename
  450. check_fields_for_valid_utf8(dsc_fn, control)
  451. # check fields
  452. if not re_field_package.match(control['Source']):
  453. raise Reject('{0}: Invalid Source field'.format(dsc_fn))
  454. if control['Source'] != changes['Source']:
  455. raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
  456. if control['Version'] != changes['Version']:
  457. raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
  458. # check filenames
  459. self.check_filename(control, dsc_fn, re_file_dsc)
  460. for f in six.itervalues(source.files):
  461. self.check_filename(control, f.filename, re_file_source)
  462. # check dependency field syntax
  463. for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
  464. value = control.get(field)
  465. if value is not None:
  466. if value.strip() == '':
  467. raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
  468. try:
  469. apt_pkg.parse_src_depends(value)
  470. except Exception as e:
  471. raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
  472. rejects = utils.check_dsc_files(dsc_fn, control, list(source.files.keys()))
  473. if len(rejects) > 0:
  474. raise Reject("\n".join(rejects))
  475. return True
  476. class SingleDistributionCheck(Check):
  477. """Check that the .changes targets only a single distribution."""
  478. def check(self, upload):
  479. if len(upload.changes.distributions) != 1:
  480. raise Reject("Only uploads to a single distribution are allowed.")
  481. class ACLCheck(Check):
  482. """Check the uploader is allowed to upload the packages in .changes"""
  483. def _does_hijack(self, session, upload, suite):
  484. # Try to catch hijacks.
  485. # This doesn't work correctly. Uploads to experimental can still
  486. # "hijack" binaries from unstable. Also one can hijack packages
  487. # via buildds (but people who try this should not be DMs).
  488. for binary_name in upload.changes.binary_names:
  489. binaries = session.query(DBBinary).join(DBBinary.source) \
  490. .filter(DBBinary.suites.contains(suite)) \
  491. .filter(DBBinary.package == binary_name)
  492. for binary in binaries:
  493. if binary.source.source != upload.changes.changes['Source']:
  494. return True, binary.package, binary.source.source
  495. return False, None, None
  496. def _check_acl(self, session, upload, acl):
  497. source_name = upload.changes.source_name
  498. if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
  499. return None, None
  500. if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
  501. return None, None
  502. if not acl.allow_new:
  503. if upload.new:
  504. return False, "NEW uploads are not allowed"
  505. for f in six.itervalues(upload.changes.files):
  506. if f.section == 'byhand' or f.section.startswith("raw-"):
  507. return False, "BYHAND uploads are not allowed"
  508. if not acl.allow_source and upload.changes.source is not None:
  509. return False, "sourceful uploads are not allowed"
  510. binaries = upload.changes.binaries
  511. if len(binaries) != 0:
  512. if not acl.allow_binary:
  513. return False, "binary uploads are not allowed"
  514. if upload.changes.source is None and not acl.allow_binary_only:
  515. return False, "binary-only uploads are not allowed"
  516. if not acl.allow_binary_all:
  517. uploaded_arches = set(upload.changes.architectures)
  518. uploaded_arches.discard('source')
  519. allowed_arches = set(a.arch_string for a in acl.architectures)
  520. forbidden_arches = uploaded_arches - allowed_arches
  521. if len(forbidden_arches) != 0:
  522. return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
  523. if not acl.allow_hijack:
  524. for suite in upload.final_suites:
  525. does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
  526. if does_hijack:
  527. return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
  528. acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
  529. if acl.allow_per_source:
  530. if acl_per_source is None:
  531. return False, "not allowed to upload source package '{0}'".format(source_name)
  532. if acl.deny_per_source and acl_per_source is not None:
  533. return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
  534. return True, None
  535. def check(self, upload):
  536. session = upload.session
  537. fingerprint = upload.fingerprint
  538. keyring = fingerprint.keyring
  539. if keyring is None:
  540. raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
  541. if not keyring.active:
  542. raise Reject('Keyring {0} is not active'.format(keyring.name))
  543. acl = fingerprint.acl or keyring.acl
  544. if acl is None:
  545. raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
  546. result, reason = self._check_acl(session, upload, acl)
  547. if not result:
  548. raise RejectACL(acl, reason)
  549. for acl in session.query(ACL).filter_by(is_global=True):
  550. result, reason = self._check_acl(session, upload, acl)
  551. if result is False:
  552. raise RejectACL(acl, reason)
  553. return True
  554. def per_suite_check(self, upload, suite):
  555. acls = suite.acls
  556. if len(acls) != 0:
  557. accept = False
  558. for acl in acls:
  559. result, reason = self._check_acl(upload.session, upload, acl)
  560. if result is False:
  561. raise Reject(reason)
  562. accept = accept or result
  563. if not accept:
  564. raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
  565. return True
  566. class TransitionCheck(Check):
  567. """check for a transition"""
  568. def check(self, upload):
  569. if not upload.changes.sourceful:
  570. return True
  571. transitions = self.get_transitions()
  572. if transitions is None:
  573. return True
  574. session = upload.session
  575. control = upload.changes.changes
  576. source = re_field_source.match(control['Source']).group('package')
  577. for trans in transitions:
  578. t = transitions[trans]
  579. transition_source = t["source"]
  580. expected = t["new"]
  581. # Will be None if nothing is in testing.
  582. current = get_source_in_suite(transition_source, "testing", session)
  583. if current is not None:
  584. compare = apt_pkg.version_compare(current.version, expected)
  585. if current is None or compare < 0:
  586. # This is still valid, the current version in testing is older than
  587. # the new version we wait for, or there is none in testing yet
  588. # Check if the source we look at is affected by this.
  589. if source in t['packages']:
  590. # The source is affected, lets reject it.
  591. rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
  592. if current is not None:
  593. currentlymsg = "at version {0}".format(current.version)
  594. else:
  595. currentlymsg = "not present in testing"
  596. rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
  597. rejectmsg += "\n".join(textwrap.wrap("""Your package
  598. is part of a testing transition designed to get {0} migrated (it is
  599. currently {1}, we need version {2}). This transition is managed by the
  600. Release Team, and {3} is the Release-Team member responsible for it.
  601. Please mail debian-release@lists.debian.org or contact {3} directly if you
  602. need further assistance. You might want to upload to experimental until this
  603. transition is done.""".format(transition_source, currentlymsg, expected, t["rm"])))
  604. raise Reject(rejectmsg)
  605. return True
  606. def get_transitions(self):
  607. cnf = Config()
  608. path = cnf.get('Dinstall::ReleaseTransitions', '')
  609. if path == '' or not os.path.exists(path):
  610. return None
  611. with open(path, 'r') as fd:
  612. contents = fd.read()
  613. try:
  614. transitions = yaml.safe_load(contents)
  615. return transitions
  616. except yaml.YAMLError as msg:
  617. utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
  618. return None
  619. class NoSourceOnlyCheck(Check):
  620. def is_source_only_upload(self, upload):
  621. changes = upload.changes
  622. if changes.source is not None and len(changes.binaries) == 0:
  623. return True
  624. return False
  625. """Check for source-only upload
  626. Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
  627. set. Otherwise they are rejected.
  628. Source-only uploads are only accepted for source packages having a
  629. Package-List field that also lists architectures per package. This
  630. check can be disabled via
  631. Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
  632. Source-only uploads to NEW are only allowed if
  633. Dinstall::AllowSourceOnlyNew is set.
  634. Uploads not including architecture-independent packages are only
  635. allowed if Dinstall::AllowNoArchIndepUploads is set.
  636. """
  637. def check(self, upload):
  638. if not self.is_source_only_upload(upload):
  639. return True
  640. allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
  641. allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
  642. allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
  643. allow_source_only_new_keys = Config().value_list('Dinstall::AllowSourceOnlyNewKeys')
  644. allow_source_only_new_sources = Config().value_list('Dinstall::AllowSourceOnlyNewSources')
  645. allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads', True)
  646. changes = upload.changes
  647. if not allow_source_only_uploads:
  648. raise Reject('Source-only uploads are not allowed.')
  649. if not allow_source_only_uploads_without_package_list \
  650. and changes.source.package_list.fallback:
  651. raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
  652. if not allow_source_only_new and upload.new \
  653. and changes.primary_fingerprint not in allow_source_only_new_keys \
  654. and changes.source_name not in allow_source_only_new_sources:
  655. raise Reject('Source-only uploads to NEW are not allowed.')
  656. if 'all' not in changes.architectures and changes.source.package_list.has_arch_indep_packages():
  657. if not allow_no_arch_indep_uploads:
  658. raise Reject('Uploads must include architecture-independent packages.')
  659. for suite in ('oldoldstable', 'oldoldstable-proposed-updates', 'oldoldstable-security',
  660. 'jessie', 'jessie-proposed-updates', 'jessie-security',
  661. 'oldoldstable-backports', 'oldoldstable-backports-sloppy',
  662. 'jessie-backports', 'jessie-backports-sloppy'):
  663. if suite in changes.distributions:
  664. raise Reject('Suite {} is not configured to build arch:all packages. Please include them in your upload'.format(suite))
  665. return True
  666. class NewOverrideCheck(Check):
  667. """Override NEW requirement
  668. """
  669. def check(self, upload):
  670. if not upload.new:
  671. return True
  672. new_override_keys = Config().value_list('Dinstall::NewOverrideKeys')
  673. changes = upload.changes
  674. if changes.primary_fingerprint in new_override_keys:
  675. upload.new = False
  676. return True
  677. class ArchAllBinNMUCheck(Check):
  678. """Check for arch:all binNMUs"""
  679. def check(self, upload):
  680. changes = upload.changes
  681. if 'all' in changes.architectures and changes.changes.get('Binary-Only') == 'yes':
  682. raise Reject('arch:all binNMUs are not allowed.')
  683. return True
  684. class LintianCheck(Check):
  685. """Check package using lintian"""
  686. def check(self, upload):
  687. changes = upload.changes
  688. # Only check sourceful uploads.
  689. if changes.source is None:
  690. return True
  691. # Only check uploads to unstable or experimental.
  692. if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
  693. return True
  694. cnf = Config()
  695. if 'Dinstall::LintianTags' not in cnf:
  696. return True
  697. tagfile = cnf['Dinstall::LintianTags']
  698. with open(tagfile, 'r') as sourcefile:
  699. sourcecontent = sourcefile.read()
  700. try:
  701. lintiantags = yaml.safe_load(sourcecontent)['lintian']
  702. except yaml.YAMLError as msg:
  703. raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
  704. fd, temp_filename = utils.temp_filename(mode=0o644)
  705. temptagfile = os.fdopen(fd, 'w')
  706. for tags in six.itervalues(lintiantags):
  707. for tag in tags:
  708. print(tag, file=temptagfile)
  709. temptagfile.close()
  710. changespath = os.path.join(upload.directory, changes.filename)
  711. try:
  712. cmd = []
  713. result = 0
  714. user = cnf.get('Dinstall::UnprivUser') or None
  715. if user is not None:
  716. cmd.extend(['sudo', '-H', '-u', user])
  717. cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
  718. output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
  719. except subprocess.CalledProcessError as e:
  720. result = e.returncode
  721. output = e.output
  722. finally:
  723. os.unlink(temp_filename)
  724. if result == 2:
  725. utils.warn("lintian failed for %s [return code: %s]." %
  726. (changespath, result))
  727. utils.warn(utils.prefix_multi_line_string(output,
  728. " [possible output:] "))
  729. parsed_tags = lintian.parse_lintian_output(output)
  730. rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
  731. if len(rejects) != 0:
  732. raise Reject('\n'.join(rejects))
  733. return True
  734. class SourceFormatCheck(Check):
  735. """Check source format is allowed in the target suite"""
  736. def per_suite_check(self, upload, suite):
  737. source = upload.changes.source
  738. session = upload.session
  739. if source is None:
  740. return True
  741. source_format = source.dsc['Format']
  742. query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
  743. if query.first() is None:
  744. raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
  745. class SuiteCheck(Check):
  746. def per_suite_check(self, upload, suite):
  747. if not suite.accept_source_uploads and upload.changes.source is not None:
  748. raise Reject('The suite "{0}" does not accept source uploads.'.format(suite.suite_name))
  749. if not suite.accept_binary_uploads and len(upload.changes.binaries) != 0:
  750. raise Reject('The suite "{0}" does not accept binary uploads.'.format(suite.suite_name))
  751. return True
  752. class SuiteArchitectureCheck(Check):
  753. def per_suite_check(self, upload, suite):
  754. session = upload.session
  755. for arch in upload.changes.architectures:
  756. query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
  757. if query.first() is None:
  758. raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
  759. return True
  760. class VersionCheck(Check):
  761. """Check version constraints"""
  762. def _highest_source_version(self, session, source_name, suite):
  763. db_source = session.query(DBSource).filter_by(source=source_name) \
  764. .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
  765. if db_source is None:
  766. return None
  767. else:
  768. return db_source.version
  769. def _highest_binary_version(self, session, binary_name, suite, architecture):
  770. db_binary = session.query(DBBinary).filter_by(package=binary_name) \
  771. .filter(DBBinary.suites.contains(suite)) \
  772. .join(DBBinary.architecture) \
  773. .filter(Architecture.arch_string.in_(['all', architecture])) \
  774. .order_by(DBBinary.version.desc()).first()
  775. if db_binary is None:
  776. return None
  777. else:
  778. return db_binary.version
  779. def _version_checks(self, upload, suite, other_suite, op, op_name):
  780. session = upload.session
  781. if upload.changes.source is not None:
  782. source_name = upload.changes.source.dsc['Source']
  783. source_version = upload.changes.source.dsc['Version']
  784. v = self._highest_source_version(session, source_name, other_suite)
  785. if v is not None and not op(version_compare(source_version, v)):
  786. raise Reject("Version check failed:\n"
  787. "Your upload included the source package {0}, version {1},\n"
  788. "however {3} already has version {2}.\n"
  789. "Uploads to {5} must have a {4} version than present in {3}."
  790. .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
  791. for binary in upload.changes.binaries:
  792. binary_name = binary.control['Package']
  793. binary_version = binary.control['Version']
  794. architecture = binary.control['Architecture']
  795. v = self._highest_binary_version(session, binary_name, other_suite, architecture)
  796. if v is not None and not op(version_compare(binary_version, v)):
  797. raise Reject("Version check failed:\n"
  798. "Your upload included the binary package {0}, version {1}, for {2},\n"
  799. "however {4} already has version {3}.\n"
  800. "Uploads to {6} must have a {5} version than present in {4}."
  801. .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
  802. def per_suite_check(self, upload, suite):
  803. session = upload.session
  804. vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
  805. .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
  806. must_be_newer_than = [vc.reference for vc in vc_newer]
  807. # Must be newer than old versions in `suite`
  808. must_be_newer_than.append(suite)
  809. for s in must_be_newer_than:
  810. self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
  811. vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
  812. must_be_older_than = [vc.reference for vc in vc_older]
  813. for s in must_be_older_than:
  814. self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')
  815. return True
  816. @property
  817. def forcable(self):
  818. return True