utils.py 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440
  1. #!/usr/bin/env python
  2. # vim:set et ts=4 sw=4:
  3. """Utility functions
  4. @contact: Debian FTP Master <ftpmaster@debian.org>
  5. @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
  6. @license: GNU General Public License version 2 or later
  7. """
  8. # This program is free software; you can redistribute it and/or modify
  9. # it under the terms of the GNU General Public License as published by
  10. # the Free Software Foundation; either version 2 of the License, or
  11. # (at your option) any later version.
  12. # This program is distributed in the hope that it will be useful,
  13. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. # GNU General Public License for more details.
  16. # You should have received a copy of the GNU General Public License
  17. # along with this program; if not, write to the Free Software
  18. # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  19. from __future__ import absolute_import, print_function
  20. import commands
  21. import codecs
  22. import datetime
  23. import os
  24. import pwd
  25. import grp
  26. import select
  27. import socket
  28. import shutil
  29. import sqlalchemy.sql as sql
  30. import sys
  31. import tempfile
  32. import traceback
  33. import stat
  34. import apt_inst
  35. import apt_pkg
  36. import time
  37. import re
  38. import email as modemail
  39. import subprocess
  40. import ldap
  41. import errno
  42. import functools
  43. import daklib.config as config
  44. import daklib.daksubprocess
  45. from .dbconn import DBConn, get_architecture, get_component, get_suite, \
  46. get_override_type, Keyring, session_wrapper, \
  47. get_active_keyring_paths, \
  48. get_suite_architectures, get_or_set_metadatakey, DBSource, \
  49. Component, Override, OverrideType
  50. from sqlalchemy import desc
  51. from .dak_exceptions import *
  52. from .gpg import SignedFile
  53. from .textutils import fix_maintainer
  54. from .regexes import re_html_escaping, html_escaping, re_single_line_field, \
  55. re_multi_line_field, re_srchasver, re_taint_free, \
  56. re_re_mark, re_whitespace_comment, re_issource, \
  57. re_build_dep_arch, re_parse_maintainer
  58. from .formats import parse_format, validate_changes_format
  59. from .srcformats import get_format_from_string
  60. from collections import defaultdict
  61. ################################################################################
  62. default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
  63. alias_cache = None #: Cache for email alias checks
  64. key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
  65. # Monkeypatch commands.getstatusoutput as it may not return the correct exit
  66. # code in lenny's Python. This also affects commands.getoutput and
  67. # commands.getstatus.
  68. def dak_getstatusoutput(cmd):
  69. pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
  70. stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
  71. output = pipe.stdout.read()
  72. pipe.wait()
  73. if output[-1:] == '\n':
  74. output = output[:-1]
  75. ret = pipe.wait()
  76. if ret is None:
  77. ret = 0
  78. return ret, output
  79. commands.getstatusoutput = dak_getstatusoutput
  80. ################################################################################
  81. def html_escape(s):
  82. """ Escape html chars """
  83. return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
  84. ################################################################################
  85. def open_file(filename, mode='r'):
  86. """
  87. Open C{file}, return fileobject.
  88. @type filename: string
  89. @param filename: path/filename to open
  90. @type mode: string
  91. @param mode: open mode
  92. @rtype: fileobject
  93. @return: open fileobject
  94. @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
  95. """
  96. try:
  97. f = open(filename, mode)
  98. except IOError:
  99. raise CantOpenError(filename)
  100. return f
  101. ################################################################################
  102. def our_raw_input(prompt=""):
  103. if prompt:
  104. while 1:
  105. try:
  106. sys.stdout.write(prompt)
  107. break
  108. except IOError:
  109. pass
  110. sys.stdout.flush()
  111. try:
  112. ret = raw_input()
  113. return ret
  114. except EOFError:
  115. sys.stderr.write("\nUser interrupt (^D).\n")
  116. raise SystemExit
  117. ################################################################################
  118. def extract_component_from_section(section):
  119. component = ""
  120. if section.find('/') != -1:
  121. component = section.split('/')[0]
  122. # Expand default component
  123. if component == "":
  124. component = "main"
  125. return (section, component)
  126. ################################################################################
  127. def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
  128. require_signature = True
  129. if keyrings is None:
  130. keyrings = []
  131. require_signature = False
  132. signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
  133. contents = signed_file.contents
  134. error = ""
  135. changes = {}
  136. # Split the lines in the input, keeping the linebreaks.
  137. lines = contents.splitlines(True)
  138. if len(lines) == 0:
  139. raise ParseChangesError("[Empty changes file]")
  140. # Reindex by line number so we can easily verify the format of
  141. # .dsc files...
  142. index = 0
  143. indexed_lines = {}
  144. for line in lines:
  145. index += 1
  146. indexed_lines[index] = line[:-1]
  147. num_of_lines = len(indexed_lines.keys())
  148. index = 0
  149. first = -1
  150. while index < num_of_lines:
  151. index += 1
  152. line = indexed_lines[index]
  153. if line == "" and signing_rules == 1:
  154. if index != num_of_lines:
  155. raise InvalidDscError(index)
  156. break
  157. slf = re_single_line_field.match(line)
  158. if slf:
  159. field = slf.groups()[0].lower()
  160. changes[field] = slf.groups()[1]
  161. first = 1
  162. continue
  163. if line == " .":
  164. changes[field] += '\n'
  165. continue
  166. mlf = re_multi_line_field.match(line)
  167. if mlf:
  168. if first == -1:
  169. raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
  170. if first == 1 and changes[field] != "":
  171. changes[field] += '\n'
  172. first = 0
  173. changes[field] += mlf.groups()[0] + '\n'
  174. continue
  175. error += line
  176. changes["filecontents"] = armored_contents
  177. if "source" in changes:
  178. # Strip the source version in brackets from the source field,
  179. # put it in the "source-version" field instead.
  180. srcver = re_srchasver.search(changes["source"])
  181. if srcver:
  182. changes["source"] = srcver.group(1)
  183. changes["source-version"] = srcver.group(2)
  184. if error:
  185. raise ParseChangesError(error)
  186. return changes
  187. ################################################################################
  188. def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
  189. """
  190. Parses a changes file and returns a dictionary where each field is a
  191. key. The mandatory first argument is the filename of the .changes
  192. file.
  193. signing_rules is an optional argument:
  194. - If signing_rules == -1, no signature is required.
  195. - If signing_rules == 0 (the default), a signature is required.
  196. - If signing_rules == 1, it turns on the same strict format checking
  197. as dpkg-source.
  198. The rules for (signing_rules == 1)-mode are:
  199. - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
  200. followed by any PGP header data and must end with a blank line.
  201. - The data section must end with a blank line and must be followed by
  202. "-----BEGIN PGP SIGNATURE-----".
  203. """
  204. with open_file(filename) as changes_in:
  205. content = changes_in.read()
  206. try:
  207. unicode(content, 'utf-8')
  208. except UnicodeError:
  209. raise ChangesUnicodeError("Changes file not proper utf-8")
  210. changes = parse_deb822(content, signing_rules, keyrings=keyrings)
  211. if not dsc_file:
  212. # Finally ensure that everything needed for .changes is there
  213. must_keywords = ('Format', 'Date', 'Source', 'Architecture', 'Version',
  214. 'Distribution', 'Maintainer', 'Changes', 'Files')
  215. missingfields = []
  216. for keyword in must_keywords:
  217. if keyword.lower() not in changes:
  218. missingfields.append(keyword)
  219. if len(missingfields):
  220. raise ParseChangesError("Missing mandatory field(s) in changes file (policy 5.5): %s" % (missingfields))
  221. return changes
  222. ################################################################################
  223. def hash_key(hashname):
  224. return '%ssum' % hashname
  225. ################################################################################
  226. def check_dsc_files(dsc_filename, dsc, dsc_files):
  227. """
  228. Verify that the files listed in the Files field of the .dsc are
  229. those expected given the announced Format.
  230. @type dsc_filename: string
  231. @param dsc_filename: path of .dsc file
  232. @type dsc: dict
  233. @param dsc: the content of the .dsc parsed by C{parse_changes()}
  234. @type dsc_files: dict
  235. @param dsc_files: the file list returned by C{build_file_list()}
  236. @rtype: list
  237. @return: all errors detected
  238. """
  239. rejmsg = []
  240. # Ensure .dsc lists proper set of source files according to the format
  241. # announced
  242. has = defaultdict(lambda: 0)
  243. ftype_lookup = (
  244. (r'orig\.tar\.(gz|bz2|xz)\.asc', ('orig_tar_sig',)),
  245. (r'orig\.tar\.gz', ('orig_tar_gz', 'orig_tar')),
  246. (r'diff\.gz', ('debian_diff',)),
  247. (r'tar\.gz', ('native_tar_gz', 'native_tar')),
  248. (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
  249. (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
  250. (r'tar\.(gz|bz2|xz)', ('native_tar',)),
  251. (r'orig-.+\.tar\.(gz|bz2|xz)\.asc', ('more_orig_tar_sig',)),
  252. (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
  253. )
  254. for f in dsc_files:
  255. m = re_issource.match(f)
  256. if not m:
  257. rejmsg.append("%s: %s in Files field not recognised as source."
  258. % (dsc_filename, f))
  259. continue
  260. # Populate 'has' dictionary by resolving keys in lookup table
  261. matched = False
  262. for regex, keys in ftype_lookup:
  263. if re.match(regex, m.group(3)):
  264. matched = True
  265. for key in keys:
  266. has[key] += 1
  267. break
  268. # File does not match anything in lookup table; reject
  269. if not matched:
  270. rejmsg.append("%s: unexpected source file '%s'" % (dsc_filename, f))
  271. break
  272. # Check for multiple files
  273. for file_type in ('orig_tar', 'orig_tar_sig', 'native_tar', 'debian_tar', 'debian_diff'):
  274. if has[file_type] > 1:
  275. rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
  276. # Source format specific tests
  277. try:
  278. format = get_format_from_string(dsc['format'])
  279. rejmsg.extend([
  280. '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
  281. ])
  282. except UnknownFormatError:
  283. # Not an error here for now
  284. pass
  285. return rejmsg
  286. ################################################################################
  287. # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
  288. def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
  289. files = {}
  290. # Make sure we have a Files: field to parse...
  291. if field not in changes:
  292. raise NoFilesFieldError
  293. # Validate .changes Format: field
  294. if not is_a_dsc:
  295. validate_changes_format(parse_format(changes['format']), field)
  296. includes_section = (not is_a_dsc) and field == "files"
  297. # Parse each entry/line:
  298. for i in changes[field].split('\n'):
  299. if not i:
  300. break
  301. s = i.split()
  302. section = priority = ""
  303. try:
  304. if includes_section:
  305. (md5, size, section, priority, name) = s
  306. else:
  307. (md5, size, name) = s
  308. except ValueError:
  309. raise ParseChangesError(i)
  310. if section == "":
  311. section = "-"
  312. if priority == "":
  313. priority = "-"
  314. (section, component) = extract_component_from_section(section)
  315. files[name] = dict(size=size, section=section,
  316. priority=priority, component=component)
  317. files[name][hashname] = md5
  318. return files
  319. ################################################################################
  320. def send_mail(message, filename="", whitelists=None):
  321. """sendmail wrapper, takes _either_ a message string or a file as arguments
  322. @type whitelists: list of (str or None)
  323. @param whitelists: path to whitelists. C{None} or an empty list whitelists
  324. everything, otherwise an address is whitelisted if it is
  325. included in any of the lists.
  326. In addition a global whitelist can be specified in
  327. Dinstall::MailWhiteList.
  328. """
  329. maildir = Cnf.get('Dir::Mail')
  330. if maildir:
  331. path = os.path.join(maildir, datetime.datetime.now().isoformat())
  332. path = find_next_free(path)
  333. with open(path, 'w') as fh:
  334. print(message, end=' ', file=fh)
  335. # Check whether we're supposed to be sending mail
  336. if "Dinstall::Options::No-Mail" in Cnf and Cnf["Dinstall::Options::No-Mail"]:
  337. return
  338. # If we've been passed a string dump it into a temporary file
  339. if message:
  340. (fd, filename) = tempfile.mkstemp()
  341. os.write(fd, message)
  342. os.close(fd)
  343. if whitelists is None or None in whitelists:
  344. whitelists = []
  345. if Cnf.get('Dinstall::MailWhiteList', ''):
  346. whitelists.append(Cnf['Dinstall::MailWhiteList'])
  347. if len(whitelists) != 0:
  348. with open_file(filename) as message_in:
  349. message_raw = modemail.message_from_file(message_in)
  350. whitelist = []
  351. for path in whitelists:
  352. with open_file(path, 'r') as whitelist_in:
  353. for line in whitelist_in:
  354. if not re_whitespace_comment.match(line):
  355. if re_re_mark.match(line):
  356. whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
  357. else:
  358. whitelist.append(re.compile(re.escape(line.strip())))
  359. # Fields to check.
  360. fields = ["To", "Bcc", "Cc"]
  361. for field in fields:
  362. # Check each field
  363. value = message_raw.get(field, None)
  364. if value is not None:
  365. match = []
  366. for item in value.split(","):
  367. (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
  368. mail_whitelisted = 0
  369. for wr in whitelist:
  370. if wr.match(email):
  371. mail_whitelisted = 1
  372. break
  373. if not mail_whitelisted:
  374. print("Skipping {0} since it's not whitelisted".format(item))
  375. continue
  376. match.append(item)
  377. # Doesn't have any mail in whitelist so remove the header
  378. if len(match) == 0:
  379. del message_raw[field]
  380. else:
  381. message_raw.replace_header(field, ', '.join(match))
  382. # Change message fields in order if we don't have a To header
  383. if "To" not in message_raw:
  384. fields.reverse()
  385. for field in fields:
  386. if field in message_raw:
  387. message_raw[fields[-1]] = message_raw[field]
  388. del message_raw[field]
  389. break
  390. else:
  391. # Clean up any temporary files
  392. # and return, as we removed all recipients.
  393. if message:
  394. os.unlink(filename)
  395. return
  396. fd = os.open(filename, os.O_RDWR | os.O_EXCL, 0o700)
  397. os.write(fd, message_raw.as_string(True))
  398. os.close(fd)
  399. # Invoke sendmail
  400. (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
  401. if (result != 0):
  402. raise SendmailFailedError(output)
  403. # Clean up any temporary files
  404. if message:
  405. os.unlink(filename)
  406. ################################################################################
  407. def poolify(source):
  408. if source[:3] == "lib":
  409. return source[:4] + '/' + source + '/'
  410. else:
  411. return source[:1] + '/' + source + '/'
  412. ################################################################################
  413. def move(src, dest, overwrite=0, perms=0o664):
  414. if os.path.exists(dest) and os.path.isdir(dest):
  415. dest_dir = dest
  416. else:
  417. dest_dir = os.path.dirname(dest)
  418. if not os.path.lexists(dest_dir):
  419. umask = os.umask(00000)
  420. os.makedirs(dest_dir, 0o2775)
  421. os.umask(umask)
  422. #print "Moving %s to %s..." % (src, dest)
  423. if os.path.exists(dest) and os.path.isdir(dest):
  424. dest += '/' + os.path.basename(src)
  425. # Don't overwrite unless forced to
  426. if os.path.lexists(dest):
  427. if not overwrite:
  428. fubar("Can't move %s to %s - file already exists." % (src, dest))
  429. else:
  430. if not os.access(dest, os.W_OK):
  431. fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
  432. shutil.copy2(src, dest)
  433. os.chmod(dest, perms)
  434. os.unlink(src)
  435. def copy(src, dest, overwrite=0, perms=0o664):
  436. if os.path.exists(dest) and os.path.isdir(dest):
  437. dest_dir = dest
  438. else:
  439. dest_dir = os.path.dirname(dest)
  440. if not os.path.exists(dest_dir):
  441. umask = os.umask(00000)
  442. os.makedirs(dest_dir, 0o2775)
  443. os.umask(umask)
  444. #print "Copying %s to %s..." % (src, dest)
  445. if os.path.exists(dest) and os.path.isdir(dest):
  446. dest += '/' + os.path.basename(src)
  447. # Don't overwrite unless forced to
  448. if os.path.lexists(dest):
  449. if not overwrite:
  450. raise FileExistsError
  451. else:
  452. if not os.access(dest, os.W_OK):
  453. raise CantOverwriteError
  454. shutil.copy2(src, dest)
  455. os.chmod(dest, perms)
  456. ################################################################################
  457. def which_conf_file():
  458. if os.getenv('DAK_CONFIG'):
  459. return os.getenv('DAK_CONFIG')
  460. res = socket.getfqdn()
  461. # In case we allow local config files per user, try if one exists
  462. if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
  463. homedir = os.getenv("HOME")
  464. confpath = os.path.join(homedir, "/etc/dak.conf")
  465. if os.path.exists(confpath):
  466. apt_pkg.read_config_file_isc(Cnf, confpath)
  467. # We are still in here, so there is no local config file or we do
  468. # not allow local files. Do the normal stuff.
  469. if Cnf.get("Config::" + res + "::DakConfig"):
  470. return Cnf["Config::" + res + "::DakConfig"]
  471. return default_config
  472. ################################################################################
  473. def TemplateSubst(subst_map, filename):
  474. """ Perform a substition of template """
  475. with open_file(filename) as templatefile:
  476. template = templatefile.read()
  477. for k, v in subst_map.iteritems():
  478. template = template.replace(k, str(v))
  479. return template
  480. ################################################################################
  481. def fubar(msg, exit_code=1):
  482. sys.stderr.write("E: %s\n" % (msg))
  483. sys.exit(exit_code)
  484. def warn(msg):
  485. sys.stderr.write("W: %s\n" % (msg))
  486. ################################################################################
  487. # Returns the user name with a laughable attempt at rfc822 conformancy
  488. # (read: removing stray periods).
  489. def whoami():
  490. return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
  491. def getusername():
  492. return pwd.getpwuid(os.getuid())[0]
  493. ################################################################################
  494. def size_type(c):
  495. t = " B"
  496. if c > 10240:
  497. c = c / 1024
  498. t = " KB"
  499. if c > 10240:
  500. c = c / 1024
  501. t = " MB"
  502. return ("%d%s" % (c, t))
  503. ################################################################################
  504. def find_next_free(dest, too_many=100):
  505. extra = 0
  506. orig_dest = dest
  507. while os.path.lexists(dest) and extra < too_many:
  508. dest = orig_dest + '.' + repr(extra)
  509. extra += 1
  510. if extra >= too_many:
  511. raise NoFreeFilenameError
  512. return dest
  513. ################################################################################
  514. def result_join(original, sep='\t'):
  515. resultlist = []
  516. for i in xrange(len(original)):
  517. if original[i] is None:
  518. resultlist.append("")
  519. else:
  520. resultlist.append(original[i])
  521. return sep.join(resultlist)
  522. ################################################################################
  523. def prefix_multi_line_string(str, prefix, include_blank_lines=0):
  524. out = ""
  525. for line in str.split('\n'):
  526. line = line.strip()
  527. if line or include_blank_lines:
  528. out += "%s%s\n" % (prefix, line)
  529. # Strip trailing new line
  530. if out:
  531. out = out[:-1]
  532. return out
  533. ################################################################################
  534. def join_with_commas_and(list):
  535. if len(list) == 0:
  536. return "nothing"
  537. if len(list) == 1:
  538. return list[0]
  539. return ", ".join(list[:-1]) + " and " + list[-1]
  540. ################################################################################
  541. def pp_deps(deps):
  542. pp_deps = []
  543. for atom in deps:
  544. (pkg, version, constraint) = atom
  545. if constraint:
  546. pp_dep = "%s (%s %s)" % (pkg, constraint, version)
  547. else:
  548. pp_dep = pkg
  549. pp_deps.append(pp_dep)
  550. return " |".join(pp_deps)
  551. ################################################################################
  552. def get_conf():
  553. return Cnf
  554. ################################################################################
  555. def parse_args(Options):
  556. """ Handle -a, -c and -s arguments; returns them as SQL constraints """
  557. # XXX: This should go away and everything which calls it be converted
  558. # to use SQLA properly. For now, we'll just fix it not to use
  559. # the old Pg interface though
  560. session = DBConn().session()
  561. # Process suite
  562. if Options["Suite"]:
  563. suite_ids_list = []
  564. for suitename in split_args(Options["Suite"]):
  565. suite = get_suite(suitename, session=session)
  566. if not suite or suite.suite_id is None:
  567. warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
  568. else:
  569. suite_ids_list.append(suite.suite_id)
  570. if suite_ids_list:
  571. con_suites = "AND su.id IN (%s)" % ", ".join([str(i) for i in suite_ids_list])
  572. else:
  573. fubar("No valid suite given.")
  574. else:
  575. con_suites = ""
  576. # Process component
  577. if Options["Component"]:
  578. component_ids_list = []
  579. for componentname in split_args(Options["Component"]):
  580. component = get_component(componentname, session=session)
  581. if component is None:
  582. warn("component '%s' not recognised." % (componentname))
  583. else:
  584. component_ids_list.append(component.component_id)
  585. if component_ids_list:
  586. con_components = "AND c.id IN (%s)" % ", ".join([str(i) for i in component_ids_list])
  587. else:
  588. fubar("No valid component given.")
  589. else:
  590. con_components = ""
  591. # Process architecture
  592. con_architectures = ""
  593. check_source = 0
  594. if Options["Architecture"]:
  595. arch_ids_list = []
  596. for archname in split_args(Options["Architecture"]):
  597. if archname == "source":
  598. check_source = 1
  599. else:
  600. arch = get_architecture(archname, session=session)
  601. if arch is None:
  602. warn("architecture '%s' not recognised." % (archname))
  603. else:
  604. arch_ids_list.append(arch.arch_id)
  605. if arch_ids_list:
  606. con_architectures = "AND a.id IN (%s)" % ", ".join([str(i) for i in arch_ids_list])
  607. else:
  608. if not check_source:
  609. fubar("No valid architecture given.")
  610. else:
  611. check_source = 1
  612. return (con_suites, con_architectures, con_components, check_source)
  613. ################################################################################
  614. @functools.total_ordering
  615. class ArchKey(object):
  616. """
  617. Key object for use in sorting lists of architectures.
  618. Sorts normally except that 'source' dominates all others.
  619. """
  620. __slots__ = ['arch', 'issource']
  621. def __init__(self, arch, *args):
  622. self.arch = arch
  623. self.issource = arch == 'source'
  624. def __lt__(self, other):
  625. if self.issource:
  626. return not other.issource
  627. if other.issource:
  628. return False
  629. return self.arch < other.arch
  630. def __eq__(self, other):
  631. return self.arch == other.arch
  632. ################################################################################
  633. def split_args(s, dwim=True):
  634. """
  635. Split command line arguments which can be separated by either commas
  636. or whitespace. If dwim is set, it will complain about string ending
  637. in comma since this usually means someone did 'dak ls -a i386, m68k
  638. foo' or something and the inevitable confusion resulting from 'm68k'
  639. being treated as an argument is undesirable.
  640. """
  641. if s.find(",") == -1:
  642. return s.split()
  643. else:
  644. if s[-1:] == "," and dwim:
  645. fubar("split_args: found trailing comma, spurious space maybe?")
  646. return s.split(",")
  647. ################################################################################
  648. def gpg_keyring_args(keyrings=None):
  649. if not keyrings:
  650. keyrings = get_active_keyring_paths()
  651. return " ".join(["--keyring %s" % x for x in keyrings])
  652. ################################################################################
  653. def gpg_get_key_addresses(fingerprint):
  654. """retreive email addresses from gpg key uids for a given fingerprint"""
  655. addresses = key_uid_email_cache.get(fingerprint)
  656. if addresses is not None:
  657. return addresses
  658. addresses = list()
  659. try:
  660. with open(os.devnull, "wb") as devnull:
  661. output = daklib.daksubprocess.check_output(
  662. ["gpg", "--no-default-keyring"] + gpg_keyring_args().split() +
  663. ["--with-colons", "--list-keys", fingerprint], stderr=devnull)
  664. except subprocess.CalledProcessError:
  665. pass
  666. else:
  667. for l in output.split('\n'):
  668. parts = l.split(':')
  669. if parts[0] not in ("uid", "pub"):
  670. continue
  671. if parts[1] in ("i", "d", "r"):
  672. # Skip uid that is invalid, disabled or revoked
  673. continue
  674. try:
  675. uid = parts[9]
  676. except IndexError:
  677. continue
  678. try:
  679. # Do not use unicode_escape, because it is locale-specific
  680. uid = codecs.decode(uid, "string_escape").decode("utf-8")
  681. except UnicodeDecodeError:
  682. uid = uid.decode("latin1") # does not fail
  683. m = re_parse_maintainer.match(uid)
  684. if not m:
  685. continue
  686. address = m.group(2)
  687. address = address.encode("utf8") # dak still uses bytes
  688. if address.endswith('@debian.org'):
  689. # prefer @debian.org addresses
  690. # TODO: maybe not hardcode the domain
  691. addresses.insert(0, address)
  692. else:
  693. addresses.append(address)
  694. key_uid_email_cache[fingerprint] = addresses
  695. return addresses
  696. ################################################################################
  697. def get_logins_from_ldap(fingerprint='*'):
  698. """retrieve login from LDAP linked to a given fingerprint"""
  699. LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
  700. LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
  701. l = ldap.open(LDAPServer)
  702. l.simple_bind_s('', '')
  703. Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
  704. '(keyfingerprint=%s)' % fingerprint,
  705. ['uid', 'keyfingerprint'])
  706. login = {}
  707. for elem in Attrs:
  708. login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
  709. return login
  710. ################################################################################
  711. def get_users_from_ldap():
  712. """retrieve login and user names from LDAP"""
  713. LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
  714. LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
  715. l = ldap.open(LDAPServer)
  716. l.simple_bind_s('', '')
  717. Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
  718. '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
  719. users = {}
  720. for elem in Attrs:
  721. elem = elem[1]
  722. name = []
  723. for k in ('cn', 'mn', 'sn'):
  724. try:
  725. if elem[k][0] != '-':
  726. name.append(elem[k][0])
  727. except KeyError:
  728. pass
  729. users[' '.join(name)] = elem['uid'][0]
  730. return users
  731. ################################################################################
  732. def clean_symlink(src, dest, root):
  733. """
  734. Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
  735. Returns fixed 'src'
  736. """
  737. src = src.replace(root, '', 1)
  738. dest = dest.replace(root, '', 1)
  739. dest = os.path.dirname(dest)
  740. new_src = '../' * len(dest.split('/'))
  741. return new_src + src
  742. ################################################################################
  743. def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
  744. """
  745. Return a secure and unique filename by pre-creating it.
  746. @type directory: str
  747. @param directory: If non-null it will be the directory the file is pre-created in.
  748. @type prefix: str
  749. @param prefix: The filename will be prefixed with this string
  750. @type suffix: str
  751. @param suffix: The filename will end with this string
  752. @type mode: str
  753. @param mode: If set the file will get chmodded to those permissions
  754. @type group: str
  755. @param group: If set the file will get chgrped to the specified group.
  756. @rtype: list
  757. @return: Returns a pair (fd, name)
  758. """
  759. (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
  760. if mode:
  761. os.chmod(tfname, mode)
  762. if group:
  763. gid = grp.getgrnam(group).gr_gid
  764. os.chown(tfname, -1, gid)
  765. return (tfd, tfname)
  766. ################################################################################
  767. def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
  768. """
  769. Return a secure and unique directory by pre-creating it.
  770. @type parent: str
  771. @param parent: If non-null it will be the directory the directory is pre-created in.
  772. @type prefix: str
  773. @param prefix: The filename will be prefixed with this string
  774. @type suffix: str
  775. @param suffix: The filename will end with this string
  776. @type mode: str
  777. @param mode: If set the file will get chmodded to those permissions
  778. @type group: str
  779. @param group: If set the file will get chgrped to the specified group.
  780. @rtype: list
  781. @return: Returns a pair (fd, name)
  782. """
  783. tfname = tempfile.mkdtemp(suffix, prefix, parent)
  784. if mode:
  785. os.chmod(tfname, mode)
  786. if group:
  787. gid = grp.getgrnam(group).gr_gid
  788. os.chown(tfname, -1, gid)
  789. return tfname
  790. ################################################################################
  791. def is_email_alias(email):
  792. """ checks if the user part of the email is listed in the alias file """
  793. global alias_cache
  794. if alias_cache is None:
  795. aliasfn = which_alias_file()
  796. alias_cache = set()
  797. if aliasfn:
  798. for l in open(aliasfn):
  799. alias_cache.add(l.split(':')[0])
  800. uid = email.split('@')[0]
  801. return uid in alias_cache
  802. ################################################################################
  803. def get_changes_files(from_dir):
  804. """
  805. Takes a directory and lists all .changes files in it (as well as chdir'ing
  806. to the directory; this is due to broken behaviour on the part of p-u/p-a
  807. when you're not in the right place)
  808. Returns a list of filenames
  809. """
  810. try:
  811. # Much of the rest of p-u/p-a depends on being in the right place
  812. os.chdir(from_dir)
  813. changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
  814. except OSError as e:
  815. fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
  816. return changes_files
  817. ################################################################################
  818. Cnf = config.Config().Cnf
  819. ################################################################################
  820. def parse_wnpp_bug_file(file="/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
  821. """
  822. Parses the wnpp bug list available at https://qa.debian.org/data/bts/wnpp_rm
  823. Well, actually it parsed a local copy, but let's document the source
  824. somewhere ;)
  825. returns a dict associating source package name with a list of open wnpp
  826. bugs (Yes, there might be more than one)
  827. """
  828. line = []
  829. try:
  830. f = open(file)
  831. lines = f.readlines()
  832. except IOError as e:
  833. print("Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file)
  834. lines = []
  835. wnpp = {}
  836. for line in lines:
  837. splited_line = line.split(": ", 1)
  838. if len(splited_line) > 1:
  839. wnpp[splited_line[0]] = splited_line[1].split("|")
  840. for source in wnpp.keys():
  841. bugs = []
  842. for wnpp_bug in wnpp[source]:
  843. bug_no = re.search("(\d)+", wnpp_bug).group()
  844. if bug_no:
  845. bugs.append(bug_no)
  846. wnpp[source] = bugs
  847. return wnpp
  848. ################################################################################
  849. def deb_extract_control(fh):
  850. """extract DEBIAN/control from a binary package"""
  851. return apt_inst.DebFile(fh).control.extractdata("control")
  852. ################################################################################
  853. def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
  854. """mail addresses to contact for an upload
  855. @type maintainer: str
  856. @param maintainer: Maintainer field of the .changes file
  857. @type changed_by: str
  858. @param changed_by: Changed-By field of the .changes file
  859. @type fingerprint: str
  860. @param fingerprint: fingerprint of the key used to sign the upload
  861. @rtype: list of str
  862. @return: list of RFC 2047-encoded mail addresses to contact regarding
  863. this upload
  864. """
  865. recipients = Cnf.value_list('Dinstall::UploadMailRecipients')
  866. if not recipients:
  867. recipients = [
  868. 'maintainer',
  869. 'changed_by',
  870. 'signer',
  871. ]
  872. # Ensure signer is last if present
  873. try:
  874. recipients.remove('signer')
  875. recipients.append('signer')
  876. except ValueError:
  877. pass
  878. # Compute the set of addresses of the recipients
  879. addresses = set() # Name + email
  880. emails = set() # Email only, used to avoid duplicates
  881. for recipient in recipients:
  882. if recipient.startswith('mail:'): # Email hardcoded in config
  883. address = recipient[5:]
  884. elif recipient == 'maintainer':
  885. address = maintainer
  886. elif recipient == 'changed_by':
  887. address = changed_by
  888. elif recipient == 'signer':
  889. fpr_addresses = gpg_get_key_addresses(fingerprint)
  890. address = fpr_addresses[0] if fpr_addresses else None
  891. if any(x in emails for x in fpr_addresses):
  892. # The signer already gets a copy via another email
  893. address = None
  894. else:
  895. raise Exception('Unsupported entry in {0}: {1}'.format(
  896. 'Dinstall::UploadMailRecipients', recipient))
  897. if address is not None:
  898. email = fix_maintainer(address)[3]
  899. if email not in emails:
  900. addresses.add(address)
  901. emails.add(email)
  902. encoded_addresses = [fix_maintainer(e)[1] for e in addresses]
  903. return encoded_addresses
  904. ################################################################################
  905. def call_editor(text="", suffix=".txt"):
  906. """run editor and return the result as a string
  907. @type text: str
  908. @param text: initial text
  909. @type suffix: str
  910. @param suffix: extension for temporary file
  911. @rtype: str
  912. @return: string with the edited text
  913. """
  914. editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
  915. tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
  916. try:
  917. print(text, end=' ', file=tmp)
  918. tmp.close()
  919. daklib.daksubprocess.check_call([editor, tmp.name])
  920. return open(tmp.name, 'r').read()
  921. finally:
  922. os.unlink(tmp.name)
  923. ################################################################################
  924. def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False, include_arch_all=True):
  925. dbsuite = get_suite(suite, session)
  926. overridesuite = dbsuite
  927. if dbsuite.overridesuite is not None:
  928. overridesuite = get_suite(dbsuite.overridesuite, session)
  929. dep_problem = 0
  930. p2c = {}
  931. all_broken = defaultdict(lambda: defaultdict(set))
  932. if arches:
  933. all_arches = set(arches)
  934. else:
  935. all_arches = set(x.arch_string for x in get_suite_architectures(suite))
  936. all_arches -= set(["source", "all"])
  937. removal_set = set(removals)
  938. metakey_d = get_or_set_metadatakey("Depends", session)
  939. metakey_p = get_or_set_metadatakey("Provides", session)
  940. params = {
  941. 'suite_id': dbsuite.suite_id,
  942. 'metakey_d_id': metakey_d.key_id,
  943. 'metakey_p_id': metakey_p.key_id,
  944. }
  945. if include_arch_all:
  946. rdep_architectures = all_arches | set(['all'])
  947. else:
  948. rdep_architectures = all_arches
  949. for architecture in rdep_architectures:
  950. deps = {}
  951. sources = {}
  952. virtual_packages = {}
  953. try:
  954. params['arch_id'] = get_architecture(architecture, session).arch_id
  955. except AttributeError:
  956. continue
  957. statement = sql.text('''
  958. SELECT b.package, s.source, c.name as component,
  959. (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
  960. (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
  961. FROM binaries b
  962. JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
  963. JOIN source s ON b.source = s.id
  964. JOIN files_archive_map af ON b.file = af.file_id
  965. JOIN component c ON af.component_id = c.id
  966. WHERE b.architecture = :arch_id''')
  967. query = session.query('package', 'source', 'component', 'depends', 'provides'). \
  968. from_statement(statement).params(params)
  969. for package, source, component, depends, provides in query:
  970. sources[package] = source
  971. p2c[package] = component
  972. if depends is not None:
  973. deps[package] = depends
  974. # Maintain a counter for each virtual package. If a
  975. # Provides: exists, set the counter to 0 and count all
  976. # provides by a package not in the list for removal.
  977. # If the counter stays 0 at the end, we know that only
  978. # the to-be-removed packages provided this virtual
  979. # package.
  980. if provides is not None:
  981. for virtual_pkg in provides.split(","):
  982. virtual_pkg = virtual_pkg.strip()
  983. if virtual_pkg == package:
  984. continue
  985. if virtual_pkg not in virtual_packages:
  986. virtual_packages[virtual_pkg] = 0
  987. if package not in removals:
  988. virtual_packages[virtual_pkg] += 1
  989. # If a virtual package is only provided by the to-be-removed
  990. # packages, treat the virtual package as to-be-removed too.
  991. removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])
  992. # Check binary dependencies (Depends)
  993. for package in deps:
  994. if package in removals:
  995. continue
  996. try:
  997. parsed_dep = apt_pkg.parse_depends(deps[package])
  998. except ValueError as e:
  999. print("Error for package %s: %s" % (package, e))
  1000. parsed_dep = []
  1001. for dep in parsed_dep:
  1002. # Check for partial breakage. If a package has a ORed
  1003. # dependency, there is only a dependency problem if all
  1004. # packages in the ORed depends will be removed.
  1005. unsat = 0
  1006. for dep_package, _, _ in dep:
  1007. if dep_package in removals:
  1008. unsat += 1
  1009. if unsat == len(dep):
  1010. component = p2c[package]
  1011. source = sources[package]
  1012. if component != "main":
  1013. source = "%s/%s" % (source, component)
  1014. all_broken[source][package].add(architecture)
  1015. dep_problem = 1
  1016. if all_broken and not quiet:
  1017. if cruft:
  1018. print(" - broken Depends:")
  1019. else:
  1020. print("# Broken Depends:")
  1021. for source, bindict in sorted(all_broken.items()):
  1022. lines = []
  1023. for binary, arches in sorted(bindict.items()):
  1024. if arches == all_arches or 'all' in arches:
  1025. lines.append(binary)
  1026. else:
  1027. lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
  1028. if cruft:
  1029. print(' %s: %s' % (source, lines[0]))
  1030. else:
  1031. print('%s: %s' % (source, lines[0]))
  1032. for line in lines[1:]:
  1033. if cruft:
  1034. print(' ' + ' ' * (len(source) + 2) + line)
  1035. else:
  1036. print(' ' * (len(source) + 2) + line)
  1037. if not cruft:
  1038. print()
  1039. # Check source dependencies (Build-Depends and Build-Depends-Indep)
  1040. all_broken = defaultdict(set)
  1041. metakey_bd = get_or_set_metadatakey("Build-Depends", session)
  1042. metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
  1043. if include_arch_all:
  1044. metakey_ids = (metakey_bd.key_id, metakey_bdi.key_id)
  1045. else:
  1046. metakey_ids = (metakey_bd.key_id,)
  1047. params = {
  1048. 'suite_id': dbsuite.suite_id,
  1049. 'metakey_ids': metakey_ids,
  1050. }
  1051. statement = sql.text('''
  1052. SELECT s.source, string_agg(sm.value, ', ') as build_dep
  1053. FROM source s
  1054. JOIN source_metadata sm ON s.id = sm.src_id
  1055. WHERE s.id in
  1056. (SELECT src FROM newest_src_association
  1057. WHERE suite = :suite_id)
  1058. AND sm.key_id in :metakey_ids
  1059. GROUP BY s.id, s.source''')
  1060. query = session.query('source', 'build_dep').from_statement(statement). \
  1061. params(params)
  1062. for source, build_dep in query:
  1063. if source in removals:
  1064. continue
  1065. parsed_dep = []
  1066. if build_dep is not None:
  1067. # Remove [arch] information since we want to see breakage on all arches
  1068. build_dep = re_build_dep_arch.sub("", build_dep)
  1069. try:
  1070. parsed_dep = apt_pkg.parse_src_depends(build_dep)
  1071. except ValueError as e:
  1072. print("Error for source %s: %s" % (source, e))
  1073. for dep in parsed_dep:
  1074. unsat = 0
  1075. for dep_package, _, _ in dep:
  1076. if dep_package in removals:
  1077. unsat += 1
  1078. if unsat == len(dep):
  1079. component, = session.query(Component.component_name) \
  1080. .join(Component.overrides) \
  1081. .filter(Override.suite == overridesuite) \
  1082. .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
  1083. .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
  1084. .first()
  1085. key = source
  1086. if component != "main":
  1087. key = "%s/%s" % (source, component)
  1088. all_broken[key].add(pp_deps(dep))
  1089. dep_problem = 1
  1090. if all_broken and not quiet:
  1091. if cruft:
  1092. print(" - broken Build-Depends:")
  1093. else:
  1094. print("# Broken Build-Depends:")
  1095. for source, bdeps in sorted(all_broken.items()):
  1096. bdeps = sorted(bdeps)
  1097. if cruft:
  1098. print(' %s: %s' % (source, bdeps[0]))
  1099. else:
  1100. print('%s: %s' % (source, bdeps[0]))
  1101. for bdep in bdeps[1:]:
  1102. if cruft:
  1103. print(' ' + ' ' * (len(source) + 2) + bdep)
  1104. else:
  1105. print(' ' * (len(source) + 2) + bdep)
  1106. if not cruft:
  1107. print()
  1108. return dep_problem
  1109. ################################################################################
  1110. def parse_built_using(control):
  1111. """source packages referenced via Built-Using
  1112. @type control: dict-like
  1113. @param control: control file to take Built-Using field from
  1114. @rtype: list of (str, str)
  1115. @return: list of (source_name, source_version) pairs
  1116. """
  1117. built_using = control.get('Built-Using', None)
  1118. if built_using is None:
  1119. return []
  1120. bu = []
  1121. for dep in apt_pkg.parse_depends(built_using):
  1122. assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
  1123. source_name, source_version, comp = dep[0]
  1124. assert comp == '=', 'Built-Using must contain strict dependencies'
  1125. bu.append((source_name, source_version))
  1126. return bu
  1127. ################################################################################
  1128. def is_in_debug_section(control):
  1129. """binary package is a debug package
  1130. @type control: dict-like
  1131. @param control: control file of binary package
  1132. @rtype Boolean
  1133. @return: True if the binary package is a debug package
  1134. """
  1135. section = control['Section'].split('/', 1)[-1]
  1136. auto_built_package = control.get("Auto-Built-Package")
  1137. return section == "debug" and auto_built_package == "debug-symbols"
  1138. ################################################################################
  1139. def find_possibly_compressed_file(filename):
  1140. """
  1141. @type filename: string
  1142. @param filename: path to a control file (Sources, Packages, etc) to
  1143. look for
  1144. @rtype string
  1145. @return: path to the (possibly compressed) control file, or null if the
  1146. file doesn't exist
  1147. """
  1148. _compressions = ('', '.xz', '.gz', '.bz2')
  1149. for ext in _compressions:
  1150. _file = filename + ext
  1151. if os.path.exists(_file):
  1152. return _file
  1153. raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), filename)