archive.py 55 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449
  1. # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
  2. #
  3. # This program is free software; you can redistribute it and/or modify
  4. # it under the terms of the GNU General Public License as published by
  5. # the Free Software Foundation; either version 2 of the License, or
  6. # (at your option) any later version.
  7. #
  8. # This program is distributed in the hope that it will be useful,
  9. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  11. # GNU General Public License for more details.
  12. #
  13. # You should have received a copy of the GNU General Public License along
  14. # with this program; if not, write to the Free Software Foundation, Inc.,
  15. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  16. """module to manipulate the archive
  17. This module provides classes to manipulate the archive.
  18. """
  19. from daklib.dbconn import *
  20. import daklib.checks as checks
  21. from daklib.config import Config
  22. from daklib.externalsignature import check_upload_for_external_signature_request
  23. import daklib.upload as upload
  24. import daklib.utils
  25. from daklib.fstransactions import FilesystemTransaction
  26. from daklib.regexes import re_changelog_versions, re_bin_only_nmu
  27. import os
  28. import shutil
  29. from sqlalchemy.orm.exc import NoResultFound
  30. from sqlalchemy.orm import object_session
  31. import sqlalchemy.exc
  32. import subprocess
  33. import traceback
  34. class ArchiveException(Exception):
  35. pass
  36. class HashMismatchException(ArchiveException):
  37. pass
  38. class ArchiveTransaction:
  39. """manipulate the archive in a transaction
  40. """
  41. def __init__(self):
  42. self.fs = FilesystemTransaction()
  43. self.session = DBConn().session()
  44. def get_file(self, hashed_file, source_name, check_hashes=True):
  45. """Look for file C{hashed_file} in database
  46. @type hashed_file: L{daklib.upload.HashedFile}
  47. @param hashed_file: file to look for in the database
  48. @type source_name: str
  49. @param source_name: source package name
  50. @type check_hashes: bool
  51. @param check_hashes: check size and hashes match
  52. @raise KeyError: file was not found in the database
  53. @raise HashMismatchException: hash mismatch
  54. @rtype: L{daklib.dbconn.PoolFile}
  55. @return: database entry for the file
  56. """
  57. poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename)
  58. try:
  59. poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
  60. if check_hashes and (poolfile.filesize != hashed_file.size
  61. or poolfile.md5sum != hashed_file.md5sum
  62. or poolfile.sha1sum != hashed_file.sha1sum
  63. or poolfile.sha256sum != hashed_file.sha256sum):
  64. raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
  65. return poolfile
  66. except NoResultFound:
  67. raise KeyError('{0} not found in database.'.format(poolname))
  68. def _install_file(self, directory, hashed_file, archive, component, source_name):
  69. """Install a file
  70. Will not give an error when the file is already present.
  71. @rtype: L{daklib.dbconn.PoolFile}
  72. @return: database object for the new file
  73. """
  74. session = self.session
  75. poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename)
  76. try:
  77. poolfile = self.get_file(hashed_file, source_name)
  78. except KeyError:
  79. poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
  80. poolfile.md5sum = hashed_file.md5sum
  81. poolfile.sha1sum = hashed_file.sha1sum
  82. poolfile.sha256sum = hashed_file.sha256sum
  83. session.add(poolfile)
  84. session.flush()
  85. try:
  86. session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
  87. except NoResultFound:
  88. archive_file = ArchiveFile(archive, component, poolfile)
  89. session.add(archive_file)
  90. session.flush()
  91. path = os.path.join(archive.path, 'pool', component.component_name, poolname)
  92. hashed_file_path = os.path.join(directory, hashed_file.input_filename)
  93. self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
  94. return poolfile
  95. def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
  96. """Install a binary package
  97. @type directory: str
  98. @param directory: directory the binary package is located in
  99. @type binary: L{daklib.upload.Binary}
  100. @param binary: binary package to install
  101. @type suite: L{daklib.dbconn.Suite}
  102. @param suite: target suite
  103. @type component: L{daklib.dbconn.Component}
  104. @param component: target component
  105. @type allow_tainted: bool
  106. @param allow_tainted: allow to copy additional files from tainted archives
  107. @type fingerprint: L{daklib.dbconn.Fingerprint}
  108. @param fingerprint: optional fingerprint
  109. @type source_suites: SQLAlchemy subquery for C{daklib.dbconn.Suite} or C{True}
  110. @param source_suites: suites to copy the source from if they are not
  111. in C{suite} or C{True} to allow copying from any
  112. suite.
  113. @type extra_source_archives: list of L{daklib.dbconn.Archive}
  114. @param extra_source_archives: extra archives to copy Built-Using sources from
  115. @rtype: L{daklib.dbconn.DBBinary}
  116. @return: databse object for the new package
  117. """
  118. session = self.session
  119. control = binary.control
  120. maintainer = get_or_set_maintainer(control['Maintainer'], session)
  121. architecture = get_architecture(control['Architecture'], session)
  122. (source_name, source_version) = binary.source
  123. source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
  124. source = source_query.filter(DBSource.suites.contains(suite)).first()
  125. if source is None:
  126. if source_suites is not True:
  127. source_query = source_query.join(DBSource.suites) \
  128. .filter(Suite.suite_id == source_suites.c.id)
  129. source = source_query.first()
  130. if source is None:
  131. raise ArchiveException('{0}: trying to install to {1}, but could not find source ({2} {3})'.
  132. format(binary.hashed_file.filename, suite.suite_name, source_name, source_version))
  133. self.copy_source(source, suite, source.poolfile.component)
  134. db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
  135. unique = dict(
  136. package=control['Package'],
  137. version=control['Version'],
  138. architecture=architecture,
  139. )
  140. rest = dict(
  141. source=source,
  142. maintainer=maintainer,
  143. poolfile=db_file,
  144. binarytype=binary.type,
  145. )
  146. # Other attributes that are ignored for purposes of equality with
  147. # an existing source
  148. rest2 = dict(
  149. fingerprint=fingerprint,
  150. )
  151. try:
  152. db_binary = session.query(DBBinary).filter_by(**unique).one()
  153. for key, value in rest.items():
  154. if getattr(db_binary, key) != value:
  155. raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
  156. except NoResultFound:
  157. db_binary = DBBinary(**unique)
  158. for key, value in rest.items():
  159. setattr(db_binary, key, value)
  160. for key, value in rest2.items():
  161. setattr(db_binary, key, value)
  162. session.add(db_binary)
  163. session.flush()
  164. import_metadata_into_db(db_binary, session)
  165. self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
  166. if suite not in db_binary.suites:
  167. db_binary.suites.append(suite)
  168. session.flush()
  169. return db_binary
  170. def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
  171. """ensure source exists in the given archive
  172. This is intended to be used to check that Built-Using sources exist.
  173. @type filename: str
  174. @param filename: filename to use in error messages
  175. @type source: L{daklib.dbconn.DBSource}
  176. @param source: source to look for
  177. @type archive: L{daklib.dbconn.Archive}
  178. @param archive: archive to look in
  179. @type extra_archives: list of L{daklib.dbconn.Archive}
  180. @param extra_archives: list of archives to copy the source package from
  181. if it is not yet present in C{archive}
  182. """
  183. session = self.session
  184. db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
  185. if db_file is not None:
  186. return True
  187. # Try to copy file from one extra archive
  188. if extra_archives is None:
  189. extra_archives = []
  190. db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([a.archive_id for a in extra_archives])).first()
  191. if db_file is None:
  192. raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
  193. source_archive = db_file.archive
  194. for dsc_file in source.srcfiles:
  195. af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
  196. # We were given an explicit list of archives so it is okay to copy from tainted archives.
  197. self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
  198. def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
  199. """Add Built-Using sources to C{db_binary.extra_sources}
  200. """
  201. session = self.session
  202. for bu_source_name, bu_source_version in daklib.utils.parse_built_using(control):
  203. bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
  204. if bu_source is None:
  205. raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
  206. self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
  207. db_binary.extra_sources.append(bu_source)
  208. def install_source_to_archive(self, directory, source, archive, component, changed_by, allow_tainted=False, fingerprint=None):
  209. session = self.session
  210. control = source.dsc
  211. maintainer = get_or_set_maintainer(control['Maintainer'], session)
  212. source_name = control['Source']
  213. ### Add source package to database
  214. # We need to install the .dsc first as the DBSource object refers to it.
  215. db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
  216. unique = dict(
  217. source=source_name,
  218. version=control['Version'],
  219. )
  220. rest = dict(
  221. maintainer=maintainer,
  222. poolfile=db_file_dsc,
  223. dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
  224. )
  225. # Other attributes that are ignored for purposes of equality with
  226. # an existing source
  227. rest2 = dict(
  228. changedby=changed_by,
  229. fingerprint=fingerprint,
  230. )
  231. created = False
  232. try:
  233. db_source = session.query(DBSource).filter_by(**unique).one()
  234. for key, value in rest.items():
  235. if getattr(db_source, key) != value:
  236. raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
  237. except NoResultFound:
  238. created = True
  239. db_source = DBSource(**unique)
  240. for key, value in rest.items():
  241. setattr(db_source, key, value)
  242. for key, value in rest2.items():
  243. setattr(db_source, key, value)
  244. session.add(db_source)
  245. session.flush()
  246. # Add .dsc file. Other files will be added later.
  247. db_dsc_file = DSCFile()
  248. db_dsc_file.source = db_source
  249. db_dsc_file.poolfile = db_file_dsc
  250. session.add(db_dsc_file)
  251. session.flush()
  252. if not created:
  253. for f in db_source.srcfiles:
  254. self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted)
  255. return db_source
  256. ### Now add remaining files and copy them to the archive.
  257. for hashed_file in source.files.values():
  258. hashed_file_path = os.path.join(directory, hashed_file.input_filename)
  259. if os.path.exists(hashed_file_path):
  260. db_file = self._install_file(directory, hashed_file, archive, component, source_name)
  261. session.add(db_file)
  262. else:
  263. db_file = self.get_file(hashed_file, source_name)
  264. self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
  265. db_dsc_file = DSCFile()
  266. db_dsc_file.source = db_source
  267. db_dsc_file.poolfile = db_file
  268. session.add(db_dsc_file)
  269. session.flush()
  270. # Importing is safe as we only arrive here when we did not find the source already installed earlier.
  271. import_metadata_into_db(db_source, session)
  272. # Uploaders are the maintainer and co-maintainers from the Uploaders field
  273. db_source.uploaders.append(maintainer)
  274. if 'Uploaders' in control:
  275. from daklib.textutils import split_uploaders
  276. for u in split_uploaders(control['Uploaders']):
  277. db_source.uploaders.append(get_or_set_maintainer(u, session))
  278. session.flush()
  279. return db_source
  280. def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
  281. """Install a source package
  282. @type directory: str
  283. @param directory: directory the source package is located in
  284. @type source: L{daklib.upload.Source}
  285. @param source: source package to install
  286. @type suite: L{daklib.dbconn.Suite}
  287. @param suite: target suite
  288. @type component: L{daklib.dbconn.Component}
  289. @param component: target component
  290. @type changed_by: L{daklib.dbconn.Maintainer}
  291. @param changed_by: person who prepared this version of the package
  292. @type allow_tainted: bool
  293. @param allow_tainted: allow to copy additional files from tainted archives
  294. @type fingerprint: L{daklib.dbconn.Fingerprint}
  295. @param fingerprint: optional fingerprint
  296. @rtype: L{daklib.dbconn.DBSource}
  297. @return: database object for the new source
  298. """
  299. db_source = self.install_source_to_archive(directory, source, suite.archive, component, changed_by, allow_tainted, fingerprint)
  300. if suite in db_source.suites:
  301. return db_source
  302. db_source.suites.append(suite)
  303. self.session.flush()
  304. return db_source
  305. def _copy_file(self, db_file, archive, component, allow_tainted=False):
  306. """Copy a file to the given archive and component
  307. @type db_file: L{daklib.dbconn.PoolFile}
  308. @param db_file: file to copy
  309. @type archive: L{daklib.dbconn.Archive}
  310. @param archive: target archive
  311. @type component: L{daklib.dbconn.Archive}
  312. @param component: target component
  313. @type allow_tainted: bool
  314. @param allow_tainted: allow to copy from tainted archives (such as NEW)
  315. """
  316. session = self.session
  317. if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
  318. query = session.query(ArchiveFile).filter_by(file=db_file)
  319. if not allow_tainted:
  320. query = query.join(Archive).filter(Archive.tainted == False) # noqa:E712
  321. source_af = query.first()
  322. if source_af is None:
  323. raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename))
  324. target_af = ArchiveFile(archive, component, db_file)
  325. session.add(target_af)
  326. session.flush()
  327. self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
  328. def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
  329. """Copy a binary package to the given suite and component
  330. @type db_binary: L{daklib.dbconn.DBBinary}
  331. @param db_binary: binary to copy
  332. @type suite: L{daklib.dbconn.Suite}
  333. @param suite: target suite
  334. @type component: L{daklib.dbconn.Component}
  335. @param component: target component
  336. @type allow_tainted: bool
  337. @param allow_tainted: allow to copy from tainted archives (such as NEW)
  338. @type extra_archives: list of L{daklib.dbconn.Archive}
  339. @param extra_archives: extra archives to copy Built-Using sources from
  340. """
  341. session = self.session
  342. archive = suite.archive
  343. if archive.tainted:
  344. allow_tainted = True
  345. filename = db_binary.poolfile.filename
  346. # make sure source is present in target archive
  347. db_source = db_binary.source
  348. if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None:
  349. raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name))
  350. # make sure built-using packages are present in target archive
  351. for db_source in db_binary.extra_sources:
  352. self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
  353. # copy binary
  354. db_file = db_binary.poolfile
  355. self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
  356. if suite not in db_binary.suites:
  357. db_binary.suites.append(suite)
  358. self.session.flush()
  359. def copy_source(self, db_source, suite, component, allow_tainted=False):
  360. """Copy a source package to the given suite and component
  361. @type db_source: L{daklib.dbconn.DBSource}
  362. @param db_source: source to copy
  363. @type suite: L{daklib.dbconn.Suite}
  364. @param suite: target suite
  365. @type component: L{daklib.dbconn.Component}
  366. @param component: target component
  367. @type allow_tainted: bool
  368. @param allow_tainted: allow to copy from tainted archives (such as NEW)
  369. """
  370. archive = suite.archive
  371. if archive.tainted:
  372. allow_tainted = True
  373. for db_dsc_file in db_source.srcfiles:
  374. self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
  375. if suite not in db_source.suites:
  376. db_source.suites.append(suite)
  377. self.session.flush()
  378. def remove_file(self, db_file, archive, component):
  379. """Remove a file from a given archive and component
  380. @type db_file: L{daklib.dbconn.PoolFile}
  381. @param db_file: file to remove
  382. @type archive: L{daklib.dbconn.Archive}
  383. @param archive: archive to remove the file from
  384. @type component: L{daklib.dbconn.Component}
  385. @param component: component to remove the file from
  386. """
  387. af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
  388. self.fs.unlink(af.path)
  389. self.session.delete(af)
  390. def remove_binary(self, binary, suite):
  391. """Remove a binary from a given suite and component
  392. @type binary: L{daklib.dbconn.DBBinary}
  393. @param binary: binary to remove
  394. @type suite: L{daklib.dbconn.Suite}
  395. @param suite: suite to remove the package from
  396. """
  397. binary.suites.remove(suite)
  398. self.session.flush()
  399. def remove_source(self, source, suite):
  400. """Remove a source from a given suite and component
  401. @type source: L{daklib.dbconn.DBSource}
  402. @param source: source to remove
  403. @type suite: L{daklib.dbconn.Suite}
  404. @param suite: suite to remove the package from
  405. @raise ArchiveException: source package is still referenced by other
  406. binaries in the suite
  407. """
  408. session = self.session
  409. query = session.query(DBBinary).filter_by(source=source) \
  410. .filter(DBBinary.suites.contains(suite))
  411. if query.first() is not None:
  412. raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
  413. source.suites.remove(suite)
  414. session.flush()
  415. def commit(self):
  416. """commit changes"""
  417. try:
  418. self.session.commit()
  419. self.fs.commit()
  420. finally:
  421. self.session.rollback()
  422. self.fs.rollback()
  423. def rollback(self):
  424. """rollback changes"""
  425. self.session.rollback()
  426. self.fs.rollback()
  427. def flush(self):
  428. self.session.flush()
  429. def __enter__(self):
  430. return self
  431. def __exit__(self, type, value, traceback):
  432. if type is None:
  433. self.commit()
  434. else:
  435. self.rollback()
  436. return None
  437. def source_component_from_package_list(package_list, suite):
  438. """Get component for a source package
  439. This function will look at the Package-List field to determine the
  440. component the source package belongs to. This is the first component
  441. the source package provides binaries for (first with respect to the
  442. ordering of components).
  443. It the source package has no Package-List field, None is returned.
  444. @type package_list: L{daklib.packagelist.PackageList}
  445. @param package_list: package list of the source to get the override for
  446. @type suite: L{daklib.dbconn.Suite}
  447. @param suite: suite to consider for binaries produced
  448. @rtype: L{daklib.dbconn.Component} or C{None}
  449. @return: component for the given source or C{None}
  450. """
  451. if package_list.fallback:
  452. return None
  453. session = object_session(suite)
  454. packages = package_list.packages_for_suite(suite)
  455. components = set(p.component for p in packages)
  456. query = session.query(Component).order_by(Component.ordering) \
  457. .filter(Component.component_name.in_(components))
  458. return query.first()
  459. class ArchiveUpload:
  460. """handle an upload
  461. This class can be used in a with-statement::
  462. with ArchiveUpload(...) as upload:
  463. ...
  464. Doing so will automatically run any required cleanup and also rollback the
  465. transaction if it was not committed.
  466. """
  467. def __init__(self, directory, changes, keyrings):
  468. self.transaction = ArchiveTransaction()
  469. """transaction used to handle the upload
  470. @type: L{daklib.archive.ArchiveTransaction}
  471. """
  472. self.session = self.transaction.session
  473. """database session"""
  474. self.original_directory = directory
  475. self.original_changes = changes
  476. self.changes = None
  477. """upload to process
  478. @type: L{daklib.upload.Changes}
  479. """
  480. self.directory = None
  481. """directory with temporary copy of files. set by C{prepare}
  482. @type: str
  483. """
  484. self.keyrings = keyrings
  485. self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
  486. """fingerprint of the key used to sign the upload
  487. @type: L{daklib.dbconn.Fingerprint}
  488. """
  489. self.reject_reasons = []
  490. """reasons why the upload cannot by accepted
  491. @type: list of str
  492. """
  493. self.warnings = []
  494. """warnings
  495. @note: Not used yet.
  496. @type: list of str
  497. """
  498. self.final_suites = None
  499. self.new = False
  500. """upload is NEW. set by C{check}
  501. @type: bool
  502. """
  503. self._checked = False
  504. """checks passes. set by C{check}
  505. @type: bool
  506. """
  507. self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
  508. self._new = self._new_queue.suite
  509. def warn(self, message):
  510. """add a warning message
  511. Adds a warning message that can later be seen in C{self.warnings}
  512. @type message: string
  513. @param message: warning message
  514. """
  515. self.warnings.append(message)
  516. def prepare(self):
  517. """prepare upload for further processing
  518. This copies the files involved to a temporary directory. If you use
  519. this method directly, you have to remove the directory given by the
  520. C{directory} attribute later on your own.
  521. Instead of using the method directly, you can also use a with-statement::
  522. with ArchiveUpload(...) as upload:
  523. ...
  524. This will automatically handle any required cleanup.
  525. """
  526. assert self.directory is None
  527. assert self.original_changes.valid_signature
  528. cnf = Config()
  529. session = self.transaction.session
  530. group = cnf.get('Dinstall::UnprivGroup') or None
  531. self.directory = daklib.utils.temp_dirname(parent=cnf.get('Dir::TempPath'),
  532. mode=0o2750, group=group)
  533. with FilesystemTransaction() as fs:
  534. src = os.path.join(self.original_directory, self.original_changes.filename)
  535. dst = os.path.join(self.directory, self.original_changes.filename)
  536. fs.copy(src, dst, mode=0o640)
  537. self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
  538. files = {}
  539. try:
  540. files = self.changes.files
  541. except upload.InvalidChangesException:
  542. # Do not raise an exception; upload will be rejected later
  543. # due to the missing files
  544. pass
  545. for f in files.values():
  546. src = os.path.join(self.original_directory, f.filename)
  547. dst = os.path.join(self.directory, f.filename)
  548. if not os.path.exists(src):
  549. continue
  550. fs.copy(src, dst, mode=0o640)
  551. source = None
  552. try:
  553. source = self.changes.source
  554. except Exception:
  555. # Do not raise an exception here if the .dsc is invalid.
  556. pass
  557. if source is not None:
  558. for f in source.files.values():
  559. src = os.path.join(self.original_directory, f.filename)
  560. dst = os.path.join(self.directory, f.filename)
  561. if not os.path.exists(dst):
  562. try:
  563. db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
  564. db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
  565. fs.copy(db_archive_file.path, dst, mode=0o640)
  566. except KeyError:
  567. # Ignore if get_file could not find it. Upload will
  568. # probably be rejected later.
  569. pass
  570. def unpacked_source(self):
  571. """Path to unpacked source
  572. Get path to the unpacked source. This method does unpack the source
  573. into a temporary directory under C{self.directory} if it has not
  574. been done so already.
  575. @rtype: str or C{None}
  576. @return: string giving the path to the unpacked source directory
  577. or C{None} if no source was included in the upload.
  578. """
  579. assert self.directory is not None
  580. source = self.changes.source
  581. if source is None:
  582. return None
  583. dsc_path = os.path.join(self.directory, source._dsc_file.filename)
  584. sourcedir = os.path.join(self.directory, 'source')
  585. if not os.path.exists(sourcedir):
  586. subprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=subprocess.DEVNULL)
  587. if not os.path.isdir(sourcedir):
  588. raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
  589. return sourcedir
  590. def _map_suite(self, suite_name):
  591. suite_names = set((suite_name, ))
  592. for rule in Config().value_list("SuiteMappings"):
  593. fields = rule.split()
  594. rtype = fields[0]
  595. if rtype == "map" or rtype == "silent-map":
  596. (src, dst) = fields[1:3]
  597. if src in suite_names:
  598. suite_names.remove(src)
  599. suite_names.add(dst)
  600. if rtype != "silent-map":
  601. self.warnings.append('Mapping {0} to {1}.'.format(src, dst))
  602. elif rtype == "copy" or rtype == "silent-copy":
  603. (src, dst) = fields[1:3]
  604. if src in suite_names:
  605. suite_names.add(dst)
  606. if rtype != "silent-copy":
  607. self.warnings.append('Copy {0} to {1}.'.format(src, dst))
  608. elif rtype == "ignore":
  609. ignored = fields[1]
  610. if ignored in suite_names:
  611. suite_names.remove(ignored)
  612. self.warnings.append('Ignoring target suite {0}.'.format(ignored))
  613. elif rtype == "reject":
  614. rejected = fields[1]
  615. if rejected in suite_names:
  616. raise checks.Reject('Uploads to {0} are not accepted.'.format(rejected))
  617. ## XXX: propup-version and map-unreleased not yet implemented
  618. return suite_names
  619. def _mapped_suites(self):
  620. """Get target suites after mappings
  621. @rtype: list of L{daklib.dbconn.Suite}
  622. @return: list giving the mapped target suites of this upload
  623. """
  624. session = self.session
  625. suite_names = set()
  626. for dist in self.changes.distributions:
  627. suite_names.update(self._map_suite(dist))
  628. suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
  629. return suites
  630. def _check_new_binary_overrides(self, suite, overridesuite):
  631. new = False
  632. source = self.changes.source
  633. # Check binaries listed in the source package's Package-List field:
  634. if source is not None and not source.package_list.fallback:
  635. packages = source.package_list.packages_for_suite(suite)
  636. binaries = [entry for entry in packages]
  637. for b in binaries:
  638. override = self._binary_override(overridesuite, b)
  639. if override is None:
  640. self.warnings.append('binary:{0} is NEW.'.format(b.name))
  641. new = True
  642. # Check all uploaded packages.
  643. # This is necessary to account for packages without a Package-List
  644. # field, really late binary-only uploads (where an unused override
  645. # was already removed), and for debug packages uploaded to a suite
  646. # without a debug suite (which are then considered as NEW).
  647. binaries = self.changes.binaries
  648. for b in binaries:
  649. if daklib.utils.is_in_debug_section(b.control) and suite.debug_suite is not None:
  650. continue
  651. override = self._binary_override(overridesuite, b)
  652. if override is None:
  653. self.warnings.append('binary:{0} is NEW.'.format(b.name))
  654. new = True
  655. return new
  656. def _check_new(self, suite, overridesuite):
  657. """Check if upload is NEW
  658. An upload is NEW if it has binary or source packages that do not have
  659. an override in C{overridesuite} OR if it references files ONLY in a
  660. tainted archive (eg. when it references files in NEW).
  661. Debug packages (*-dbgsym in Section: debug) are not considered as NEW
  662. if C{suite} has a separate debug suite.
  663. @rtype: bool
  664. @return: C{True} if the upload is NEW, C{False} otherwise
  665. """
  666. session = self.session
  667. new = False
  668. # Check for missing overrides
  669. if self._check_new_binary_overrides(suite, overridesuite):
  670. new = True
  671. if self.changes.source is not None:
  672. override = self._source_override(overridesuite, self.changes.source)
  673. if override is None:
  674. self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source']))
  675. new = True
  676. # Check if we reference a file only in a tainted archive
  677. files = list(self.changes.files.values())
  678. if self.changes.source is not None:
  679. files.extend(self.changes.source.files.values())
  680. for f in files:
  681. query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
  682. query_untainted = query.join(Archive).filter(Archive.tainted == False) # noqa:E712
  683. in_archive = (query.first() is not None)
  684. in_untainted_archive = (query_untainted.first() is not None)
  685. if in_archive and not in_untainted_archive:
  686. self.warnings.append('{0} is only available in NEW.'.format(f.filename))
  687. new = True
  688. return new
  689. def _final_suites(self):
  690. session = self.session
  691. mapped_suites = self._mapped_suites()
  692. final_suites = list()
  693. for suite in mapped_suites:
  694. overridesuite = suite
  695. if suite.overridesuite is not None:
  696. overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
  697. if self._check_new(suite, overridesuite):
  698. self.new = True
  699. if suite not in final_suites:
  700. final_suites.append(suite)
  701. return final_suites
  702. def _binary_override(self, suite, binary):
  703. """Get override entry for a binary
  704. @type suite: L{daklib.dbconn.Suite}
  705. @param suite: suite to get override for
  706. @type binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
  707. @param binary: binary to get override for
  708. @rtype: L{daklib.dbconn.Override} or C{None}
  709. @return: override for the given binary or C{None}
  710. """
  711. if suite.overridesuite is not None:
  712. suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
  713. mapped_component = get_mapped_component(binary.component)
  714. if mapped_component is None:
  715. return None
  716. query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
  717. .join(Component).filter(Component.component_name == mapped_component.component_name) \
  718. .join(OverrideType).filter(OverrideType.overridetype == binary.type)
  719. return query.one_or_none()
  720. def _source_override(self, suite, source):
  721. """Get override entry for a source
  722. @type suite: L{daklib.dbconn.Suite}
  723. @param suite: suite to get override for
  724. @type source: L{daklib.upload.Source}
  725. @param source: source to get override for
  726. @rtype: L{daklib.dbconn.Override} or C{None}
  727. @return: override for the given source or C{None}
  728. """
  729. if suite.overridesuite is not None:
  730. suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
  731. query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
  732. .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
  733. component = source_component_from_package_list(source.package_list, suite)
  734. if component is not None:
  735. query = query.filter(Override.component == component)
  736. return query.one_or_none()
  737. def _binary_component(self, suite, binary, only_overrides=True):
  738. """get component for a binary
  739. By default this will only look at overrides to get the right component;
  740. if C{only_overrides} is C{False} this method will also look at the
  741. Section field.
  742. @type suite: L{daklib.dbconn.Suite}
  743. @type binary: L{daklib.upload.Binary}
  744. @type only_overrides: bool
  745. @param only_overrides: only use overrides to get the right component
  746. @rtype: L{daklib.dbconn.Component} or C{None}
  747. """
  748. override = self._binary_override(suite, binary)
  749. if override is not None:
  750. return override.component
  751. if only_overrides:
  752. return None
  753. return get_mapped_component(binary.component, self.session)
  754. def _source_component(self, suite, source, only_overrides=True):
  755. """get component for a source
  756. By default this will only look at overrides to get the right component;
  757. if C{only_overrides} is C{False} this method will also look at the
  758. Section field.
  759. @type suite: L{daklib.dbconn.Suite}
  760. @type binary: L{daklib.upload.Binary}
  761. @type only_overrides: bool
  762. @param only_overrides: only use overrides to get the right component
  763. @rtype: L{daklib.dbconn.Component} or C{None}
  764. """
  765. override = self._source_override(suite, source)
  766. if override is not None:
  767. return override.component
  768. if only_overrides:
  769. return None
  770. return get_mapped_component(source.component, self.session)
  771. def check(self, force=False):
  772. """run checks against the upload
  773. @type force: bool
  774. @param force: ignore failing forcable checks
  775. @rtype: bool
  776. @return: C{True} if all checks passed, C{False} otherwise
  777. """
  778. # XXX: needs to be better structured.
  779. assert self.changes.valid_signature
  780. try:
  781. # Validate signatures and hashes before we do any real work:
  782. for chk in (
  783. checks.SignatureAndHashesCheck,
  784. checks.WeakSignatureCheck,
  785. checks.SignatureTimestampCheck,
  786. checks.ChangesCheck,
  787. checks.ExternalHashesCheck,
  788. checks.SourceCheck,
  789. checks.BinaryCheck,
  790. checks.BinaryTimestampCheck,
  791. checks.SingleDistributionCheck,
  792. checks.ArchAllBinNMUCheck,
  793. ):
  794. chk().check(self)
  795. final_suites = self._final_suites()
  796. if len(final_suites) == 0:
  797. self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.')
  798. return False
  799. self.final_suites = final_suites
  800. for chk in (
  801. checks.TransitionCheck,
  802. checks.ACLCheck,
  803. checks.NewOverrideCheck,
  804. checks.NoSourceOnlyCheck,
  805. checks.LintianCheck,
  806. ):
  807. chk().check(self)
  808. for chk in (
  809. checks.SuiteCheck,
  810. checks.ACLCheck,
  811. checks.SourceFormatCheck,
  812. checks.SuiteArchitectureCheck,
  813. checks.VersionCheck,
  814. ):
  815. for suite in final_suites:
  816. chk().per_suite_check(self, suite)
  817. if len(self.reject_reasons) != 0:
  818. return False
  819. self._checked = True
  820. return True
  821. except checks.Reject as e:
  822. self.reject_reasons.append(str(e))
  823. except Exception as e:
  824. self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
  825. return False
  826. def _install_to_suite(self, target_suite, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None, policy_upload=False):
  827. """Install upload to the given suite
  828. @type target_suite: L{daklib.dbconn.Suite}
  829. @param target_suite: target suite (before redirection to policy queue or NEW)
  830. @type suite: L{daklib.dbconn.Suite}
  831. @param suite: suite to install the package into. This is the real suite,
  832. ie. after any redirection to NEW or a policy queue
  833. @param source_component_func: function to get the L{daklib.dbconn.Component}
  834. for a L{daklib.upload.Source} object
  835. @param binary_component_func: function to get the L{daklib.dbconn.Component}
  836. for a L{daklib.upload.Binary} object
  837. @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary}
  838. @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary}
  839. @param policy_upload: Boolean indicating upload to policy queue (including NEW)
  840. @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource}
  841. object for the install source or C{None} if no source was
  842. included. The second is a list of L{daklib.dbconn.DBBinary}
  843. objects for the installed binary packages.
  844. """
  845. # XXX: move this function to ArchiveTransaction?
  846. control = self.changes.changes
  847. changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
  848. if source_suites is None:
  849. source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery()
  850. source = self.changes.source
  851. if source is not None:
  852. component = source_component_func(source)
  853. db_source = self.transaction.install_source(
  854. self.directory,
  855. source,
  856. suite,
  857. component,
  858. changed_by,
  859. fingerprint=self.fingerprint
  860. )
  861. else:
  862. db_source = None
  863. db_binaries = []
  864. for binary in sorted(self.changes.binaries, key=lambda x: x.name):
  865. copy_to_suite = suite
  866. if daklib.utils.is_in_debug_section(binary.control) and suite.debug_suite is not None:
  867. copy_to_suite = suite.debug_suite
  868. component = binary_component_func(binary)
  869. db_binary = self.transaction.install_binary(
  870. self.directory,
  871. binary,
  872. copy_to_suite,
  873. component,
  874. fingerprint=self.fingerprint,
  875. source_suites=source_suites,
  876. extra_source_archives=extra_source_archives
  877. )
  878. db_binaries.append(db_binary)
  879. if not policy_upload:
  880. check_upload_for_external_signature_request(self.session, target_suite, copy_to_suite, db_binary)
  881. if suite.copychanges:
  882. src = os.path.join(self.directory, self.changes.filename)
  883. dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
  884. self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
  885. suite.update_last_changed()
  886. return (db_source, db_binaries)
  887. def _install_changes(self):
  888. assert self.changes.valid_signature
  889. control = self.changes.changes
  890. session = self.transaction.session
  891. config = Config()
  892. changelog_id = None
  893. # Only add changelog for sourceful uploads and binNMUs
  894. if self.changes.sourceful or re_bin_only_nmu.search(control['Version']):
  895. query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
  896. changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
  897. assert changelog_id is not None
  898. db_changes = DBChange()
  899. db_changes.changesname = self.changes.filename
  900. db_changes.source = control['Source']
  901. db_changes.binaries = control.get('Binary', None)
  902. db_changes.architecture = control['Architecture']
  903. db_changes.version = control['Version']
  904. db_changes.distribution = control['Distribution']
  905. db_changes.urgency = control['Urgency']
  906. db_changes.maintainer = control['Maintainer']
  907. db_changes.changedby = control.get('Changed-By', control['Maintainer'])
  908. db_changes.date = control['Date']
  909. db_changes.fingerprint = self.fingerprint.fingerprint
  910. db_changes.changelog_id = changelog_id
  911. db_changes.closes = self.changes.closed_bugs
  912. try:
  913. self.transaction.session.add(db_changes)
  914. self.transaction.session.flush()
  915. except sqlalchemy.exc.IntegrityError:
  916. raise ArchiveException('{0} is already known.'.format(self.changes.filename))
  917. return db_changes
  918. def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
  919. u = PolicyQueueUpload()
  920. u.policy_queue = policy_queue
  921. u.target_suite = target_suite
  922. u.changes = db_changes
  923. u.source = db_source
  924. u.binaries = db_binaries
  925. self.transaction.session.add(u)
  926. self.transaction.session.flush()
  927. queue_files = [self.changes.filename]
  928. queue_files.extend(f.filename for f in self.changes.buildinfo_files)
  929. for fn in queue_files:
  930. src = os.path.join(self.changes.directory, fn)
  931. dst = os.path.join(policy_queue.path, fn)
  932. self.transaction.fs.copy(src, dst, mode=policy_queue.change_perms)
  933. return u
  934. def try_autobyhand(self):
  935. """Try AUTOBYHAND
  936. Try to handle byhand packages automatically.
  937. @rtype: list of L{daklib.upload.HashedFile}
  938. @return: list of remaining byhand files
  939. """
  940. assert len(self.reject_reasons) == 0
  941. assert self.changes.valid_signature
  942. assert self.final_suites is not None
  943. assert self._checked
  944. byhand = self.changes.byhand_files
  945. if len(byhand) == 0:
  946. return True
  947. suites = list(self.final_suites)
  948. assert len(suites) == 1, "BYHAND uploads must be to a single suite"
  949. suite = suites[0]
  950. cnf = Config()
  951. control = self.changes.changes
  952. automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
  953. remaining = []
  954. for f in byhand:
  955. if '_' in f.filename:
  956. parts = f.filename.split('_', 2)
  957. if len(parts) != 3:
  958. print("W: unexpected byhand filename {0}. No automatic processing.".format(f.filename))
  959. remaining.append(f)
  960. continue
  961. package, version, archext = parts
  962. arch, ext = archext.split('.', 1)
  963. else:
  964. parts = f.filename.split('.')
  965. if len(parts) < 2:
  966. print("W: unexpected byhand filename {0}. No automatic processing.".format(f.filename))
  967. remaining.append(f)
  968. continue
  969. package = parts[0]
  970. version = '0'
  971. arch = 'all'
  972. ext = parts[-1]
  973. try:
  974. rule = automatic_byhand_packages.subtree(package)
  975. except KeyError:
  976. remaining.append(f)
  977. continue
  978. if rule['Source'] != self.changes.source_name \
  979. or rule['Section'] != f.section \
  980. or ('Extension' in rule and rule['Extension'] != ext):
  981. remaining.append(f)
  982. continue
  983. script = rule['Script']
  984. retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename), suite.suite_name], shell=False)
  985. if retcode != 0:
  986. print("W: error processing {0}.".format(f.filename))
  987. remaining.append(f)
  988. return len(remaining) == 0
  989. def _install_byhand(self, policy_queue_upload, hashed_file):
  990. """install byhand file
  991. @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload}
  992. @type hashed_file: L{daklib.upload.HashedFile}
  993. """
  994. fs = self.transaction.fs
  995. session = self.transaction.session
  996. policy_queue = policy_queue_upload.policy_queue
  997. byhand_file = PolicyQueueByhandFile()
  998. byhand_file.upload = policy_queue_upload
  999. byhand_file.filename = hashed_file.filename
  1000. session.add(byhand_file)
  1001. session.flush()
  1002. src = os.path.join(self.directory, hashed_file.filename)
  1003. dst = os.path.join(policy_queue.path, hashed_file.filename)
  1004. fs.copy(src, dst, mode=policy_queue.change_perms)
  1005. return byhand_file
  1006. def _do_bts_versiontracking(self):
  1007. cnf = Config()
  1008. fs = self.transaction.fs
  1009. btsdir = cnf.get('Dir::BTSVersionTrack')
  1010. if btsdir is None or btsdir == '':
  1011. return
  1012. base = os.path.join(btsdir, self.changes.filename[:-8])
  1013. # version history
  1014. sourcedir = self.unpacked_source()
  1015. if sourcedir is not None:
  1016. dch_path = os.path.join(sourcedir, 'debian', 'changelog')
  1017. with open(dch_path, 'r') as fh:
  1018. versions = fs.create("{0}.versions".format(base), mode=0o644)
  1019. for line in fh.readlines():
  1020. if re_changelog_versions.match(line):
  1021. versions.write(line)
  1022. versions.close()
  1023. # binary -> source mapping
  1024. if self.changes.binaries:
  1025. debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
  1026. for binary in self.changes.binaries:
  1027. control = binary.control
  1028. source_package, source_version = binary.source
  1029. line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version])
  1030. print(line, file=debinfo)
  1031. debinfo.close()
  1032. def _policy_queue(self, suite):
  1033. if suite.policy_queue is not None:
  1034. return suite.policy_queue
  1035. return None
  1036. def install(self):
  1037. """install upload
  1038. Install upload to a suite or policy queue. This method does B{not}
  1039. handle uploads to NEW.
  1040. You need to have called the C{check} method before calling this method.
  1041. """
  1042. assert len(self.reject_reasons) == 0
  1043. assert self.changes.valid_signature
  1044. assert self.final_suites is not None
  1045. assert self._checked
  1046. assert not self.new
  1047. db_changes = self._install_changes()
  1048. for suite in self.final_suites:
  1049. overridesuite = suite
  1050. if suite.overridesuite is not None:
  1051. overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
  1052. policy_queue = self._policy_queue(suite)
  1053. policy_upload = False
  1054. redirected_suite = suite
  1055. if policy_queue is not None:
  1056. redirected_suite = policy_queue.suite
  1057. policy_upload = True
  1058. # source can be in the suite we install to or any suite we enhance
  1059. source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
  1060. for enhanced_suite_id, in self.session.query(VersionCheck.reference_id) \
  1061. .filter(VersionCheck.suite_id.in_(source_suite_ids)) \
  1062. .filter(VersionCheck.check == 'Enhances'):
  1063. source_suite_ids.add(enhanced_suite_id)
  1064. source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery()
  1065. def source_component_func(source):
  1066. return self._source_component(overridesuite, source, only_overrides=False)
  1067. def binary_component_func(binary):
  1068. return self._binary_component(overridesuite, binary, only_overrides=False)
  1069. (db_source, db_binaries) = self._install_to_suite(suite, redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive], policy_upload=policy_upload)
  1070. if policy_queue is not None:
  1071. self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries)
  1072. # copy to build queues
  1073. if policy_queue is None or policy_queue.send_to_build_queues:
  1074. for build_queue in suite.copy_queues:
  1075. self._install_to_suite(suite, build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
  1076. self._do_bts_versiontracking()
  1077. def install_to_new(self):
  1078. """install upload to NEW
  1079. Install upload to NEW. This method does B{not} handle regular uploads
  1080. to suites or policy queues.
  1081. You need to have called the C{check} method before calling this method.
  1082. """
  1083. # Uploads to NEW are special as we don't have overrides.
  1084. assert len(self.reject_reasons) == 0
  1085. assert self.changes.valid_signature
  1086. assert self.final_suites is not None
  1087. source = self.changes.source
  1088. binaries = self.changes.binaries
  1089. byhand = self.changes.byhand_files
  1090. # we need a suite to guess components
  1091. suites = list(self.final_suites)
  1092. assert len(suites) == 1, "NEW uploads must be to a single suite"
  1093. suite = suites[0]
  1094. # decide which NEW queue to use
  1095. if suite.new_queue is None:
  1096. new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
  1097. else:
  1098. new_queue = suite.new_queue
  1099. if len(byhand) > 0:
  1100. # There is only one global BYHAND queue
  1101. new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
  1102. new_suite = new_queue.suite
  1103. def binary_component_func(binary):
  1104. return self._binary_component(suite, binary, only_overrides=False)
  1105. # guess source component
  1106. # XXX: should be moved into an extra method
  1107. binary_component_names = set()
  1108. for binary in binaries:
  1109. component = binary_component_func(binary)
  1110. binary_component_names.add(component.component_name)
  1111. source_component_name = None
  1112. for c in self.session.query(Component).order_by(Component.component_id):
  1113. guess = c.component_name
  1114. if guess in binary_component_names:
  1115. source_component_name = guess
  1116. break
  1117. if source_component_name is None:
  1118. source_component = self.session.query(Component).order_by(Component.component_id).first()
  1119. else:
  1120. source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
  1121. def source_component_func(source):
  1122. return source_component
  1123. db_changes = self._install_changes()
  1124. (db_source, db_binaries) = self._install_to_suite(suite, new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive], policy_upload=True)
  1125. policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
  1126. for f in byhand:
  1127. self._install_byhand(policy_upload, f)
  1128. self._do_bts_versiontracking()
  1129. def commit(self):
  1130. """commit changes"""
  1131. self.transaction.commit()
  1132. def rollback(self):
  1133. """rollback changes"""
  1134. self.transaction.rollback()
  1135. def __enter__(self):
  1136. self.prepare()
  1137. return self
  1138. def __exit__(self, type, value, traceback):
  1139. if self.directory is not None:
  1140. shutil.rmtree(self.directory)
  1141. self.directory = None
  1142. self.changes = None
  1143. self.transaction.rollback()
  1144. return None