123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226 |
- # Copyright (C) 2015, Ansgar Burchardt <ansgar@debian.org>
- #
- # This program is free software; you can redistribute it and/or modify
- # it under the terms of the GNU General Public License as published by
- # the Free Software Foundation; either version 2 of the License, or
- # (at your option) any later version.
- #
- # This program is distributed in the hope that it will be useful,
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- # GNU General Public License for more details.
- #
- # You should have received a copy of the GNU General Public License along
- # with this program; if not, write to the Free Software Foundation, Inc.,
- # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
- import daklib.compress
- import daklib.config
- import daklib.dbconn
- import daklib.gpg
- import daklib.upload
- import daklib.regexes
- import apt_pkg
- import os
- import shutil
- import tempfile
- import urllib2
- from daklib.dbconn import DBSource, PoolFile
- from sqlalchemy.orm import object_session
- # Hmm, maybe use APT directly for all of this?
- _release_hashes_fields = ('MD5Sum', 'SHA1', 'SHA256')
- class Release(object):
- def __init__(self, base, suite_name, data):
- self._base = base
- self._suite_name = suite_name
- self._dict = apt_pkg.TagSection(data)
- self._hashes = daklib.upload.parse_file_list(self._dict, False, daklib.regexes.re_file_safe_slash, _release_hashes_fields)
- def architectures(self):
- return self._dict['Architectures'].split()
- def components(self):
- return self._dict['Components'].split()
- def packages(self, component, architecture):
- fn = '{0}/binary-{1}/Packages'.format(component, architecture)
- tmp = obtain_release_file(self, fn)
- return apt_pkg.TagFile(tmp.fh())
- def sources(self, component):
- fn = '{0}/source/Sources'.format(component)
- tmp = obtain_release_file(self, fn)
- return apt_pkg.TagFile(tmp.fh())
- def suite(self):
- return self._dict['Suite']
- def codename(self):
- return self._dict['Codename']
- # TODO: Handle Date/Valid-Until to make sure we import
- # a newer version than before
- class File(object):
- def __init__(self):
- config = daklib.config.Config()
- self._tmp = tempfile.NamedTemporaryFile(dir=config['Dir::TempPath'])
- def fh(self):
- self._tmp.seek(0)
- return self._tmp
- def hashes(self):
- return apt_pkg.Hashes(self.fh())
- def obtain_file(base, path):
- """Obtain a file 'path' located below 'base'
- Returns: daklib.import_repository.File
- Note: return type can still change
- """
- fn = '{0}/{1}'.format(base, path)
- tmp = File()
- if fn.startswith('http://'):
- fh = urllib2.urlopen(fn, timeout=300)
- shutil.copyfileobj(fh, tmp._tmp)
- fh.close()
- else:
- with open(fn, 'r') as fh:
- shutil.copyfileobj(fh, tmp._tmp)
- return tmp
- def obtain_release(base, suite_name, keyring, fingerprint=None):
- """Obtain release information
- Returns: daklib.import_repository.Release
- """
- tmp = obtain_file(base, 'dists/{0}/InRelease'.format(suite_name))
- data = tmp.fh().read()
- f = daklib.gpg.SignedFile(data, [keyring])
- r = Release(base, suite_name, f.contents)
- if r.suite() != suite_name and r.codename() != suite_name:
- raise Exception("Suite {0} doesn't match suite or codename from Release file.".format(suite_name))
- return r
- _compressions = ('.xz', '.gz', '.bz2')
- def obtain_release_file(release, filename):
- """Obtain file referenced from Release
- A compressed version is automatically selected and decompressed if it exists.
- Returns: daklib.import_repository.File
- """
- if filename not in release._hashes:
- raise IOError("File {0} not referenced in Release".format(filename))
- compressed = False
- for ext in _compressions:
- compressed_file = filename + ext
- if compressed_file in release._hashes:
- compressed = True
- filename = compressed_file
- break
- # Obtain file and check hashes
- tmp = obtain_file(release._base, 'dists/{0}/{1}'.format(release._suite_name, filename))
- hashedfile = release._hashes[filename]
- hashedfile.check_fh(tmp.fh())
- if compressed:
- tmp2 = File()
- daklib.compress.decompress(tmp.fh(), tmp2.fh(), filename)
- tmp = tmp2
- return tmp
- def import_source_to_archive(base, entry, transaction, archive, component):
- """Import source package described by 'entry' into the given 'archive' and 'component'
- 'entry' needs to be a dict-like object with at least the following
- keys as used in a Sources index: Directory, Files, Checksums-Sha1,
- Checksums-Sha256
- Return: daklib.dbconn.DBSource
- """
- # Obtain and verify files
- if not daklib.regexes.re_file_safe_slash.match(entry['Directory']):
- raise Exception("Unsafe path in Directory field")
- hashed_files = daklib.upload.parse_file_list(entry, False)
- files = []
- for f in hashed_files.values():
- path = os.path.join(entry['Directory'], f.filename)
- tmp = obtain_file(base, path)
- f.check_fh(tmp.fh())
- files.append(tmp)
- directory, f.input_filename = os.path.split(tmp.fh().name)
- # Inject files into archive
- source = daklib.upload.Source(directory, hashed_files.values(), [], require_signature=False)
- # TODO: ugly hack!
- for f in hashed_files.keys():
- if f.endswith('.dsc'):
- continue
- source.files[f].input_filename = hashed_files[f].input_filename
- # TODO: allow changed_by to be NULL
- changed_by = source.dsc['Maintainer']
- db_changed_by = daklib.dbconn.get_or_set_maintainer(changed_by, transaction.session)
- db_source = transaction.install_source_to_archive(directory, source, archive, component, db_changed_by)
- return db_source
- def import_package_to_suite(base, entry, transaction, suite, component):
- """Import binary package described by 'entry' into the given 'suite' and 'component'
- 'entry' needs to be a dict-like object with at least the following
- keys as used in a Packages index: Filename, Size, MD5sum, SHA1,
- SHA256
- Returns: daklib.dbconn.DBBinary
- """
- # Obtain and verify file
- filename = entry['Filename']
- tmp = obtain_file(base, filename)
- directory, fn = os.path.split(tmp.fh().name)
- hashedfile = daklib.upload.HashedFile(os.path.basename(filename), long(entry['Size']), entry['MD5sum'], entry['SHA1'], entry['SHA256'], input_filename=fn)
- hashedfile.check_fh(tmp.fh())
- # Inject file into archive
- binary = daklib.upload.Binary(directory, hashedfile)
- db_binary = transaction.install_binary(directory, binary, suite, component)
- transaction.flush()
- return db_binary
- def import_source_to_suite(base, entry, transaction, suite, component):
- """Import source package described by 'entry' into the given 'suite' and 'component'
- 'entry' needs to be a dict-like object with at least the following
- keys as used in a Sources index: Directory, Files, Checksums-Sha1,
- Checksums-Sha256
- Returns: daklib.dbconn.DBBinary
- """
- source = import_source_to_archive(base, entry, transaction, suite.archive, component)
- source.suites.append(suite)
- transaction.flush()
- def source_in_archive(source, version, archive, component=None):
- """Check that source package 'source' with version 'version' exists in 'archive',
- with an optional check for the given component 'component'.
- @type source: str
- @type version: str
- @type archive: daklib.dbconn.Archive
- @type component: daklib.dbconn.Component or None
- @rtype: boolean
- Note: This should probably be moved somewhere else
- """
- session = object_session(archive)
- query = session.query(DBSource).filter_by(source=source, version=version) \
- .join(DBSource.poolfile).join(PoolFile.archives).filter_by(archive=archive)
- if component is not None:
- query = query.filter_by(component=component)
- return session.query(query.exists()).scalar()
|