123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661 |
- # -*- mode:sh -*-
- # If we error out this one is called, *FOLLOWED* by cleanup in common
- function onerror() {
- ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
- subject="ATTENTION ATTENTION!"
- if [[ ${error:-false} = false ]]; then
- subject="${subject} (continued)"
- else
- subject="${subject} (interrupted)"
- fi
- subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE:-} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
- if [[ -r ${STAGEFILE:-/nonexistant}.log ]]; then
- cat "${STAGEFILE}.log"
- else
- echo "file ${STAGEFILE:-}.log does not exist, sorry"
- fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
- }
- ########################################################################
- # the actual functions follow #
- ########################################################################
- # pushing merkels QA user, part one
- function qa1() {
- log "Telling QA user that we start dinstall"
- ssh -n -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
- }
- function mailingliststxt() {
- cd ${ftpdir}/doc
- log "Updating archive version of mailing-lists.txt"
- wget ${wgetopts} https://www.debian.org/misc/mailing-lists.txt
- }
- function pseudopackages() {
- cd ${scriptdir}/masterfiles
- log "Updating archive version of pseudo-packages"
- for file in maintainers description; do
- wget ${wgetopts} https://bugs.debian.org/pseudopackages/pseudo-packages.${file}
- done
- }
- # Updating various files
- function bugdoctxt() {
- log "Updating Bugs docu"
- cd $configdir
- $scriptsdir/update-bugdoctxt
- }
- # The first i18n one, syncing new descriptions
- function i18n1() {
- log "Synchronizing i18n package descriptions"
- # First sync their newest data
- cd ${scriptdir}/i18nsync
- rsync -rtq --safe-links --chmod=F644,D755 --delete --delete-after ddtp-sync:/does/not/matter . || true
- # Now check if we still know about the packages for which they created the files
- # is the timestamp signed by us?
- if gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp; then
- # now read it. As its signed by us we are sure the content is what we expect, no need
- # to do more here. And we only test -d a directory on it anyway.
- TSTAMP=$(cat timestamp)
- # do we have the dir still?
- if [[ -d ${scriptdir}/i18n/${TSTAMP} ]]; then
- # Lets check!
- if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
- # Yay, worked, lets copy around
- for dir in ${extimportdists}; do
- if [[ -d dists/${dir}/ ]]; then
- cd dists/${dir}/main/i18n
- rsync -aq --delete --delete-after --exclude by-hash --exclude "Translation-en.*" --exclude "Translation-*.diff/" . ${ftpdir}/dists/${dir}/main/i18n/.
- fi
- cd ${scriptdir}/i18nsync
- done
- else
- echo "ARRRR, bad guys, wrong files, ARRR"
- echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
- fi
- else
- echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
- echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
- fi
- else
- echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
- echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
- fi
- }
- # Syncing AppStream/DEP-11 data
- function dep11() {
- log "Synchronizing AppStream metadata"
- # First sync their newest data
- local dep11dir="${scriptdir}/dep11"
- mkdir -p ${dep11dir}
- cd ${dep11dir}
- rsync -rtq --safe-links --chmod=F644,D755 --delete --delete-after dep11-sync:/does/not/matter .
- # Lets check!
- if ${scriptsdir}/dep11-basic-validate.py . ${dep11dir}/; then
- # Yay, worked, lets copy around
- for dir in ${extimportdists}; do
- if [[ -d ${dir}/ ]]; then
- for comp in main contrib non-free non-free-firmware; do
- if ! [[ -d ${dir}/${comp} ]]; then
- continue
- fi
- mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
- cd ${dir}/${comp}
- rsync -rtq --delete --delete-after --exclude by-hash --exclude "./*.tmp" . ${ftpdir}/dists/${dir}/${comp}/dep11/.
- cd ${dep11dir}
- done
- fi
- done
- else
- echo "ARRRR, bad guys, wrong files, ARRR"
- echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
- fi
- }
- function cruft() {
- log "Checking for cruft in overrides"
- dak check-overrides
- }
- function dominate() {
- log "Removing obsolete source and binary associations"
- dak dominate
- dak manage-debug-suites unstable-debug experimental-debug stretch-backports-debug buster-backports-debug buster-backports-sloppy-debug bullseye-backports-debug bullseye-backports-sloppy-debug bookworm-backports-debug stable-backports-sloppy-debug testing-backports-debug testing-proposed-updates-debug
- }
- function autocruft() {
- log "Check for obsolete binary packages"
- dak auto-decruft -s unstable
- dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
- dak auto-decruft -s testing-proposed-updates --if-newer-version-in testing --if-newer-version-in-rm-msg "NVIT" --decruft-equal-versions
- }
- function overrides() {
- log "Writing overrides into text files"
- cd $overridedir
- dak make-overrides
- }
- function mpfm() {
- local archiveroot
- log "Generating package / file mapping"
- for archive in "${public_archives[@]}"; do
- log " archive: ${archive}"
- archiveroot="$(get_archiveroot "${archive}")"
- dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
- done
- }
- function packages() {
- log " Generating Packages/Sources for: ${public_archives[*]}"
- dak generate-packages-sources2 -a "${public_archives[*]}"
- log " Generating Contents for: ${public_archives[*]}"
- dak contents generate -a "${public_archives[*]}"
- }
- function pdiff() {
- log "Generating pdiff files"
- dak generate-index-diffs
- }
- function release() {
- log "Generating Release files"
- dak generate-releases -a "${public_archives[*]}"
- }
- function dedup() {
- log "Dedupe archive"
- dak archive-dedup-pool
- for archive in "${public_archives[@]}"; do
- archiveroot="$(get_archiveroot "${archive}")"
- cd "${archiveroot}"
- for dir in doc indices project tools; do
- if [[ -d ${dir} ]]; then
- jdupes --hardlink --noempty --quiet --linkhard --recurse ${dir}
- fi
- done
- done
- }
- function mkmaintainers() {
- local archiveroot
- local mkmindices
- log 'Creating Maintainers index ... '
- for archive in "${public_archives[@]}"; do
- archiveroot="$(get_archiveroot "${archive}")"
- mkmindices="${archiveroot}/indices"
- if ! [[ -d ${mkmindices} ]]; then
- mkdir "${mkmindices}"
- fi
- cd "${mkmindices}"
- rm -f Maintainers Maintainers.gz Uploaders Uploaders.gz
- dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
- gzip -9v --rsyncable --no-name <Maintainers >Maintainers.gz
- gzip -9v --rsyncable --no-name <Uploaders >Uploaders.gz
- done
- }
- function copyoverrides() {
- log 'Copying override files into public view ...'
- (
- shopt -s nullglob
- rm -f ${indices}/override.*.gz
- TESTING=$(dak admin suite-config get-value testing codename)
- STABLE=$(dak admin suite-config get-value stable codename)
- OLDSTABLE=$(dak admin suite-config get-value oldstable codename)
- OLDOLDSTABLE=$(dak admin suite-config get-value oldoldstable codename)
- for ofile in ${overridedir}/override.{$OLDOLDSTABLE,$OLDSTABLE,$STABLE,$TESTING,sid}.{,extra.}{main,contrib,non-free}*; do
- bname=${ofile##*/}
- gzip -9cv --rsyncable --no-name ${ofile} > ${indices}/${bname}.gz
- chmod g+w ${indices}/${bname}.gz
- done
- )
- }
- function mkfilesindices() {
- set +o pipefail
- umask 002
- rm -f $base/ftp/indices/files/*.files
- rm -f $base/ftp/indices/files/components/*.list.gz
- cd $base/ftp/indices/files/components
- ARCHLIST=$(mktemp)
- log "Querying postgres"
- local query="
- SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
- FROM files f
- JOIN files_archive_map af ON f.id = af.file_id
- JOIN component c ON af.component_id = c.id
- JOIN archive ON af.archive_id = archive.id
- LEFT OUTER JOIN
- (binaries b
- JOIN architecture a ON b.architecture = a.id)
- ON f.id = b.file
- WHERE archive.name = 'ftp-master'
- ORDER BY path, arch_string
- "
- psql -At -c "$query" >$ARCHLIST
- includedirs () {
- perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
- }
- poolfirst () {
- perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
- }
- log "Generating sources list"
- (
- sed -n 's/|$//p' $ARCHLIST
- cd $base/ftp
- find ./dists -maxdepth 1 \! -type d
- find ./dists \! -type d | grep "/source/"
- ) | sort -u | gzip -9 --rsyncable --no-name > source.list.gz
- log "Generating arch lists"
- ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
- for a in $ARCHES; do
- (sed -n "s/|$a$//p" $ARCHLIST
- sed -n 's/|all$//p' $ARCHLIST
- cd $base/ftp
- find ./dists -maxdepth 1 \! -type d
- find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
- ) | sort -u | gzip -9 --rsyncable --no-name > arch-$a.list.gz
- done
- log "Generating suite lists"
- suite_list () {
- local suite_id="$(printf %d $1)"
- local query
- query="
- SELECT DISTINCT './pool/' || c.name || '/' || f.filename
- FROM
- (SELECT sa.source AS source
- FROM src_associations sa
- WHERE sa.suite = $suite_id
- UNION
- SELECT esr.src_id
- FROM extra_src_references esr
- JOIN bin_associations ba ON esr.bin_id = ba.bin
- WHERE ba.suite = $suite_id
- UNION
- SELECT b.source AS source
- FROM bin_associations ba
- JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
- JOIN dsc_files df ON s.source = df.source
- JOIN files f ON df.file = f.id
- JOIN files_archive_map af ON f.id = af.file_id
- JOIN component c ON af.component_id = c.id
- JOIN archive ON af.archive_id = archive.id
- WHERE archive.name = 'ftp-master'
- "
- psql -F' ' -A -t -c "$query"
- query="
- SELECT './pool/' || c.name || '/' || f.filename
- FROM bin_associations ba
- JOIN binaries b ON ba.bin = b.id
- JOIN files f ON b.file = f.id
- JOIN files_archive_map af ON f.id = af.file_id
- JOIN component c ON af.component_id = c.id
- JOIN archive ON af.archive_id = archive.id
- WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
- "
- psql -F' ' -A -t -c "$query"
- }
- psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
- while read id suite; do
- [[ -e $base/ftp/dists/$suite ]] || continue
- (
- (cd $base/ftp
- distname=$(cd dists; readlink $suite || echo $suite)
- find ./dists/$distname \! -type d
- for distdir in ./dists/*; do
- [[ $(readlink $distdir) != $distname ]] || echo $distdir
- done
- )
- suite_list $id
- ) | sort -u | gzip -9 --rsyncable --no-name > suite-${suite}.list.gz
- done
- log "Finding everything on the ftp site to generate sundries"
- (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
- rm -f sundries.list
- zcat *.list.gz | cat - *.list | sort -u |
- diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
- log "Generating files list"
- for a in $ARCHES; do
- (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
- cat - sundries.list dists.list project.list docs.list indices.list |
- sort -u | poolfirst > ../arch-$a.files
- done
- TESTING=$(dak admin suite-config get-value testing codename)
- STABLE=$(dak admin suite-config get-value stable codename)
- OLDSTABLE=$(dak admin suite-config get-value oldstable codename)
- OLDOLDSTABLE=$(dak admin suite-config get-value oldoldstable codename)
- (cd $base/ftp/
- for dist in sid $OLDOLDSTABLE $OLDSTABLE $STABLE $TESTING; do
- find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 --rsyncable --no-name > $base/ftp/indices/files/components/translation-$dist.list.gz
- done
- )
- (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-$OLDOLDSTABLE.list.gz ; zcat translation-$OLDSTABLE.list.gz ; zcat translation-$STABLE.list.gz ; zcat translation-$TESTING.list.gz) |
- sort -u | poolfirst > ../typical.files
- rm -f $ARCHLIST
- log "Done!"
- set -o pipefail
- }
- function mkchecksums() {
- local archiveroot dsynclist md5list
- for archive in "${public_archives[@]}"; do
- archiveroot="$(get_archiveroot "${archive}")"
- dsynclist=$dbdir/dsync.${archive}.list
- md5list=${archiveroot}/indices/md5sums
- log "Creating md5 / dsync index file for ${archive}... "
- cd "$archiveroot"
- # FIXME: We should get rid of md5sums, but until then, keep it running.
- # People actually use this file.
- LD_LIBRARY_PATH=${bindir}:${LD_LIBRARY_PATH:-} ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
- LD_LIBRARY_PATH=${bindir}:${LD_LIBRARY_PATH:-} ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9 --rsyncable --no-name > ${md5list}.gz
- done
- }
- function mirror() {
- local archiveroot targetpath
- local -a tracepaths
- for archive in "${public_archives[@]}"; do
- archiveroot="$(get_archiveroot "${archive}")"
- targetpath="${mirrordir}/${archive}"
- if [[ ! -f ${archiveroot}/../mirror-excluded-files.list ]]; then
- log "Mirror exclude file should exist, even if it is empty"
- touch ${archiveroot}/../mirror-excluded-files.list
- fi
- log "Regenerating \"public\" mirror/${archive} hardlink fun"
- rsync -aH --link-dest ${archiveroot} \
- --delete --delete-after --delete-excluded \
- --exclude "Packages.*.new" --exclude "Sources.*.new" \
- --filter 'exclude /project/trace/*' \
- --filter 'protect /project/' \
- --filter 'protect /project/trace/' \
- --filter 'protect /project/trace/*' \
- --exclude-from=${archiveroot}/../mirror-excluded-files.list \
- --ignore-errors \
- ${archiveroot}/. ${targetpath}/.
- tracepaths+=("${targetpath}")
- done
- write_project_trace "${tracepaths[@]}"
- }
- function transitionsclean() {
- log "Removing out of date transitions..."
- cd ${base}
- dak transitions -c -a
- }
- function dm() {
- log "Updating DM permissions page"
- dak acl export-per-source dm >${exportdir}/dm.txt
- }
- function bts() {
- log "Categorizing uncategorized bugs filed against ftp.debian.org"
- sudo -u dak-unpriv dak bts-categorize
- }
- function mirrorpush() {
- log "Checking the public archive copies..."
- local archiveroot targetpath
- for archive in "${public_archives[@]}"; do
- log "... archive: ${archive}"
- archiveroot="$(get_archiveroot "${archive}")"
- targetpath="${mirrordir}/${archive}"
- cd ${archiveroot}/dists
- broken=0
- for release in $(find . -name "InRelease"); do
- echo "Processing: ${release}"
- subdir=${release%/InRelease}
- while read SHASUM SIZE NAME; do
- if ! [[ -f ${subdir}/${NAME} ]]; then
- bname=$(basename ${NAME})
- if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-[a-zA-Z0-9-]+\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
- # We don't keep unpacked files, don't check for their existance.
- # We might want to go and check their unpacked shasum, but right now
- # I don't care. I believe it should be enough if all the packed shasums
- # match.
- continue
- fi
- broken=$(( broken + 1 ))
- echo "File ${subdir}/${NAME} is missing"
- continue
- fi
- # We do have symlinks in the tree (see the contents files currently).
- # So we use "readlink -f" to check the size of the target, as thats basically
- # what gen-releases does
- fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
- if [[ ${fsize} -ne ${SIZE} ]]; then
- broken=$(( broken + 1 ))
- echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
- continue
- fi
- fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
- fshasum=${fshasum%% *}
- if [[ ${fshasum} != ${SHASUM} ]]; then
- broken=$(( broken + 1 ))
- echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
- continue
- fi
- done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
- done
- if [[ $broken -gt 0 ]]; then
- log_error "Trouble with the public mirror for ${archive}, found ${broken} errors"
- continue
- else
- log "Starting the mirrorpush for ${archive}"
- case ${archive} in
- ftp-master)
- fname="mirrorstart"
- pusharg=""
- pname="debian"
- ;;
- security)
- fname="mirrorstart"
- pusharg="-a security"
- pname="debian-security"
- ;;
- debian-debug)
- pusharg="-a debug"
- ;;&
- debian-security-debug)
- pusharg="-a security-debug"
- ;;&
- backports)
- pusharg="-a backports"
- ;;&
- *)
- fname="mirrorstart.${archive}"
- pname=${archive}
- ;;
- esac
- mkdir -p ${webdir}/${pname}/project/trace/
- ln -sf ${mirrordir}/${archive}/project/trace/master ${webdir}/${pname}/project/trace/master
- ln -sf ${mirrordir}/${archive}/project/trace/master ${webdir}/${pname}/project/trace/ftp-master.debian.org
- date -u > ${webdir}/${fname}
- echo "Using dak v1" >> ${webdir}/${fname}
- echo "Running on host $(hostname -f)" >> ${webdir}/${fname}
- sudo -u archvsync runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 &
- fi
- done
- }
- function mirrorpush-backports() {
- log "Syncing backports mirror"
- sudo -u backports /home/backports/bin/update-archive
- }
- function mirrorpush-release() {
- log "Pushing cdbuilder host"
- sudo -u archvsync runmirrors -a release > ~dak/runmirrors-release.log 2>&1 &
- }
- function i18n2() {
- log "Exporting package data foo for i18n project"
- STAMP=$(date "+%Y%m%d%H%M")
- mkdir -p ${scriptdir}/i18n/${STAMP}
- cd ${scriptdir}/i18n/${STAMP}
- for suite in stable testing unstable; do
- codename=$(dak admin suite-config get-value ${suite} codename)
- echo "Codename is ${codename}"
- dak control-suite -l ${suite} >${codename}
- done
- echo "${STAMP}" > timestamp
- gpg --homedir /srv/ftp-master.debian.org/s3kr1t/dot-gnupg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
- rm -f md5sum
- md5sum * > md5sum
- cd ${webdir}/
- ln -sfT ${scriptdir}/i18n/${STAMP} i18n
- cd ${scriptdir}
- find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
- }
- function stats() {
- log "Updating stats data"
- cd ${configdir}
- #${scriptsdir}/update-ftpstats ${base}/log/* > ${base}/misc/ftpstats.data
- #R --slave --vanilla < ${base}/misc/ftpstats.R
- dak stats arch-space > ${webdir}/arch-space
- dak stats pkg-nums > ${webdir}/pkg-nums
- }
- function cleantransactions() {
- log "Cleanup transaction ids older than 3 months"
- cd ${base}/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
- }
- function logstats() {
- if [ "${functionname}" = ftp-master.debian.org ]; then
- cd ${TMPDIR}
- ${masterdir}/tools/logs.py "$1"
- fi
- }
- # save timestamp when we start
- function savetimestamp() {
- echo ${NOW} > "${dbdir}/dinstallstart"
- }
- function maillogfile() {
- mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org < "${LOGFILE}"
- }
- function testingsourcelist() {
- dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
- }
- # Function to update a "statefile" telling people what we are doing
- # (more or less).
- #
- # This should be called with the argument(s)
- # - Status name we want to show.
- #
- function state() {
- RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
- cat >"${DINSTALLSTATE}" <<EOF
- Dinstall start: ${DINSTALLBEGIN}
- Current action: $*
- Action start: ${RIGHTNOW}
- EOF
- }
- # extract changelogs and stuff
- function changelogs() {
- log "Extracting changelogs"
- dak make-changelog -e -a ftp-master
- [[ -f ${exportdir}/changelogs/filelist.yaml ]] && xz -f ${exportdir}/changelogs/filelist.yaml
- mkdir -p ${exportpublic}/changelogs
- cd ${exportpublic}/changelogs
- rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
- sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
- # dak make-changelog -e -a backports
- # [[ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ]] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
- # mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
- # cd /srv/backports-master.debian.org/rsync/export/changelogs
- # rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
- }
- # Generate a list of extra mirror files, sha256sum em and sign that
- function signotherfiles() {
- log "Signing extra mirror files"
- local archiveroot
- for archive in "${public_archives[@]}"; do
- log "... archive: ${archive}"
- archiveroot="$(get_archiveroot "${archive}")"
- local TMPLO=$( gettempfile )
- cd ${archiveroot}
- rm -f extrafiles
- sha256sum $(find * -type f | egrep -v '(pool|i18n|dep11|source)/|Contents-.*\.(gz|diff)|installer|binary-|(In)?Release(.gpg)?|\.changes' | sort) > ${TMPLO}
- gpg --no-options --batch --no-tty --armour --personal-digest-preferences=SHA256 --homedir /srv/ftp-master.debian.org/s3kr1t/dot-gnupg --no-options --batch --no-tty --armour --default-key 2B90D010 --clearsign --output ${archiveroot}/extrafiles ${TMPLO}
- rm -f ${TMPLO}
- done
- }
- function startup() {
- touch "${DINSTALLSTART}"
- log_timestamp "startup"
- trap onerror ERR
- }
- function postcronscript() {
- trap - ERR
- logstats ${LOGFILE}
- touch "${DINSTALLEND}"
- }
|