dinstall.functions 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653
  1. # -*- mode:sh -*-
  2. # If we error out this one is called, *FOLLOWED* by cleanup in common
  3. function onerror() {
  4. ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
  5. subject="ATTENTION ATTENTION!"
  6. if [[ ${error:-false} = false ]]; then
  7. subject="${subject} (continued)"
  8. else
  9. subject="${subject} (interrupted)"
  10. fi
  11. subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE:-} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
  12. if [[ -r ${STAGEFILE:-/nonexistant}.log ]]; then
  13. cat "${STAGEFILE}.log"
  14. else
  15. echo "file ${STAGEFILE:-}.log does not exist, sorry"
  16. fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
  17. }
  18. ########################################################################
  19. # the actual functions follow #
  20. ########################################################################
  21. # pushing merkels QA user, part one
  22. function qa1() {
  23. log "Telling QA user that we start dinstall"
  24. ssh -n -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
  25. }
  26. function mailingliststxt() {
  27. cd ${ftpdir}/doc
  28. log "Updating archive version of mailing-lists.txt"
  29. wget ${wgetopts} https://www.debian.org/misc/mailing-lists.txt
  30. }
  31. function pseudopackages() {
  32. cd ${scriptdir}/masterfiles
  33. log "Updating archive version of pseudo-packages"
  34. for file in maintainers description; do
  35. wget ${wgetopts} https://bugs.debian.org/pseudopackages/pseudo-packages.${file}
  36. done
  37. }
  38. # Updating various files
  39. function bugdoctxt() {
  40. log "Updating Bugs docu"
  41. cd $configdir
  42. $scriptsdir/update-bugdoctxt
  43. }
  44. # The first i18n one, syncing new descriptions
  45. function i18n1() {
  46. log "Synchronizing i18n package descriptions"
  47. # First sync their newest data
  48. cd ${scriptdir}/i18nsync
  49. rsync -rtq --safe-links --chmod=F644,D755 --delete --delete-after ddtp-sync:/does/not/matter . || true
  50. # Now check if we still know about the packages for which they created the files
  51. # is the timestamp signed by us?
  52. if gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp; then
  53. # now read it. As its signed by us we are sure the content is what we expect, no need
  54. # to do more here. And we only test -d a directory on it anyway.
  55. TSTAMP=$(cat timestamp)
  56. # do we have the dir still?
  57. if [[ -d ${scriptdir}/i18n/${TSTAMP} ]]; then
  58. # Lets check!
  59. if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
  60. # Yay, worked, lets copy around
  61. for dir in ${extimportdists}; do
  62. if [[ -d dists/${dir}/ ]]; then
  63. cd dists/${dir}/main/i18n
  64. rsync -aq --delete --delete-after --exclude by-hash --exclude "Translation-en.*" --exclude "Translation-*.diff/" . ${ftpdir}/dists/${dir}/main/i18n/.
  65. fi
  66. cd ${scriptdir}/i18nsync
  67. done
  68. else
  69. echo "ARRRR, bad guys, wrong files, ARRR"
  70. echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
  71. fi
  72. else
  73. echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
  74. echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
  75. fi
  76. else
  77. echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
  78. echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
  79. fi
  80. }
  81. # Syncing AppStream/DEP-11 data
  82. function dep11() {
  83. log "Synchronizing AppStream metadata"
  84. # First sync their newest data
  85. local dep11dir="${scriptdir}/dep11"
  86. mkdir -p ${dep11dir}
  87. cd ${dep11dir}
  88. rsync -rtq --safe-links --chmod=F644,D755 --delete --delete-after dep11-sync:/does/not/matter .
  89. # Lets check!
  90. if ${scriptsdir}/dep11-basic-validate.py . ${dep11dir}/; then
  91. # Yay, worked, lets copy around
  92. for dir in ${extimportdists}; do
  93. if [[ -d ${dir}/ ]]; then
  94. for comp in main contrib non-free; do
  95. mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
  96. cd ${dir}/${comp}
  97. rsync -rtq --delete --delete-after --exclude by-hash --exclude "./*.tmp" . ${ftpdir}/dists/${dir}/${comp}/dep11/.
  98. cd ${dep11dir}
  99. done
  100. fi
  101. done
  102. else
  103. echo "ARRRR, bad guys, wrong files, ARRR"
  104. echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
  105. fi
  106. }
  107. function cruft() {
  108. log "Checking for cruft in overrides"
  109. dak check-overrides
  110. }
  111. function dominate() {
  112. log "Removing obsolete source and binary associations"
  113. dak dominate
  114. dak manage-debug-suites unstable-debug experimental-debug stretch-backports-debug buster-backports-debug buster-backports-sloppy-debug bullseye-backports-debug bullseye-backports-sloppy-debug bookworm-backports-debug testing-proposed-updates-debug
  115. }
  116. function autocruft() {
  117. log "Check for obsolete binary packages"
  118. dak auto-decruft -s unstable
  119. dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
  120. dak auto-decruft -s testing-proposed-updates --if-newer-version-in testing --if-newer-version-in-rm-msg "NVIT" --decruft-equal-versions
  121. }
  122. function overrides() {
  123. log "Writing overrides into text files"
  124. cd $overridedir
  125. dak make-overrides
  126. }
  127. function mpfm() {
  128. local archiveroot
  129. log "Generating package / file mapping"
  130. for archive in "${public_archives[@]}"; do
  131. log " archive: ${archive}"
  132. archiveroot="$(get_archiveroot "${archive}")"
  133. dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
  134. done
  135. }
  136. function packages() {
  137. log " Generating Packages/Sources for: ${public_archives[*]}"
  138. dak generate-packages-sources2 -a "${public_archives[*]}"
  139. log " Generating Contents for: ${public_archives[*]}"
  140. dak contents generate -a "${public_archives[*]}"
  141. }
  142. function pdiff() {
  143. log "Generating pdiff files"
  144. dak generate-index-diffs
  145. }
  146. function release() {
  147. log "Generating Release files"
  148. dak generate-releases -a "${public_archives[*]}"
  149. }
  150. function dedup() {
  151. log "Dedupe archive"
  152. dak archive-dedup-pool
  153. for archive in "${public_archives[@]}"; do
  154. archiveroot="$(get_archiveroot "${archive}")"
  155. cd "${archiveroot}"
  156. for dir in doc indices project tools; do
  157. if [[ -d ${dir} ]]; then
  158. jdupes --hardlink --noempty --quiet --summarize --recurse ${dir}
  159. fi
  160. done
  161. done
  162. }
  163. function mkmaintainers() {
  164. local archiveroot
  165. local mkmindices
  166. log 'Creating Maintainers index ... '
  167. for archive in "${public_archives[@]}"; do
  168. archiveroot="$(get_archiveroot "${archive}")"
  169. mkmindices="${archiveroot}/indices"
  170. if ! [[ -d ${mkmindices} ]]; then
  171. mkdir "${mkmindices}"
  172. fi
  173. cd "${mkmindices}"
  174. rm -f Maintainers Maintainers.gz Uploaders Uploaders.gz
  175. dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
  176. gzip -9v --rsyncable --no-name <Maintainers >Maintainers.gz
  177. gzip -9v --rsyncable --no-name <Uploaders >Uploaders.gz
  178. done
  179. }
  180. function copyoverrides() {
  181. log 'Copying override files into public view ...'
  182. (
  183. shopt -s nullglob
  184. rm -f ${indices}/override.*.gz
  185. TESTING=$(dak admin suite-config get-value testing codename)
  186. STABLE=$(dak admin suite-config get-value stable codename)
  187. OLDSTABLE=$(dak admin suite-config get-value oldstable codename)
  188. OLDOLDSTABLE=$(dak admin suite-config get-value oldoldstable codename)
  189. for ofile in ${overridedir}/override.{$OLDOLDSTABLE,$OLDSTABLE,$STABLE,$TESTING,sid}.{,extra.}{main,contrib,non-free}*; do
  190. bname=${ofile##*/}
  191. gzip -9cv --rsyncable --no-name ${ofile} > ${indices}/${bname}.gz
  192. chmod g+w ${indices}/${bname}.gz
  193. done
  194. )
  195. }
  196. function mkfilesindices() {
  197. set +o pipefail
  198. umask 002
  199. rm -f $base/ftp/indices/files/*.files
  200. rm -f $base/ftp/indices/files/components/*.list.gz
  201. cd $base/ftp/indices/files/components
  202. ARCHLIST=$(mktemp)
  203. log "Querying postgres"
  204. local query="
  205. SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
  206. FROM files f
  207. JOIN files_archive_map af ON f.id = af.file_id
  208. JOIN component c ON af.component_id = c.id
  209. JOIN archive ON af.archive_id = archive.id
  210. LEFT OUTER JOIN
  211. (binaries b
  212. JOIN architecture a ON b.architecture = a.id)
  213. ON f.id = b.file
  214. WHERE archive.name = 'ftp-master'
  215. ORDER BY path, arch_string
  216. "
  217. psql -At -c "$query" >$ARCHLIST
  218. includedirs () {
  219. perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
  220. }
  221. poolfirst () {
  222. perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
  223. }
  224. log "Generating sources list"
  225. (
  226. sed -n 's/|$//p' $ARCHLIST
  227. cd $base/ftp
  228. find ./dists -maxdepth 1 \! -type d
  229. find ./dists \! -type d | grep "/source/"
  230. ) | sort -u | gzip -9 --rsyncable --no-name > source.list.gz
  231. log "Generating arch lists"
  232. ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
  233. for a in $ARCHES; do
  234. (sed -n "s/|$a$//p" $ARCHLIST
  235. sed -n 's/|all$//p' $ARCHLIST
  236. cd $base/ftp
  237. find ./dists -maxdepth 1 \! -type d
  238. find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
  239. ) | sort -u | gzip -9 --rsyncable --no-name > arch-$a.list.gz
  240. done
  241. log "Generating suite lists"
  242. suite_list () {
  243. local suite_id="$(printf %d $1)"
  244. local query
  245. query="
  246. SELECT DISTINCT './pool/' || c.name || '/' || f.filename
  247. FROM
  248. (SELECT sa.source AS source
  249. FROM src_associations sa
  250. WHERE sa.suite = $suite_id
  251. UNION
  252. SELECT esr.src_id
  253. FROM extra_src_references esr
  254. JOIN bin_associations ba ON esr.bin_id = ba.bin
  255. WHERE ba.suite = $suite_id
  256. UNION
  257. SELECT b.source AS source
  258. FROM bin_associations ba
  259. JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
  260. JOIN dsc_files df ON s.source = df.source
  261. JOIN files f ON df.file = f.id
  262. JOIN files_archive_map af ON f.id = af.file_id
  263. JOIN component c ON af.component_id = c.id
  264. JOIN archive ON af.archive_id = archive.id
  265. WHERE archive.name = 'ftp-master'
  266. "
  267. psql -F' ' -A -t -c "$query"
  268. query="
  269. SELECT './pool/' || c.name || '/' || f.filename
  270. FROM bin_associations ba
  271. JOIN binaries b ON ba.bin = b.id
  272. JOIN files f ON b.file = f.id
  273. JOIN files_archive_map af ON f.id = af.file_id
  274. JOIN component c ON af.component_id = c.id
  275. JOIN archive ON af.archive_id = archive.id
  276. WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
  277. "
  278. psql -F' ' -A -t -c "$query"
  279. }
  280. psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
  281. while read id suite; do
  282. [[ -e $base/ftp/dists/$suite ]] || continue
  283. (
  284. (cd $base/ftp
  285. distname=$(cd dists; readlink $suite || echo $suite)
  286. find ./dists/$distname \! -type d
  287. for distdir in ./dists/*; do
  288. [[ $(readlink $distdir) != $distname ]] || echo $distdir
  289. done
  290. )
  291. suite_list $id
  292. ) | sort -u | gzip -9 --rsyncable --no-name > suite-${suite}.list.gz
  293. done
  294. log "Finding everything on the ftp site to generate sundries"
  295. (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
  296. rm -f sundries.list
  297. zcat *.list.gz | cat - *.list | sort -u |
  298. diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
  299. log "Generating files list"
  300. for a in $ARCHES; do
  301. (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
  302. cat - sundries.list dists.list project.list docs.list indices.list |
  303. sort -u | poolfirst > ../arch-$a.files
  304. done
  305. TESTING=$(dak admin suite-config get-value testing codename)
  306. STABLE=$(dak admin suite-config get-value stable codename)
  307. OLDSTABLE=$(dak admin suite-config get-value oldstable codename)
  308. OLDOLDSTABLE=$(dak admin suite-config get-value oldoldstable codename)
  309. (cd $base/ftp/
  310. for dist in sid $OLDOLDSTABLE $OLDSTABLE $STABLE $TESTING; do
  311. find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 --rsyncable --no-name > $base/ftp/indices/files/components/translation-$dist.list.gz
  312. done
  313. )
  314. (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-$OLDOLDSTABLE.list.gz ; zcat translation-$OLDSTABLE.list.gz ; zcat translation-$STABLE.list.gz ; zcat translation-$TESTING.list.gz) |
  315. sort -u | poolfirst > ../typical.files
  316. rm -f $ARCHLIST
  317. log "Done!"
  318. set -o pipefail
  319. }
  320. function mkchecksums() {
  321. local archiveroot dsynclist md5list
  322. for archive in "${public_archives[@]}"; do
  323. archiveroot="$(get_archiveroot "${archive}")"
  324. dsynclist=$dbdir/dsync.${archive}.list
  325. md5list=${archiveroot}/indices/md5sums
  326. log "Creating md5 / dsync index file for ${archive}... "
  327. cd "$archiveroot"
  328. # FIXME: We should get rid of md5sums, but until then, keep it running.
  329. # People actually use this file.
  330. LD_LIBRARY_PATH=${bindir}:${LD_LIBRARY_PATH:-} ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
  331. LD_LIBRARY_PATH=${bindir}:${LD_LIBRARY_PATH:-} ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9 --rsyncable --no-name > ${md5list}.gz
  332. done
  333. }
  334. function mirror() {
  335. local archiveroot targetpath
  336. local -a tracepaths
  337. for archive in "${public_archives[@]}"; do
  338. archiveroot="$(get_archiveroot "${archive}")"
  339. targetpath="${mirrordir}/${archive}"
  340. log "Regenerating \"public\" mirror/${archive} hardlink fun"
  341. rsync -aH --link-dest ${archiveroot} \
  342. --delete --delete-after --delete-excluded \
  343. --exclude "Packages.*.new" --exclude "Sources.*.new" \
  344. --filter 'exclude /project/trace/*' \
  345. --filter 'protect /project/' \
  346. --filter 'protect /project/trace/' \
  347. --filter 'protect /project/trace/*' \
  348. --ignore-errors \
  349. ${archiveroot}/. ${targetpath}/.
  350. tracepaths+=("${targetpath}")
  351. done
  352. write_project_trace "${tracepaths[@]}"
  353. }
  354. function transitionsclean() {
  355. log "Removing out of date transitions..."
  356. cd ${base}
  357. dak transitions -c -a
  358. }
  359. function dm() {
  360. log "Updating DM permissions page"
  361. dak acl export-per-source dm >${exportdir}/dm.txt
  362. }
  363. function bts() {
  364. log "Categorizing uncategorized bugs filed against ftp.debian.org"
  365. sudo -u dak-unpriv dak bts-categorize
  366. }
  367. function mirrorpush() {
  368. log "Checking the public archive copies..."
  369. local archiveroot targetpath
  370. for archive in "${public_archives[@]}"; do
  371. log "... archive: ${archive}"
  372. archiveroot="$(get_archiveroot "${archive}")"
  373. targetpath="${mirrordir}/${archive}"
  374. cd ${archiveroot}/dists
  375. broken=0
  376. for release in $(find . -name "InRelease"); do
  377. echo "Processing: ${release}"
  378. subdir=${release%/InRelease}
  379. while read SHASUM SIZE NAME; do
  380. if ! [[ -f ${subdir}/${NAME} ]]; then
  381. bname=$(basename ${NAME})
  382. if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-[a-zA-Z0-9-]+\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
  383. # We don't keep unpacked files, don't check for their existance.
  384. # We might want to go and check their unpacked shasum, but right now
  385. # I don't care. I believe it should be enough if all the packed shasums
  386. # match.
  387. continue
  388. fi
  389. broken=$(( broken + 1 ))
  390. echo "File ${subdir}/${NAME} is missing"
  391. continue
  392. fi
  393. # We do have symlinks in the tree (see the contents files currently).
  394. # So we use "readlink -f" to check the size of the target, as thats basically
  395. # what gen-releases does
  396. fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
  397. if [[ ${fsize} -ne ${SIZE} ]]; then
  398. broken=$(( broken + 1 ))
  399. echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
  400. continue
  401. fi
  402. fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
  403. fshasum=${fshasum%% *}
  404. if [[ ${fshasum} != ${SHASUM} ]]; then
  405. broken=$(( broken + 1 ))
  406. echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
  407. continue
  408. fi
  409. done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
  410. done
  411. if [[ $broken -gt 0 ]]; then
  412. log_error "Trouble with the public mirror for ${archive}, found ${broken} errors"
  413. continue
  414. else
  415. log "Starting the mirrorpush for ${archive}"
  416. case ${archive} in
  417. ftp-master)
  418. fname="mirrorstart"
  419. pusharg=""
  420. pname="debian"
  421. ;;
  422. security)
  423. fname="mirrorstart"
  424. pusharg="-a security"
  425. pname="debian-security"
  426. ;;
  427. debian-debug)
  428. pusharg="-a debug"
  429. ;;&
  430. debian-security-debug)
  431. pusharg="-a security-debug"
  432. ;;&
  433. backports)
  434. pusharg="-a backports"
  435. ;;&
  436. *)
  437. fname="mirrorstart.${archive}"
  438. pname=${archive}
  439. ;;
  440. esac
  441. mkdir -p ${webdir}/${pname}/project/trace/
  442. ln -sf ${mirrordir}/${archive}/project/trace/master ${webdir}/${pname}/project/trace/master
  443. ln -sf ${mirrordir}/${archive}/project/trace/master ${webdir}/${pname}/project/trace/ftp-master.debian.org
  444. date -u > ${webdir}/${fname}
  445. echo "Using dak v1" >> ${webdir}/${fname}
  446. echo "Running on host $(hostname -f)" >> ${webdir}/${fname}
  447. sudo -u archvsync runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 &
  448. fi
  449. done
  450. }
  451. function mirrorpush-backports() {
  452. log "Syncing backports mirror"
  453. sudo -u backports /home/backports/bin/update-archive
  454. }
  455. function mirrorpush-release() {
  456. log "Pushing cdbuilder host"
  457. sudo -u archvsync runmirrors -a release > ~dak/runmirrors-release.log 2>&1 &
  458. }
  459. function i18n2() {
  460. log "Exporting package data foo for i18n project"
  461. STAMP=$(date "+%Y%m%d%H%M")
  462. mkdir -p ${scriptdir}/i18n/${STAMP}
  463. cd ${scriptdir}/i18n/${STAMP}
  464. for suite in stable testing unstable; do
  465. codename=$(dak admin suite-config get-value ${suite} codename)
  466. echo "Codename is ${codename}"
  467. dak control-suite -l ${suite} >${codename}
  468. done
  469. echo "${STAMP}" > timestamp
  470. gpg --homedir /srv/ftp-master.debian.org/s3kr1t/dot-gnupg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
  471. rm -f md5sum
  472. md5sum * > md5sum
  473. cd ${webdir}/
  474. ln -sfT ${scriptdir}/i18n/${STAMP} i18n
  475. cd ${scriptdir}
  476. find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
  477. }
  478. function stats() {
  479. log "Updating stats data"
  480. cd ${configdir}
  481. #${scriptsdir}/update-ftpstats ${base}/log/* > ${base}/misc/ftpstats.data
  482. #R --slave --vanilla < ${base}/misc/ftpstats.R
  483. dak stats arch-space > ${webdir}/arch-space
  484. dak stats pkg-nums > ${webdir}/pkg-nums
  485. }
  486. function cleantransactions() {
  487. log "Cleanup transaction ids older than 3 months"
  488. cd ${base}/backup/
  489. find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
  490. }
  491. function logstats() {
  492. if [ "${functionname}" = ftp-master.debian.org ]; then
  493. cd ${TMPDIR}
  494. ${masterdir}/tools/logs.py "$1"
  495. fi
  496. }
  497. # save timestamp when we start
  498. function savetimestamp() {
  499. echo ${NOW} > "${dbdir}/dinstallstart"
  500. }
  501. function maillogfile() {
  502. mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org < "${LOGFILE}"
  503. }
  504. function testingsourcelist() {
  505. dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
  506. }
  507. # Function to update a "statefile" telling people what we are doing
  508. # (more or less).
  509. #
  510. # This should be called with the argument(s)
  511. # - Status name we want to show.
  512. #
  513. function state() {
  514. RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
  515. cat >"${DINSTALLSTATE}" <<EOF
  516. Dinstall start: ${DINSTALLBEGIN}
  517. Current action: $*
  518. Action start: ${RIGHTNOW}
  519. EOF
  520. }
  521. # extract changelogs and stuff
  522. function changelogs() {
  523. log "Extracting changelogs"
  524. dak make-changelog -e -a ftp-master
  525. [[ -f ${exportdir}/changelogs/filelist.yaml ]] && xz -f ${exportdir}/changelogs/filelist.yaml
  526. mkdir -p ${exportpublic}/changelogs
  527. cd ${exportpublic}/changelogs
  528. rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
  529. sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
  530. # dak make-changelog -e -a backports
  531. # [[ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ]] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
  532. # mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
  533. # cd /srv/backports-master.debian.org/rsync/export/changelogs
  534. # rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
  535. }
  536. # Generate a list of extra mirror files, sha256sum em and sign that
  537. function signotherfiles() {
  538. log "Signing extra mirror files"
  539. local archiveroot
  540. for archive in "${public_archives[@]}"; do
  541. log "... archive: ${archive}"
  542. archiveroot="$(get_archiveroot "${archive}")"
  543. local TMPLO=$( gettempfile )
  544. cd ${archiveroot}
  545. rm -f extrafiles
  546. sha256sum $(find * -type f | egrep -v '(pool|i18n|dep11|source)/|Contents-.*\.(gz|diff)|installer|binary-|(In)?Release(.gpg)?|\.changes' | sort) > ${TMPLO}
  547. gpg --no-options --batch --no-tty --armour --personal-digest-preferences=SHA256 --homedir /srv/ftp-master.debian.org/s3kr1t/dot-gnupg --no-options --batch --no-tty --armour --default-key 2B90D010 --clearsign --output ${archiveroot}/extrafiles ${TMPLO}
  548. rm -f ${TMPLO}
  549. done
  550. }
  551. function startup() {
  552. touch "${DINSTALLSTART}"
  553. log_timestamp "startup"
  554. trap onerror ERR
  555. }
  556. function postcronscript() {
  557. trap - ERR
  558. logstats ${LOGFILE}
  559. touch "${DINSTALLEND}"
  560. }