dinstall.functions 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659
  1. # -*- mode:sh -*-
  2. # If we error out this one is called, *FOLLOWED* by cleanup in common
  3. function onerror() {
  4. ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
  5. subject="ATTENTION ATTENTION!"
  6. if [[ ${error:-false} = false ]]; then
  7. subject="${subject} (continued)"
  8. else
  9. subject="${subject} (interrupted)"
  10. fi
  11. subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE:-} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
  12. if [[ -r ${STAGEFILE:-/nonexistant}.log ]]; then
  13. cat "${STAGEFILE}.log"
  14. else
  15. echo "file ${STAGEFILE:-}.log does not exist, sorry"
  16. fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
  17. }
  18. ########################################################################
  19. # the actual functions follow #
  20. ########################################################################
  21. # pushing merkels QA user, part one
  22. function qa1() {
  23. log "Telling QA user that we start dinstall"
  24. ssh -n -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
  25. }
  26. function mailingliststxt() {
  27. cd ${ftpdir}/doc
  28. log "Updating archive version of mailing-lists.txt"
  29. wget ${wgetopts} https://www.debian.org/misc/mailing-lists.txt
  30. }
  31. function pseudopackages() {
  32. cd ${scriptdir}/masterfiles
  33. log "Updating archive version of pseudo-packages"
  34. for file in maintainers description; do
  35. wget ${wgetopts} https://bugs.debian.org/pseudopackages/pseudo-packages.${file}
  36. done
  37. }
  38. # Updating various files
  39. function bugdoctxt() {
  40. log "Updating Bugs docu"
  41. cd $configdir
  42. $scriptsdir/update-bugdoctxt
  43. }
  44. # The first i18n one, syncing new descriptions
  45. function i18n1() {
  46. log "Synchronizing i18n package descriptions"
  47. # First sync their newest data
  48. cd ${scriptdir}/i18nsync
  49. rsync -rtq --safe-links --chmod=F644,D755 --delete --delete-after ddtp-sync:/does/not/matter . || true
  50. # Now check if we still know about the packages for which they created the files
  51. # is the timestamp signed by us?
  52. if gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp; then
  53. # now read it. As its signed by us we are sure the content is what we expect, no need
  54. # to do more here. And we only test -d a directory on it anyway.
  55. TSTAMP=$(cat timestamp)
  56. # do we have the dir still?
  57. if [[ -d ${scriptdir}/i18n/${TSTAMP} ]]; then
  58. # Lets check!
  59. if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
  60. # Yay, worked, lets copy around
  61. for dir in ${extimportdists}; do
  62. if [[ -d dists/${dir}/ ]]; then
  63. cd dists/${dir}/main/i18n
  64. rsync -aq --delete --delete-after --exclude by-hash --exclude "Translation-en.*" --exclude "Translation-*.diff/" . ${ftpdir}/dists/${dir}/main/i18n/.
  65. fi
  66. cd ${scriptdir}/i18nsync
  67. done
  68. else
  69. echo "ARRRR, bad guys, wrong files, ARRR"
  70. echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
  71. fi
  72. else
  73. echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
  74. echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
  75. fi
  76. else
  77. echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
  78. echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
  79. fi
  80. }
  81. # Syncing AppStream/DEP-11 data
  82. function dep11() {
  83. log "Synchronizing AppStream metadata"
  84. # First sync their newest data
  85. local dep11dir="${scriptdir}/dep11"
  86. mkdir -p ${dep11dir}
  87. cd ${dep11dir}
  88. rsync -rtq --safe-links --chmod=F644,D755 --delete --delete-after dep11-sync:/does/not/matter .
  89. # Lets check!
  90. if ${scriptsdir}/dep11-basic-validate.py . ${dep11dir}/; then
  91. # Yay, worked, lets copy around
  92. for dir in ${extimportdists}; do
  93. if [[ -d ${dir}/ ]]; then
  94. for comp in main contrib non-free; do
  95. mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
  96. cd ${dir}/${comp}
  97. rsync -rtq --delete --delete-after --exclude by-hash --exclude "./*.tmp" . ${ftpdir}/dists/${dir}/${comp}/dep11/.
  98. cd ${dep11dir}
  99. done
  100. fi
  101. done
  102. else
  103. echo "ARRRR, bad guys, wrong files, ARRR"
  104. echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
  105. fi
  106. }
  107. function cruft() {
  108. log "Checking for cruft in overrides"
  109. dak check-overrides
  110. }
  111. function dominate() {
  112. log "Removing obsolete source and binary associations"
  113. dak dominate
  114. dak manage-debug-suites unstable-debug experimental-debug stretch-backports-debug buster-backports-debug testing-proposed-updates-debug
  115. }
  116. function autocruft() {
  117. log "Check for obsolete binary packages"
  118. dak auto-decruft -s unstable
  119. dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
  120. dak auto-decruft -s testing-proposed-updates --if-newer-version-in testing --if-newer-version-in-rm-msg "NVIT" --decruft-equal-versions
  121. }
  122. function overrides() {
  123. log "Writing overrides into text files"
  124. cd $overridedir
  125. dak make-overrides
  126. }
  127. function mpfm() {
  128. local archiveroot
  129. log "Generating package / file mapping"
  130. for archive in "${public_archives[@]}"; do
  131. log " archive: ${archive}"
  132. archiveroot="$(get_archiveroot "${archive}")"
  133. dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
  134. done
  135. }
  136. function packages() {
  137. log " Generating Packages/Sources for: ${public_archives[*]}"
  138. dak generate-packages-sources2 -a "${public_archives[*]}"
  139. log " Generating Contents for: ${public_archives[*]}"
  140. dak contents generate -a "${public_archives[*]}"
  141. }
  142. function pdiff() {
  143. log "Generating pdiff files"
  144. dak generate-index-diffs
  145. }
  146. function release() {
  147. log "Generating Release files"
  148. dak generate-releases -a "${public_archives[*]}"
  149. }
  150. function dedup() {
  151. log "Dedupe archive"
  152. dak archive-dedup-pool
  153. for archive in "${public_archives[@]}"; do
  154. archiveroot="$(get_archiveroot "${archive}")"
  155. cd "${archiveroot}"
  156. for dir in doc indices project tools; do
  157. if [[ -d ${dir} ]]; then
  158. jdupes --hardlink --noempty --quiet --summarize --recurse ${dir}
  159. fi
  160. done
  161. done
  162. }
  163. function mkmaintainers() {
  164. local archiveroot
  165. local mkmindices
  166. log 'Creating Maintainers index ... '
  167. for archive in "${public_archives[@]}"; do
  168. archiveroot="$(get_archiveroot "${archive}")"
  169. mkmindices="${archiveroot}/indices"
  170. if ! [[ -d ${mkmindices} ]]; then
  171. mkdir "${mkmindices}"
  172. fi
  173. cd "${mkmindices}"
  174. rm -f Maintainers Maintainers.gz Uploaders Uploaders.gz
  175. dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
  176. gzip -9v --rsyncable --no-name <Maintainers >Maintainers.gz
  177. gzip -9v --rsyncable --no-name <Uploaders >Uploaders.gz
  178. done
  179. }
  180. function copyoverrides() {
  181. log 'Copying override files into public view ...'
  182. (
  183. shopt -s nullglob
  184. rm -f ${indices}/override.*.gz
  185. TESTING=$(dak admin suite-config get-value testing codename)
  186. STABLE=$(dak admin suite-config get-value stable codename)
  187. OLDSTABLE=$(dak admin suite-config get-value oldstable codename)
  188. OLDOLDSTABLE=$(dak admin suite-config get-value oldoldstable codename)
  189. for ofile in ${overridedir}/override.{$OLDOLDSTABLE,$OLDSTABLE,$STABLE,$TESTING,sid}.{,extra.}{main,contrib,non-free}*; do
  190. bname=${ofile##*/}
  191. gzip -9cv --rsyncable --no-name ${ofile} > ${indices}/${bname}.gz
  192. chmod g+w ${indices}/${bname}.gz
  193. done
  194. )
  195. }
  196. function mkfilesindices() {
  197. set +o pipefail
  198. umask 002
  199. rm -f $base/ftp/indices/files/*.files
  200. rm -f $base/ftp/indices/files/components/*.list.gz
  201. cd $base/ftp/indices/files/components
  202. ARCHLIST=$(tempfile)
  203. log "Querying postgres"
  204. local query="
  205. SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
  206. FROM files f
  207. JOIN files_archive_map af ON f.id = af.file_id
  208. JOIN component c ON af.component_id = c.id
  209. JOIN archive ON af.archive_id = archive.id
  210. LEFT OUTER JOIN
  211. (binaries b
  212. JOIN architecture a ON b.architecture = a.id)
  213. ON f.id = b.file
  214. WHERE archive.name = 'ftp-master'
  215. ORDER BY path, arch_string
  216. "
  217. psql -At -c "$query" >$ARCHLIST
  218. includedirs () {
  219. perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
  220. }
  221. poolfirst () {
  222. perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
  223. }
  224. log "Generating sources list"
  225. (
  226. sed -n 's/|$//p' $ARCHLIST
  227. cd $base/ftp
  228. find ./dists -maxdepth 1 \! -type d
  229. find ./dists \! -type d | grep "/source/"
  230. ) | sort -u | gzip -9 --rsyncable --no-name > source.list.gz
  231. log "Generating arch lists"
  232. ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
  233. for a in $ARCHES; do
  234. (sed -n "s/|$a$//p" $ARCHLIST
  235. sed -n 's/|all$//p' $ARCHLIST
  236. cd $base/ftp
  237. find ./dists -maxdepth 1 \! -type d
  238. find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
  239. ) | sort -u | gzip -9 --rsyncable --no-name > arch-$a.list.gz
  240. done
  241. log "Generating suite lists"
  242. suite_list () {
  243. local suite_id="$(printf %d $1)"
  244. local query
  245. query="
  246. SELECT DISTINCT './pool/' || c.name || '/' || f.filename
  247. FROM
  248. (SELECT sa.source AS source
  249. FROM src_associations sa
  250. WHERE sa.suite = $suite_id
  251. UNION
  252. SELECT esr.src_id
  253. FROM extra_src_references esr
  254. JOIN bin_associations ba ON esr.bin_id = ba.bin
  255. WHERE ba.suite = $suite_id
  256. UNION
  257. SELECT b.source AS source
  258. FROM bin_associations ba
  259. JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
  260. JOIN dsc_files df ON s.source = df.source
  261. JOIN files f ON df.file = f.id
  262. JOIN files_archive_map af ON f.id = af.file_id
  263. JOIN component c ON af.component_id = c.id
  264. JOIN archive ON af.archive_id = archive.id
  265. WHERE archive.name = 'ftp-master'
  266. "
  267. psql -F' ' -A -t -c "$query"
  268. query="
  269. SELECT './pool/' || c.name || '/' || f.filename
  270. FROM bin_associations ba
  271. JOIN binaries b ON ba.bin = b.id
  272. JOIN files f ON b.file = f.id
  273. JOIN files_archive_map af ON f.id = af.file_id
  274. JOIN component c ON af.component_id = c.id
  275. JOIN archive ON af.archive_id = archive.id
  276. WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
  277. "
  278. psql -F' ' -A -t -c "$query"
  279. }
  280. psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
  281. while read id suite; do
  282. [[ -e $base/ftp/dists/$suite ]] || continue
  283. (
  284. (cd $base/ftp
  285. distname=$(cd dists; readlink $suite || echo $suite)
  286. find ./dists/$distname \! -type d
  287. for distdir in ./dists/*; do
  288. [[ $(readlink $distdir) != $distname ]] || echo $distdir
  289. done
  290. )
  291. suite_list $id
  292. ) | sort -u | gzip -9 --rsyncable --no-name > suite-${suite}.list.gz
  293. done
  294. log "Finding everything on the ftp site to generate sundries"
  295. (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
  296. rm -f sundries.list
  297. zcat *.list.gz | cat - *.list | sort -u |
  298. diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
  299. log "Generating files list"
  300. for a in $ARCHES; do
  301. (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
  302. cat - sundries.list dists.list project.list docs.list indices.list |
  303. sort -u | poolfirst > ../arch-$a.files
  304. done
  305. TESTING=$(dak admin suite-config get-value testing codename)
  306. STABLE=$(dak admin suite-config get-value stable codename)
  307. OLDSTABLE=$(dak admin suite-config get-value oldstable codename)
  308. OLDOLDSTABLE=$(dak admin suite-config get-value oldoldstable codename)
  309. (cd $base/ftp/
  310. for dist in sid $OLDOLDSTABLE $OLDSTABLE $STABLE $TESTING; do
  311. find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 --rsyncable --no-name > $base/ftp/indices/files/components/translation-$dist.list.gz
  312. done
  313. )
  314. (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-$OLDOLDSTABLE.list.gz ; zcat translation-$OLDSTABLE.list.gz ; zcat translation-$STABLE.list.gz ; zcat translation-$TESTING.list.gz) |
  315. sort -u | poolfirst > ../typical.files
  316. rm -f $ARCHLIST
  317. log "Done!"
  318. set -o pipefail
  319. }
  320. function mkchecksums() {
  321. local archiveroot dsynclist md5list
  322. for archive in "${public_archives[@]}"; do
  323. archiveroot="$(get_archiveroot "${archive}")"
  324. dsynclist=$dbdir/dsync.${archive}.list
  325. md5list=${archiveroot}/indices/md5sums
  326. log "Creating md5 / dsync index file for ${archive}... "
  327. cd "$archiveroot"
  328. # FIXME: We should get rid of md5sums, but until then, keep it running.
  329. # People actually use this file.
  330. LD_LIBRARY_PATH=${bindir}:${LD_LIBRARY_PATH:-} ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
  331. LD_LIBRARY_PATH=${bindir}:${LD_LIBRARY_PATH:-} ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9 --rsyncable --no-name > ${md5list}.gz
  332. done
  333. }
  334. function mirror() {
  335. local archiveroot targetpath
  336. local -a tracepaths
  337. for archive in "${public_archives[@]}"; do
  338. archiveroot="$(get_archiveroot "${archive}")"
  339. targetpath="${mirrordir}/${archive}"
  340. log "Regenerating \"public\" mirror/${archive} hardlink fun"
  341. rsync -aH --link-dest ${archiveroot} \
  342. --delete --delete-after --delete-excluded \
  343. --exclude "Packages.*.new" --exclude "Sources.*.new" \
  344. --filter 'exclude /project/trace/*' \
  345. --filter 'protect /project/' \
  346. --filter 'protect /project/trace/' \
  347. --filter 'protect /project/trace/*' \
  348. --ignore-errors \
  349. ${archiveroot}/. ${targetpath}/.
  350. tracepaths+=("${targetpath}")
  351. done
  352. write_project_trace "${tracepaths[@]}"
  353. }
  354. function expire() {
  355. log "Expiring old database dumps..."
  356. cd ${base}/backup
  357. ${scriptsdir}/expire_dumps -d . -p -f "dump_*"
  358. }
  359. function transitionsclean() {
  360. log "Removing out of date transitions..."
  361. cd ${base}
  362. dak transitions -c -a
  363. }
  364. function dm() {
  365. log "Updating DM permissions page"
  366. dak acl export-per-source dm >${exportdir}/dm.txt
  367. }
  368. function bts() {
  369. log "Categorizing uncategorized bugs filed against ftp.debian.org"
  370. sudo -u dak-unpriv dak bts-categorize
  371. }
  372. function mirrorpush() {
  373. log "Checking the public archive copies..."
  374. local archiveroot targetpath
  375. for archive in "${public_archives[@]}"; do
  376. log "... archive: ${archive}"
  377. archiveroot="$(get_archiveroot "${archive}")"
  378. targetpath="${mirrordir}/${archive}"
  379. cd ${archiveroot}/dists
  380. broken=0
  381. for release in $(find . -name "InRelease"); do
  382. echo "Processing: ${release}"
  383. subdir=${release%/InRelease}
  384. while read SHASUM SIZE NAME; do
  385. if ! [[ -f ${subdir}/${NAME} ]]; then
  386. bname=$(basename ${NAME})
  387. if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-[a-zA-Z0-9-]+\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
  388. # We don't keep unpacked files, don't check for their existance.
  389. # We might want to go and check their unpacked shasum, but right now
  390. # I don't care. I believe it should be enough if all the packed shasums
  391. # match.
  392. continue
  393. fi
  394. broken=$(( broken + 1 ))
  395. echo "File ${subdir}/${NAME} is missing"
  396. continue
  397. fi
  398. # We do have symlinks in the tree (see the contents files currently).
  399. # So we use "readlink -f" to check the size of the target, as thats basically
  400. # what gen-releases does
  401. fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
  402. if [[ ${fsize} -ne ${SIZE} ]]; then
  403. broken=$(( broken + 1 ))
  404. echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
  405. continue
  406. fi
  407. fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
  408. fshasum=${fshasum%% *}
  409. if [[ ${fshasum} != ${SHASUM} ]]; then
  410. broken=$(( broken + 1 ))
  411. echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
  412. continue
  413. fi
  414. done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
  415. done
  416. if [[ $broken -gt 0 ]]; then
  417. log_error "Trouble with the public mirror for ${archive}, found ${broken} errors"
  418. continue
  419. else
  420. log "Starting the mirrorpush for ${archive}"
  421. case ${archive} in
  422. ftp-master)
  423. fname="mirrorstart"
  424. pusharg=""
  425. pname="debian"
  426. ;;
  427. security)
  428. fname="mirrorstart"
  429. pusharg="-a security"
  430. pname="debian-security"
  431. ;;
  432. debian-debug)
  433. pusharg="-a debug"
  434. ;;&
  435. debian-security-debug)
  436. pusharg="-a security-debug"
  437. ;;&
  438. backports)
  439. pusharg="-a backports"
  440. ;;&
  441. *)
  442. fname="mirrorstart.${archive}"
  443. pname=${archive}
  444. ;;
  445. esac
  446. mkdir -p ${webdir}/${pname}/project/trace/
  447. ln -sf ${mirrordir}/${archive}/project/trace/master ${webdir}/${pname}/project/trace/master
  448. ln -sf ${mirrordir}/${archive}/project/trace/master ${webdir}/${pname}/project/trace/ftp-master.debian.org
  449. date -u > ${webdir}/${fname}
  450. echo "Using dak v1" >> ${webdir}/${fname}
  451. echo "Running on host $(hostname -f)" >> ${webdir}/${fname}
  452. sudo -u archvsync runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 &
  453. fi
  454. done
  455. }
  456. function mirrorpush-backports() {
  457. log "Syncing backports mirror"
  458. sudo -u backports /home/backports/bin/update-archive
  459. }
  460. function mirrorpush-release() {
  461. log "Pushing cdbuilder host"
  462. sudo -u archvsync runmirrors -a release > ~dak/runmirrors-release.log 2>&1 &
  463. }
  464. function i18n2() {
  465. log "Exporting package data foo for i18n project"
  466. STAMP=$(date "+%Y%m%d%H%M")
  467. mkdir -p ${scriptdir}/i18n/${STAMP}
  468. cd ${scriptdir}/i18n/${STAMP}
  469. for suite in stable testing unstable; do
  470. codename=$(dak admin suite-config get-value ${suite} codename)
  471. echo "Codename is ${codename}"
  472. dak control-suite -l ${suite} >${codename}
  473. done
  474. echo "${STAMP}" > timestamp
  475. gpg --homedir /srv/ftp-master.debian.org/s3kr1t/dot-gnupg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
  476. rm -f md5sum
  477. md5sum * > md5sum
  478. cd ${webdir}/
  479. ln -sfT ${scriptdir}/i18n/${STAMP} i18n
  480. cd ${scriptdir}
  481. find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
  482. }
  483. function stats() {
  484. log "Updating stats data"
  485. cd ${configdir}
  486. #${scriptsdir}/update-ftpstats ${base}/log/* > ${base}/misc/ftpstats.data
  487. #R --slave --vanilla < ${base}/misc/ftpstats.R
  488. dak stats arch-space > ${webdir}/arch-space
  489. dak stats pkg-nums > ${webdir}/pkg-nums
  490. }
  491. function cleantransactions() {
  492. log "Cleanup transaction ids older than 3 months"
  493. cd ${base}/backup/
  494. find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
  495. }
  496. function logstats() {
  497. if [ "${functionname}" = ftp-master.debian.org ]; then
  498. cd ${TMPDIR}
  499. ${masterdir}/tools/logs.py "$1"
  500. fi
  501. }
  502. # save timestamp when we start
  503. function savetimestamp() {
  504. echo ${NOW} > "${dbdir}/dinstallstart"
  505. }
  506. function maillogfile() {
  507. mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org < "${LOGFILE}"
  508. }
  509. function testingsourcelist() {
  510. dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
  511. }
  512. # Function to update a "statefile" telling people what we are doing
  513. # (more or less).
  514. #
  515. # This should be called with the argument(s)
  516. # - Status name we want to show.
  517. #
  518. function state() {
  519. RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
  520. cat >"${DINSTALLSTATE}" <<EOF
  521. Dinstall start: ${DINSTALLBEGIN}
  522. Current action: $*
  523. Action start: ${RIGHTNOW}
  524. EOF
  525. }
  526. # extract changelogs and stuff
  527. function changelogs() {
  528. log "Extracting changelogs"
  529. dak make-changelog -e -a ftp-master
  530. [[ -f ${exportdir}/changelogs/filelist.yaml ]] && xz -f ${exportdir}/changelogs/filelist.yaml
  531. mkdir -p ${exportpublic}/changelogs
  532. cd ${exportpublic}/changelogs
  533. rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
  534. sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
  535. # dak make-changelog -e -a backports
  536. # [[ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ]] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
  537. # mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
  538. # cd /srv/backports-master.debian.org/rsync/export/changelogs
  539. # rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
  540. }
  541. # Generate a list of extra mirror files, sha256sum em and sign that
  542. function signotherfiles() {
  543. log "Signing extra mirror files"
  544. local archiveroot
  545. for archive in "${public_archives[@]}"; do
  546. log "... archive: ${archive}"
  547. archiveroot="$(get_archiveroot "${archive}")"
  548. local TMPLO=$( gettempfile )
  549. cd ${archiveroot}
  550. rm -f extrafiles
  551. sha256sum $(find * -type f | egrep -v '(pool|i18n|dep11|source)/|Contents-.*\.(gz|diff)|installer|binary-|(In)?Release(.gpg)?|\.changes' | sort) > ${TMPLO}
  552. gpg --no-options --batch --no-tty --armour --personal-digest-preferences=SHA256 --homedir /srv/ftp-master.debian.org/s3kr1t/dot-gnupg --no-options --batch --no-tty --armour --default-key 2B90D010 --clearsign --output ${archiveroot}/extrafiles ${TMPLO}
  553. rm -f ${TMPLO}
  554. done
  555. }
  556. function startup() {
  557. touch "${DINSTALLSTART}"
  558. log_timestamp "startup"
  559. trap onerror ERR
  560. }
  561. function postcronscript() {
  562. trap - ERR
  563. logstats ${LOGFILE}
  564. touch "${DINSTALLEND}"
  565. }