scrape.sh 359 B

12345678910111213141516171819
  1. #!/usr/bin/env bash
  2. ROOT="${HOME}/Documents/websides" # without slash at end!!!!
  3. download(){
  4. link="$1"
  5. subdir=$(echo "$link" | sed 's~^https\?://~~')
  6. DEST="${ROOT}/${subdir}"
  7. wget -r -l inf -P "$DEST" "$link"
  8. }
  9. ulimit -f 25600 # don't allow to download too big file
  10. for link in "$@"; do
  11. download "$link"
  12. done
  13. ulimit -f 99999999999999