A curl_parameters => curl_parameters +2 -0
@@ 0,0 1,2 @@
+-s
+--user-agent Accendo/1.0
M main.sh => main.sh +5 -2
@@ 1,10 1,11 @@
#!/bin/bash
POST_ID=$1
PAGE_SIZE=100
+SCRIPT_DIR=$PWD
trap times EXIT;
mkdir -p ${POST_ID}; cd ${POST_ID}
echo -n "Retrieving page 1 ... "
-curl -so ${POST_ID}.1.rp "https://www.glowfic.com/posts/${POST_ID}?per_page=${PAGE_SIZE}"
+curl -K $SCRIPT_DIR/curl_parameters -o ${POST_ID}.1.rp "https://www.glowfic.com/posts/${POST_ID}?per_page=${PAGE_SIZE}"
if [ -f ${POST_ID}.1.rp ]; then
echo "Done!"
echo -n "Generating index ... "
@@ 17,7 18,7 @@ if (( MAX_PAGE \> 1 )); then
seq 2 ${MAX_PAGE} > ${POST_ID}.index
echo "Done!"
echo "Retrieving pages ... "
- xargs -a ${POST_ID}.index -P 4 -I {} -t curl -so ${POST_ID}.{}.rp "https://www.glowfic.com/posts/${POST_ID}?per_page=${PAGE_SIZE}&page={}"
+ xargs -a ${POST_ID}.index -P 4 -I {} -t curl -K $SCRIPT_DIR/curl_parameters -o ${POST_ID}.{}.rp "https://www.glowfic.com/posts/${POST_ID}?per_page=${PAGE_SIZE}&page={}"
echo "Done!"
echo -n "Verifying pages ..."
for PAGE in `seq 1 ${MAX_PAGE}`; do
@@ 35,4 36,6 @@ if (( MAX_PAGE \> 1 )); then
else
echo "No pages in index! Skipping page retrievals."
fi
+echo -n "Downloaded size: "
+du -ch . | tail -n 1 | cut -f 1
echo "Completed retrieval of ${POST_ID}!"