Add some flags to make curl try harder to download files for those hosts that are less than reliable.

This commit is contained in:
stormdragon2976 2023-02-08 16:03:14 -05:00
parent 8dcdea5ed1
commit 3085939e35

View File

@ -291,7 +291,7 @@ download() {
fi
# Skip if the item is in cache.
test -e "${cache}/${dest}" && continue
if ! curl -L4 --output "${cache}/${dest}" "${i}" ; then
if ! curl -L4 -C - --retry 10 --output "${cache}/${dest}" "${i}" ; then
echo "Could not download \"$i\"..."
exit 1
fi
@ -437,7 +437,7 @@ echo "Loading documentation, please wait..."
local gameDoc="$(find "$gamePath" -type f -iname 'user_manual.html' -or -iname 'user_manual.htm' | head -1)"
# Game name specific docs, add the name to the for loop.
if [[ -z "$gameDoc" ]]; then
for i in "troopanum.txt" ; do
for i in "troopanum.txt" "superdeekout.txt" ; do
gameDoc="$(find "$gamePath" -type f -iname "$i" -or -iname 'manual.htm' | head -1)"
done
fi