mirror of
https://github.com/kaboomserver/server.git
synced 2025-01-02 12:52:22 +00:00
Rewrite server update script (#148)
* refactor!: rewrite update script * refactor: split common URL logic into download_with_args * feat: add skip_404 arg to url/zip download type
This commit is contained in:
parent
1f1deda15c
commit
c4ca438600
11 changed files with 373 additions and 69 deletions
19
.github/workflows/main.yml
vendored
19
.github/workflows/main.yml
vendored
|
@ -9,27 +9,20 @@ permissions:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update:
|
update:
|
||||||
if: github.repository == 'kaboomserver/server'
|
if: github.repository == 'kaboomserver/server' || github.event_name == 'workflow_dispatch'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Fetch server jar
|
- name: Update server jar and plugins
|
||||||
run: scripts/fetch_server.sh
|
id: update
|
||||||
|
|
||||||
- name: Fetch internal plugins
|
|
||||||
run: scripts/fetch_internal_plugins.sh
|
|
||||||
|
|
||||||
- name: Fetch external plugins
|
|
||||||
run: scripts/fetch_external_plugins.sh
|
|
||||||
|
|
||||||
- name: Update server and plugins
|
|
||||||
run: |
|
run: |
|
||||||
cp fetched_server/server.jar .
|
scripts/update.sh
|
||||||
cp fetched_plugins/*.jar plugins/
|
git diff --quiet . || echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Push changes
|
- name: Push changes
|
||||||
|
if: steps.update.outputs.changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
git config --global user.name 'kaboombot'
|
git config --global user.name 'kaboombot'
|
||||||
git config --global user.email '58372747+kaboombot@users.noreply.github.com'
|
git config --global user.email '58372747+kaboombot@users.noreply.github.com'
|
||||||
|
|
81
scripts/_common.sh
Normal file
81
scripts/_common.sh
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
_EXEC_PATH="$(realpath .)"
|
||||||
|
_HAS_TTY=0
|
||||||
|
if (exec < /dev/tty) 2>/dev/null; then
|
||||||
|
_HAS_TTY=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$DEBUG" = 1 ]; then
|
||||||
|
debug() {
|
||||||
|
printf '[DEBUG] '
|
||||||
|
|
||||||
|
# If shell supports FUNCNAME, print it
|
||||||
|
# The -20 is used to pad the function name with up to 20 spaces on the right.
|
||||||
|
if [ -n "${FUNCNAME+x}" ]; then
|
||||||
|
# shellcheck disable=SC3054 # FUNCNAME support requires array support
|
||||||
|
printf '%-20s' "${FUNCNAME[1]}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$@"
|
||||||
|
}
|
||||||
|
else debug() { true; }
|
||||||
|
fi
|
||||||
|
|
||||||
|
contains() {
|
||||||
|
NEEDLE="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
for piece in "$@"; do
|
||||||
|
if [ "$piece" = "$NEEDLE" ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
check_path() {
|
||||||
|
rpath="$(realpath "$1")"
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
"/"*) echo "Attempted path traversal: $1 is absolute"
|
||||||
|
return 1;;
|
||||||
|
*);; # Safe
|
||||||
|
esac
|
||||||
|
|
||||||
|
case "$rpath" in
|
||||||
|
"$_EXEC_PATH/"*);; # Safe
|
||||||
|
*) echo "Attempted path traversal: $1 is outside current directory"
|
||||||
|
return 1;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
download() {
|
||||||
|
debug "downloading $1 to $2"
|
||||||
|
exitcode=0
|
||||||
|
statuscode=0
|
||||||
|
|
||||||
|
curl_params="-fL $1 -o $2 --write-out %{http_code}"
|
||||||
|
|
||||||
|
# shellcheck disable=SC2086 # Intentional
|
||||||
|
if [ $_HAS_TTY = 1 ]; then
|
||||||
|
# TTY present: Enable curl's progress bar, clear it if operation successful
|
||||||
|
tput sc 2>/dev/null || true # Save cursor pos
|
||||||
|
|
||||||
|
statuscode=$(curl -# $curl_params </dev/tty 3>&1) || exitcode=$?
|
||||||
|
if [ $exitcode = 0 ]; then
|
||||||
|
(tput rc; tput ed) 2>/dev/null || true # Reset cursor pos; Clear to end
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
statuscode=$(curl $curl_params) || exitcode=$?
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$statuscode" = "404" ]; then
|
||||||
|
return 100
|
||||||
|
fi
|
||||||
|
|
||||||
|
return $exitcode
|
||||||
|
}
|
52
scripts/_parser.jq
Normal file
52
scripts/_parser.jq
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
# Apply $filter to input
|
||||||
|
# <- downloads.json | evaluate_filter("plugins")
|
||||||
|
# -> [["internal", "plugins/Extras.jar", "type"], "zip"]
|
||||||
|
# -> [["internal", "plugins/Extras.jar", "url"], "..."]
|
||||||
|
# -> [["internal", "plugins/Extras.jar", "url"]]
|
||||||
|
# -> [["internal", "plugins/Extras.jar"]]
|
||||||
|
# -> [["internal"]]
|
||||||
|
def evaluate_filter($filter):
|
||||||
|
$filter | indices("/") | length
|
||||||
|
| truncate_stream(
|
||||||
|
inputs
|
||||||
|
| select(
|
||||||
|
.[0] as $key
|
||||||
|
| $key | join("/")
|
||||||
|
| startswith($filter)));
|
||||||
|
|
||||||
|
# Flatten stream structure, stripping everything but the download
|
||||||
|
# path and it's properties
|
||||||
|
# <- [["internal", "plugins/Extras.jar", "type"], "zip"]
|
||||||
|
# <- [["internal", "plugins/Extras.jar", "url"], "..."]
|
||||||
|
# <- [["internal", "plugins/Extras.jar"]]
|
||||||
|
# <- [["internal"]]
|
||||||
|
# -> [["plugins/Extras.jar", "type"], "zip"]
|
||||||
|
# -> [["plugins/Extras.jar", "url"], "..."]
|
||||||
|
def get_downloads_obj:
|
||||||
|
select(length == 2)
|
||||||
|
| del(.[0][:-2]);
|
||||||
|
|
||||||
|
# Reduce flattened stream to an object
|
||||||
|
# <- [["plugins/Extras.jar", "type"], "zip"]
|
||||||
|
# <- [["plugins/Extras.jar", "url"], "..."]
|
||||||
|
# -> { "plugins/Extras.jar": {"type": "zip", "url": "..."} }
|
||||||
|
def reduce_to_object(stream):
|
||||||
|
reduce stream as $in ({};
|
||||||
|
setpath($in[0]; $in[1]));
|
||||||
|
|
||||||
|
# Turn object into a bash-readable string
|
||||||
|
# <- { "plugins/Extras.jar": {"type": "zip"} }
|
||||||
|
# -> plugins/Extras.jar
|
||||||
|
# zip
|
||||||
|
# { "url": ... }
|
||||||
|
def print_bash:
|
||||||
|
to_entries[]
|
||||||
|
| (.value | del(.type)) as $args
|
||||||
|
| "\(.key)\n\(.value.type)\n\($args)";
|
||||||
|
|
||||||
|
reduce_to_object(
|
||||||
|
if $arg1 == ""
|
||||||
|
then inputs
|
||||||
|
else evaluate_filter($arg1) end
|
||||||
|
| get_downloads_obj)
|
||||||
|
| print_bash
|
67
scripts/_sources/_index.sh
Normal file
67
scripts/_sources/_index.sh
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
#!/bin/sh
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
|
||||||
|
. "$_SCRIPT_PATH"/_sources/_url.sh
|
||||||
|
. "$_SCRIPT_PATH"/_sources/_zip.sh
|
||||||
|
|
||||||
|
_parse_args() {
|
||||||
|
# <- { "a": "b", "c": "d" }
|
||||||
|
# -> a
|
||||||
|
# -> b
|
||||||
|
# -> c
|
||||||
|
# -> d
|
||||||
|
jq --raw-output --exit-status \
|
||||||
|
'to_entries[] | "\(.key)\n\(.value)"'
|
||||||
|
}
|
||||||
|
|
||||||
|
read_args() {
|
||||||
|
while read -r key; read -r value; do
|
||||||
|
debug "read: $key=$value"
|
||||||
|
|
||||||
|
if contains "$key" "$@"; then
|
||||||
|
debug "set: arg_$key"
|
||||||
|
|
||||||
|
# The eval here might look scary, but we know that $key
|
||||||
|
# is safe and we escape $value.
|
||||||
|
eval "arg_$key=\$value"
|
||||||
|
fi
|
||||||
|
done <<PARSE_ARGS_HEREDOC # We must use a heredoc here, see shellcheck SC2031
|
||||||
|
$(_parse_args)
|
||||||
|
PARSE_ARGS_HEREDOC
|
||||||
|
}
|
||||||
|
|
||||||
|
require_args() {
|
||||||
|
for key in "$@"; do
|
||||||
|
# Same thing as above
|
||||||
|
eval "tmp=\$arg_$key"
|
||||||
|
if [ -z "$tmp" ]; then
|
||||||
|
echo "Missing required download argument $key"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
tmp=""
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
download_with_args() {
|
||||||
|
require_args url
|
||||||
|
|
||||||
|
# Unfortunately we cannot handle skip_404 here as "zip" can't
|
||||||
|
# continue if we 404
|
||||||
|
download "${arg_url:?}" "$1"
|
||||||
|
}
|
||||||
|
|
||||||
|
download_type() {
|
||||||
|
# Calling the function with _download_type_"$1" opens up the
|
||||||
|
# possibility for users to run arbitrary commands, so we must
|
||||||
|
# manually handle the type.
|
||||||
|
#
|
||||||
|
# Since the args are part of the function's stdin, they will be
|
||||||
|
# be propagated into the _download_type_... functions.
|
||||||
|
case "$1" in
|
||||||
|
"url") _download_type_url "$2";;
|
||||||
|
"zip") _download_type_zip "$2";;
|
||||||
|
*) echo Invalid download type "$1"
|
||||||
|
return 1;;
|
||||||
|
esac
|
||||||
|
}
|
12
scripts/_sources/_url.sh
Normal file
12
scripts/_sources/_url.sh
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
_download_type_url() {
|
||||||
|
read_args url skip_404
|
||||||
|
|
||||||
|
exitcode=0
|
||||||
|
download_with_args "$@" || exitcode=$?
|
||||||
|
|
||||||
|
if [ $exitcode = 100 ] && [ "${arg_skip_404:-false}" = "true" ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
}
|
26
scripts/_sources/_zip.sh
Normal file
26
scripts/_sources/_zip.sh
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
_download_type_zip() {
|
||||||
|
read_args url skip_404 extract
|
||||||
|
|
||||||
|
zip_path="$(mktemp --suffix=.zip)"
|
||||||
|
|
||||||
|
exitcode=0
|
||||||
|
download_with_args "$zip_path" || exitcode=$?
|
||||||
|
if [ $exitcode != 0 ]; then
|
||||||
|
rm -f "$zip_path" 2>/dev/null
|
||||||
|
|
||||||
|
if [ $exitcode = 100 ] && [ "${arg_skip_404:-false}" = "true" ]; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return $exitcode
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
debug "extracting ${arg_extract:?} to $1"
|
||||||
|
unzip -p "$zip_path" \
|
||||||
|
"${arg_extract:?}" > "$1" || exitcode=$?
|
||||||
|
rm -f "$zip_path" 2>/dev/null
|
||||||
|
|
||||||
|
return $exitcode
|
||||||
|
}
|
73
scripts/downloads.json
Normal file
73
scripts/downloads.json
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
{
|
||||||
|
"server.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"url": "https://ci.plex.us.org/job/Scissors/job/1.20.4/lastSuccessfulBuild/artifact/*zip*/archive.zip",
|
||||||
|
"extract": "archive/build/libs/scissors-*.jar"
|
||||||
|
},
|
||||||
|
"plugins": {
|
||||||
|
"external": {
|
||||||
|
"plugins/Essentials.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"url": "https://ci.ender.zone/job/EssentialsX/lastSuccessfulBuild/artifact/*zip*/archive.zip",
|
||||||
|
"extract": "archive/jars/EssentialsX-*.jar"
|
||||||
|
},
|
||||||
|
"plugins/FastAsyncWorldEdit.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"url": "https://ci.athion.net/job/FastAsyncWorldEdit/lastSuccessfulBuild/artifact/*zip*/archive.zip",
|
||||||
|
"extract": "archive/artifacts/FastAsyncWorldEdit-Bukkit-*.jar"
|
||||||
|
},
|
||||||
|
"plugins/Geyser.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"url": "https://nightly.link/GeyserMC/Geyser/workflows/build/master/Geyser-Spigot.zip",
|
||||||
|
"extract": "Geyser-Spigot.jar"
|
||||||
|
},
|
||||||
|
"plugins/ViaVersion.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"url": "https://ci.viaversion.com/job/ViaVersion/lastSuccessfulBuild/artifact/*zip*/archive.zip",
|
||||||
|
"extract": "archive/build/libs/ViaVersion-*.jar"
|
||||||
|
},
|
||||||
|
"plugins/ViaBackwards.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"url": "https://ci.viaversion.com/job/ViaBackwards/lastSuccessfulBuild/artifact/*zip*/archive.zip",
|
||||||
|
"extract": "archive/build/libs/ViaBackwards-*.jar"
|
||||||
|
},
|
||||||
|
"plugins/ViaRewind.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"url": "https://ci.viaversion.com/job/ViaRewind/lastSuccessfulBuild/artifact/*zip*/archive.zip",
|
||||||
|
"extract": "archive/build/libs/ViaRewind-*.jar"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"internal": {
|
||||||
|
"plugins/CommandSpy.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"skip_404": true,
|
||||||
|
"url": "https://nightly.link/kaboomserver/commandspy/workflows/main/master/CommandSpy.zip",
|
||||||
|
"extract": "CommandSpy.jar"
|
||||||
|
},
|
||||||
|
"plugins/Extras.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"skip_404": true,
|
||||||
|
"url": "https://nightly.link/kaboomserver/extras/workflows/main/master/Extras.zip",
|
||||||
|
"extract": "Extras.jar"
|
||||||
|
},
|
||||||
|
"plugins/iControlU.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"skip_404": true,
|
||||||
|
"url": "https://nightly.link/kaboomserver/icontrolu/workflows/main/master/iControlU.zip",
|
||||||
|
"extract": "iControlU.jar"
|
||||||
|
},
|
||||||
|
"plugins/ParticleTrails.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"skip_404": true,
|
||||||
|
"url": "https://nightly.link/kaboomserver/particletrails/workflows/main/master/ParticleTrails.zip",
|
||||||
|
"extract": "ParticleTrails.jar"
|
||||||
|
},
|
||||||
|
"plugins/Weapons.jar": {
|
||||||
|
"type": "zip",
|
||||||
|
"skip_404": true,
|
||||||
|
"url": "https://nightly.link/kaboomserver/weapons/workflows/main/master/Weapons.zip",
|
||||||
|
"extract": "Weapons.jar"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,29 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
# Script used to fetch latest versions of external plugins
|
|
||||||
# Plugins: EssentialsX, FastAsyncWorldEdit, GeyserMC, ViaVersion, ViaBackwards, ViaRewind
|
|
||||||
|
|
||||||
mkdir -p fetched_plugins
|
|
||||||
|
|
||||||
# Fetch plugins
|
|
||||||
for download_url in https://ci.ender.zone/job/EssentialsX/lastSuccessfulBuild/artifact/*zip*/archive.zip \
|
|
||||||
https://ci.athion.net/job/FastAsyncWorldEdit/lastSuccessfulBuild/artifact/*zip*/archive.zip \
|
|
||||||
https://nightly.link/GeyserMC/Geyser/workflows/build/master/Geyser%20Spigot.zip \
|
|
||||||
https://ci.viaversion.com/job/ViaVersion/lastSuccessfulBuild/artifact/*zip*/archive.zip \
|
|
||||||
https://ci.viaversion.com/job/ViaBackwards/lastSuccessfulBuild/artifact/*zip*/archive.zip \
|
|
||||||
https://ci.viaversion.com/job/ViaRewind/lastSuccessfulBuild/artifact/*zip*/archive.zip
|
|
||||||
do
|
|
||||||
curl -L $download_url > archive.zip
|
|
||||||
unzip -o archive.zip
|
|
||||||
rm archive.zip
|
|
||||||
done
|
|
||||||
|
|
||||||
# Move plugins
|
|
||||||
mv archive/jars/EssentialsX-*.jar fetched_plugins/Essentials.jar
|
|
||||||
mv archive/artifacts/FastAsyncWorldEdit-Bukkit-*.jar fetched_plugins/FastAsyncWorldEdit.jar
|
|
||||||
mv Geyser-Spigot.jar fetched_plugins/Geyser.jar
|
|
||||||
mv archive/build/libs/ViaVersion-*.jar fetched_plugins/ViaVersion.jar
|
|
||||||
mv archive/build/libs/ViaBackwards-*.jar fetched_plugins/ViaBackwards.jar
|
|
||||||
mv archive/universal/build/libs/ViaRewind-*.jar fetched_plugins/ViaRewind.jar
|
|
||||||
|
|
||||||
# Clean up
|
|
||||||
rm -rf archive/
|
|
|
@ -1,16 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
# Script used to fetch latest versions of internal plugins
|
|
||||||
# Plugins: CommandSpy, Extras, iControlU, ParticleTrails, Weapons
|
|
||||||
|
|
||||||
mkdir -p fetched_plugins
|
|
||||||
|
|
||||||
for download_url in https://nightly.link/kaboomserver/commandspy/workflows/main/master/CommandSpy.zip \
|
|
||||||
https://nightly.link/kaboomserver/extras/workflows/main/master/Extras.zip \
|
|
||||||
https://nightly.link/kaboomserver/icontrolu/workflows/main/master/iControlU.zip \
|
|
||||||
https://nightly.link/kaboomserver/particletrails/workflows/main/master/ParticleTrails.zip \
|
|
||||||
https://nightly.link/kaboomserver/weapons/workflows/main/master/Weapons.zip
|
|
||||||
do
|
|
||||||
curl -L $download_url > archive.zip
|
|
||||||
unzip -o archive.zip -d fetched_plugins
|
|
||||||
rm archive.zip
|
|
||||||
done
|
|
|
@ -1,11 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
# Script used to fetch the latest version of the server jar
|
|
||||||
|
|
||||||
mkdir -p fetched_server
|
|
||||||
|
|
||||||
curl -L https://ci.plex.us.org/job/Scissors/job/1.20.4/lastSuccessfulBuild/artifact/*zip*/archive.zip > archive.zip
|
|
||||||
unzip -o archive.zip
|
|
||||||
mv archive/build/libs/scissors-*.jar fetched_server/server.jar
|
|
||||||
|
|
||||||
rm -rf archive/
|
|
||||||
rm archive.zip
|
|
56
scripts/update.sh
Executable file
56
scripts/update.sh
Executable file
|
@ -0,0 +1,56 @@
|
||||||
|
#!/bin/sh
|
||||||
|
# shellcheck disable=SC1091 # Included files should be manually checked
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Pipefail is part of POSIX.1-2024, however some shells haven't
|
||||||
|
# implemented it yet. Turn it on only if it's available.
|
||||||
|
# shellcheck disable=SC3040
|
||||||
|
if (set -o pipefail 2>/dev/null); then
|
||||||
|
set -o pipefail
|
||||||
|
fi
|
||||||
|
|
||||||
|
_SCRIPT_PATH="$(dirname "$(readlink -f -- "$0")")"
|
||||||
|
. "$_SCRIPT_PATH"/_common.sh
|
||||||
|
. "$_SCRIPT_PATH"/_sources/_index.sh
|
||||||
|
|
||||||
|
_FILTER="$1"
|
||||||
|
if [ "$_FILTER" = "help" ]; then
|
||||||
|
cat <<USAGE
|
||||||
|
Usage: scripts/update.sh [FILTER]
|
||||||
|
Downloads all files contained in scripts/downloads.json. If FILTER
|
||||||
|
is specified, only files whose JSON paths start with FILTER will be
|
||||||
|
downloaded.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
scripts/update.sh server
|
||||||
|
scripts/update.sh plugins/internal
|
||||||
|
USAGE
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
_parse_downloads() {
|
||||||
|
exitcode=0
|
||||||
|
|
||||||
|
jq --null-input --raw-output --exit-status \
|
||||||
|
--arg arg1 "$_FILTER" \
|
||||||
|
--from-file "$_SCRIPT_PATH"/_parser.jq \
|
||||||
|
--stream "$_SCRIPT_PATH"/downloads.json || exitcode=$?
|
||||||
|
if [ $exitcode = 4 ]; then
|
||||||
|
echo 'No downloads matched the filter.'>&2
|
||||||
|
return $exitcode
|
||||||
|
fi
|
||||||
|
|
||||||
|
return $exitcode
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "Downloading with filter ${_FILTER:-"<none>"}..."
|
||||||
|
_parse_downloads | while read -r path; read -r type; read -r args; do
|
||||||
|
echo "> $path"
|
||||||
|
if ! check_path "$path"; then
|
||||||
|
echo "Bailing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
debug "download_type: type=$type; args=$args"
|
||||||
|
echo "$args" | download_type "$type" "$path"
|
||||||
|
done
|
Loading…
Reference in a new issue