Compare commits
11 Commits
Author | SHA1 | Date | |
---|---|---|---|
34775cfe97 | |||
9b3ea06c61 | |||
f8cfeac03c | |||
422adf3d47 | |||
07d1805d13 | |||
3d148e6b4a | |||
c8b1f67bdd | |||
d1becc7836 | |||
b68c7c5949 | |||
8e575157f7 | |||
7d778ecd66 |
93
zpac
93
zpac
@@ -20,8 +20,13 @@ usage() {
|
|||||||
-s Search for <package>.
|
-s Search for <package>.
|
||||||
-S Search only in the sync db. Implies -s.
|
-S Search only in the sync db. Implies -s.
|
||||||
-A Search only in AUR. Implies -s.
|
-A Search only in AUR. Implies -s.
|
||||||
-d Get the package sources (default).
|
-d Get the package sources (default). Set twice to also get the dependencies.
|
||||||
-D Get the dependencies also. Implies -d.
|
-f Force downloading package sources even if they are already present in the working directory.
|
||||||
|
-w <dir> Set the working directory.
|
||||||
|
|
||||||
|
Warning! Due to how the argument handling works, you can do this:
|
||||||
|
$ zpac -wdd /tmp/workdir package
|
||||||
|
Where /tmp/workdir is an argument to w. Don't do this. But it works, unfortunately.
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -35,7 +40,14 @@ aur.search() {
|
|||||||
aur_search_results=( $( jshon -e results -a -e Name -u <<< "$aur_api_search_data" ) )
|
aur_search_results=( $( jshon -e results -a -e Name -u <<< "$aur_api_search_data" ) )
|
||||||
|
|
||||||
for i in "${!aur_search_results[@]}"; do
|
for i in "${!aur_search_results[@]}"; do
|
||||||
printf 'aur/%s\n %s\n' "${aur_search_results[${i}]}" "$( jshon -e results -e $i -e Description -u <<< "$aur_api_search_data" )"
|
{
|
||||||
|
read -r version
|
||||||
|
read -r description
|
||||||
|
} < <(
|
||||||
|
jshon -e results -e $i -e Version -u -p -e Description -u <<< "$aur_api_search_data"
|
||||||
|
)
|
||||||
|
|
||||||
|
printf '%s %s\n %s\n' "aur/${aur_search_results[$i]}" "$version" "$description"
|
||||||
done
|
done
|
||||||
else
|
else
|
||||||
return 1
|
return 1
|
||||||
@@ -56,21 +68,32 @@ aur.info() {
|
|||||||
aur.get() {
|
aur.get() {
|
||||||
local dep_name aur_api_multireq aur_deps_api_data aur_deps_count aur_deps pkg_aur_info pkg_base tarball_path
|
local dep_name aur_api_multireq aur_deps_api_data aur_deps_count aur_deps pkg_aur_info pkg_base tarball_path
|
||||||
|
|
||||||
read pkg_aur_info
|
read -r pkg_aur_info
|
||||||
|
|
||||||
pkg_base=$( jshon -e results -e PackageBase -u <<< "$pkg_aur_info" )
|
{
|
||||||
tarball_path=$( jshon -e results -e URLPath -u <<< "$pkg_aur_info" )
|
read -r pkg_base
|
||||||
|
read -r tarball_path
|
||||||
|
} < <(
|
||||||
|
jshon -e results -e PackageBase -u -p -e URLPath -u <<< "$pkg_aur_info"
|
||||||
|
)
|
||||||
|
|
||||||
|
(( flag_force )) || {
|
||||||
|
[[ -d "${cfg_workdir}/${pkg_base}" ]] && {
|
||||||
|
echo "Found ${pkg_base} in ${cfg_workdir}, skipping. Use -f to override."
|
||||||
|
return 17
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
cd "$cfg_workdir"
|
cd "$cfg_workdir"
|
||||||
|
|
||||||
echo "Downloading ${cfg_aur_url}${tarball_path}"
|
echo "Working in ${cfg_workdir}"
|
||||||
|
|
||||||
|
echo "Downloading ${pkg_base}"
|
||||||
{ curl -skL "${cfg_aur_url}${tarball_path}" | gzip -d | tar x; } || {
|
{ curl -skL "${cfg_aur_url}${tarball_path}" | gzip -d | tar x; } || {
|
||||||
err "Fail!"
|
err "Fail!"
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
echo ": ${cfg_workdir}/${pkg_base}"
|
|
||||||
|
|
||||||
if (( flag_get_deps )); then
|
if (( flag_get_deps )); then
|
||||||
unset depends makedepends
|
unset depends makedepends
|
||||||
|
|
||||||
@@ -86,11 +109,18 @@ aur.get() {
|
|||||||
aur_deps_count=$( jshon -e resultcount <<< "$aur_deps_api_data" )
|
aur_deps_count=$( jshon -e resultcount <<< "$aur_deps_api_data" )
|
||||||
|
|
||||||
if (( aur_deps_count )); then
|
if (( aur_deps_count )); then
|
||||||
|
echo "Getting dependencies for ${pkg_base}."
|
||||||
|
|
||||||
aur_deps=( $( jshon -e results -a -e Name -u <<< "$aur_deps_api_data" ) )
|
aur_deps=( $( jshon -e results -a -e Name -u <<< "$aur_deps_api_data" ) )
|
||||||
|
|
||||||
for i in "${aur_deps[@]}"; do
|
for i in "${aur_deps[@]}"; do
|
||||||
aur.get < <(aur.info "$i") || {
|
aur.get < <(aur.info "$i")
|
||||||
return "$?"
|
_result="$?"
|
||||||
|
|
||||||
|
(( _result )) && {
|
||||||
|
(( _result == 17 )) || {
|
||||||
|
return "$_result"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
@@ -119,29 +149,48 @@ main() {
|
|||||||
|
|
||||||
while (( $# )); do
|
while (( $# )); do
|
||||||
case "$1" in
|
case "$1" in
|
||||||
(-h) usage; return 0;;
|
(--) shift; break;;
|
||||||
|
|
||||||
(-s) action='search';;
|
(-*)
|
||||||
(-A)
|
while read -n1 c
|
||||||
|
do
|
||||||
|
case "$c" in
|
||||||
|
(-|'') :;;
|
||||||
|
|
||||||
|
(h) usage; return 0;;
|
||||||
|
|
||||||
|
(s) action='search';;
|
||||||
|
(A)
|
||||||
action='search'
|
action='search'
|
||||||
flag_search_aur=1;;
|
flag_search_aur=1;;
|
||||||
(-S)
|
(S)
|
||||||
action='search'
|
action='search'
|
||||||
flag_search_syncdb=1;;
|
flag_search_syncdb=1;;
|
||||||
|
|
||||||
(-d) action='get';;
|
(d)
|
||||||
(-D)
|
if [[ "$action" == 'get' ]]; then
|
||||||
|
flag_get_deps=1
|
||||||
|
else
|
||||||
action='get'
|
action='get'
|
||||||
flag_get_deps=1;;
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
(--) shift; break;;
|
(w)
|
||||||
(-*)
|
cfg_workdir="$2"
|
||||||
|
shift;;
|
||||||
|
|
||||||
|
(f) flag_force=1;;
|
||||||
|
|
||||||
|
(*)
|
||||||
err "Unknown key: $1"
|
err "Unknown key: $1"
|
||||||
usage
|
usage
|
||||||
return 1
|
return 1
|
||||||
;;
|
;;
|
||||||
|
esac
|
||||||
|
done <<< "$1"
|
||||||
|
;;
|
||||||
|
|
||||||
*) break;;
|
(*) break;;
|
||||||
esac
|
esac
|
||||||
shift
|
shift
|
||||||
done
|
done
|
||||||
@@ -176,7 +225,7 @@ main() {
|
|||||||
if (( ! "$?" )); then
|
if (( ! "$?" )); then
|
||||||
repo='aur'
|
repo='aur'
|
||||||
else
|
else
|
||||||
while read; do
|
while read -r; do
|
||||||
if [[ "$REPLY" =~ ^Repository ]]; then
|
if [[ "$REPLY" =~ ^Repository ]]; then
|
||||||
repo="${REPLY##* }"
|
repo="${REPLY##* }"
|
||||||
fi
|
fi
|
||||||
|
Reference in New Issue
Block a user