Skip to content

Commit

Permalink
CLI: invalid bucket (usability)
Browse files Browse the repository at this point in the history
* bucket[/object] URI parsing: reduce copy/paste
* add "did you mean" hint
* tests: 'ais cp --latest' cont-d

Signed-off-by: Alex Aizman <[email protected]>
alex-aizman committed Jan 8, 2024
1 parent 6cb2c3a commit 57244bd
Showing 5 changed files with 79 additions and 65 deletions.
74 changes: 38 additions & 36 deletions ais/test/scripts/s3-cp-latest-prefix.sh
Original file line number Diff line number Diff line change
@@ -6,14 +6,14 @@
# - aistore cluster, also configured to access the same bucket
#
## Example usage:
## ./ais/test/scripts/s3-cp-latest-prefix.sh --bucket s3://abc ########################
## ./ais/test/scripts/s3-cp-latest-prefix.sh --bucket s3://abc ########################

lorem='Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.'

duis='Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Et harum quidem..'

## Command line options (and their respective defaults)
bucket="s3://abc"
src="s3://abc"

## constants
sum1="xxhash[ad97df912d23103f]"
@@ -23,25 +23,32 @@ host="--host=s3.amazonaws.com"

while (( "$#" )); do
case "${1}" in
--bucket) bucket=$2; shift; shift;;
--bucket) src=$2; shift; shift;;
*) echo "fatal: unknown argument '${1}'"; exit 1;;
esac
done

## uncomment for verbose output
## set -x
set -x ## DEBUG

## establish existence
ais show bucket $bucket -c 1>/dev/null || exit $?
ais show bucket $src -c 1>/dev/null || exit $?

## temp destination bucket will be created on the fly
dst="ais://dst-$RANDOM"

## remember existing bucket's versioning; disable if need be
sync=$(ais bucket props show ${bucket} versioning.synchronize -H | awk '{print $2}')
[[ "$sync" == "false" ]] || ais bucket props set $bucket versioning.synchronize=false
validate=$(ais bucket props show ${src} versioning.validate_warm_get -H | awk '{print $2}')
[[ "$validate" == "false" ]] || ais bucket props set $src versioning.validate_warm_get=false
sync=$(ais bucket props show ${src} versioning.synchronize -H | awk '{print $2}')
[[ "$sync" == "false" ]] || ais bucket props set $src versioning.synchronize=false

cleanup() {
rc=$?
ais object rm "$bucket/lorem-duis" 1>/dev/null 2>&1
[[ "$sync" == "true" ]] || ais bucket props set $bucket versioning.synchronize=false 1>/dev/null 2>&1
ais object rm "$src/lorem-duis" 1>/dev/null 2>&1
[[ "$validate" == "true" ]] || ais bucket props set $src versioning.validate_warm_get=false 1>/dev/null 2>&1
[[ "$sync" == "true" ]] || ais bucket props set $src versioning.synchronize=false 1>/dev/null 2>&1
ais rmb $dst --yes 1>/dev/null 2>&1
exit $rc
}

@@ -52,62 +59,57 @@ ais show performance counters --regex "(GET-COLD$|VERSION-CHANGE$|DELETE)"
echo -e

echo "1. out-of-band PUT: 1st version"
echo $lorem | s3cmd put - "$bucket/lorem-duis" $host 1>/dev/null || exit $?
echo $lorem | s3cmd put - "$src/lorem-duis" $host 1>/dev/null || exit $?

echo "2. copy, and check"
ais cp "$bucket/lorem-duis" --wait
checksum=$(ais ls "$bucket/lorem-duis" --cached -H -props checksum | awk '{print $2}')
ais cp "$src/lorem-duis" $dst --all --wait || exit $?
checksum=$(ais ls "$dst/lorem-duis" --cached -H -props checksum | awk '{print $2}')
[[ "$checksum" == "$sum1" ]] || { echo "FAIL: $checksum != $sum1"; exit 1; }

echo "3. out-of-band PUT: 2nd version (overwrite)"
echo $duis | s3cmd put - "$bucket/lorem-duis" $host 1>/dev/null || exit $?
echo $duis | s3cmd put - "$src/lorem-duis" $host 1>/dev/null || exit $?

echo "4. copy and check (expecting the first version's checksum)"
ais cp "$bucket/lorem-duis" --wait
checksum=$(ais ls "$bucket/lorem-duis" --cached -H -props checksum | awk '{print $2}')
ais cp "$src/lorem-duis" $dst --wait
checksum=$(ais ls "$dst/lorem-duis" --cached -H -props checksum | awk '{print $2}')
[[ "$checksum" != "$sum2" ]] || { echo "FAIL: $checksum == $sum2"; exit 1; }

echo "5. query cold-get count (statistics)"
cnt1=$(ais show performance counters --regex GET-COLD -H | awk '{sum+=$2;}END{print sum;}')

echo "6. copy latest: detect version change and update in-cluster copy"
ais cp "$bucket/lorem-duis" --latest --wait
checksum=$(ais ls "$bucket/lorem-duis" --cached -H -props checksum | awk '{print $2}')
ais cp "$src/lorem-duis" $dst --latest --wait
checksum=$(ais ls "$dst/lorem-duis" --cached -H -props checksum | awk '{print $2}')
[[ "$checksum" == "$sum2" ]] || { echo "FAIL: $checksum != $sum2"; exit 1; }

echo "7. cold-get counter must increment"
cnt2=$(ais show performance counters --regex GET-COLD -H | awk '{sum+=$2;}END{print sum;}')
[[ $cnt2 == $(($cnt1+1)) ]] || { echo "FAIL: $cnt2 != $(($cnt1+1))"; exit 1; }

echo "8. warm GET must remain \"warm\" and cold-get-count must not increment"
ais get "$bucket/lorem-duis" /dev/null 1>/dev/null
checksum=$(ais ls "$bucket/lorem-duis" --cached -H -props checksum | awk '{print $2}')
[[ "$checksum" == "$sum2" ]] || { echo "FAIL: $checksum != $sum2"; exit 1; }

cnt3=$(ais show performance counters --regex GET-COLD -H | awk '{sum+=$2;}END{print sum;}')
[[ $cnt3 == $cnt2 ]] || { echo "FAIL: $cnt3 != $cnt2"; exit 1; }
echo "8. remember 'remote-deleted' counter"
cnt3=$(ais show performance counters --regex REMOTE-DEL -H | awk '{sum+=$2;}END{print sum;}')

echo "9. out-of-band DELETE"
s3cmd del "$bucket/lorem-duis" $host 1>/dev/null || exit $?
s3cmd del "$src/lorem-duis" $host 1>/dev/null || exit $?

echo "10. copy with '--latest' (expecting no changes)"
ais cp "$bucket/lorem-duis" --latest --wait
checksum=$(ais ls "$bucket/lorem-duis" --cached -H -props checksum | awk '{print $2}')
echo "10. copy '--latest' (expecting no changes)"
ais cp "$src/lorem-duis" $dst --latest --wait || exit $?
checksum=$(ais ls "$src/lorem-duis" --cached -H -props checksum | awk '{print $2}')
[[ "$checksum" == "$sum2" ]] || { echo "FAIL: $checksum != $sum2"; exit 1; }

echo "11. remember 'remote-deleted' counter"
cnt4=$(ais show performance counters --regex REMOTE-DEL -H | awk '{sum+=$2;}END{print sum;}')
[[ $cnt4 == $cnt3 ]] || { echo "FAIL: $cnt4 != $cnt3"; exit 1; }

echo "12. run 'cp --latest' one last time, and make sure the object \"disappears\""
ais cp "$bucket/lorem-duis" --latest --wait 2>/dev/null
[[ $? == 0 ]] || { echo "FAIL: expecting 'prefetch --wait' to return Ok, got $?"; exit 1; }
echo "11. run 'cp --sync' one last time, and make sure the object \"disappears\""
ais cp "$src/lorem-duis" --sync --wait

echo "13. 'remote-deleted' counter must increment"
cnt5=$(ais show performance counters --regex REMOTE-DEL -H | awk '{sum+=$2;}END{print sum;}')
[[ $cnt5 == $(($cnt4+1)) ]] || { echo "FAIL: $cnt5 != $(($cnt4+1))"; exit 1; }
### [[ $? == 0 ]] || { echo "FAIL: expecting 'cp --wait' to return Ok, got $?"; exit 1; }

echo "12. 'remote-deleted' counter must increment"
cnt5=$(ais show performance counters --regex REMOTE-DEL -H | awk '{sum+=$2;}END{print sum;}')
[[ $cnt5 == $(($cnt3+1)) ]] || { echo "FAIL: $cnt5 != $(($cnt3+1))"; exit 1; }

ais ls "$bucket/lorem-duis" --cached --silent -H 2>/dev/null
ais ls "$src/lorem-duis" --cached --silent -H 2>/dev/null
[[ $? != 0 ]] || { echo "FAIL: expecting 'show object' error, got $?"; exit 1; }

echo -e
3 changes: 3 additions & 0 deletions ais/test/scripts/s3-prefetch-latest-prefix.sh
Original file line number Diff line number Diff line change
@@ -35,12 +35,15 @@ done
ais show bucket $bucket -c 1>/dev/null || exit $?

## remember existing bucket's versioning; disable if need be
validate=$(ais bucket props show ${bucket} versioning.validate_warm_get -H | awk '{print $2}')
[[ "$validate" == "false" ]] || ais bucket props set $bucket versioning.validate_warm_get=false
sync=$(ais bucket props show ${bucket} versioning.synchronize -H | awk '{print $2}')
[[ "$sync" == "false" ]] || ais bucket props set $bucket versioning.synchronize=false

cleanup() {
rc=$?
ais object rm "$bucket/lorem-duis" 1>/dev/null 2>&1
[[ "$validate" == "true" ]] || ais bucket props set $bucket versioning.validate_warm_get=false 1>/dev/null 2>&1
[[ "$sync" == "true" ]] || ais bucket props set $bucket versioning.synchronize=false 1>/dev/null 2>&1
exit $rc
}
4 changes: 2 additions & 2 deletions cmd/cli/cli/bucket_hdlr.go
Original file line number Diff line number Diff line change
@@ -534,8 +534,8 @@ func listAnyHandler(c *cli.Context) error {
bck, objName, err := cmn.ParseBckObjectURI(uri, opts) // `ais ls` with no args - is Ok

if err != nil {
if strings.Contains(err.Error(), cos.OnlyPlus) && strings.Contains(err.Error(), "bucket name") {
err = fmt.Errorf("bucket name in %q is invalid: "+cos.OnlyPlus, uri)
if errV := errBucketNameInvalid(c, uri, err); errV != nil {
return errV
}
return err
}
58 changes: 33 additions & 25 deletions cmd/cli/cli/parse_uri.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// Package cli provides easy-to-use commands to manage, monitor, and utilize AIS clusters.
/*
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
* Copyright (c) 2018-2024, NVIDIA CORPORATION. All rights reserved.
*/
package cli

@@ -14,6 +14,20 @@ import (
"github.com/urfave/cli"
)

func errBucketNameInvalid(c *cli.Context, arg string, err error) error {
if errV := errMisplacedFlag(c, arg); errV != nil {
return errV
}
if strings.Contains(err.Error(), cos.OnlyPlus) && strings.Contains(err.Error(), "bucket name") {
if strings.Contains(arg, ":/") && !strings.Contains(arg, apc.BckProviderSeparator) {
a := strings.Replace(arg, ":/", apc.BckProviderSeparator, 1)
return fmt.Errorf("bucket name in %q is invalid: (did you mean %q?)", arg, a)
}
return fmt.Errorf("bucket name in %q is invalid: "+cos.OnlyPlus, arg)
}
return nil
}

// Return `bckFrom` and `bckTo` - the [shift] and the [shift+1] arguments, respectively
func parseBcks(c *cli.Context, bckFromArg, bckToArg string, shift int, optionalSrcObjname bool) (bckFrom, bckTo cmn.Bck, objFrom string,
err error) {
@@ -27,25 +41,29 @@ func parseBcks(c *cli.Context, bckFromArg, bckToArg string, shift int, optionalS
}

// src
var uri string
if optionalSrcObjname {
bckFrom, objFrom, err = parseBckObjURI(c, c.Args().Get(0), true /*emptyObjnameOK*/)
uri = c.Args().Get(0)
bckFrom, objFrom, err = parseBckObjURI(c, uri, true /*emptyObjnameOK*/)
} else {
bckFrom, err = parseBckURI(c, c.Args().Get(shift), true /*error only*/)
uri = c.Args().Get(shift)
bckFrom, err = parseBckURI(c, uri, true /*error only*/)
}
if err != nil {
if strings.Contains(err.Error(), cos.OnlyPlus) && strings.Contains(err.Error(), "bucket name") {
err = fmt.Errorf("bucket name in %q is invalid: "+cos.OnlyPlus, c.Args().Get(shift))
if errV := errBucketNameInvalid(c, uri, err); errV != nil {
err = errV
} else {
err = incorrectUsageMsg(c, "invalid %s argument '%s' - %v", bckFromArg, c.Args().Get(shift), err)
}
return
}

// dst
bckTo, err = parseBckURI(c, c.Args().Get(shift+1), true)
uri = c.Args().Get(shift + 1)
bckTo, err = parseBckURI(c, uri, true)
if err != nil {
if strings.Contains(err.Error(), cos.OnlyPlus) && strings.Contains(err.Error(), "bucket name") {
err = fmt.Errorf("bucket name in %q is invalid: "+cos.OnlyPlus, c.Args().Get(shift+1))
if errV := errBucketNameInvalid(c, uri, err); errV != nil {
err = errV
} else {
err = incorrectUsageMsg(c, "invalid %s argument '%s' - %v", bckToArg, c.Args().Get(shift+1), err)
}
@@ -61,22 +79,15 @@ func parseBckURI(c *cli.Context, uri string, errorOnly bool) (cmn.Bck, error) {
}

opts := cmn.ParseURIOpts{}
if providerRequired {
parts := strings.Split(uri, apc.BckProviderSeparator)
if len(parts) < 2 {
err := fmt.Errorf("invalid %q: backend provider cannot be empty\n(e.g. valid names%s)", uri, validNames)
return cmn.Bck{}, err
}
if len(parts) > 2 {
err := fmt.Errorf("invalid bucket arg: too many parts %v\n(e.g. valid names%s)", parts, validNames)
return cmn.Bck{}, err
}
} else {
if !providerRequired {
opts.DefaultProvider = cfg.DefaultProvider
}
bck, objName, err := cmn.ParseBckObjectURI(uri, opts)
switch {
case err != nil:
if errV := errBucketNameInvalid(c, uri, err); errV != nil {
err = errV
}
return cmn.Bck{}, err
case objName != "":
if errorOnly {
@@ -154,12 +165,9 @@ func parseBckObjURI(c *cli.Context, uri string, emptyObjnameOK bool) (bck cmn.Bc
}
bck, objName, err = cmn.ParseBckObjectURI(uri, opts)
if err != nil {
if errV := errMisplacedFlag(c, uri); errV != nil {
if errV := errBucketNameInvalid(c, uri, err); errV != nil {
return bck, objName, errV
}
if strings.Contains(err.Error(), cos.OnlyPlus) && strings.Contains(err.Error(), "bucket name") {
return bck, objName, fmt.Errorf("bucket name in %q is invalid: "+cos.OnlyPlus, uri)
}
var msg string
if emptyObjnameOK {
msg = "Expecting " + optionalObjectsArgument + ", e.g.: ais://mmm, s3://nnn/obj2, gs://ppp/a/b/c, etc."
@@ -173,8 +181,8 @@ func parseBckObjURI(c *cli.Context, uri string, emptyObjnameOK bool) (bck cmn.Bc
if bck.Name == "" {
err = incorrectUsageMsg(c, "%q: missing bucket name", uri)
} else if err = bck.Validate(); err != nil {
if strings.Contains(err.Error(), cos.OnlyPlus) && strings.Contains(err.Error(), "bucket name") {
err = fmt.Errorf("bucket name in %q is invalid: "+cos.OnlyPlus, uri)
if errV := errBucketNameInvalid(c, uri, err); errV != nil {
err = errV
} else {
err = cannotExecuteError(c, err, "")
}
5 changes: 3 additions & 2 deletions cmd/cli/cli/utils.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
// Package cli provides easy-to-use commands to manage, monitor, and utilize AIS clusters.
// This file contains util functions and types.
/*
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
* Copyright (c) 2018-2024, NVIDIA CORPORATION. All rights reserved.
*/
package cli

@@ -35,6 +34,8 @@ import (
"github.com/urfave/cli"
)

// This file contains common utilities and low-level helpers.

const (
keyAndValueSeparator = "="

0 comments on commit 57244bd

Please sign in to comment.