ceph/qa/rgw/s3.sh

175 lines
2.9 KiB
Bash
Raw Normal View History

2010-09-23 05:32:40 +00:00
#!/bin/sh
origdir=`pwd`
# set -x
load_credentials() {
if [ -e ~/.s3 ]; then
source ~/.s3
else
echo "ERROR: Credentials not defined!"
exit 1
fi
}
if [ "$S3_ACCESS_KEY_ID" == "" ] ||
[ "$S3_HOSTNAME" == "" ] ||
[ "$S3_SECRET_ACCESS_KEY" == "" ]; then
load_credentials
fi
bindir=${origdir}/libs3/build/bin
libdir=${origdir}/libs3/build/lib
log=${origdir}/s3.log
export LD_LIBRARY_PATH=${libdir}
s3=${bindir}/s3
2010-09-24 17:13:14 +00:00
tmp_bucket="test-`date +%s`"
tmpdir="tmp"
2010-09-23 05:32:40 +00:00
cleanup() {
2010-09-24 17:13:14 +00:00
rm -fR libs3 tmp
2010-09-23 05:32:40 +00:00
}
build() {
echo "Checking out source"
log git clone git://github.com/wido/libs3.git
echo "Building"
log make -C libs3
}
init() {
cleanup
build
mkdir -p tmp
}
log() {
"$@" >> $log
}
2010-09-24 17:13:14 +00:00
check_error() {
2010-09-23 05:32:40 +00:00
should_succeed=$1
fail=`grep -c ERROR .cmd.log`
[ $fail -eq 0 ] && success=1 || success=0
if [ $success -ne $should_succeed ]; then
[ $should_succeed -ne 0 ] && echo "Command failed:"
[ $should_succeed -eq 0 ] && echo "Command succeeded unexpectedly:"
echo "$op $params"
cat .cmd.log
exit 1
fi
}
2010-09-24 17:13:14 +00:00
do_op() {
should_succeed=$1
shift
op=$1
shift
params="$@"
echo "# $op" "$@" | tee -a $log
$op "$@" > .cmd.log 2>&1
log cat .cmd.log
check_error $should_succeed
}
2010-09-23 05:32:40 +00:00
run_s3() {
2010-09-24 17:13:14 +00:00
echo $s3 "$@" >> .cmd.log
2010-09-23 05:32:40 +00:00
$s3 "$@"
}
create_bucket() {
bucket_name=$1
2011-02-10 23:43:30 +00:00
run_s3 create $bucket_name
2010-09-23 05:32:40 +00:00
}
delete_bucket() {
bucket_name=$1
2011-02-10 23:43:30 +00:00
run_s3 delete $bucket_name
2010-09-23 05:32:40 +00:00
}
create_file() {
2010-09-24 17:13:14 +00:00
filename=$1
dd if=/dev/urandom of=$tmpdir/$filename bs=4096 count=2048
run_s3 put $tmp_bucket/$filename filename=$tmpdir/$filename
2010-09-23 05:32:40 +00:00
}
get_file() {
2010-09-24 17:13:14 +00:00
filename=$1
dest_fname=$2
run_s3 get $tmp_bucket/$filename filename=$tmpdir/$dest_fname
do_op 1 diff $tmpdir/$filename $tmpdir/$dest_fname
rm -f $tmpdir/foo.tmp
}
get_acl() {
filename=$1
2010-09-23 05:32:40 +00:00
dest_fname=$2
2010-09-24 17:13:14 +00:00
run_s3 getacl $tmp_bucket/$filename filename=$tmpdir/$dest_fname
}
set_acl() {
filename=$1
src_fname=$2
run_s3 setacl $tmp_bucket/$filename filename=$tmpdir/$src_fname
2010-09-23 05:32:40 +00:00
}
delete_file() {
2010-09-24 17:13:14 +00:00
filename=$1
run_s3 delete $tmp_bucket/$filename
2010-09-23 05:32:40 +00:00
}
2010-09-24 17:13:14 +00:00
get_anon() {
should_succeed=$1
bucket=$2
fname=$3
dest=$tmpdir/$4
echo "# get_anon $@"
url="http://$bucket.$S3_HOSTNAME/$fname"
wget $url -O $dest > .cmd.log 2>&1
res=$?
log cat .cmd.log
if [ $res -ne 0 ]; then
echo "ERROR: Could not fetch file anonymously (url=$url)" > .cmd.log
fi
check_error $should_succeed
}
add_acl() {
filename=$1
acl=$2
echo $acl >> $tmpdir/$filename
}
2010-09-23 05:32:40 +00:00
main() {
log echo "****************************************************************"
log echo "* `date`" >> $log
log echo "****************************************************************"
init
do_op 1 create_bucket $tmp_bucket
do_op 0 create_bucket $tmp_bucket
2010-09-24 17:13:14 +00:00
2010-09-23 05:32:40 +00:00
do_op 1 create_file foo
do_op 1 get_file foo foo.tmp
2010-09-24 17:13:14 +00:00
do_op 1 get_acl foo foo.acl
get_anon 0 $tmp_bucket foo foo.anon
add_acl foo.acl "Group All Users READ"
do_op 1 set_acl foo foo.acl
get_anon 1 $tmp_bucket foo foo.anon
2010-09-23 05:32:40 +00:00
do_op 1 delete_file foo
do_op 1 delete_bucket $tmp_bucket
}
main "$@"