diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..5c5c22c
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,10 @@
+*.pyc
+*.swp
+testsuite
+testsuite-out
+/MANIFEST
+/dist
+build/*
+s3cmd.egg-info
+s3cmd.spec
+.idea
diff --git a/.svnignore b/.svnignore
new file mode 100644
index 0000000..cb4611a
--- /dev/null
+++ b/.svnignore
@@ -0,0 +1,8 @@
+## Run 'svn propset svn:ignore -F .svnignore .' after you change this list
+*.pyc
+tst.*
+MANIFEST
+dist
+build
+.*.swp
+s3cmd.1.gz
diff --git a/.travis.s3cfg b/.travis.s3cfg
new file mode 100644
index 0000000..91797af
--- /dev/null
+++ b/.travis.s3cfg
@@ -0,0 +1,75 @@
+[default]
+access_key = Q3AM3UQ867SPQQA43P2F
+access_token = 
+add_encoding_exts = 
+add_headers = 
+bucket_location = us-east-1
+ca_certs_file = 
+cache_file = 
+check_ssl_certificate = True
+check_ssl_hostname = True
+cloudfront_host = cloudfront.amazonaws.com
+default_mime_type = binary/octet-stream
+delay_updates = False
+delete_after = False
+delete_after_fetch = False
+delete_removed = False
+dry_run = False
+enable_multipart = True
+encoding = UTF-8
+encrypt = False
+expiry_date = 
+expiry_days = 
+expiry_prefix = 
+follow_symlinks = False
+force = False
+get_continue = False
+gpg_command = /usr/bin/gpg
+gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
+gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
+gpg_passphrase = 
+guess_mime_type = True
+host_base = localhost:9000
+host_bucket = localhost:9000
+human_readable_sizes = False
+invalidate_default_index_on_cf = False
+invalidate_default_index_root_on_cf = True
+invalidate_on_cf = False
+kms_key = 
+limit = -1
+limitrate = 0
+list_md5 = False
+log_target_prefix = 
+long_listing = False
+max_delete = -1
+mime_type = 
+multipart_chunk_size_mb = 15
+multipart_max_chunks = 10000
+preserve_attrs = True
+progress_meter = True
+proxy_host = 
+proxy_port = 0
+put_continue = False
+recursive = False
+recv_chunk = 65536
+reduced_redundancy = False
+requester_pays = False
+restore_days = 1
+secret_key = zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG
+send_chunk = 65536
+server_side_encryption = False
+signature_v2 = False
+simpledb_host = sdb.amazonaws.com
+skip_existing = False
+socket_timeout = 300
+stats = False
+stop_on_error = False
+storage_class = 
+urlencoding_mode = normal
+use_http_expect = False
+use_https = False
+use_mime_magic = True
+verbosity = WARNING
+website_endpoint = http://%(bucket)s.s3-website-%(location)s.amazonaws.com/
+website_error = 
+website_index = index.html
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..1caf558
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,38 @@
+language: python
+python:
+    - "2.6"
+    - "2.7"
+    - "3.4"
+    - "3.5"
+    - "3.6"
+#matrix:
+#    allow_failures:
+#        - python: "3.5"
+notifications:
+    email: false
+    irc: "chat.freenode.net#s3cmd"
+# command to install dependencies
+cache:
+    directories:
+        - $HOME/cache
+install:
+    - pip install .
+# command to prepare tests
+before_install:
+    - mkdir -p $HOME/cache
+    - test ! -e $HOME/cache/minio && wget -O $HOME/cache/minio https://dl.minio.io/server/minio/release/linux-amd64/minio || echo "Minio already in cache"
+    - mkdir -p $HOME/minio_tmp
+# Start a local instance of minio
+before_script:
+    - "export AWS_ACCESS_KEY_ID=Q3AM3UQ867SPQQA43P2F"
+    - "export AWS_SECRET_ACCESS_KEY=zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG"
+    - "export MINIO_ACCESS_KEY=Q3AM3UQ867SPQQA43P2F"
+    - "export MINIO_SECRET_KEY=zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG"
+    - chmod +x $HOME/cache/minio
+    - $HOME/cache/minio server $HOME/minio_tmp &
+    - sleep 4 # give minio some time to start
+# command to run tests
+## Tests stopped at test 23 because minio doesn't support "quote_plus" used in signatures.
+script: python ./run-tests-minio.py -c .travis.s3cfg -p baseauto
+after_script:
+    - killall minio
diff --git a/ChangeLog b/ChangeLog
new file mode 100644
index 0000000..f11fad4
--- /dev/null
+++ b/ChangeLog
@@ -0,0 +1,1459 @@
+2011-06-06  Michal Ludvig  <mludvig@logix.net.nz>
+
+===== Migrated to GIT =====
+
+No longer keeping ChangeLog up to date, use git log instead!
+
+* git://github.com/s3tools/s3cmd.git
+
+2011-04-11  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/S3Uri.py: Fixed cf:// uri parsing.
+	* S3/CloudFront.py: Don't fail if there are no cfinval
+	  requests.
+
+2011-04-11  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/PkgInfo.py: Updated to 1.1.0-beta1
+	* NEWS: Updated.
+	* s3cmd.1: Regenerated.
+
+2011-04-11  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/Config.py: Increase socket_timeout from 10 secs to 5 mins.
+
+2011-04-10  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/CloudFront.py, S3/S3Uri.py: Support for checking 
+	  status of CF Invalidation Requests [cfinvalinfo].
+	* s3cmd, S3/CloudFront.py, S3/Config.py: Support for CloudFront
+	  invalidation using [sync --cf-invalidate] command.
+	* S3/Utils.py: getDictFromTree() now recurses into
+	  sub-trees.
+
+2011-03-30  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/CloudFront.py: Fix warning with Python 2.7
+	* S3/CloudFront.py: Cmd._get_dist_name_for_bucket() moved to
+	  CloudFront class.
+
+2011-01-13  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/FileLists.py: Move file/object listing functions
+	  to S3/FileLists.py
+
+2011-01-09  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* Released version 1.0.0
+	  ----------------------
+
+	* S3/PkgInfo.py: Updated to 1.0.0
+	* NEWS: Updated.
+
+2011-01-02  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd: Improved r457 (Don't crash when file disappears
+	  before checking MD5).
+	* s3cmd, s3cmd.1, format-manpage.pl: Improved --help text
+	  and manpage.
+	* s3cmd: Removed explicit processing of --follow-symlinks
+	  (is cought by the default / main loop).
+
+2010-12-24  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd: Set 10s socket timeout for read()/write().
+	* s3cmd: Added --(no-)check-md5 for [sync].
+	* run-tests.py, testsuite.tar.gz: Added testsuite for
+	  the above.
+	* NEWS: Document the above.
+	* s3cmd: Don't crash when file disappears before
+	  checking MD5.
+
+2010-12-09  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* Released version 1.0.0-rc2
+	  --------------------------
+
+	* S3/PkgInfo.py: Updated to 1.0.0-rc2
+	* NEWS, TODO, s3cmd.1: Updated.
+
+2010-11-13  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd: Added support for remote-to-remote sync.
+	  (Based on patch from Sundar Raman - thanks!)
+	* run-tests.py: Testsuite for the above.
+
+2010-11-12  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd: Fixed typo in "s3cmd du" error path.
+
+2010-11-12  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* format-manpage.pl: new manpage auto-formatter
+	* s3cmd.1: Updated using the above helper script
+	* setup.py: Warn if manpage is too old.
+
+2010-10-27  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* run-tests.py, testsuite.tar.gz: Keep the testsuite in
+	  SVN as a tarball. There's too many "strange" things 
+	  in the directory for it to be kept in SVN.
+
+2010-10-27  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* TODO: Updated.
+	* upload-to-sf.sh: Updated for new SF.net system
+
+2010-10-26  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* Released version 1.0.0-rc1
+	  --------------------------
+
+	* S3/PkgInfo.py: Updated to 1.0.0-rc1
+	* NEWS, TODO: Updated.
+
+2010-10-26  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/CloudFront.py, S3/Config.py: Added support
+	  for CloudFront DefaultRootObject. Thanks to Luke Andrew.
+
+2010-10-25  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd: Improved 'fixbucket' command. Thanks to Srinivasa
+	  Moorthy.
+	* s3cmd: Read config file even if User Profile directory on 
+	  Windows contains non-ascii symbols. Thx Slava Vishnyakov
+
+2010-10-25  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd: Don't fail when a local node is a directory
+	  and we expected a file. (as if for example /etc/passwd 
+	  was a dir)
+
+2010-10-25  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/S3.py: Ignore inaccessible (and missing) files
+	  on upload.
+	* run-tests.py: Extended [sync] test to verify correct
+	  handling of inaccessible files.
+	* testsuite/permission-tests: New testsuite files.
+
+2010-10-24  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/S3.py: "Stringify" all headers. Httplib should do
+	  it but some Python 2.7 users reported problems that should
+	  now be fixed.
+	* run-tests.py: Fixed test #6
+
+2010-07-25  Aaron Maxwell  <amax@resymbol.net>
+
+	* S3/Config.py, testsuite/etc/, run-tests.py, s3cmd.1, s3cmd:
+	  Option to follow local symlinks for sync and 
+	  put (--follow-symlinks option), including tests and documentation
+	* run-tests.py: --bucket-prefix option, to allow different 
+	  developers to run tests in their own sandbox
+
+2010-07-08  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* run-tests.py, testsuite/crappy-file-name.tar.gz:
+	  Updated testsuite, work around a problem with [s3cmd cp]
+	  when the source file contains '?' or '\x7f' 
+	  (where the inability to copy '?' is especially annoying).
+
+2010-07-08  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/Utils.py, S3/S3Uri.py: Fixed names after moving 
+	  functions between modules.
+
+2010-06-29  Timothee Groleau <kde@timotheegroleau.com>
+
+	* S3/ACL.py: Fix isAnonRead method on Grantees
+	* ChangeLog: Update name of contributor for Timothee Groleau
+
+2010-06-13  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/CloudFront.py: Both [accesslog] and [cfmodify] 
+	  access logging can now be disabled with --no-access-logging
+
+2010-06-13  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/CloudFront.py: Allow s3:// URI as well as cf:// URI 
+	  for most CloudFront-related commands.
+
+2010-06-12  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/CloudFront.py, S3/Config.py: Support access 
+	  logging for CloudFront distributions.
+	* S3/S3.py, S3/Utils.py: Moved some functions to Utils.py
+	  to make them available to CloudFront.py
+	* NEWS: Document the above.
+
+2010-05-27  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/S3.py: Fix bucket listing for buckets with
+	  over 1000 prefixes. (contributed by Timothee Groleau)
+	* S3/S3.py: Fixed code formating.
+
+2010-05-21  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/S3.py: Added support for bucket locations
+	  outside US/EU (i.e. us-west-1 and ap-southeast-1 as of now).
+
+2010-05-21  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/S3.py, S3/Config.py: Added --reduced-redundancy
+	  switch for Reduced Redundancy Storage.
+
+2010-05-20  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/ACL.py, S3/Config.py: Support for --acl-grant
+	  and --acl-revoke (contributed by Timothee Groleau)
+	* s3cmd: Couple of fixes on top of the above commit.
+	* s3cmd: Pre-parse ACL parameters in OptionS3ACL()
+
+2010-05-20  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/Exceptions.py, S3/S3.py: Some HTTP_400 exceptions 
+	  are retriable.
+
+2010-03-19  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd, S3/ACL.py: Print all ACLs for a Grantee
+	(one Grantee can have multiple different Grant entries)
+
+2010-03-19  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd: Enable bucket-level ACL setting
+	* s3cmd, S3/AccessLog.py, ...: Added [accesslog] command.
+	* s3cmd: Fix imports from S3.Utils
+
+2009-12-10  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* s3cmd: Path separator conversion on Windows hosts.
+
+2009-10-08  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* Released version 0.9.9.91
+	  -------------------------
+
+	* S3/PkgInfo.py: Updated to 0.9.9.91
+	* NEWS: News for 0.9.9.91
+
+2009-10-08  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/S3.py: fixed reference to _max_retries.
+
+2009-10-06  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* Released version 0.9.9.90
+	  -------------------------
+
+	* S3/PkgInfo.py: Updated to 0.9.9.90
+	* NEWS: News for 0.9.9.90
+
+2009-10-06  Michal Ludvig  <mludvig@logix.net.nz>
+
+	* S3/S3.py: Introduce throttling on upload only after
+	  second failure. I.e. first retry at full speed.
+	* TODO: Updated with new ideas.
+
+2009-06-02  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: New [fixbucket] command for fixing invalid object
+	  names in a given Bucket. For instance names with &#x08; in
+	  them (not sure how people manage to upload them but they do).
+	* S3/S3.py, S3/Utils.py, S3/Config.py: Support methods for 
+	  the above, plus advise user to run 'fixbucket' when XML parsing 
+	  fails.
+	* NEWS: Updated.
+	
+2009-05-29  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Utils.py: New function replace_nonprintables()
+	* s3cmd: Filter local filenames through the above function
+	  to avoid problems with uploaded filenames containing invalid 
+	  XML entities, eg &#08; 
+	* S3/S3.py: Warn if a non-printables char is passed to
+	  urlencode_string() - they should have been replaced earlier 
+	  in the processing.
+	* run-tests.py, TODO, NEWS: Updated.
+	* testsuite/crappy-file-name.tar.gz: Tarball with a crappy-named
+	  file. Untar for the testsuite.
+
+2009-05-29  Michal Ludvig  <michal@logix.cz>
+
+	* testsuite/blahBlah/*: Added files needed for run-tests.py
+
+2009-05-28  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Utils.py (dateS3toPython): Be more relaxed about
+	  timestamps format.
+
+2009-05-28  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, run-test.py, TODO, NEWS: Added --dry-run
+	  and --exclude/--include for [setacl].
+	* s3cmd, run-test.py, TODO, NEWS: Added --dry-run
+	  and --exclude/--include for [del].
+
+2009-05-28  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Support for recursive [cp] and [mv], including
+	  multiple-source arguments, --include/--exclude,
+	  --dry-run, etc.
+	* run-tests.py: Tests for the above.
+	* S3/S3.py: Preserve metadata (eg ACL or MIME type) 
+	  during [cp] and [mv].
+	* NEWS, TODO: Updated.
+
+2009-05-28  Michal Ludvig  <michal@logix.cz>
+
+	* run-tests.py: Added --verbose mode.
+
+2009-05-27  Michal Ludvig  <michal@logix.cz>
+
+	* NEWS: Added info about --verbatim.
+	* TODO: Added more tasks.
+
+2009-05-27  Michal Ludvig  <michal@logix.cz>
+
+	* S3/SortedDict.py: Add case-sensitive mode.
+	* s3cmd, S3/S3.py, S3/Config.py: Use SortedDict() in 
+	  case-sensitive mode to avoid dropping filenames
+	  differing only in capitalisation
+	* run-tests.py: Testsuite for the above.
+	* NEWS: Updated.
+
+2009-03-20  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: Re-sign requests before retrial to avoid 
+	  RequestTimeTooSkewed errors on failed long-running
+	  uploads.
+	  BTW 'request' now has its own class S3Request.
+
+2009-03-04  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/Config.py, S3/S3.py: Support for --verbatim.
+
+2009-02-25  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Fixed "put file.ext s3://bkt" (ie just the bucket name).
+	* s3cmd: Fixed reporting of ImportError of S3 modules.
+	* s3cmd: Fixed Error: global name 'real_filename' is not defined
+
+2009-02-24  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: New command [sign]
+	* S3/Utils.py: New function sign_string()
+	* S3/S3.py, S3/CloudFront.py: Use sign_string().
+	* NEWS: Updated.
+
+2009-02-17  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.9
+	  ----------------------
+
+	* S3/PkgInfo.py: Updated to 0.9.9
+	* NEWS: Compile a big news list for 0.9.9
+
+2009-02-17  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd.1: Document all the new options and commands.
+	* s3cmd, S3/Config.py: Updated some help texts. Removed
+	  option --debug-syncmatch along the way (because --dry-run
+	  with --debug is good enough).
+	* TODO: Updated.
+
+2009-02-16  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Check Python version >= 2.4 as soon as possible.
+
+2009-02-14  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/Config.py, S3/S3.py: Added --add-header option.
+	* NEWS: Documented --add-header.
+	* run-tests.py: Fixed for new messages.
+
+2009-02-14  Michal Ludvig  <michal@logix.cz>
+
+	* README: Updated for 0.9.9
+	* s3cmd, S3/PkgInfo.py, s3cmd.1: Replaced project 
+	  URLs with http://s3tools.org
+	* NEWS: Improved message.
+
+2009-02-12  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Added --list-md5 for 'ls' command.
+	* S3/Config.py: New setting list_md5
+
+2009-02-12  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Set Content-Length header for requests with 'body'.
+	* s3cmd: And send it for requests with no body as well...
+
+2009-02-02  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.9-rc3
+	  --------------------------
+
+	* S3/PkgInfo.py, NEWS: Updated for 0.9.9-rc3
+
+2009-02-01  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Exceptions.py: Correct S3Exception.__str__() to
+	  avoid crash in S3Error() subclass. Reported by '~t2~'.
+	* NEWS: Updated.
+
+2009-01-30  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.9-rc2
+	  --------------------------
+
+	* S3/PkgInfo.py, NEWS, TODO: Updated for 0.9.9-rc2
+
+2009-01-30  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Under some circumstance s3cmd crashed
+	  when put/get/sync had 0 files to transmit. Fixed now.
+
+2009-01-28  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Output 'delete:' in --dry-run only when
+	  used together with --delete-removed. Otherwise
+	  the user will think that without --dry-run it
+	  would really delete the files.
+
+2009-01-27  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.9-rc1
+	  --------------------------
+
+	* S3/PkgInfo.py, NEWS, TODO: Updated for 0.9.9-rc1
+
+2009-01-26  Michal Ludvig  <michal@logix.cz>
+
+	* Merged CloudFront support from branches/s3cmd-airlock
+	  See the ChangeLog in that branch for details.
+
+2009-01-25  W. Tell  <w_tell -at- sourceforge>
+
+	* s3cmd: Implemented --include and friends.
+
+2009-01-25  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Enabled --dry-run and --exclude for 'put' and 'get'.
+	* S3/Exceptions.py: Remove DeprecationWarning about 
+	  BaseException.message in Python 2.6
+	* s3cmd: Rewritten gpg_command() to use subprocess.Popen()
+	  instead of os.popen4() deprecated in 2.6
+	* TODO: Note about failing GPG.
+
+2009-01-22  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Config.py: guess_mime_type = True (will affect new 
+	  installations only).
+
+2009-01-22  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.9-pre5
+	  ---------------------------
+
+	* S3/PkgInfo.py, NEWS, TODO: Updated for 0.9.9-pre5
+
+2009-01-22  Michal Ludvig  <michal@logix.cz>
+
+	* run-tests.py: Updated paths for the new sync
+	  semantics.
+	* s3cmd, S3/S3.py: Small fixes to make testsuite happy.
+
+2009-01-21  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Migrated 'sync' local->remote to the new
+	  scheme with fetch_{local,remote}_list().
+	  Enabled --dry-run for 'sync'.
+
+2009-01-20  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Migrated 'sync' remote->local to the new
+	  scheme with fetch_{local,remote}_list().
+	  Changed fetch_remote_list() to return dict() compatible
+	  with fetch_local_list().
+	  Re-implemented --exclude / --include processing.
+	* S3/Utils.py: functions for parsing RFC822 dates (for HTTP
+	  header responses).
+	* S3/Config.py: placeholders for --include.
+
+2009-01-15  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3Uri.py, NEWS: Support for recursive 'put'.
+
+2009-01-13  Michal Ludvig  <michal@logix.cz>
+
+	* TODO: Updated.
+	* s3cmd: renamed (fetch_)remote_keys to remote_list and
+	  a few other renames for consistency.
+
+2009-01-08  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: Some errors during file upload were incorrectly 
+	  interpreted as MD5 mismatch. (bug #2384990)
+	* S3/ACL.py: Move attributes from class to instance.
+	* run-tests.py: Tests for ACL.
+	* s3cmd: Minor messages changes.
+
+2009-01-07  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: New command 'setacl'.
+	* S3/S3.py: Implemented set_acl().
+	* S3/ACL.py: Fill in <Owner/> tag in ACL XML.
+	* NEWS: Info about 'setacl'.
+
+2009-01-07  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Factored remote_keys generation from cmd_object_get()
+	  to fetch_remote_keys().
+	* s3cmd: Display Public URL in 'info' for AnonRead objects.
+	* S3/ACL.py: Generate XML from a current list of Grantees
+
+2009-01-07  Michal Ludvig  <michal@logix.cz>
+
+	* S3/ACL.py: Keep ACL internally as a list of of 'Grantee' objects.
+	* S3/Utils.py: Fix crash in stripNameSpace() when the XML has no NS.
+
+2009-01-07  Michal Ludvig  <michal@logix.cz>
+
+	* S3/ACL.py: New object for handling ACL issues.
+	* S3/S3.py: Moved most of S3.get_acl() to ACL class.
+	* S3/Utils.py: Reworked XML helpers - remove XMLNS before 
+	  parsing the input XML to avoid having all Tags prefixed
+	  with {XMLNS} by ElementTree.
+
+2009-01-03  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Don't fail when neither $HOME nor %USERPROFILE% is set.
+	  (fixes #2483388)
+
+2009-01-01  W. Tell  <w_tell -at- sourceforge>
+
+	* S3/S3.py, S3/Utils.py: Use 'hashlib' instead of md5 and sha 
+	  modules to avoid Python 2.6 warnings.
+
+2008-12-31  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.9-pre4
+	  ---------------------------
+
+2008-12-31  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Reworked internal handling of unicode vs encoded filenames.
+	  Should replace unknown characters with '?' instead of baling out.
+
+2008-12-31  Michal Ludvig  <michal@logix.cz>
+
+	* run-tests.py: Display system encoding in use.
+	* s3cmd: Print a nice error message when --exclude-from
+	  file is not readable.
+	* S3/PkgInfo.py: Bumped up version to 0.9.9-pre4
+	* S3/Exceptions.py: Added missing imports.
+	* NEWS: Updated.
+	* testsuite: reorganised UTF-8 files, added GBK encoding files,
+	  moved encoding-specific files to 'tar.gz' archives, removed 
+	  unicode dir.
+	* run-tests.py: Adapted to the above change.
+	* run-tests.sh: removed.
+	* testsuite/exclude.encodings: Added.
+	* run-tests.py: Don't assume utf-8, use preferred encoding 
+	  instead.
+	* s3cmd, S3/Utils.py, S3/Exceptions.py, S3/Progress.py,
+	  S3/Config.py, S3/S3.py: Added --encoding switch and 
+	  Config.encoding variable. Don't assume utf-8 for filesystem
+	  and terminal output anymore.
+	* s3cmd: Avoid ZeroDivisionError on fast links.
+	* s3cmd: Unicodised all info() output.
+
+2008-12-30  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Replace unknown Unicode characters with '?'
+	  to avoid UnicodeEncodeError's. Also make all output strings
+	  unicode.
+	* run-tests.py: Exit on failed test. Fixed order of tests.
+
+2008-12-29  Michal Ludvig  <michal@logix.cz>
+
+	* TODO, NEWS: Updated
+	* s3cmd: Improved wildcard get.
+	* run-tests.py: Improved testsuite, added parameters support
+	  to run only specified tests, cleaned up win/posix integration.
+	* S3/Exception.py: Python 2.4 doesn't automatically set 
+	  Exception.message.
+
+2008-12-29  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, run-tests.py: Make it work on Windows.
+
+2008-12-26  Michal Ludvig  <michal@logix.cz>
+
+	* setup.cfg: Remove explicit install prefix. That should fix
+	  Mac OS X and Windows "setup.py install" runs.
+
+2008-12-22  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3.py, S3/Progress.py: Display "[X of Y]"
+	  in --progress mode.
+	* s3cmd, S3/Config.py: Implemented recursive [get].
+	  Added --skip-existing option for [get] and [sync]. 
+
+2008-12-17  Michal Ludvig  <michal@logix.cz>
+
+	* TODO: Updated
+
+2008-12-14  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Progress.py: Restructured import Utils to avoid import
+	  conflicts.
+
+2008-12-12  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Better Exception output. Print sys.path on ImportError,
+	  don't print backtrace on KeyboardInterrupt
+
+2008-12-11  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Support for multiple sources in 'get' command.
+
+2008-12-10  Michal Ludvig  <michal@logix.cz>
+
+	* TODO: Updated list.
+	* s3cmd: Don't display download/upload completed message
+	  in --progress mode.
+	* S3/S3.py: Pass src/dst names down to Progress class.
+	* S3/Progress.py: added new class ProgressCR - apparently 
+	  ProgressANSI doesn't work on MacOS-X (and perhaps elsewhere).
+	* S3/Config.py: Default progress meter is now ProgressCR
+	* s3cmd: Updated email address for reporting bugs.
+
+2008-12-02  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3.py, NEWS: Support for (non-)recursive 'ls'
+
+2008-12-01  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.9-pre3
+	  ---------------------------
+
+	* S3/PkgInfo.py: Bumped up version to 0.9.9-pre3
+
+2008-12-01  Michal Ludvig  <michal@logix.cz>
+
+	* run-tests.py: Added a lot of new tests.
+	* testsuite/etc/logo.png: New file.
+
+2008-11-30  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: object_get() -- make start_position argument optional.
+
+2008-11-29  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Delete local files with "sync --delete-removed"
+
+2008-11-25  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/Progress.py: Fixed Unicode output in Progress meter.
+	* s3cmd: Fixed 'del --recursive' without prefix (i.e. all objects).
+	* TODO: Updated list.
+	* upload-to-sf.sh: Helper script.
+	* S3/PkgInfo.py: Bumped up version to 0.9.9-pre2+svn
+
+2008-11-24  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.9-pre2
+	  ------------------------
+
+	* S3/PkgInfo.py: Bumped up version to 0.9.9-pre2
+	* NEWS: Added 0.9.9-pre2
+
+2008-11-24  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, s3cmd.1, S3/S3.py: Display or don't display progress meter
+	  default depends on whether we're on TTY (console) or not.
+
+2008-11-24  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Fixed 'get' conflict.
+	* s3cmd.1, TODO: Document 'mv' command.
+
+2008-11-24  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py, s3cmd, S3/Config.py, s3cmd.1: Added --continue for
+	  'get' command, improved 'get' failure resiliency.
+	* S3/Progress.py: Support for progress meter not starting in 0.
+	* S3/S3.py: improved retrying in send_request() and send_file()
+
+2008-11-24  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3.py, NEWS: "s3cmd mv" for moving objects
+
+2008-11-24  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Utils.py: Common XML parser.
+	* s3cmd, S3/Exeptions.py: Print info message on Error.
+
+2008-11-21  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Support for 'cp' command.
+	* S3/S3.py: Added S3.object.copy() method.
+	* s3cmd.1: Document 'cp' command.
+	* NEWS: Let everyone know ;-)
+	Thanks Andrew Ryan for a patch proposal!
+	https://sourceforge.net/forum/forum.php?thread_id=2346987&forum_id=618865
+
+2008-11-17  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Progress.py: Two progress meter implementations.
+	* S3/Config.py, s3cmd: New --progress / --no-progress parameters
+	  and Config() members.
+	* S3/S3.py: Call Progress() in send_file()/recv_file()
+	* NEWS: Let everyone know ;-)
+
+2008-11-16  Michal Ludvig  <michal@logix.cz>
+
+	* NEWS: Fetch 0.9.8.4 release news from 0.9.8.x branch.
+
+2008-11-16  Michal Ludvig  <michal@logix.cz>
+
+	Merge from 0.9.8.x branch, rel 251:
+	* S3/S3.py: Adjusting previous commit (orig 249) - it's not a good idea 
+	  to retry ALL failures. Especially not those code=4xx where AmazonS3 
+	  servers are not happy with our requests.
+	Merge from 0.9.8.x branch, rel 249:
+	* S3/S3.py, S3/Exception.py: Re-issue failed requests in S3.send_request()
+	Merge from 0.9.8.x branch, rel 248:
+	* s3cmd: Don't leak open filehandles in sync. Thx Patrick Linskey for report.
+	Merge from 0.9.8.x branch, rel 247:
+	* s3cmd: Re-raise the right exception.
+	Merge from 0.9.8.x branch, rel 246:
+	* s3cmd, S3/S3.py, S3/Exceptions.py: Don't abort 'sync' or 'put' on files
+	  that can't be open (e.g. Permision denied). Print a warning and skip over
+	  instead.
+	Merge from 0.9.8.x branch, rel 245:
+	* S3/S3.py: Escape parameters in strings. Fixes sync to and 
+	  ls of directories with spaces. (Thx Lubomir Rintel from Fedora Project)
+	Merge from 0.9.8.x branch, rel 244:
+	* s3cmd: Unicode brainfuck again. This time force all output
+	  in UTF-8, will see how many complaints we'll get...
+
+2008-09-16  Michal Ludvig  <michal@logix.cz>
+
+	* NEWS: s3cmd 0.9.8.4 released from branches/0.9.8.x SVN branch.
+
+2008-09-16  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: Don't run into ZeroDivisionError when speed counter
+	  returns 0s elapsed on upload/download file.
+
+2008-09-15  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3.py, S3/Utils.py, S3/S3Uri.py, S3/Exceptions.py:
+	  Yet anoter Unicode round. Unicodised all command line arguments 
+	  before processing.
+
+2008-09-15  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: "s3cmd mb" can create upper-case buckets again
+	  in US. Non-US (e.g. EU) bucket names must conform to strict
+	  DNS-rules.
+	* S3/S3Uri.py: Display public URLs correctly for non-DNS buckets.
+
+2008-09-10  Michal Ludvig  <michal@logix.cz>
+
+	* testsuite, run-tests.py: Added testsuite with first few tests.
+
+2008-09-10  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3Uri.py, S3/S3.py: All internal representations of
+	  S3Uri()s are Unicode (i.e. not UTF-8 but type()==unicode). It 
+	  still doesn't work on non-UTF8 systems though.
+
+2008-09-04  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Rework UTF-8 output to keep sys.stdout untouched (or it'd
+	  break 's3cmd get' to stdout for binary files).
+
+2008-09-03  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3.py, S3/Config.py: Removed --use-old-connect-method
+	  again. Autodetect the need for old connect method instead.
+
+2008-09-03  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3.py: Make --verbose mode more useful and default 
+	  mode less verbose.
+
+2008-09-03  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/Config.py: [rb] Allow removal of non-empty buckets
+	  with --force.
+	  [mb, rb] Allow multiple arguments, i.e. create or remove
+	  multiple buckets at once.
+	  [del] Perform recursive removal with --recursive (or -r).
+
+2008-09-01  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Refuse 'sync' together with '--encrypt'.
+	* S3/S3.py: removed object_{get,put,delete}_uri() functions
+	  and made object_{get,put,delete}() accept URI instead of 
+	  bucket/object parameters.
+
+2008-09-01  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Bumped up version to 0.9.9-pre1
+
+2008-09-01  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3.py, S3/Config.py: Allow access to upper-case
+	  named buckets again with --use-old-connect-method 
+	  (uses http://s3.amazonaws.com/bucket/object instead of
+	  http://bucket.s3.amazonaws.com/object)
+
+2008-08-19  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Always output UTF-8, even on output redirects.
+
+2008-08-01  Michal Ludvig  <michal@logix.cz>
+
+	* TODO: Add some items
+
+2008-07-29  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.8.3
+	  ------------------------
+
+2008-07-29  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Bumped up version to 0.9.8.3
+	* NEWS: Added 0.9.8.3
+
+2008-07-29  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Utils.py (hash_file_md5): Hash files in 32kB chunks
+	  instead of reading it all up to a memory first to avoid
+	  OOM on large files.
+
+2008-07-07  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd.1: couple of syntax fixes from Mikhail Gusarov
+
+2008-07-03  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.8.2
+	  ------------------------
+
+2008-07-03  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Bumped up version to 0.9.8.2
+	* NEWS: Added 0.9.8.2
+	* s3cmd: Print version info on 'unexpected error' output.
+
+2008-06-30  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: Re-upload when Amazon doesn't send ETag
+	  in PUT response. It happens from time to time for
+	  unknown reasons. Thanks "Burtc" for report and
+	  "hermzz" for fix.
+
+2008-06-27  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.8.1
+	  ------------------------
+
+2008-06-27  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Bumped up version to 0.9.8.1
+	* NEWS: Added 0.9.8.1
+	* s3cmd: make 'cfg' global
+	* run-tests.sh: Sort-of testsuite
+
+2008-06-23  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.8
+	  ----------------------
+
+2008-06-23  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Bumped up version to 0.9.8
+	* NEWS: Added 0.9.8
+	* TODO: Removed completed tasks
+
+2008-06-23  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Last-minute compatibility fixes for Python 2.4
+	* s3cmd, s3cmd.1: --debug-exclude is an alias for --debug-syncmatch
+	* s3cmd: Don't require $HOME env variable to be set.
+	  Fixes #2000133
+	* s3cmd: Wrapped all execution in a try/except block
+	  to catch all exceptions and ask for a report.
+
+2008-06-18  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Version 0.9.8-rc3
+
+2008-06-18  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: Bucket name can't contain upper-case letters (S3/DNS limitation).
+
+2008-06-12  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Version 0.9.8-rc2
+
+2008-06-12  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, s3cmd.1: Added GLOB (shell-style wildcard) exclude, renamed
+	  orig regexp-style --exclude to --rexclude
+
+2008-06-11  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Version 0.9.8-rc1
+
+2008-06-11  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Remove python 2.5 specific code (try/except/finally 
+	  block) and make s3cmd compatible with python 2.4 again.
+	* s3cmd, S3/Config.py, s3cmd.1: Added --exclude-from and --debug-syncmatch
+	  switches for sync.
+
+2008-06-10  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Added --exclude switch for sync.
+	* s3cmd.1, NEWS: Document --exclude
+
+2008-06-05  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.7
+	  ----------------------
+
+2008-06-05  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Bumped up version to 0.9.7
+	* NEWS: Added 0.9.7
+	* TODO: Removed completed tasks
+	* s3cmd, s3cmd.1: Updated help texts, 
+	  removed --dry-run option as it's not implemented.
+	
+2008-06-05  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Config.py: Store more file attributes in sync to S3.
+	* s3cmd: Make sync remote2local more error-resilient.
+
+2008-06-04  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Implemented cmd_sync_remote2local() for restoring
+	  backup from S3 to a local filesystem
+	* S3/S3.py: S3.object_get_uri() now requires writable stream 
+	  and not a path name.
+	* S3/Utils.py: Added mkdir_with_parents()
+
+2008-06-04  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Refactored cmd_sync() in preparation 
+	  for remote->local sync.
+
+2008-04-30  Michal Ludvig  <michal@logix.cz>
+
+	* s3db, S3/SimpleDB.py: Implemented almost full SimpleDB API.
+
+2008-04-29  Michal Ludvig  <michal@logix.cz>
+
+	* s3db, S3/SimpleDB.py: Initial support for Amazon SimpleDB. 
+	  For now implements ListDomains() call and most of the 
+	  infrastructure required for request creation.
+
+2008-04-29  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Exceptions.py: Exceptions moved out of S3.S3
+	* S3/SortedDict.py: rewritten from scratch to preserve
+	  case of keys while still sorting in case-ignore mode.
+
+2008-04-28  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: send_file() now computes MD5 sum of the file
+	  being uploaded, compares with ETag returned by Amazon
+	  and retries upload if they don't match.
+
+2008-03-05  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/S3.py, S3/Utils.py: Throttle upload speed and retry 
+	  when upload failed.
+	  Report download/upload speed and time elapsed.
+
+2008-02-28  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.6
+	  ----------------------
+
+2008-02-28  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: bumped up version to 0.9.6
+	* NEWS: What's new in 0.9.6
+
+2008-02-27  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, s3cmd.1: Updated help and man page.
+	* S3/S3.py, S3/Utils.py, s3cmd: Support for 's3cmd info' command.
+	* s3cmd: Fix crash when 'sync'ing files with unresolvable owner uid/gid.
+	* S3/S3.py, S3/Utils.py: open files in binary mode (otherwise windows
+	  users have problems).
+	* S3/S3.py: modify 'x-amz-date' format (problems reported on MacOS X). 
+	  Thanks Jon Larkowski for fix.
+
+2008-02-27  Michal Ludvig  <michal@logix.cz>
+
+	* TODO: Updated wishlist.
+
+2008-02-11  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: Properly follow RedirectPermanent responses for EU buckets
+	* S3/S3.py: Create public buckets with -P (#1837328)
+	* S3/S3.py, s3cmd: Correctly display public URL on uploads.
+	* S3/S3.py, S3/Config.py: Support for MIME types. Both 
+	default and guessing. Fixes bug #1872192 (Thanks Martin Herr)
+
+2007-11-13  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.5
+	  ----------------------
+
+2007-11-13  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: Support for buckets stored in Europe, access now 
+	  goes via <bucket>.s3.amazonaws.com where possible.
+
+2007-11-12  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Support for storing file attributes (like ownership, 
+	  mode, etc) in sync operation.
+	* s3cmd, S3/S3.py: New command 'ib' to get information about 
+	  bucket (only 'LocationConstraint' supported for now).
+
+2007-10-01  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Fix typo in argument name (patch
+	  from Kim-Minh KAPLAN, SF #1804808)
+
+2007-09-25  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Exit with error code on error (patch
+	  from Kim-Minh KAPLAN, SF #1800583)
+
+2007-09-25  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: Don't fail if bucket listing doesn't have
+	  <IsTruncated> node.
+	* s3cmd: Create ~/.s3cfg with 0600 permissions.
+
+2007-09-13  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Improved 'sync'
+	* S3/S3.py: Support for buckets with over 1000 objects.
+
+2007-09-03  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Small tweaks to --configure workflow.
+
+2007-09-02  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Initial support for 'sync' operation. For
+	  now only local->s3 direction. In this version doesn't
+	  work well with non-ASCII filenames and doesn't support
+	  encryption.
+
+2007-08-24  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/Util.py: More ElementTree imports cleanup
+
+2007-08-19  Michal Ludvig  <michal@logix.cz>
+
+	* NEWS: Added news for 0.9.5
+
+2007-08-19  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Better handling of multiple arguments for put, get and del
+
+2007-08-14  Michal Ludvig  <michal@logix.cz>
+
+	* setup.py, S3/Utils.py: Try import xml.etree.ElementTree
+	  or elementtree.ElementTree module.
+
+2007-08-14  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd.1: Add info about --encrypt parameter.
+
+2007-08-14  Michal Ludvig  <michal@logix.cz>
+
+	* S3/PkgInfo.py: Bump up version to 0.9.5-pre
+
+2007-08-13  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.4
+	  ----------------------
+
+2007-08-13  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py: Added function urlencode_string() that encodes
+	  non-ascii characters in object name before sending it to S3.
+
+2007-08-13  Michal Ludvig  <michal@logix.cz>
+
+	* README: Updated Amazon S3 pricing overview
+
+2007-08-13  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd, S3/Config.py, S3/S3.py: HTTPS support
+
+2007-07-20  Michal Ludvig  <michal@logix.cz>
+
+	* setup.py: Check correct Python version and ElementTree availability.
+
+2007-07-05  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: --configure support for Proxy
+	* S3/S3.py: HTTP proxy support from
+	  John D. Rowell <jdrowell@exerciseyourbrain.com>
+
+2007-06-19  Michal Ludvig  <michal@logix.cz>
+
+	* setup.py: Check for S3CMD_PACKAGING and don't install
+	  manpages and docs if defined.
+	* INSTALL: Document the above change.
+	* MANIFEST.in: Include uncompressed manpage
+
+2007-06-17  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Added encryption key support to --configure
+	* S3/PkgInfo.py: Bump up version to 0.9.4-pre
+	* setup.py: Cleaned up some rpm-specific stuff that 
+	  caused problems to Debian packager Mikhail Gusarov
+	* setup.cfg: Removed [bdist_rpm] section
+	* MANIFEST.in: Include S3/*.py
+
+2007-06-16  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd.1: Syntax fixes from Mikhail Gusarov <dottedmag@dottedmag.net>
+
+2007-05-27  Michal Ludvig  <michal@logix.cz>
+
+	* Support for on-the-fly GPG encryption.
+
+2007-05-26  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd.1: Add info about "s3cmd du" command.
+
+2007-05-26  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.3
+	  ----------------------
+
+2007-05-26  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Patch from Basil Shubin <basil.shubin@gmail.com>
+	  adding support for "s3cmd du" command.
+	* s3cmd: Modified output format of "s3cmd du" to conform
+	  with unix "du".
+	* setup.cfg: Require Python 2.5 in RPM. Otherwise it needs
+	  to require additional python modules (e.g. ElementTree)
+	  which may have different names in different distros. It's 
+	  indeed still possible to manually install s3cmd with 
+	  Python 2.4 and appropriate modules.
+
+2007-04-09  Michal Ludvig  <michal@logix.cz>
+
+	* Released version 0.9.2
+	  ----------------------
+
+2007-04-09  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd.1: Added manpage
+	* Updated infrastructure files to create "better"
+	  distribution archives.
+
+2007-03-26  Michal Ludvig  <michal@logix.cz>
+
+	* setup.py, S3/PkgInfo.py: Move package info out of setup.py
+	* s3cmd: new parameter --version
+	* s3cmd, S3/S3Uri.py: Output public HTTP URL for objects
+	  stored with Public ACL.
+	  
+2007-02-28  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Verify supplied accesskey and secretkey
+	  in interactive configuration path.
+	* S3/Config.py: Hide access key and secret key
+	  from debug output.
+	* S3/S3.py: Modify S3Error exception to work
+	  in python 2.4 (=> don't expect Exception is
+	  a new-style class).
+	* s3cmd: Updated for the above change.
+
+2007-02-19  Michal Ludvig  <michal@logix.cz>
+
+	* NEWS, INSTALL, README, setup.py: Added
+	  more documentation.
+
+2007-02-19  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py, s3cmd: New feature - allow "get" to stdout
+
+2007-02-19  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3fs.py: Removed (development moved to branch s3fs-devel).
+
+2007-02-08  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3fs.py: 
+	  - Implemented mknod()
+	  - Can create directory structure
+	  - Rewritten to use SQLite3. Currently can create
+	    the filesystem, and a root inode.
+
+2007-02-07  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd (from /s3py:74): Renamed SVN top-level project
+	  s3py to s3cmd
+
+2007-02-07  Michal Ludvig  <michal@logix.cz>
+
+	* setup.cfg: Only require Python 2.4, not 2.5
+	* S3/Config.py: Removed show_uri - no longer needed,
+	  it's now default
+
+2007-02-07  Michal Ludvig  <michal@logix.cz>
+
+	* setup.py
+	  - Version 0.9.1
+
+2007-02-07  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd: Change all "exit()" calls to "sys.exit()"
+	  and allow for python 2.4
+	* S3/S3.py: Removed dependency on hashlib -> allow for python 2.4
+
+2007-01-27  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py, S3/S3Uri.py: Case insensitive regex in S3Uri.py
+
+2007-01-26  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3fs.py: Added support for stroing/loading inodes.
+	  No data yet however.
+
+2007-01-26  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3fs.py: Initial version of S3fs module. 
+	  Can create filesystem via "S3fs.mkfs()"
+
+2007-01-26  Michal Ludvig  <michal@logix.cz>
+
+	* S3/BidirMap.py, S3/Config.py, S3/S3.py, S3/S3Uri.py,
+	  S3/SortedDict.py, S3/Utils.py, s3cmd: Added headers with
+	  copyright to all files
+	* S3/S3.py, S3/S3Uri.py: Removed S3.compose_uri(), introduced
+	  S3UriS3.compose_uri() instead.
+
+2007-01-26  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py, S3/S3Uri.py, s3cmd: 
+	  - Converted all users of parse_uri to S3Uri class API
+	  - Removed "cp" command again. Will have to use 'put'
+	    and 'get' for now.
+
+2007-01-25  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3Uri.py: New module S3/S3Uri.py
+	* S3/S3.py, s3cmd: Converted "put" operation to use
+	  the new S3Uri class.
+
+2007-01-24  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py
+	* s3cmd
+	  - Added 'cp' command
+	  - Renamed parse_s3_uri to parse_uri (this will go away anyway)
+
+2007-01-19  Michal Ludvig  <michal@logix.cz>
+
+	* setup.cfg
+	* setup.py
+	  - Include README into tarballs
+
+2007-01-19  Michal Ludvig  <michal@logix.cz>
+
+	* README
+	  - Added comprehensive README file
+
+2007-01-19  Michal Ludvig  <michal@logix.cz>
+
+	* setup.cfg
+	* setup.py
+	  - Added configuration for setup.py sdist
+
+2007-01-19  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Config.py
+	* s3cmd
+	  - Added interactive configurator (--configure)
+	  - Added config dumper (--dump-config)
+	  - Improved --help output
+
+2007-01-19  Michal Ludvig  <michal@logix.cz>
+
+	* setup.cfg
+	* setup.py
+	  Added info for building RPM packages.
+
+2007-01-18  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Config.py
+	* S3/S3.py
+	* s3cmd
+	  Moved class Config from S3/S3.py to S3/Config.py
+
+2007-01-18  Michal Ludvig  <michal@logix.cz>
+
+	* S3/Config.py (from /s3py/trunk/S3/ConfigParser.py:47)
+	* S3/ConfigParser.py
+	* S3/S3.py
+	  Renamed S3/ConfigParser.py to S3/Config.py
+
+2007-01-18  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd
+	  Added info about homepage
+
+2007-01-17  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py
+	* s3cmd
+	  - Use prefix for listings if specified.
+	  - List all commands in --help
+
+2007-01-16  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py
+	* s3cmd
+	  Major rework of Config class:
+	  - Renamed from AwsConfig to Config
+	  - Converted to Singleton (see Config.__new__() and an article on
+	    Wikipedia)
+	  - No more explicit listing of options - use introspection to get them
+	    (class variables that of type str, int or bool that don't start with
+	    underscore)
+	  - Check values read from config file and verify their type.
+	  
+	  Added OptionMimeType and -m/-M options. Not yet implemented
+	  functionality in the rest of S3/S3.py
+
+2007-01-15  Michal Ludvig  <michal@logix.cz>
+
+	* S3/S3.py
+	* s3cmd
+	  - Merged list-buckets and bucket-list-objects operations into
+	    a single 'ls' command.
+	  - New parameter -P for uploading publicly readable objects
+
+2007-01-14  Michal Ludvig  <michal@logix.cz>
+
+	* s3.py
+	* setup.py
+	  Renamed s3.py to s3cmd (take 2)
+
+2007-01-14  Michal Ludvig  <michal@logix.cz>
+
+	* s3cmd (from /s3py/trunk/s3.py:45)
+	  Renamed s3.py to s3cmd
+
+2007-01-14  Michal Ludvig  <michal@logix.cz>
+
+	* S3
+	* S3/S3.py
+	* s3.py
+	* setup.py
+	  All classes from s3.py go to S3/S3.py
+	  Added setup.py
+
+2007-01-14  Michal Ludvig  <michal@logix.cz>
+
+	* s3.py
+	  Minor fix S3.utils -> S3.Utils
+
+2007-01-14  Michal Ludvig  <michal@logix.cz>
+
+	* .svnignore
+	* BidirMap.py
+	* ConfigParser.py
+	* S3
+	* S3/BidirMap.py (from /s3py/trunk/BidirMap.py:35)
+	* S3/ConfigParser.py (from /s3py/trunk/ConfigParser.py:38)
+	* S3/SortedDict.py (from /s3py/trunk/SortedDict.py:35)
+	* S3/Utils.py (from /s3py/trunk/utils.py:39)
+	* S3/__init__.py
+	* SortedDict.py
+	* s3.py
+	* utils.py
+	  Moved modules to their own package
+
+2007-01-12  Michal Ludvig  <michal@logix.cz>
+
+	* s3.py
+	  Added "del" command
+	  Converted all (?) commands to accept s3-uri
+	  Added -u/--show-uri parameter
+
+2007-01-11  Michal Ludvig  <michal@logix.cz>
+
+	* s3.py
+	  Verify MD5 on received files
+	  Improved upload of multiple files
+	  Initial S3-URI support (more tbd)
+
+2007-01-11  Michal Ludvig  <michal@logix.cz>
+
+	* s3.py
+	  Minor fixes:
+	  - store names of parsed files in AwsConfig
+	  - Print total size with upload/download
+
+2007-01-11  Michal Ludvig  <michal@logix.cz>
+
+	* s3.py
+	* utils.py
+	  Added support for sending and receiving files.
+
+2007-01-11  Michal Ludvig  <michal@logix.cz>
+
+	* ConfigParser.py
+	* s3.py
+	  List all Objects in all Buckets command
+	  Yet another logging improvement
+	  Version check for Python 2.5 or higher
+
+2007-01-11  Michal Ludvig  <michal@logix.cz>
+
+	* ConfigParser.py
+	* s3.py
+	* utils.py
+	  Added ConfigParser
+	  Improved setting logging levels
+	  It can now quite reliably list buckets and objects
+
+2007-01-11  Michal Ludvig  <michal@logix.cz>
+
+	* .svnignore
+	  Added ignore list
+
+2007-01-11  Michal Ludvig  <michal@logix.cz>
+
+	* .svnignore
+	* BidirMap.py
+	* SortedDict.py
+	* s3.py
+	* utils.py
+	  Initial import
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..1c89483
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,44 @@
+SHELL  := /bin/bash
+VERSION := $(shell /usr/bin/env python2 -c 'from S3 import PkgInfo;print PkgInfo.version')
+SPEC   := s3cmd.spec
+COMMIT := $(shell git rev-parse HEAD)
+SHORTCOMMIT := $(shell git rev-parse --short=8 HEAD)
+TARBALL = s3cmd-$(VERSION)-$(SHORTCOMMIT).tar.gz
+
+release:
+	python2 setup.py register sdist upload --sign
+
+clean:
+	-rm -rf s3cmd-*.tar.gz *.rpm *~ $(SPEC)
+	-find . -name \*.pyc -exec rm \{\} \;
+	-find . -name \*.pyo -exec rm \{\} \;
+
+$(SPEC): $(SPEC).in
+	sed -e 's/##VERSION##/$(VERSION)/' \
+            -e 's/##COMMIT##/$(COMMIT)/' \
+            -e 's/##SHORTCOMMIT##/$(SHORTCOMMIT)/' \
+            $(SPEC).in > $(SPEC)
+
+# fixme: python setup.py sdist also generates a PKG-INFO file which we don't have using straight git archive
+git-tarball:
+	git archive --format tar --prefix s3cmd-$(COMMIT)/ HEAD S3/ s3cmd NEWS README.md LICENSE INSTALL setup.cfg s3cmd.1 setup.py| gzip -c > $(TARBALL)
+
+# Use older digest algorithms for local rpmbuilds, as EPEL5 and
+# earlier releases need this.  When building using mock for a
+# particular target, it will use the proper (newer) digests if that
+# target supports it.
+git-rpm: clean git-tarball $(SPEC)
+	tmp_dir=`mktemp -d` ; \
+	mkdir -p $${tmp_dir}/{BUILD,RPMS,SRPMS,SPECS,SOURCES} ; \
+	cp $(TARBALL) $${tmp_dir}/SOURCES ; \
+	cp $(SPEC) $${tmp_dir}/SPECS ; \
+	cd $${tmp_dir} > /dev/null 2>&1; \
+	rpmbuild -ba --define "_topdir $${tmp_dir}" \
+	  --define "_source_filedigest_algorithm 0" \
+	  --define "_binary_filedigest_algorithm 0" \
+	  --define "dist %{nil}" \
+          SPECS/$(SPEC) ; \
+	cd - > /dev/null 2>&1; \
+	cp $${tmp_dir}/RPMS/noarch/* $${tmp_dir}/SRPMS/* . ; \
+	rm -rf $${tmp_dir} ; \
+	rpmlint *.rpm *.spec
diff --git a/PKG-INFO b/PKG-INFO
deleted file mode 100644
index d4ec2b2..0000000
--- a/PKG-INFO
+++ /dev/null
@@ -1,45 +0,0 @@
-Metadata-Version: 1.1
-Name: s3cmd
-Version: 2.0.2
-Summary: Command line tool for managing Amazon S3 and CloudFront services
-Home-page: http://s3tools.org
-Author: github.com/mdomsch, github.com/matteobar, github.com/fviard
-Author-email: s3tools-bugs@lists.sourceforge.net
-License: GNU GPL v2+
-Description-Content-Type: UNKNOWN
-Description: 
-        
-        S3cmd lets you copy files from/to Amazon S3
-        (Simple Storage Service) using a simple to use
-        command line client. Supports rsync-like backup,
-        GPG encryption, and more. Also supports management
-        of Amazon's CloudFront content delivery network.
-        
-        
-        Authors:
-        --------
-            Michal Ludvig  <michal@logix.cz>
-        
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Environment :: Console
-Classifier: Environment :: MacOS X
-Classifier: Environment :: Win32 (MS Windows)
-Classifier: Intended Audience :: End Users/Desktop
-Classifier: Intended Audience :: System Administrators
-Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
-Classifier: Natural Language :: English
-Classifier: Operating System :: MacOS :: MacOS X
-Classifier: Operating System :: Microsoft :: Windows
-Classifier: Operating System :: POSIX
-Classifier: Operating System :: Unix
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: 3.6
-Classifier: Topic :: System :: Archiving
-Classifier: Topic :: Utilities
diff --git a/README.md b/README.md
index c83af8c..708554b 100644
--- a/README.md
+++ b/README.md
@@ -335,7 +335,7 @@ For more information refer to the [S3cmd / S3tools homepage](http://s3tools.org)
 
 ### License
 
-Copyright (C) 2007-2017 TGRMN Software - http://www.tgrmn.com - and contributors
+Copyright (C) 2007-2019 TGRMN Software - http://www.tgrmn.com - and contributors
 
 This program is free software; you can redistribute it and/or modify
 it under the terms of the GNU General Public License as published by
diff --git a/RELEASE_INSTRUCTIONS b/RELEASE_INSTRUCTIONS
new file mode 100644
index 0000000..4da4c9f
--- /dev/null
+++ b/RELEASE_INSTRUCTIONS
@@ -0,0 +1,89 @@
+Instructions for s3cmd maintainers for doing a tagged release and publishing on sourceforge.net.
+In the below, 1.5.0-rc1 is the example version being released.  Salt to taste.
+
+1.  Make a fresh clone of the repo:
+   git clone ssh+git://git@github.com/s3tools/s3cmd s3cmd-release
+
+2.  Run ./run-tests.py to verify it all works OK.
+
+3.  Update version to 1.5.0-rc1 in S3/PkgInfo.py
+
+4.  Update manpage with ./s3cmd --help | ./format-manpage.pl > s3cmd.1
+
+5.  Update NEWS with info about new features. Best to extract from git
+    with:  git log --no-merges v1.5.0-beta1..
+    (list all tags with: "git tag")
+
+
+6.  Verify the above changes:
+    git diff --check && git diff
+    git status
+    (The only changed files should be NEWS, s3cmd.1, S3/PkgInfo.py)
+
+7.  Remove testsuite (intentionally inaccessible files break the next
+    step):
+    chmod -R +rwx testsuite/permission-tests/permission-denied-dir && rm -rf testsuite
+
+8.  If everything worked fine commit the above changes:
+    git commit -a -m "Update version to 1.5.0-rc1"
+
+9. Tag it:
+    git tag --sign -a v1.5.0-rc1 -m "Tag v1.5.0-rc1"
+
+10. Push back to github:
+    git push --tags
+
+11.  Build the "Source Distribution":
+    python setup.py sdist upload --sign
+    -> Creates dist/s3cmd-1.5.0-rc1.tar.gz* and dist/s3cmd-1.5.0-rc1.zip*
+
+The upload step publishes it to PyPi, so 'pip install s3cmd' downloads
+the new version.  It also generated the GPG signatures, so you don't
+have to do that manually, and have them available for the next steps.
+
+
+GitHub releases
+
+1. Login to github.com/s3tools/s3cmd
+
+2.  You will see your new tag in the Tags tab.  Click "Draft a new
+    release".
+
+3.  In the 'Tag version' drop-down, select your new tag.
+
+4.  In the 'Release title' field, name it v1.5.0-rc1.
+
+5.  In the 'Describe this release' text box, add in this release's
+    notes from the NEWS file.
+
+6.  Upload all 4 files from dist/.
+
+7.  Click "Publish release"
+
+
+
+SourceForge releases
+
+1.  Login to sf.net
+
+2.  Go to https://sourceforge.net/p/s3tools/admin/
+
+3.  Files -> s3cmd -> Add Folder -> Enter "1.5.0-rc1" -> Create
+
+4.  Go into 1.5.0-rc1 -> Add File -> upload dist/s3cmd-1.5.0-rc1.tar.gz
+
+5.  Once uploaded click the little "i" icon on the right and click
+    "Select all" under "Default Download For:" to update the default
+    download button to this new version.
+
+6.  Give it a few minutes and verify on the Summary page that the
+    download button has been updated to s3cmd-1.5.0-rc1.tar.gz
+
+Now it's time to send out an announcement email to
+s3tools-announce@lists.sourceforge.net and
+s3tools-general@lists.sourceforge.net (check out the s3cmd-announce
+archive for an inspiration :)
+
+And the last step is to ask the respective distribution maintainers
+(Fedora, Debian, Ubuntu, OpenSuse, ...?) to update the package in
+their builds.
diff --git a/S3/ACL.py b/S3/ACL.py
index 7f4d245..48b8c20 100644
--- a/S3/ACL.py
+++ b/S3/ACL.py
@@ -16,7 +16,7 @@ try:
 except ImportError:
     import elementtree.ElementTree as ET
 
-PY3 = (sys.version_info >= (3,0))
+PY3 = (sys.version_info >= (3, 0))
 
 class Grantee(object):
     ALL_USERS_URI = "http://acs.amazonaws.com/groups/global/AllUsers"
diff --git a/S3/CloudFront.py b/S3/CloudFront.py
index cc81b61..b6e3839 100644
--- a/S3/CloudFront.py
+++ b/S3/CloudFront.py
@@ -22,12 +22,17 @@ except ImportError:
 from .S3 import S3
 from .Config import Config
 from .Exceptions import *
-from .Utils import getTreeFromXml, appendXmlTextNode, getDictFromTree, dateS3toPython, getBucketFromHostname, getHostnameFromBucket, deunicodise, urlencode_string, convertHeaderTupleListToDict
+from .Utils import (getTreeFromXml, appendXmlTextNode, getDictFromTree,
+                    dateS3toPython, getBucketFromHostname,
+                    getHostnameFromBucket, deunicodise, urlencode_string,
+                    convertHeaderTupleListToDict, encode_to_s3, decode_from_s3)
 from .Crypto import sign_string_v2
 from .S3Uri import S3Uri, S3UriS3
 from .ConnMan import ConnMan
 from .SortedDict import SortedDict
 
+PY3 = (sys.version_info >= (3, 0))
+
 cloudfront_api_version = "2010-11-01"
 cloudfront_resource = "/%(api_ver)s/distribution" % { 'api_ver' : cloudfront_api_version }
 
@@ -176,7 +181,7 @@ class DistributionConfig(object):
         else:
             self.info['Logging'] = None
 
-    def __str__(self):
+    def get_printable_tree(self):
         tree = ET.Element("DistributionConfig")
         tree.attrib['xmlns'] = DistributionConfig.xmlns
 
@@ -197,7 +202,18 @@ class DistributionConfig(object):
             appendXmlTextNode("Bucket", getHostnameFromBucket(self.info['Logging'].bucket()), logging_el)
             appendXmlTextNode("Prefix", self.info['Logging'].object(), logging_el)
             tree.append(logging_el)
-        return ET.tostring(tree)
+        return tree
+
+    def __unicode__(self):
+        return decode_from_s3(ET.tostring(self.get_printable_tree()))
+
+    def __str__(self):
+        if PY3:
+            # Return unicode
+            return ET.tostring(self.get_printable_tree(), encoding="unicode")
+        else:
+            # Return bytes
+            return ET.tostring(self.get_printable_tree())
 
 class Invalidation(object):
     ## Example:
@@ -285,15 +301,25 @@ class InvalidationBatch(object):
     def get_reference(self):
         return self.reference
 
-    def __str__(self):
+    def get_printable_tree(self):
         tree = ET.Element("InvalidationBatch")
-
         for path in self.paths:
             if len(path) < 1 or path[0] != "/":
                 path = "/" + path
             appendXmlTextNode("Path", urlencode_string(path), tree)
         appendXmlTextNode("CallerReference", self.reference, tree)
-        return ET.tostring(tree)
+        return tree
+
+    def __unicode__(self):
+        return decode_from_s3(ET.tostring(self.get_printable_tree()))
+
+    def __str__(self):
+        if PY3:
+            # Return unicode
+            return ET.tostring(self.get_printable_tree(), encoding="unicode")
+        else:
+            # Return bytes
+            return ET.tostring(self.get_printable_tree())
 
 class CloudFront(object):
     operations = {
@@ -564,7 +590,7 @@ class CloudFront(object):
 
     def sign_request(self, headers):
         string_to_sign = headers['x-amz-date']
-        signature = sign_string_v2(string_to_sign)
+        signature = decode_from_s3(sign_string_v2(encode_to_s3(string_to_sign)))
         debug(u"CloudFront.sign_request('%s') = %s" % (string_to_sign, signature))
         return signature
 
@@ -603,7 +629,7 @@ class CloudFront(object):
                     continue
 
                 if CloudFront.dist_list.get(distListIndex, None) is None:
-                    CloudFront.dist_list[distListIndex] = set() 
+                    CloudFront.dist_list[distListIndex] = set()
 
                 CloudFront.dist_list[distListIndex].add(d.uri())
 
diff --git a/S3/Config.py b/S3/Config.py
index e6e76f0..698310d 100644
--- a/S3/Config.py
+++ b/S3/Config.py
@@ -205,6 +205,7 @@ class Config(object):
     # Maximum sleep duration for throtte / limitrate.
     # s3 will timeout if a request/transfer is stuck for more than a short time
     throttle_max = 100
+    public_url_use_https = False
 
     ## Creating a singleton
     def __new__(self, configfile = None, access_key=None, secret_key=None, access_token=None):
@@ -260,13 +261,14 @@ class Config(object):
             resp = conn.getresponse()
             files = resp.read()
             if resp.status == 200 and len(files)>1:
-                conn.request('GET', "/latest/meta-data/iam/security-credentials/%s"%files.decode('UTF-8'))
+                conn.request('GET', "/latest/meta-data/iam/security-credentials/%s" % files.decode('utf-8'))
                 resp=conn.getresponse()
                 if resp.status == 200:
-                    creds=json.load(resp)
-                    Config().update_option('access_key', creds['AccessKeyId'].encode('ascii'))
-                    Config().update_option('secret_key', creds['SecretAccessKey'].encode('ascii'))
-                    Config().update_option('access_token', creds['Token'].encode('ascii'))
+                    resp_content = config_unicodise(resp.read())
+                    creds=json.loads(resp_content)
+                    Config().update_option('access_key', config_unicodise(creds['AccessKeyId']))
+                    Config().update_option('secret_key', config_unicodise(creds['SecretAccessKey']))
+                    Config().update_option('access_token', config_unicodise(creds['Token']))
                 else:
                     raise IOError
             else:
diff --git a/S3/Crypto.py b/S3/Crypto.py
index b5ed20b..1c98c6d 100644
--- a/S3/Crypto.py
+++ b/S3/Crypto.py
@@ -63,6 +63,7 @@ def sign_string_v2(string_to_sign):
 
     Useful for REST authentication. See http://s3.amazonaws.com/doc/s3-developer-guide/RESTAuthentication.html
     string_to_sign should be utf-8 "bytes".
+    and returned signature will be utf-8 encoded "bytes".
     """
     secret_key = Config.Config().secret_key
     signature = base64.encodestring(hmac.new(encode_to_s3(secret_key), string_to_sign, sha1).digest()).strip()
diff --git a/S3/S3.py b/S3/S3.py
index 8e454e4..13da91b 100644
--- a/S3/S3.py
+++ b/S3/S3.py
@@ -382,8 +382,6 @@ class S3(object):
             bucket_location = bucket_location.strip()
             if bucket_location.upper() == "EU":
                 bucket_location = bucket_location.upper()
-            else:
-                bucket_location = bucket_location.lower()
             body  = "<CreateBucketConfiguration><LocationConstraint>"
             body += bucket_location
             body += "</LocationConstraint></CreateBucketConfiguration>"
@@ -968,7 +966,7 @@ class S3(object):
         request = self.create_request("BUCKET_LIST", bucket = uri.bucket(),
                                       uri_params = {'policy': None})
         response = self.send_request(request)
-        return response['data']
+        return decode_from_s3(response['data'])
 
     def set_policy(self, uri, policy):
         headers = SortedDict(ignore_case = True)
@@ -991,7 +989,7 @@ class S3(object):
         request = self.create_request("BUCKET_LIST", bucket = uri.bucket(),
                                       uri_params = {'cors': None})
         response = self.send_request(request)
-        return response['data']
+        return decode_from_s3(response['data'])
 
     def set_cors(self, uri, cors):
         headers = SortedDict(ignore_case = True)
diff --git a/S3/S3Uri.py b/S3/S3Uri.py
index 2422412..0139d3d 100644
--- a/S3/S3Uri.py
+++ b/S3/S3Uri.py
@@ -90,10 +90,13 @@ class S3UriS3(S3Uri):
         return check_bucket_name_dns_support(Config.Config().host_bucket, self._bucket)
 
     def public_url(self):
+        public_url_protocol = "http"
+        if Config.Config().public_url_use_https:
+            public_url_protocol = "https"
         if self.is_dns_compatible():
-            return "http://%s.%s/%s" % (self._bucket, Config.Config().host_base, self._object)
+            return "%s://%s.%s/%s" % (public_url_protocol, self._bucket, Config.Config().host_base, self._object)
         else:
-            return "http://%s/%s/%s" % (Config.Config().host_base, self._bucket, self._object)
+            return "%s://%s/%s/%s" % (public_url_protocol, Config.Config().host_base, self._bucket, self._object)
 
     def host_name(self):
         if self.is_dns_compatible():
diff --git a/S3/Utils.py b/S3/Utils.py
index 6bcfeb2..b9f4fd5 100644
--- a/S3/Utils.py
+++ b/S3/Utils.py
@@ -101,7 +101,7 @@ def stripNameSpace(xml):
 __all__.append("stripNameSpace")
 
 def getTreeFromXml(xml):
-    xml, xmlns = stripNameSpace(xml)
+    xml, xmlns = stripNameSpace(encode_to_s3(xml))
     try:
         tree = ET.fromstring(xml)
         if xmlns:
@@ -194,12 +194,12 @@ __all__.append("dateRFC822toUnix")
 def formatSize(size, human_readable = False, floating_point = False):
     size = floating_point and float(size) or int(size)
     if human_readable:
-        coeffs = ['k', 'M', 'G', 'T']
+        coeffs = ['K', 'M', 'G', 'T']
         coeff = ""
         while size > 2048:
             size /= 1024
             coeff = coeffs.pop(0)
-        return (size, coeff)
+        return (floating_point and float(size) or int(size), coeff)
     else:
         return (size, "")
 __all__.append("formatSize")
diff --git a/TODO b/TODO
new file mode 100644
index 0000000..48bc7f0
--- /dev/null
+++ b/TODO
@@ -0,0 +1,52 @@
+TODO list for s3cmd project
+===========================
+
+- Before 1.0.0 (or asap after 1.0.0)
+  - Make 'sync s3://bkt/some-filename local/other-filename' work
+    (at the moment it'll always download).
+  - Enable --exclude for [ls].
+  - Allow change /tmp to somewhere else
+  - With --guess-mime use 'magic' module if available.
+  - Support --preserve for [put] and [get]. Update manpage.
+  - Don't let --continue fail if the file is already fully downloaded.
+  - Option --mime-type should set mime type with 'cp' and 'mv'. 
+    If possible --guess-mime-type should do as well.
+  - Make upload throttling configurable.
+  - Allow removing 'DefaultRootObject' from CloudFront distributions.
+  - Get s3://bucket/non-existent creates empty local file 'non-existent'
+  - Add 'geturl' command, both Unicode and urlencoded output.
+  - Add a command for generating "Query String Authentication" URLs.
+  - Support --acl-grant (together with --acl-public/private) for [put] and [sync]
+  - Filter 's3cmd ls' output by --bucket-location=
+
+- After 1.0.0
+  - Sync must backup non-files as well. At least directories, 
+    symlinks and device nodes.
+  - Speed up upload / download with multiple threads.
+    (see http://blog.50projects.com/p/s3cmd-modifications.html)
+  - Sync should be able to update metadata (UID, timstamps, etc)
+    if only these change (i.e. same content, different metainfo).
+  - If GPG fails error() and exit. If un-GPG fails save the 
+    file with .gpg extension.
+  - Keep backup files remotely on put/sync-to if requested 
+    (move the old 'object' to e.g. 'object~' and only then upload 
+     the new one). Could be more advanced to keep, say, last 5 
+     copies, etc.
+  - Memory consumption on very large upload sets is terribly high.
+  - Implement per-bucket (or per-regexp?) default settings. For
+    example regarding ACLs, encryption, etc.
+
+- Implement GPG for sync
+  (it's not that easy since it won't be easy to compare
+   the encrypted-remote-object size with local file. 
+   either we can store the metadata in a dedicated file 
+   where we face a risk of inconsistencies, or we'll store
+   the metadata encrypted in each object header where we'll
+   have to do large number for object/HEAD requests. tough 
+   call).
+  Or we can only compare local timestamps with remote object 
+  timestamps. If the local one is older we'll *assume* it 
+  hasn't been changed. But what to do about remote2local sync?
+
+- Keep man page up to date and write some more documentation
+  - Yeah, right ;-)
diff --git a/debian/changelog b/debian/changelog
index 1c6f0e9..fe72d86 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,8 +1,12 @@
-s3cmd (2.0.2-2) UNRELEASED; urgency=medium
+s3cmd (2.0.2+git20190522.ae6cdde-1) UNRELEASED; urgency=medium
 
+  [ Ondřej Nový ]
   * d/changelog: Remove trailing whitespaces
 
- -- Ondřej Nový <onovy@debian.org>  Mon, 01 Oct 2018 09:38:41 +0200
+  [ Debian Janitor ]
+  * New upstream snapshot.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Tue, 20 Aug 2019 01:05:27 +0000
 
 s3cmd (2.0.2-1) unstable; urgency=medium
 
diff --git a/format-manpage.pl b/format-manpage.pl
new file mode 100755
index 0000000..17695f4
--- /dev/null
+++ b/format-manpage.pl
@@ -0,0 +1,223 @@
+#!/usr/bin/perl
+
+# Format s3cmd.1 manpage
+# Usage:
+#   s3cmd --help | format-manpage.pl > s3cmd.1
+
+use strict;
+
+my $commands = "";
+my $cfcommands = "";
+my $wscommands = "";
+my $options = "";
+
+while (<>) {
+	if (/^Commands:/) {
+		while (<>) {
+			last if (/^\s*$/);
+			my ($desc, $cmd, $cmdline);
+			($desc = $_) =~ s/^\s*(.*?)\s*$/$1/;
+			($cmdline = <>) =~ s/^\s*s3cmd (.*?) (.*?)\s*$/s3cmd \\fB$1\\fR \\fI$2\\fR/;
+			$cmd = $1;
+			$cmdline =~ s/-/\\-/g;
+			if ($cmd =~ /^cf/) {
+				$cfcommands .= ".TP\n$cmdline\n$desc\n";
+			} elsif ($cmd =~ /^ws/) {
+				$wscommands .= ".TP\n$cmdline\n$desc\n";
+			} else {
+				$commands .= ".TP\n$cmdline\n$desc\n";
+			}
+		}
+	}
+	if (/^Options:/) {
+		my ($opt, $desc);
+		while (<>) {
+			last if (/^\s*$/);
+			$_ =~ s/(.*?)\s*$/$1/;
+			$desc = "";
+			$opt = "";
+			if (/^  (-.*)/) {
+				$opt = $1;
+				if ($opt =~ /  /) {
+					($opt, $desc) = split(/\s\s+/, $opt, 2);
+				}
+				$opt =~ s/(-[^ ,=\.]+)/\\fB$1\\fR/g;
+				# escape all single dashes
+				$opt =~ s/-/\\-/g;
+				$options .= ".TP\n$opt\n";
+			} else {
+				$_ =~ s/\s*(.*?)\s*$/$1/;
+				$_ =~ s/(--[^ ,=\.]+)/\\fB$1\\fR/g;
+				# escape word 'Cache-Control'
+				$_ =~ s/'(\S+-\S+)'/\\&'$1'/g;
+				# escape all single dashes
+				$_ =~ s/-/\\-/g;
+				$desc .= $_;
+			}
+			if ($desc) {
+				$options .= "$desc\n";
+			}
+		}
+	}
+}
+print "
+.\\\" !!! IMPORTANT: This file is generated from s3cmd \\-\\-help output using format-manpage.pl
+.\\\" !!!            Do your changes either in s3cmd file or in 'format\\-manpage.pl' otherwise
+.\\\" !!!            they will be overwritten!
+
+.TH s3cmd 1
+.SH NAME
+s3cmd \\- tool for managing Amazon S3 storage space and Amazon CloudFront content delivery network
+.SH SYNOPSIS
+.B s3cmd
+[\\fIOPTIONS\\fR] \\fICOMMAND\\fR [\\fIPARAMETERS\\fR]
+.SH DESCRIPTION
+.PP
+.B s3cmd
+is a command line client for copying files to/from 
+Amazon S3 (Simple Storage Service) and performing other
+related tasks, for instance creating and removing buckets,
+listing objects, etc.
+
+.SH COMMANDS
+.PP
+.B s3cmd
+can do several \\fIactions\\fR specified by the following \\fIcommands\\fR.
+$commands
+
+.PP
+Commands for static WebSites configuration
+$wscommands
+
+.PP
+Commands for CloudFront management
+$cfcommands
+
+.SH OPTIONS
+.PP
+Some of the below specified options can have their default 
+values set in 
+.B s3cmd
+config file (by default \$HOME/.s3cmd). As it's a simple text file 
+feel free to open it with your favorite text editor and do any
+changes you like. 
+$options
+
+.SH EXAMPLES
+One of the most powerful commands of \\fIs3cmd\\fR is \\fBs3cmd sync\\fR used for 
+synchronising complete directory trees to or from remote S3 storage. To some extent 
+\\fBs3cmd put\\fR and \\fBs3cmd get\\fR share a similar behaviour with \\fBsync\\fR.
+.PP
+Basic usage common in backup scenarios is as simple as:
+.nf
+	s3cmd sync /local/path/ s3://test\\-bucket/backup/
+.fi
+.PP
+This command will find all files under /local/path directory and copy them 
+to corresponding paths under s3://test\\-bucket/backup on the remote side.
+For example:
+.nf
+	/local/path/\\fBfile1.ext\\fR         \\->  s3://bucket/backup/\\fBfile1.ext\\fR
+	/local/path/\\fBdir123/file2.bin\\fR  \\->  s3://bucket/backup/\\fBdir123/file2.bin\\fR
+.fi
+.PP
+However if the local path doesn't end with a slash the last directory's name
+is used on the remote side as well. Compare these with the previous example:
+.nf
+	s3cmd sync /local/path s3://test\\-bucket/backup/
+.fi
+will sync:
+.nf
+	/local/\\fBpath/file1.ext\\fR         \\->  s3://bucket/backup/\\fBpath/file1.ext\\fR
+	/local/\\fBpath/dir123/file2.bin\\fR  \\->  s3://bucket/backup/\\fBpath/dir123/file2.bin\\fR
+.fi
+.PP
+To retrieve the files back from S3 use inverted syntax:
+.nf
+	s3cmd sync s3://test\\-bucket/backup/ ~/restore/
+.fi
+that will download files:
+.nf
+	s3://bucket/backup/\\fBfile1.ext\\fR         \\->  ~/restore/\\fBfile1.ext\\fR
+	s3://bucket/backup/\\fBdir123/file2.bin\\fR  \\->  ~/restore/\\fBdir123/file2.bin\\fR
+.fi
+.PP
+Without the trailing slash on source the behaviour is similar to 
+what has been demonstrated with upload:
+.nf
+	s3cmd sync s3://test\\-bucket/backup ~/restore/
+.fi
+will download the files as:
+.nf
+	s3://bucket/\\fBbackup/file1.ext\\fR         \\->  ~/restore/\\fBbackup/file1.ext\\fR
+	s3://bucket/\\fBbackup/dir123/file2.bin\\fR  \\->  ~/restore/\\fBbackup/dir123/file2.bin\\fR
+.fi
+.PP
+All source file names, the bold ones above, are matched against \\fBexclude\\fR 
+rules and those that match are then re\\-checked against \\fBinclude\\fR rules to see
+whether they should be excluded or kept in the source list.
+.PP
+For the purpose of \\fB\\-\\-exclude\\fR and \\fB\\-\\-include\\fR matching only the 
+bold file names above are used. For instance only \\fBpath/file1.ext\\fR is tested
+against the patterns, not \\fI/local/\\fBpath/file1.ext\\fR
+.PP
+Both \\fB\\-\\-exclude\\fR and \\fB\\-\\-include\\fR work with shell\\-style wildcards (a.k.a. GLOB).
+For a greater flexibility s3cmd provides Regular\\-expression versions of the two exclude options 
+named \\fB\\-\\-rexclude\\fR and \\fB\\-\\-rinclude\\fR. 
+The options with ...\\fB\\-from\\fR suffix (eg \\-\\-rinclude\\-from) expect a filename as
+an argument. Each line of such a file is treated as one pattern.
+.PP
+There is only one set of patterns built from all \\fB\\-\\-(r)exclude(\\-from)\\fR options
+and similarly for include variant. Any file excluded with eg \\-\\-exclude can 
+be put back with a pattern found in \\-\\-rinclude\\-from list.
+.PP
+Run s3cmd with \\fB\\-\\-dry\\-run\\fR to verify that your rules work as expected. 
+Use together with \\fB\\-\\-debug\\fR get detailed information
+about matching file names against exclude and include rules.
+.PP
+For example to exclude all files with \".jpg\" extension except those beginning with a number use:
+.PP
+	\\-\\-exclude '*.jpg' \\-\\-rinclude '[0\\-9].*\\.jpg'
+.PP
+To exclude all files except \"*.jpg\" extension, use:
+.PP
+	\\-\\-exclude '*' \\-\\-include '*.jpg'
+.PP
+To exclude local directory 'somedir', be sure to use a trailing forward slash, as such:
+.PP
+	\\-\\-exclude 'somedir/'
+.PP
+
+.SH SEE ALSO
+For the most up to date list of options run: 
+.B s3cmd \\-\\-help
+.br
+For more info about usage, examples and other related info visit project homepage at:
+.B http://s3tools.org
+.SH AUTHOR
+Written by Michal Ludvig and contributors
+.SH CONTACT, SUPPORT
+Preferred way to get support is our mailing list:
+.br
+.I s3tools\\-general\@lists.sourceforge.net
+.br
+or visit the project homepage:
+.br
+.B http://s3tools.org
+.SH REPORTING BUGS
+Report bugs to 
+.I s3tools\\-bugs\@lists.sourceforge.net
+.SH COPYRIGHT
+Copyright \\(co 2007\\-2015 TGRMN Software \\- http://www.tgrmn.com \\- and contributors
+.br
+.SH LICENSE
+This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2 of the License, or
+(at your option) any later version.
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+.br
+";
diff --git a/magic b/magic
new file mode 100644
index 0000000..7eda929
--- /dev/null
+++ b/magic
@@ -0,0 +1,63 @@
+# Additional magic for common web file types
+
+0	string/b	{\ "	JSON data
+!:mime application/json
+0	string/b	{\ }	JSON data
+!:mime application/json
+0	string/b	[	JSON data
+!:mime application/json
+
+0	search/4000	function
+>&0	search/32/b	)\ {	JavaScript program
+!:mime application/javascript
+
+0	search/4000	@media	CSS stylesheet
+!:mime text/css
+0	search/4000	@import	CSS stylesheet
+!:mime text/css
+0	search/4000	@namespace	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ background	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ border	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ bottom	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ color	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ cursor	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ direction	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ display	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ float	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ font	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ height	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ left	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ line-	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ margin	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ padding	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ position	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ right	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ text-	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ top	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ width	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ visibility	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ -moz-	CSS stylesheet
+!:mime text/css
+0	search/4000/b	{\ -webkit-	CSS stylesheet
+!:mime text/css
diff --git a/run-tests-minio.py b/run-tests-minio.py
new file mode 100755
index 0000000..e3115da
--- /dev/null
+++ b/run-tests-minio.py
@@ -0,0 +1,805 @@
+#!/usr/bin/env python2
+# -*- coding=utf-8 -*-
+
+## Amazon S3cmd - testsuite
+## Author: Michal Ludvig <michal@logix.cz>
+##         http://www.logix.cz/michal
+## License: GPL Version 2
+## Copyright: TGRMN Software and contributors
+
+from __future__ import absolute_import, print_function
+
+import sys
+import os
+import re
+import time
+from subprocess import Popen, PIPE, STDOUT
+import locale
+import getpass
+import S3.Exceptions
+import S3.Config
+from S3.ExitCodes import *
+
+PY3 = (sys.version_info >= (3,0))
+
+try:
+    unicode
+except NameError:
+    # python 3 support
+    # In python 3, unicode -> str, and str -> bytes
+    unicode = str
+
+count_pass = 0
+count_fail = 0
+count_skip = 0
+
+test_counter = 0
+run_tests = []
+exclude_tests = []
+
+verbose = False
+
+encoding = locale.getpreferredencoding()
+if not encoding:
+    print("Guessing current system encoding failed. Consider setting $LANG variable.")
+    sys.exit(1)
+else:
+    print("System encoding: " + encoding)
+
+try:
+    unicode
+except NameError:
+    # python 3 support
+    # In python 3, unicode -> str, and str -> bytes
+    unicode = str
+
+def unicodise(string, encoding = "utf-8", errors = "replace"):
+    """
+    Convert 'string' to Unicode or raise an exception.
+    Config can't use toolbox from Utils that is itself using Config
+    """
+    if type(string) == unicode:
+        return string
+
+    try:
+        return unicode(string, encoding, errors)
+    except UnicodeDecodeError:
+        raise UnicodeDecodeError("Conversion to unicode failed: %r" % string)
+
+# https://stackoverflow.com/questions/377017/test-if-executable-exists-in-python/377028#377028
+def which(program):
+    def is_exe(fpath):
+        return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+    fpath, fname = os.path.split(program)
+    if fpath:
+        if is_exe(program):
+            return program
+    else:
+        for path in os.environ["PATH"].split(os.pathsep):
+            path = path.strip('"')
+            exe_file = os.path.join(path, program)
+            if is_exe(exe_file):
+                return exe_file
+
+    return None
+
+if which('curl') is not None:
+    have_curl = True
+else:
+    have_curl = False
+
+config_file = None
+if os.getenv("HOME"):
+    config_file = os.path.join(unicodise(os.getenv("HOME"), encoding), ".s3cfg")
+elif os.name == "nt" and os.getenv("USERPROFILE"):
+    config_file = os.path.join(unicodise(os.getenv("USERPROFILE"), encoding),
+                               os.getenv("APPDATA") and unicodise(os.getenv("APPDATA"), encoding)
+                               or 'Application Data',
+                               "s3cmd.ini")
+
+
+## Unpack testsuite/ directory
+if not os.path.isdir('testsuite') and os.path.isfile('testsuite.tar.gz'):
+    os.system("tar -xz -f testsuite.tar.gz")
+if not os.path.isdir('testsuite'):
+    print("Something went wrong while unpacking testsuite.tar.gz")
+    sys.exit(1)
+
+os.system("tar -xf testsuite/checksum.tar -C testsuite")
+if not os.path.isfile('testsuite/checksum/cksum33.txt'):
+    print("Something went wrong while unpacking testsuite/checkum.tar")
+    sys.exit(1)
+
+## Fix up permissions for permission-denied tests
+os.chmod("testsuite/permission-tests/permission-denied-dir", 0o444)
+os.chmod("testsuite/permission-tests/permission-denied.txt", 0o000)
+
+## Patterns for Unicode tests
+patterns = {}
+patterns['UTF-8'] = u"ŪņЇЌœđЗ/☺ unicode € rocks ™"
+patterns['GBK'] = u"12月31日/1-特色條目"
+
+have_encoding = os.path.isdir('testsuite/encodings/' + encoding)
+if not have_encoding and os.path.isfile('testsuite/encodings/%s.tar.gz' % encoding):
+    os.system("tar xvz -C testsuite/encodings -f testsuite/encodings/%s.tar.gz" % encoding)
+    have_encoding = os.path.isdir('testsuite/encodings/' + encoding)
+
+if have_encoding:
+    #enc_base_remote = "%s/xyz/%s/" % (pbucket(1), encoding)
+    enc_pattern = patterns[encoding]
+else:
+    print(encoding + " specific files not found.")
+
+def unicodise(string):
+    if type(string) == unicode:
+        return string
+
+    return unicode(string, "UTF-8", "replace")
+
+def deunicodise(string):
+    if type(string) != unicode:
+        return string
+
+    return string.encode("UTF-8", "replace")
+
+if not os.path.isdir('testsuite/crappy-file-name'):
+    os.system("tar xvz -C testsuite -f testsuite/crappy-file-name.tar.gz")
+    # TODO: also unpack if the tarball is newer than the directory timestamp
+    #       for instance when a new version was pulled from SVN.
+
+def test(label, cmd_args = [], retcode = 0, must_find = [], must_not_find = [], must_find_re = [], must_not_find_re = [], stdin = None):
+    def command_output():
+        print("----")
+        print(" ".join([" " in arg and "'%s'" % arg or arg for arg in cmd_args]))
+        print("----")
+        print(stdout)
+        print("----")
+
+    def failure(message = ""):
+        global count_fail
+        if message:
+            message = u"  (%r)" % message
+        print(u"\x1b[31;1mFAIL%s\x1b[0m" % (message))
+        count_fail += 1
+        command_output()
+        #return 1
+        sys.exit(1)
+    def success(message = ""):
+        global count_pass
+        if message:
+            message = "  (%r)" % message
+        print("\x1b[32;1mOK\x1b[0m%s" % (message))
+        count_pass += 1
+        if verbose:
+            command_output()
+        return 0
+    def skip(message = ""):
+        global count_skip
+        if message:
+            message = "  (%r)" % message
+        print("\x1b[33;1mSKIP\x1b[0m%s" % (message))
+        count_skip += 1
+        return 0
+    def compile_list(_list, regexps = False):
+        if regexps == False:
+            _list = [re.escape(item) for item in _list]
+
+        return [re.compile(item, re.MULTILINE) for item in _list]
+
+    global test_counter
+    test_counter += 1
+    print(("%3d  %s " % (test_counter, label)).ljust(30, "."), end=' ')
+    sys.stdout.flush()
+
+    if run_tests.count(test_counter) == 0 or exclude_tests.count(test_counter) > 0:
+        return skip()
+
+    if not cmd_args:
+        return skip()
+
+    p = Popen(cmd_args, stdin = stdin, stdout = PIPE, stderr = STDOUT, universal_newlines = True, close_fds = True)
+    stdout, stderr = p.communicate()
+    if type(retcode) not in [list, tuple]: retcode = [retcode]
+    if p.returncode not in retcode:
+        return failure("retcode: %d, expected one of: %s" % (p.returncode, retcode))
+
+    if type(must_find) not in [ list, tuple ]: must_find = [must_find]
+    if type(must_find_re) not in [ list, tuple ]: must_find_re = [must_find_re]
+    if type(must_not_find) not in [ list, tuple ]: must_not_find = [must_not_find]
+    if type(must_not_find_re) not in [ list, tuple ]: must_not_find_re = [must_not_find_re]
+
+    find_list = []
+    find_list.extend(compile_list(must_find))
+    find_list.extend(compile_list(must_find_re, regexps = True))
+    find_list_patterns = []
+    find_list_patterns.extend(must_find)
+    find_list_patterns.extend(must_find_re)
+
+    not_find_list = []
+    not_find_list.extend(compile_list(must_not_find))
+    not_find_list.extend(compile_list(must_not_find_re, regexps = True))
+    not_find_list_patterns = []
+    not_find_list_patterns.extend(must_not_find)
+    not_find_list_patterns.extend(must_not_find_re)
+
+    for index in range(len(find_list)):
+        stdout = unicodise(stdout)
+        match = find_list[index].search(stdout)
+        if not match:
+            return failure("pattern not found: %s" % find_list_patterns[index])
+    for index in range(len(not_find_list)):
+        match = not_find_list[index].search(stdout)
+        if match:
+            return failure("pattern found: %s (match: %s)" % (not_find_list_patterns[index], match.group(0)))
+
+    return success()
+
+def test_s3cmd(label, cmd_args = [], **kwargs):
+    if not cmd_args[0].endswith("s3cmd"):
+        cmd_args.insert(0, "python")
+        cmd_args.insert(1, "s3cmd")
+        if config_file:
+            cmd_args.insert(2, "-c")
+            cmd_args.insert(3, config_file)
+
+    return test(label, cmd_args, **kwargs)
+
+def test_mkdir(label, dir_name):
+    if os.name in ("posix", "nt"):
+        cmd = ['mkdir', '-p']
+    else:
+        print("Unknown platform: %s" % os.name)
+        sys.exit(1)
+    cmd.append(dir_name)
+    return test(label, cmd)
+
+def test_rmdir(label, dir_name):
+    if os.path.isdir(dir_name):
+        if os.name == "posix":
+            cmd = ['rm', '-rf']
+        elif os.name == "nt":
+            cmd = ['rmdir', '/s/q']
+        else:
+            print("Unknown platform: %s" % os.name)
+            sys.exit(1)
+        cmd.append(dir_name)
+        return test(label, cmd)
+    else:
+        return test(label, [])
+
+def test_flushdir(label, dir_name):
+    test_rmdir(label + "(rm)", dir_name)
+    return test_mkdir(label + "(mk)", dir_name)
+
+def test_copy(label, src_file, dst_file):
+    if os.name == "posix":
+        cmd = ['cp', '-f']
+    elif os.name == "nt":
+        cmd = ['copy']
+    else:
+        print("Unknown platform: %s" % os.name)
+        sys.exit(1)
+    cmd.append(src_file)
+    cmd.append(dst_file)
+    return test(label, cmd)
+
+def test_curl_HEAD(label, src_file, **kwargs):
+    cmd = ['curl', '--silent', '--head', '-include', '--location']
+    cmd.append(src_file)
+    return test(label, cmd, **kwargs)
+
+bucket_prefix = u"%s-" % getpass.getuser().lower()
+
+argv = sys.argv[1:]
+while argv:
+    arg = argv.pop(0)
+    if arg.startswith('--bucket-prefix='):
+        print("Usage: '--bucket-prefix PREFIX', not '--bucket-prefix=PREFIX'")
+        sys.exit(0)
+    if arg in ("-h", "--help"):
+        print("%s A B K..O -N" % sys.argv[0])
+        print("Run tests number A, B and K through to O, except for N")
+        sys.exit(0)
+
+    if arg in ("-c", "--config"):
+        config_file = argv.pop(0)
+        continue
+    if arg in ("-l", "--list"):
+        exclude_tests = range(0, 999)
+        break
+    if arg in ("-v", "--verbose"):
+        verbose = True
+        continue
+    if arg in ("-p", "--bucket-prefix"):
+        try:
+            bucket_prefix = argv.pop(0)
+        except IndexError:
+            print("Bucket prefix option must explicitly supply a bucket name prefix")
+            sys.exit(0)
+        continue
+    if ".." in arg:
+        range_idx = arg.find("..")
+        range_start = arg[:range_idx] or 0
+        range_end = arg[range_idx+2:] or 999
+        run_tests.extend(range(int(range_start), int(range_end) + 1))
+    elif arg.startswith("-"):
+        exclude_tests.append(int(arg[1:]))
+    else:
+        run_tests.append(int(arg))
+
+print("Using bucket prefix: '%s'" % bucket_prefix)
+
+cfg = S3.Config.Config(config_file)
+
+if not run_tests:
+    run_tests = range(0, 999)
+
+# helper functions for generating bucket names
+def bucket(tail):
+        '''Test bucket name'''
+        label = 'autotest'
+        if str(tail) == '3':
+                label = 'autotest'
+        return '%ss3cmd-%s-%s' % (bucket_prefix, label, tail)
+
+def pbucket(tail):
+        '''Like bucket(), but prepends "s3://" for you'''
+        return 's3://' + bucket(tail)
+
+## ====== Remove test buckets
+test_s3cmd("Remove test buckets", ['rb', '-r', '--force', pbucket(1), pbucket(2), pbucket(3)])
+
+## ====== verify they were removed
+test_s3cmd("Verify no test buckets", ['ls'],
+           must_not_find = [pbucket(1), pbucket(2), pbucket(3)])
+
+
+## ====== Create one bucket (EU)
+# Disabled for minio
+#test_s3cmd("Create one bucket (EU)", ['mb', '--bucket-location=EU', pbucket(1)],
+#    must_find = "Bucket '%s/' created" % pbucket(1))
+test_s3cmd("Create one bucket", ['mb', pbucket(1)],
+    must_find = "Bucket '%s/' created" % pbucket(1))
+
+
+## ====== Create multiple buckets
+test_s3cmd("Create multiple buckets", ['mb', pbucket(2), pbucket(3)],
+    must_find = [ "Bucket '%s/' created" % pbucket(2), "Bucket '%s/' created" % pbucket(3)])
+
+
+## ====== Invalid bucket name
+test_s3cmd("Invalid bucket name", ["mb", "--bucket-location=EU", pbucket('EU')],
+    retcode = EX_USAGE,
+    must_find = "ERROR: Parameter problem: Bucket name '%s' contains disallowed character" % bucket('EU'),
+    must_not_find_re = "Bucket.*created")
+
+
+## ====== Buckets list
+# Modified for Minio
+test_s3cmd("Buckets list", ["ls"],
+    must_find = [ "autotest-1", "autotest-2", "autotest-3" ], must_not_find_re = "autotest-EU")
+
+
+## ====== Sync to S3
+# Modified for Minio (exclude crappy dir)
+test_s3cmd("Sync to S3", ['sync', 'testsuite/', pbucket(1) + '/xyz/', '--exclude', 'demo/*', '--exclude', '*.png', '--no-encrypt', '--exclude-from', 'testsuite/exclude.encodings', '--exclude', 'crappy-file-name/*' ],
+           must_find = [ "ERROR: Upload of 'testsuite/permission-tests/permission-denied.txt' is not possible (Reason: Permission denied)",
+           ],
+           must_not_find_re = [ "demo/", "^(?!WARNING: Skipping).*\.png$", "permission-denied-dir" ],
+           retcode = EX_PARTIAL)
+
+if have_encoding:
+    ## ====== Sync UTF-8 / GBK / ... to S3
+    test_s3cmd(u"Sync %s to S3" % encoding, ['sync', 'testsuite/encodings/' + encoding, '%s/xyz/encodings/' % pbucket(1), '--exclude', 'demo/*', '--no-encrypt' ],
+        must_find = [ u"'testsuite/encodings/%(encoding)s/%(pattern)s' -> '%(pbucket)s/xyz/encodings/%(encoding)s/%(pattern)s'" % { 'encoding' : encoding, 'pattern' : enc_pattern , 'pbucket' : pbucket(1)} ])
+
+
+## ====== List bucket content
+test_s3cmd("List bucket content", ['ls', '%s/xyz/' % pbucket(1) ],
+    must_find_re = [ u"DIR +%s/xyz/binary/$" % pbucket(1) , u"DIR +%s/xyz/etc/$" % pbucket(1) ],
+    must_not_find = [ u"random-crap.md5", u"/demo" ])
+
+
+## ====== List bucket recursive
+must_find = [ u"%s/xyz/binary/random-crap.md5" % pbucket(1) ]
+if have_encoding:
+    must_find.append(u"%(pbucket)s/xyz/encodings/%(encoding)s/%(pattern)s" % { 'encoding' : encoding, 'pattern' : enc_pattern, 'pbucket' : pbucket(1) })
+
+test_s3cmd("List bucket recursive", ['ls', '--recursive', pbucket(1)],
+    must_find = must_find,
+    must_not_find = [ "logo.png" ])
+
+## ====== FIXME
+test_s3cmd("Recursive put", ['put', '--recursive', 'testsuite/etc', '%s/xyz/' % pbucket(1) ])
+
+
+## ====== Clean up local destination dir
+test_flushdir("Clean testsuite-out/", "testsuite-out")
+
+## ====== Put from stdin
+f = open('testsuite/single-file/single-file.txt', 'r')
+test_s3cmd("Put from stdin", ['put', '-', '%s/single-file/single-file.txt' % pbucket(1)],
+           must_find = ["'<stdin>' -> '%s/single-file/single-file.txt'" % pbucket(1)],
+           stdin = f)
+f.close()
+
+## ====== Multipart put
+os.system('mkdir -p testsuite-out')
+os.system('dd if=/dev/urandom of=testsuite-out/urandom.bin bs=1M count=16 > /dev/null 2>&1')
+test_s3cmd("Put multipart", ['put', '--multipart-chunk-size-mb=5', 'testsuite-out/urandom.bin', '%s/urandom.bin' % pbucket(1)],
+           must_not_find = ['abortmp'])
+
+## ====== Multipart put from stdin
+f = open('testsuite-out/urandom.bin', 'r')
+test_s3cmd("Multipart large put from stdin", ['put', '--multipart-chunk-size-mb=5', '-', '%s/urandom2.bin' % pbucket(1)],
+           must_find = ['%s/urandom2.bin' % pbucket(1)],
+           must_not_find = ['abortmp'],
+           stdin = f)
+f.close()
+
+## ====== Clean up local destination dir
+test_flushdir("Clean testsuite-out/", "testsuite-out")
+
+## ====== Moving things without trailing '/'
+os.system('dd if=/dev/urandom of=testsuite-out/urandom1.bin bs=1k count=1 > /dev/null 2>&1')
+os.system('dd if=/dev/urandom of=testsuite-out/urandom2.bin bs=1k count=1 > /dev/null 2>&1')
+test_s3cmd("Put multiple files", ['put', 'testsuite-out/urandom1.bin', 'testsuite-out/urandom2.bin', '%s/' % pbucket(1)],
+           must_find = ["%s/urandom1.bin" % pbucket(1), "%s/urandom2.bin" % pbucket(1)])
+
+test_s3cmd("Move without '/'", ['mv', '%s/urandom1.bin' % pbucket(1), '%s/urandom2.bin' % pbucket(1), '%s/dir' % pbucket(1)],
+           retcode = 64,
+           must_find = ['Destination must be a directory'])
+
+test_s3cmd("Move recursive w/a '/'",
+           ['-r', 'mv', '%s/dir1' % pbucket(1), '%s/dir2' % pbucket(1)],
+           retcode = 64,
+           must_find = ['Destination must be a directory'])
+
+## ====== Moving multiple files into directory with trailing '/'
+must_find = ["'%s/urandom1.bin' -> '%s/dir/urandom1.bin'" % (pbucket(1),pbucket(1)), "'%s/urandom2.bin' -> '%s/dir/urandom2.bin'" % (pbucket(1),pbucket(1))]
+must_not_find = ["'%s/urandom1.bin' -> '%s/dir'" % (pbucket(1),pbucket(1)), "'%s/urandom2.bin' -> '%s/dir'" % (pbucket(1),pbucket(1))]
+test_s3cmd("Move multiple files",
+           ['mv', '%s/urandom1.bin' % pbucket(1), '%s/urandom2.bin' % pbucket(1), '%s/dir/' % pbucket(1)],
+           must_find = must_find,
+           must_not_find = must_not_find)
+
+## ====== Clean up local destination dir
+test_flushdir("Clean testsuite-out/", "testsuite-out")
+
+## ====== Sync from S3
+must_find = [ "'%s/xyz/binary/random-crap.md5' -> 'testsuite-out/xyz/binary/random-crap.md5'" % pbucket(1) ]
+if have_encoding:
+    must_find.append(u"'%(pbucket)s/xyz/encodings/%(encoding)s/%(pattern)s' -> 'testsuite-out/xyz/encodings/%(encoding)s/%(pattern)s' " % { 'encoding' : encoding, 'pattern' : enc_pattern, 'pbucket' : pbucket(1) })
+test_s3cmd("Sync from S3", ['sync', '%s/xyz' % pbucket(1), 'testsuite-out'],
+    must_find = must_find)
+
+
+## ====== Remove 'demo' directory
+test_rmdir("Remove 'dir-test/'", "testsuite-out/xyz/dir-test/")
+
+
+## ====== Create dir with name of a file
+test_mkdir("Create file-dir dir", "testsuite-out/xyz/dir-test/file-dir")
+
+
+## ====== Skip dst dirs
+test_s3cmd("Skip over dir", ['sync', '%s/xyz' % pbucket(1), 'testsuite-out'],
+           must_find = "ERROR: Download of 'xyz/dir-test/file-dir' failed (Reason: testsuite-out/xyz/dir-test/file-dir is a directory)",
+           retcode = EX_PARTIAL)
+
+
+## ====== Clean up local destination dir
+test_flushdir("Clean testsuite-out/", "testsuite-out")
+
+
+## ====== Put public, guess MIME
+test_s3cmd("Put public, guess MIME", ['put', '--guess-mime-type', '--acl-public', 'testsuite/etc/logo.png', '%s/xyz/etc/logo.png' % pbucket(1)],
+    must_find = [ "-> '%s/xyz/etc/logo.png'" % pbucket(1) ])
+
+
+## ====== Retrieve from URL
+# Minio: disabled
+#if have_curl:
+#   test_curl_HEAD("Retrieve from URL", 'http://%s.%s/xyz/etc/logo.png' % (bucket(1), cfg.host_base),
+#                   must_find_re = ['Content-Length: 22059'])
+
+## ====== Change ACL to Private
+# Minio: disabled
+#test_s3cmd("Change ACL to Private", ['setacl', '--acl-private', '%s/xyz/etc/l*.png' % pbucket(1)],
+#    must_find = [ "logo.png: ACL set to Private" ])
+
+
+## ====== Verify Private ACL
+# Minio: disabled
+#if have_curl:
+#    test_curl_HEAD("Verify Private ACL", 'http://%s.%s/xyz/etc/logo.png' % (bucket(1), cfg.host_base),
+#                   must_find_re = [ '403 Forbidden' ])
+
+
+## ====== Change ACL to Public
+# Minio: disabled
+#test_s3cmd("Change ACL to Public", ['setacl', '--acl-public', '--recursive', '%s/xyz/etc/' % pbucket(1) , '-v'],
+#    must_find = [ "logo.png: ACL set to Public" ])
+
+
+## ====== Verify Public ACL
+# Minio: disabled
+#if have_curl:
+#    test_curl_HEAD("Verify Public ACL", 'http://%s.%s/xyz/etc/logo.png' % (bucket(1), cfg.host_base),
+#                   must_find_re = [ '200 OK',
+#                                    'Content-Length: 22059'])
+
+
+## ====== Sync more to S3
+# Modified for Minio (exclude crappy dir)
+test_s3cmd("Sync more to S3", ['sync', 'testsuite/', 's3://%s/xyz/' % bucket(1), '--no-encrypt', '--exclude', 'crappy-file-name/*' ],
+           must_find = [ "'testsuite/demo/some-file.xml' -> '%s/xyz/demo/some-file.xml' " % pbucket(1) ],
+           must_not_find = [ "'testsuite/etc/linked.png' -> '%s/xyz/etc/linked.png'" % pbucket(1) ],
+           retcode = EX_PARTIAL)
+
+
+## ====== Don't check MD5 sum on Sync
+test_copy("Change file cksum1.txt", "testsuite/checksum/cksum2.txt", "testsuite/checksum/cksum1.txt")
+test_copy("Change file cksum33.txt", "testsuite/checksum/cksum2.txt", "testsuite/checksum/cksum33.txt")
+# Modified for Minio (exclude crappy dir)
+test_s3cmd("Don't check MD5", ['sync', 'testsuite/', 's3://%s/xyz/' % bucket(1), '--no-encrypt', '--no-check-md5', '--exclude', 'crappy-file-name/*'],
+           must_find = [ "cksum33.txt" ],
+           must_not_find = [ "cksum1.txt" ],
+           retcode = EX_PARTIAL)
+
+
+## ====== Check MD5 sum on Sync
+# Modified for Minio (exclude crappy dir)
+test_s3cmd("Check MD5", ['sync', 'testsuite/', 's3://%s/xyz/' % bucket(1), '--no-encrypt', '--check-md5', '--exclude', 'crappy-file-name/*'],
+           must_find = [ "cksum1.txt" ],
+           retcode = EX_PARTIAL)
+
+
+## ====== Rename within S3
+test_s3cmd("Rename within S3", ['mv', '%s/xyz/etc/logo.png' % pbucket(1), '%s/xyz/etc2/Logo.PNG' % pbucket(1)],
+    must_find = [ "move: '%s/xyz/etc/logo.png' -> '%s/xyz/etc2/Logo.PNG'" % (pbucket(1), pbucket(1))])
+
+
+## ====== Rename (NoSuchKey)
+test_s3cmd("Rename (NoSuchKey)", ['mv', '%s/xyz/etc/logo.png' % pbucket(1), '%s/xyz/etc2/Logo.PNG' % pbucket(1)],
+    retcode = EX_NOTFOUND,
+    must_find_re = [ 'Key not found' ],
+    must_not_find = [ "move: '%s/xyz/etc/logo.png' -> '%s/xyz/etc2/Logo.PNG'" % (pbucket(1), pbucket(1)) ])
+
+## ====== Sync more from S3 (invalid src)
+test_s3cmd("Sync more from S3 (invalid src)", ['sync', '--delete-removed', '%s/xyz/DOESNOTEXIST' % pbucket(1), 'testsuite-out'],
+    must_not_find = [ "delete: 'testsuite-out/logo.png'" ])
+
+## ====== Sync more from S3
+test_s3cmd("Sync more from S3", ['sync', '--delete-removed', '%s/xyz' % pbucket(1), 'testsuite-out'],
+    must_find = [ "'%s/xyz/etc2/Logo.PNG' -> 'testsuite-out/xyz/etc2/Logo.PNG'" % pbucket(1),
+                  "'%s/xyz/demo/some-file.xml' -> 'testsuite-out/xyz/demo/some-file.xml'" % pbucket(1) ],
+    must_not_find_re = [ "not-deleted.*etc/logo.png", "delete: 'testsuite-out/logo.png'" ])
+
+
+## ====== Make dst dir for get
+test_rmdir("Remove dst dir for get", "testsuite-out")
+
+
+## ====== Get multiple files
+test_s3cmd("Get multiple files", ['get', '%s/xyz/etc2/Logo.PNG' % pbucket(1), '%s/xyz/etc/AtomicClockRadio.ttf' % pbucket(1), 'testsuite-out'],
+    retcode = EX_USAGE,
+    must_find = [ 'Destination must be a directory or stdout when downloading multiple sources.' ])
+
+## ====== put/get non-ASCII filenames
+test_s3cmd("Put unicode filenames", ['put', u'testsuite/encodings/UTF-8/ŪņЇЌœđЗ/Žůžo',  u'%s/xyz/encodings/UTF-8/ŪņЇЌœđЗ/Žůžo' % pbucket(1)],
+           retcode = 0,
+           must_find = [ '->' ])
+
+
+## ====== Make dst dir for get
+test_mkdir("Make dst dir for get", "testsuite-out")
+
+
+## ====== put/get non-ASCII filenames
+test_s3cmd("Get unicode filenames", ['get', u'%s/xyz/encodings/UTF-8/ŪņЇЌœđЗ/Žůžo' % pbucket(1), 'testsuite-out'],
+           retcode = 0,
+           must_find = [ '->' ])
+
+
+## ====== Get multiple files
+test_s3cmd("Get multiple files", ['get', '%s/xyz/etc2/Logo.PNG' % pbucket(1), '%s/xyz/etc/AtomicClockRadio.ttf' % pbucket(1), 'testsuite-out'],
+    must_find = [ u"-> 'testsuite-out/Logo.PNG'",
+                  u"-> 'testsuite-out/AtomicClockRadio.ttf'" ])
+
+## ====== Upload files differing in capitalisation
+test_s3cmd("blah.txt / Blah.txt", ['put', '-r', 'testsuite/blahBlah', pbucket(1)],
+    must_find = [ '%s/blahBlah/Blah.txt' % pbucket(1), '%s/blahBlah/blah.txt' % pbucket(1)])
+
+## ====== Copy between buckets
+test_s3cmd("Copy between buckets", ['cp', '%s/xyz/etc2/Logo.PNG' % pbucket(1), '%s/xyz/etc2/logo.png' % pbucket(3)],
+    must_find = [ "remote copy: '%s/xyz/etc2/Logo.PNG' -> '%s/xyz/etc2/logo.png'" % (pbucket(1), pbucket(3)) ])
+
+## ====== Recursive copy
+test_s3cmd("Recursive copy, set ACL", ['cp', '-r', '--acl-public', '%s/xyz/' % pbucket(1), '%s/copy/' % pbucket(2), '--exclude', 'demo/dir?/*.txt', '--exclude', 'non-printables*'],
+    must_find = [ "remote copy: '%s/xyz/etc2/Logo.PNG' -> '%s/copy/etc2/Logo.PNG'" % (pbucket(1), pbucket(2)),
+                  "remote copy: '%s/xyz/blahBlah/Blah.txt' -> '%s/copy/blahBlah/Blah.txt'" % (pbucket(1), pbucket(2)),
+                  "remote copy: '%s/xyz/blahBlah/blah.txt' -> '%s/copy/blahBlah/blah.txt'" % (pbucket(1), pbucket(2)) ],
+    must_not_find = [ "demo/dir1/file1-1.txt" ])
+
+## ====== Verify ACL and MIME type
+# Minio: disable acl check, not supported by minio
+test_s3cmd("Verify ACL and MIME type", ['info', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+    must_find_re = [ "MIME type:.*image/png" ])
+
+## ====== modify MIME type
+# Minio: disable acl check, not supported by minio
+# Minio: modifying mime type alone not allowed as copy of same file for them
+#test_s3cmd("Modify MIME type", ['modify', '--mime-type=binary/octet-stream', '%s/copy/etc2/Logo.PNG' % pbucket(2) ])
+
+#test_s3cmd("Verify ACL and MIME type", ['info', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+#    must_find_re = [ "MIME type:.*binary/octet-stream" ])
+
+# Minio: disable acl check, not supported by minio
+#test_s3cmd("Modify MIME type back", ['modify', '--mime-type=image/png', '%s/copy/etc2/Logo.PNG' % pbucket(2) ])
+
+# Minio: disable acl check, not supported by minio
+#test_s3cmd("Verify ACL and MIME type", ['info', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+#    must_find_re = [ "MIME type:.*image/png" ])
+
+#test_s3cmd("Add cache-control header", ['modify', '--add-header=cache-control: max-age=3600, public', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+#    must_find_re = [ "modify: .*" ])
+
+#if have_curl:
+#    test_curl_HEAD("HEAD check Cache-Control present", 'http://%s.%s/copy/etc2/Logo.PNG' % (bucket(2), cfg.host_base),
+#                   must_find_re = [ "Cache-Control: max-age=3600" ])
+
+#test_s3cmd("Remove cache-control header", ['modify', '--remove-header=cache-control', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+#           must_find_re = [ "modify: .*" ])
+
+#if have_curl:
+#    test_curl_HEAD("HEAD check Cache-Control not present", 'http://%s.%s/copy/etc2/Logo.PNG' % (bucket(2), cfg.host_base),
+#                   must_not_find_re = [ "Cache-Control: max-age=3600" ])
+
+## ====== sign
+test_s3cmd("sign string", ['sign', 's3cmd'], must_find_re = ["Signature:"])
+test_s3cmd("signurl time", ['signurl', '%s/copy/etc2/Logo.PNG' % pbucket(2), str(int(time.time()) + 60)], must_find_re = ["http://"])
+test_s3cmd("signurl time offset", ['signurl', '%s/copy/etc2/Logo.PNG' % pbucket(2), '+60'], must_find_re = ["https?://"])
+test_s3cmd("signurl content disposition and type", ['signurl', '%s/copy/etc2/Logo.PNG' % pbucket(2), '+60', '--content-disposition=inline; filename=video.mp4', '--content-type=video/mp4'], must_find_re = [ 'response-content-disposition', 'response-content-type' ] )
+
+## ====== Rename within S3
+test_s3cmd("Rename within S3", ['mv', '%s/copy/etc2/Logo.PNG' % pbucket(2), '%s/copy/etc/logo.png' % pbucket(2)],
+    must_find = [ "move: '%s/copy/etc2/Logo.PNG' -> '%s/copy/etc/logo.png'" % (pbucket(2), pbucket(2))])
+
+## ====== Sync between buckets
+test_s3cmd("Sync remote2remote", ['sync', '%s/xyz/' % pbucket(1), '%s/copy/' % pbucket(2), '--delete-removed', '--exclude', 'non-printables*'],
+    must_find = [ "remote copy: '%s/xyz/demo/dir1/file1-1.txt' -> '%s/copy/demo/dir1/file1-1.txt'" % (pbucket(1), pbucket(2)),
+                  "remote copy: 'etc/logo.png' -> 'etc2/Logo.PNG'",
+                  "delete: '%s/copy/etc/logo.png'" % pbucket(2) ],
+    must_not_find = [ "blah.txt" ])
+
+## ====== Don't Put symbolic link
+test_s3cmd("Don't put symbolic links", ['put', 'testsuite/etc/linked1.png', 's3://%s/xyz/' % bucket(1),  '--exclude', 'crappy-file-name/*'],
+           retcode = EX_USAGE,
+           must_find = ["WARNING: Skipping over symbolic link: testsuite/etc/linked1.png"],
+           must_not_find_re = ["^(?!WARNING: Skipping).*linked1.png"])
+
+## ====== Put symbolic link
+test_s3cmd("Put symbolic links", ['put', 'testsuite/etc/linked1.png', 's3://%s/xyz/' % bucket(1),'--follow-symlinks' ,  '--exclude', 'crappy-file-name/*'],
+           must_find = [ "'testsuite/etc/linked1.png' -> '%s/xyz/linked1.png'" % pbucket(1)])
+
+## ====== Sync symbolic links
+test_s3cmd("Sync symbolic links", ['sync', 'testsuite/', 's3://%s/xyz/' % bucket(1), '--no-encrypt', '--follow-symlinks',  '--exclude', 'crappy-file-name/*' ],
+    must_find = ["remote copy: 'etc2/Logo.PNG' -> 'etc/linked.png'"],
+           # Don't want to recursively copy linked directories!
+           must_not_find_re = ["etc/more/linked-dir/more/give-me-more.txt",
+                               "etc/brokenlink.png"],
+           retcode = EX_PARTIAL)
+
+## ====== Multi source move
+test_s3cmd("Multi-source move", ['mv', '-r', '%s/copy/blahBlah/Blah.txt' % pbucket(2), '%s/copy/etc/' % pbucket(2), '%s/moved/' % pbucket(2)],
+    must_find = [ "move: '%s/copy/blahBlah/Blah.txt' -> '%s/moved/Blah.txt'" % (pbucket(2), pbucket(2)),
+                  "move: '%s/copy/etc/AtomicClockRadio.ttf' -> '%s/moved/AtomicClockRadio.ttf'" % (pbucket(2), pbucket(2)),
+                  "move: '%s/copy/etc/TypeRa.ttf' -> '%s/moved/TypeRa.ttf'" % (pbucket(2), pbucket(2)) ],
+    must_not_find = [ "blah.txt" ])
+
+## ====== Verify move
+test_s3cmd("Verify move", ['ls', '-r', pbucket(2)],
+    must_find = [ "%s/moved/Blah.txt" % pbucket(2),
+                  "%s/moved/AtomicClockRadio.ttf" % pbucket(2),
+                  "%s/moved/TypeRa.ttf" % pbucket(2),
+                  "%s/copy/blahBlah/blah.txt" % pbucket(2) ],
+    must_not_find = [ "%s/copy/blahBlah/Blah.txt" % pbucket(2),
+                      "%s/copy/etc/AtomicClockRadio.ttf" % pbucket(2),
+                      "%s/copy/etc/TypeRa.ttf" % pbucket(2) ])
+
+## ====== List all
+test_s3cmd("List all", ['la'],
+           must_find = [ "%s/urandom.bin" % pbucket(1)])
+
+## ====== Simple delete
+test_s3cmd("Simple delete", ['del', '%s/xyz/etc2/Logo.PNG' % pbucket(1)],
+    must_find = [ "delete: '%s/xyz/etc2/Logo.PNG'" % pbucket(1) ])
+
+## ====== Simple delete with rm
+test_s3cmd("Simple delete with rm", ['rm', '%s/xyz/test_rm/TypeRa.ttf' % pbucket(1)],
+    must_find = [ "delete: '%s/xyz/test_rm/TypeRa.ttf'" % pbucket(1) ])
+
+## ====== Create expiration rule with days and prefix
+# Minio: disabled
+#test_s3cmd("Create expiration rule with days and prefix", ['expire', pbucket(1), '--expiry-days=365', '--expiry-prefix=log/'],
+#    must_find = [ "Bucket '%s/': expiration configuration is set." % pbucket(1)])
+
+## ====== Create expiration rule with date and prefix
+# Minio: disabled
+#test_s3cmd("Create expiration rule with date and prefix", ['expire', pbucket(1), '--expiry-date=2012-12-31T00:00:00.000Z', '--expiry-prefix=log/'],
+#    must_find = [ "Bucket '%s/': expiration configuration is set." % pbucket(1)])
+
+## ====== Create expiration rule with days only
+# Minio: disabled
+#test_s3cmd("Create expiration rule with days only", ['expire', pbucket(1), '--expiry-days=365'],
+#    must_find = [ "Bucket '%s/': expiration configuration is set." % pbucket(1)])
+
+## ====== Create expiration rule with date only
+# Minio: disabled
+#test_s3cmd("Create expiration rule with date only", ['expire', pbucket(1), '--expiry-date=2012-12-31T00:00:00.000Z'],
+#    must_find = [ "Bucket '%s/': expiration configuration is set." % pbucket(1)])
+
+## ====== Get current expiration setting
+# Minio: disabled
+#test_s3cmd("Get current expiration setting", ['info', pbucket(1)],
+#    must_find = [ "Expiration Rule: all objects in this bucket will expire in '2012-12-31T00:00:00.000Z'"])
+
+## ====== Delete expiration rule
+# Minio: disabled
+#test_s3cmd("Delete expiration rule", ['expire', pbucket(1)],
+#    must_find = [ "Bucket '%s/': expiration configuration is deleted." % pbucket(1)])
+
+## ====== set Requester Pays flag
+# Minio: disabled
+#test_s3cmd("Set requester pays", ['payer', '--requester-pays', pbucket(2)])
+
+## ====== get Requester Pays flag
+# Minio: disabled
+#test_s3cmd("Get requester pays flag", ['info', pbucket(2)],
+#    must_find = [ "Payer:     Requester"])
+
+## ====== ls using Requester Pays flag
+# Minio: disabled
+#test_s3cmd("ls using requester pays flag", ['ls', '--requester-pays', pbucket(2)])
+
+## ====== clear Requester Pays flag
+# Minio: disabled
+#test_s3cmd("Clear requester pays", ['payer', pbucket(2)])
+
+## ====== get Requester Pays flag
+# Minio: disabled
+#test_s3cmd("Get requester pays flag", ['info', pbucket(2)],
+#    must_find = [ "Payer:     BucketOwner"])
+
+## ====== Recursive delete maximum exceeed
+test_s3cmd("Recursive delete maximum exceeded", ['del', '--recursive', '--max-delete=1', '--exclude', 'Atomic*', '%s/xyz/etc' % pbucket(1)],
+    must_not_find = [ "delete: '%s/xyz/etc/TypeRa.ttf'" % pbucket(1) ])
+
+## ====== Recursive delete
+test_s3cmd("Recursive delete", ['del', '--recursive', '--exclude', 'Atomic*', '%s/xyz/etc' % pbucket(1)],
+    must_find = [ "delete: '%s/xyz/etc/TypeRa.ttf'" % pbucket(1) ],
+    must_find_re = [ "delete: '.*/etc/logo.png'" ],
+    must_not_find = [ "AtomicClockRadio.ttf" ])
+
+## ====== Recursive delete with rm
+test_s3cmd("Recursive delete with rm", ['rm', '--recursive', '--exclude', 'Atomic*', '%s/xyz/test_rm' % pbucket(1)],
+    must_find = [ "delete: '%s/xyz/test_rm/more/give-me-more.txt'" % pbucket(1) ],
+    must_find_re = [ "delete: '.*/test_rm/logo.png'" ],
+    must_not_find = [ "AtomicClockRadio.ttf" ])
+
+## ====== Recursive delete all
+test_s3cmd("Recursive delete all", ['del', '--recursive', '--force', pbucket(1)],
+    must_find_re = [ "delete: '.*binary/random-crap'" ])
+
+## ====== Remove empty bucket
+test_s3cmd("Remove empty bucket", ['rb', pbucket(1)],
+    must_find = [ "Bucket '%s/' removed" % pbucket(1) ])
+
+## ====== Remove remaining buckets
+test_s3cmd("Remove remaining buckets", ['rb', '--recursive', pbucket(2), pbucket(3)],
+    must_find = [ "Bucket '%s/' removed" % pbucket(2),
+              "Bucket '%s/' removed" % pbucket(3) ])
+
+# vim:et:ts=4:sts=4:ai
diff --git a/run-tests.py b/run-tests.py
new file mode 100755
index 0000000..bab7300
--- /dev/null
+++ b/run-tests.py
@@ -0,0 +1,783 @@
+#!/usr/bin/env python2
+# -*- coding=utf-8 -*-
+
+## Amazon S3cmd - testsuite
+## Author: Michal Ludvig <michal@logix.cz>
+##         http://www.logix.cz/michal
+## License: GPL Version 2
+## Copyright: TGRMN Software and contributors
+
+from __future__ import absolute_import, print_function
+
+import sys
+import os
+import re
+import time
+from subprocess import Popen, PIPE, STDOUT
+import locale
+import getpass
+import S3.Exceptions
+import S3.Config
+from S3.ExitCodes import *
+
+PY3 = (sys.version_info >= (3,0))
+
+try:
+    unicode
+except NameError:
+    # python 3 support
+    # In python 3, unicode -> str, and str -> bytes
+    unicode = str
+
+count_pass = 0
+count_fail = 0
+count_skip = 0
+
+test_counter = 0
+run_tests = []
+exclude_tests = []
+
+verbose = False
+
+encoding = locale.getpreferredencoding()
+if not encoding:
+    print("Guessing current system encoding failed. Consider setting $LANG variable.")
+    sys.exit(1)
+else:
+    print("System encoding: " + encoding)
+
+try:
+    unicode
+except NameError:
+    # python 3 support
+    # In python 3, unicode -> str, and str -> bytes
+    unicode = str
+
+def unicodise(string, encoding = "utf-8", errors = "replace"):
+    """
+    Convert 'string' to Unicode or raise an exception.
+    Config can't use toolbox from Utils that is itself using Config
+    """
+    if type(string) == unicode:
+        return string
+
+    try:
+        return unicode(string, encoding, errors)
+    except UnicodeDecodeError:
+        raise UnicodeDecodeError("Conversion to unicode failed: %r" % string)
+
+# https://stackoverflow.com/questions/377017/test-if-executable-exists-in-python/377028#377028
+def which(program):
+    def is_exe(fpath):
+        return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+    fpath, fname = os.path.split(program)
+    if fpath:
+        if is_exe(program):
+            return program
+    else:
+        for path in os.environ["PATH"].split(os.pathsep):
+            path = path.strip('"')
+            exe_file = os.path.join(path, program)
+            if is_exe(exe_file):
+                return exe_file
+
+    return None
+
+if which('curl') is not None:
+    have_curl = True
+else:
+    have_curl = False
+
+config_file = None
+if os.getenv("HOME"):
+    config_file = os.path.join(unicodise(os.getenv("HOME"), encoding), ".s3cfg")
+elif os.name == "nt" and os.getenv("USERPROFILE"):
+    config_file = os.path.join(unicodise(os.getenv("USERPROFILE"), encoding),
+                               os.getenv("APPDATA") and unicodise(os.getenv("APPDATA"), encoding)
+                               or 'Application Data',
+                               "s3cmd.ini")
+
+
+## Unpack testsuite/ directory
+if not os.path.isdir('testsuite') and os.path.isfile('testsuite.tar.gz'):
+    os.system("tar -xz -f testsuite.tar.gz")
+if not os.path.isdir('testsuite'):
+    print("Something went wrong while unpacking testsuite.tar.gz")
+    sys.exit(1)
+
+os.system("tar -xf testsuite/checksum.tar -C testsuite")
+if not os.path.isfile('testsuite/checksum/cksum33.txt'):
+    print("Something went wrong while unpacking testsuite/checkum.tar")
+    sys.exit(1)
+
+## Fix up permissions for permission-denied tests
+os.chmod("testsuite/permission-tests/permission-denied-dir", 0o444)
+os.chmod("testsuite/permission-tests/permission-denied.txt", 0o000)
+
+## Patterns for Unicode tests
+patterns = {}
+patterns['UTF-8'] = u"ŪņЇЌœđЗ/☺ unicode € rocks ™"
+patterns['GBK'] = u"12月31日/1-特色條目"
+
+have_encoding = os.path.isdir('testsuite/encodings/' + encoding)
+if not have_encoding and os.path.isfile('testsuite/encodings/%s.tar.gz' % encoding):
+    os.system("tar xvz -C testsuite/encodings -f testsuite/encodings/%s.tar.gz" % encoding)
+    have_encoding = os.path.isdir('testsuite/encodings/' + encoding)
+
+if have_encoding:
+    #enc_base_remote = "%s/xyz/%s/" % (pbucket(1), encoding)
+    enc_pattern = patterns[encoding]
+else:
+    print(encoding + " specific files not found.")
+
+def unicodise(string):
+    if type(string) == unicode:
+        return string
+
+    return unicode(string, "UTF-8", "replace")
+
+def deunicodise(string):
+    if type(string) != unicode:
+        return string
+
+    return string.encode("UTF-8", "replace")
+
+if not os.path.isdir('testsuite/crappy-file-name'):
+    os.system("tar xvz -C testsuite -f testsuite/crappy-file-name.tar.gz")
+    # TODO: also unpack if the tarball is newer than the directory timestamp
+    #       for instance when a new version was pulled from SVN.
+
+def test(label, cmd_args = [], retcode = 0, must_find = [], must_not_find = [], must_find_re = [], must_not_find_re = [], stdin = None):
+    def command_output():
+        print("----")
+        print(" ".join([" " in arg and "'%s'" % arg or arg for arg in cmd_args]))
+        print("----")
+        print(stdout)
+        print("----")
+
+    def failure(message = ""):
+        global count_fail
+        if message:
+            message = u"  (%r)" % message
+        print(u"\x1b[31;1mFAIL%s\x1b[0m" % (message))
+        count_fail += 1
+        command_output()
+        #return 1
+        sys.exit(1)
+    def success(message = ""):
+        global count_pass
+        if message:
+            message = "  (%r)" % message
+        print("\x1b[32;1mOK\x1b[0m%s" % (message))
+        count_pass += 1
+        if verbose:
+            command_output()
+        return 0
+    def skip(message = ""):
+        global count_skip
+        if message:
+            message = "  (%r)" % message
+        print("\x1b[33;1mSKIP\x1b[0m%s" % (message))
+        count_skip += 1
+        return 0
+    def compile_list(_list, regexps = False):
+        if regexps == False:
+            _list = [re.escape(item) for item in _list]
+
+        return [re.compile(item, re.MULTILINE) for item in _list]
+
+    global test_counter
+    test_counter += 1
+    print(("%3d  %s " % (test_counter, label)).ljust(30, "."), end=' ')
+    sys.stdout.flush()
+
+    if run_tests.count(test_counter) == 0 or exclude_tests.count(test_counter) > 0:
+        return skip()
+
+    if not cmd_args:
+        return skip()
+
+    p = Popen(cmd_args, stdin = stdin, stdout = PIPE, stderr = STDOUT, universal_newlines = True, close_fds = True)
+    stdout, stderr = p.communicate()
+    if type(retcode) not in [list, tuple]: retcode = [retcode]
+    if p.returncode not in retcode:
+        return failure("retcode: %d, expected one of: %s" % (p.returncode, retcode))
+
+    if type(must_find) not in [ list, tuple ]: must_find = [must_find]
+    if type(must_find_re) not in [ list, tuple ]: must_find_re = [must_find_re]
+    if type(must_not_find) not in [ list, tuple ]: must_not_find = [must_not_find]
+    if type(must_not_find_re) not in [ list, tuple ]: must_not_find_re = [must_not_find_re]
+
+    find_list = []
+    find_list.extend(compile_list(must_find))
+    find_list.extend(compile_list(must_find_re, regexps = True))
+    find_list_patterns = []
+    find_list_patterns.extend(must_find)
+    find_list_patterns.extend(must_find_re)
+
+    not_find_list = []
+    not_find_list.extend(compile_list(must_not_find))
+    not_find_list.extend(compile_list(must_not_find_re, regexps = True))
+    not_find_list_patterns = []
+    not_find_list_patterns.extend(must_not_find)
+    not_find_list_patterns.extend(must_not_find_re)
+
+    for index in range(len(find_list)):
+        stdout = unicodise(stdout)
+        match = find_list[index].search(stdout)
+        if not match:
+            return failure("pattern not found: %s" % find_list_patterns[index])
+    for index in range(len(not_find_list)):
+        match = not_find_list[index].search(stdout)
+        if match:
+            return failure("pattern found: %s (match: %s)" % (not_find_list_patterns[index], match.group(0)))
+
+    return success()
+
+def test_s3cmd(label, cmd_args = [], **kwargs):
+    if not cmd_args[0].endswith("s3cmd"):
+        cmd_args.insert(0, "python")
+        cmd_args.insert(1, "s3cmd")
+        if config_file:
+            cmd_args.insert(2, "-c")
+            cmd_args.insert(3, config_file)
+
+    return test(label, cmd_args, **kwargs)
+
+def test_mkdir(label, dir_name):
+    if os.name in ("posix", "nt"):
+        cmd = ['mkdir', '-p']
+    else:
+        print("Unknown platform: %s" % os.name)
+        sys.exit(1)
+    cmd.append(dir_name)
+    return test(label, cmd)
+
+def test_rmdir(label, dir_name):
+    if os.path.isdir(dir_name):
+        if os.name == "posix":
+            cmd = ['rm', '-rf']
+        elif os.name == "nt":
+            cmd = ['rmdir', '/s/q']
+        else:
+            print("Unknown platform: %s" % os.name)
+            sys.exit(1)
+        cmd.append(dir_name)
+        return test(label, cmd)
+    else:
+        return test(label, [])
+
+def test_flushdir(label, dir_name):
+    test_rmdir(label + "(rm)", dir_name)
+    return test_mkdir(label + "(mk)", dir_name)
+
+def test_copy(label, src_file, dst_file):
+    if os.name == "posix":
+        cmd = ['cp', '-f']
+    elif os.name == "nt":
+        cmd = ['copy']
+    else:
+        print("Unknown platform: %s" % os.name)
+        sys.exit(1)
+    cmd.append(src_file)
+    cmd.append(dst_file)
+    return test(label, cmd)
+
+def test_curl_HEAD(label, src_file, **kwargs):
+    cmd = ['curl', '--silent', '--head', '-include', '--location']
+    cmd.append(src_file)
+    return test(label, cmd, **kwargs)
+
+bucket_prefix = u"%s-" % getpass.getuser().lower()
+
+argv = sys.argv[1:]
+while argv:
+    arg = argv.pop(0)
+    if arg.startswith('--bucket-prefix='):
+        print("Usage: '--bucket-prefix PREFIX', not '--bucket-prefix=PREFIX'")
+        sys.exit(0)
+    if arg in ("-h", "--help"):
+        print("%s A B K..O -N" % sys.argv[0])
+        print("Run tests number A, B and K through to O, except for N")
+        sys.exit(0)
+
+    if arg in ("-c", "--config"):
+        config_file = argv.pop(0)
+        continue
+    if arg in ("-l", "--list"):
+        exclude_tests = range(0, 999)
+        break
+    if arg in ("-v", "--verbose"):
+        verbose = True
+        continue
+    if arg in ("-p", "--bucket-prefix"):
+        try:
+            bucket_prefix = argv.pop(0)
+        except IndexError:
+            print("Bucket prefix option must explicitly supply a bucket name prefix")
+            sys.exit(0)
+        continue
+    if ".." in arg:
+        range_idx = arg.find("..")
+        range_start = arg[:range_idx] or 0
+        range_end = arg[range_idx+2:] or 999
+        run_tests.extend(range(int(range_start), int(range_end) + 1))
+    elif arg.startswith("-"):
+        exclude_tests.append(int(arg[1:]))
+    else:
+        run_tests.append(int(arg))
+
+print("Using bucket prefix: '%s'" % bucket_prefix)
+
+cfg = S3.Config.Config(config_file)
+
+if not run_tests:
+    run_tests = range(0, 999)
+
+# helper functions for generating bucket names
+def bucket(tail):
+        '''Test bucket name'''
+        label = 'autotest'
+        if str(tail) == '3':
+                label = 'autotest'
+        return '%ss3cmd-%s-%s' % (bucket_prefix, label, tail)
+
+def pbucket(tail):
+        '''Like bucket(), but prepends "s3://" for you'''
+        return 's3://' + bucket(tail)
+
+## ====== Remove test buckets
+test_s3cmd("Remove test buckets", ['rb', '-r', '--force', pbucket(1), pbucket(2), pbucket(3)])
+
+## ====== verify they were removed
+test_s3cmd("Verify no test buckets", ['ls'],
+           must_not_find = [pbucket(1), pbucket(2), pbucket(3)])
+
+
+## ====== Create one bucket (EU)
+test_s3cmd("Create one bucket (EU)", ['mb', '--bucket-location=EU', pbucket(1)],
+    must_find = "Bucket '%s/' created" % pbucket(1))
+
+
+
+## ====== Create multiple buckets
+test_s3cmd("Create multiple buckets", ['mb', pbucket(2), pbucket(3)],
+    must_find = [ "Bucket '%s/' created" % pbucket(2), "Bucket '%s/' created" % pbucket(3)])
+
+
+## ====== Invalid bucket name
+test_s3cmd("Invalid bucket name", ["mb", "--bucket-location=EU", pbucket('EU')],
+    retcode = EX_USAGE,
+    must_find = "ERROR: Parameter problem: Bucket name '%s' contains disallowed character" % bucket('EU'),
+    must_not_find_re = "Bucket.*created")
+
+
+## ====== Buckets list
+test_s3cmd("Buckets list", ["ls"],
+    must_find = [ "autotest-1", "autotest-2", "Autotest-3" ], must_not_find_re = "autotest-EU")
+
+
+## ====== Sync to S3
+test_s3cmd("Sync to S3", ['sync', 'testsuite/', pbucket(1) + '/xyz/', '--exclude', 'demo/*', '--exclude', '*.png', '--no-encrypt', '--exclude-from', 'testsuite/exclude.encodings' ],
+           must_find = [ "ERROR: Upload of 'testsuite/permission-tests/permission-denied.txt' is not possible (Reason: Permission denied)",
+                         "WARNING: 32 non-printable characters replaced in: crappy-file-name/non-printables",
+           ],
+           must_not_find_re = [ "demo/", "^(?!WARNING: Skipping).*\.png$", "permission-denied-dir" ],
+           retcode = EX_PARTIAL)
+
+if have_encoding:
+    ## ====== Sync UTF-8 / GBK / ... to S3
+    test_s3cmd(u"Sync %s to S3" % encoding, ['sync', 'testsuite/encodings/' + encoding, '%s/xyz/encodings/' % pbucket(1), '--exclude', 'demo/*', '--no-encrypt' ],
+        must_find = [ u"'testsuite/encodings/%(encoding)s/%(pattern)s' -> '%(pbucket)s/xyz/encodings/%(encoding)s/%(pattern)s'" % { 'encoding' : encoding, 'pattern' : enc_pattern , 'pbucket' : pbucket(1)} ])
+
+
+## ====== List bucket content
+test_s3cmd("List bucket content", ['ls', '%s/xyz/' % pbucket(1) ],
+    must_find_re = [ u"DIR +%s/xyz/binary/$" % pbucket(1) , u"DIR +%s/xyz/etc/$" % pbucket(1) ],
+    must_not_find = [ u"random-crap.md5", u"/demo" ])
+
+
+## ====== List bucket recursive
+must_find = [ u"%s/xyz/binary/random-crap.md5" % pbucket(1) ]
+if have_encoding:
+    must_find.append(u"%(pbucket)s/xyz/encodings/%(encoding)s/%(pattern)s" % { 'encoding' : encoding, 'pattern' : enc_pattern, 'pbucket' : pbucket(1) })
+
+test_s3cmd("List bucket recursive", ['ls', '--recursive', pbucket(1)],
+    must_find = must_find,
+    must_not_find = [ "logo.png" ])
+
+## ====== FIXME
+test_s3cmd("Recursive put", ['put', '--recursive', 'testsuite/etc', '%s/xyz/' % pbucket(1) ])
+
+
+## ====== Clean up local destination dir
+test_flushdir("Clean testsuite-out/", "testsuite-out")
+
+## ====== Put from stdin
+f = open('testsuite/single-file/single-file.txt', 'r')
+test_s3cmd("Put from stdin", ['put', '-', '%s/single-file/single-file.txt' % pbucket(1)],
+           must_find = ["'<stdin>' -> '%s/single-file/single-file.txt'" % pbucket(1)],
+           stdin = f)
+f.close()
+
+## ====== Multipart put
+os.system('mkdir -p testsuite-out')
+os.system('dd if=/dev/urandom of=testsuite-out/urandom.bin bs=1M count=16 > /dev/null 2>&1')
+test_s3cmd("Put multipart", ['put', '--multipart-chunk-size-mb=5', 'testsuite-out/urandom.bin', '%s/urandom.bin' % pbucket(1)],
+           must_not_find = ['abortmp'])
+
+## ====== Multipart put from stdin
+f = open('testsuite-out/urandom.bin', 'r')
+test_s3cmd("Multipart large put from stdin", ['put', '--multipart-chunk-size-mb=5', '-', '%s/urandom2.bin' % pbucket(1)],
+           must_find = ['%s/urandom2.bin' % pbucket(1)],
+           must_not_find = ['abortmp'],
+           stdin = f)
+f.close()
+
+## ====== Clean up local destination dir
+test_flushdir("Clean testsuite-out/", "testsuite-out")
+
+## ====== Moving things without trailing '/'
+os.system('dd if=/dev/urandom of=testsuite-out/urandom1.bin bs=1k count=1 > /dev/null 2>&1')
+os.system('dd if=/dev/urandom of=testsuite-out/urandom2.bin bs=1k count=1 > /dev/null 2>&1')
+test_s3cmd("Put multiple files", ['put', 'testsuite-out/urandom1.bin', 'testsuite-out/urandom2.bin', '%s/' % pbucket(1)],
+           must_find = ["%s/urandom1.bin" % pbucket(1), "%s/urandom2.bin" % pbucket(1)])
+
+test_s3cmd("Move without '/'", ['mv', '%s/urandom1.bin' % pbucket(1), '%s/urandom2.bin' % pbucket(1), '%s/dir' % pbucket(1)],
+           retcode = 64,
+           must_find = ['Destination must be a directory'])
+
+test_s3cmd("Move recursive w/a '/'",
+           ['-r', 'mv', '%s/dir1' % pbucket(1), '%s/dir2' % pbucket(1)],
+           retcode = 64,
+           must_find = ['Destination must be a directory'])
+
+## ====== Moving multiple files into directory with trailing '/'
+must_find = ["'%s/urandom1.bin' -> '%s/dir/urandom1.bin'" % (pbucket(1),pbucket(1)), "'%s/urandom2.bin' -> '%s/dir/urandom2.bin'" % (pbucket(1),pbucket(1))]
+must_not_find = ["'%s/urandom1.bin' -> '%s/dir'" % (pbucket(1),pbucket(1)), "'%s/urandom2.bin' -> '%s/dir'" % (pbucket(1),pbucket(1))]
+test_s3cmd("Move multiple files",
+           ['mv', '%s/urandom1.bin' % pbucket(1), '%s/urandom2.bin' % pbucket(1), '%s/dir/' % pbucket(1)],
+           must_find = must_find,
+           must_not_find = must_not_find)
+
+## ====== Clean up local destination dir
+test_flushdir("Clean testsuite-out/", "testsuite-out")
+
+## ====== Sync from S3
+must_find = [ "'%s/xyz/binary/random-crap.md5' -> 'testsuite-out/xyz/binary/random-crap.md5'" % pbucket(1) ]
+if have_encoding:
+    must_find.append(u"'%(pbucket)s/xyz/encodings/%(encoding)s/%(pattern)s' -> 'testsuite-out/xyz/encodings/%(encoding)s/%(pattern)s' " % { 'encoding' : encoding, 'pattern' : enc_pattern, 'pbucket' : pbucket(1) })
+test_s3cmd("Sync from S3", ['sync', '%s/xyz' % pbucket(1), 'testsuite-out'],
+    must_find = must_find)
+
+## ====== Remove 'demo' directory
+test_rmdir("Remove 'dir-test/'", "testsuite-out/xyz/dir-test/")
+
+
+## ====== Create dir with name of a file
+test_mkdir("Create file-dir dir", "testsuite-out/xyz/dir-test/file-dir")
+
+
+## ====== Skip dst dirs
+test_s3cmd("Skip over dir", ['sync', '%s/xyz' % pbucket(1), 'testsuite-out'],
+           must_find = "ERROR: Download of 'xyz/dir-test/file-dir' failed (Reason: testsuite-out/xyz/dir-test/file-dir is a directory)",
+           retcode = EX_PARTIAL)
+
+
+## ====== Clean up local destination dir
+test_flushdir("Clean testsuite-out/", "testsuite-out")
+
+
+## ====== Put public, guess MIME
+test_s3cmd("Put public, guess MIME", ['put', '--guess-mime-type', '--acl-public', 'testsuite/etc/logo.png', '%s/xyz/etc/logo.png' % pbucket(1)],
+    must_find = [ "-> '%s/xyz/etc/logo.png'" % pbucket(1) ])
+
+
+## ====== Retrieve from URL
+if have_curl:
+    test_curl_HEAD("Retrieve from URL", 'http://%s.%s/xyz/etc/logo.png' % (bucket(1), cfg.host_base),
+                   must_find_re = ['Content-Length: 22059'])
+
+## ====== Change ACL to Private
+test_s3cmd("Change ACL to Private", ['setacl', '--acl-private', '%s/xyz/etc/l*.png' % pbucket(1)],
+    must_find = [ "logo.png: ACL set to Private" ])
+
+
+## ====== Verify Private ACL
+if have_curl:
+    test_curl_HEAD("Verify Private ACL", 'http://%s.%s/xyz/etc/logo.png' % (bucket(1), cfg.host_base),
+                   must_find_re = [ '403 Forbidden' ])
+
+
+## ====== Change ACL to Public
+test_s3cmd("Change ACL to Public", ['setacl', '--acl-public', '--recursive', '%s/xyz/etc/' % pbucket(1) , '-v'],
+    must_find = [ "logo.png: ACL set to Public" ])
+
+
+## ====== Verify Public ACL
+if have_curl:
+    test_curl_HEAD("Verify Public ACL", 'http://%s.%s/xyz/etc/logo.png' % (bucket(1), cfg.host_base),
+                   must_find_re = [ '200 OK',
+                                    'Content-Length: 22059'])
+
+
+## ====== Sync more to S3
+test_s3cmd("Sync more to S3", ['sync', 'testsuite/', 's3://%s/xyz/' % bucket(1), '--no-encrypt' ],
+           must_find = [ "'testsuite/demo/some-file.xml' -> '%s/xyz/demo/some-file.xml' " % pbucket(1) ],
+           must_not_find = [ "'testsuite/etc/linked.png' -> '%s/xyz/etc/linked.png'" % pbucket(1) ],
+           retcode = EX_PARTIAL)
+
+
+## ====== Don't check MD5 sum on Sync
+test_copy("Change file cksum1.txt", "testsuite/checksum/cksum2.txt", "testsuite/checksum/cksum1.txt")
+test_copy("Change file cksum33.txt", "testsuite/checksum/cksum2.txt", "testsuite/checksum/cksum33.txt")
+test_s3cmd("Don't check MD5", ['sync', 'testsuite/', 's3://%s/xyz/' % bucket(1), '--no-encrypt', '--no-check-md5'],
+           must_find = [ "cksum33.txt" ],
+           must_not_find = [ "cksum1.txt" ],
+           retcode = EX_PARTIAL)
+
+
+## ====== Check MD5 sum on Sync
+test_s3cmd("Check MD5", ['sync', 'testsuite/', 's3://%s/xyz/' % bucket(1), '--no-encrypt', '--check-md5'],
+           must_find = [ "cksum1.txt" ],
+           retcode = EX_PARTIAL)
+
+
+## ====== Rename within S3
+test_s3cmd("Rename within S3", ['mv', '%s/xyz/etc/logo.png' % pbucket(1), '%s/xyz/etc2/Logo.PNG' % pbucket(1)],
+    must_find = [ "move: '%s/xyz/etc/logo.png' -> '%s/xyz/etc2/Logo.PNG'" % (pbucket(1), pbucket(1))])
+
+
+## ====== Rename (NoSuchKey)
+test_s3cmd("Rename (NoSuchKey)", ['mv', '%s/xyz/etc/logo.png' % pbucket(1), '%s/xyz/etc2/Logo.PNG' % pbucket(1)],
+    retcode = EX_NOTFOUND,
+    must_find_re = [ 'Key not found' ],
+    must_not_find = [ "move: '%s/xyz/etc/logo.png' -> '%s/xyz/etc2/Logo.PNG'" % (pbucket(1), pbucket(1)) ])
+
+## ====== Sync more from S3 (invalid src)
+test_s3cmd("Sync more from S3 (invalid src)", ['sync', '--delete-removed', '%s/xyz/DOESNOTEXIST' % pbucket(1), 'testsuite-out'],
+    must_not_find = [ "delete: 'testsuite-out/logo.png'" ])
+
+## ====== Sync more from S3
+test_s3cmd("Sync more from S3", ['sync', '--delete-removed', '%s/xyz' % pbucket(1), 'testsuite-out'],
+    must_find = [ "'%s/xyz/etc2/Logo.PNG' -> 'testsuite-out/xyz/etc2/Logo.PNG'" % pbucket(1),
+                  "'%s/xyz/demo/some-file.xml' -> 'testsuite-out/xyz/demo/some-file.xml'" % pbucket(1) ],
+    must_not_find_re = [ "not-deleted.*etc/logo.png", "delete: 'testsuite-out/logo.png'" ])
+
+
+## ====== Make dst dir for get
+test_rmdir("Remove dst dir for get", "testsuite-out")
+
+
+## ====== Get multiple files
+test_s3cmd("Get multiple files", ['get', '%s/xyz/etc2/Logo.PNG' % pbucket(1), '%s/xyz/etc/AtomicClockRadio.ttf' % pbucket(1), 'testsuite-out'],
+    retcode = EX_USAGE,
+    must_find = [ 'Destination must be a directory or stdout when downloading multiple sources.' ])
+
+## ====== put/get non-ASCII filenames
+test_s3cmd("Put unicode filenames", ['put', u'testsuite/encodings/UTF-8/ŪņЇЌœđЗ/Žůžo',  u'%s/xyz/encodings/UTF-8/ŪņЇЌœđЗ/Žůžo' % pbucket(1)],
+           retcode = 0,
+           must_find = [ '->' ])
+
+
+## ====== Make dst dir for get
+test_mkdir("Make dst dir for get", "testsuite-out")
+
+
+## ====== put/get non-ASCII filenames
+test_s3cmd("Get unicode filenames", ['get', u'%s/xyz/encodings/UTF-8/ŪņЇЌœđЗ/Žůžo' % pbucket(1), 'testsuite-out'],
+           retcode = 0,
+           must_find = [ '->' ])
+
+
+## ====== Get multiple files
+test_s3cmd("Get multiple files", ['get', '%s/xyz/etc2/Logo.PNG' % pbucket(1), '%s/xyz/etc/AtomicClockRadio.ttf' % pbucket(1), 'testsuite-out'],
+    must_find = [ u"-> 'testsuite-out/Logo.PNG'",
+                  u"-> 'testsuite-out/AtomicClockRadio.ttf'" ])
+
+## ====== Upload files differing in capitalisation
+test_s3cmd("blah.txt / Blah.txt", ['put', '-r', 'testsuite/blahBlah', pbucket(1)],
+    must_find = [ '%s/blahBlah/Blah.txt' % pbucket(1), '%s/blahBlah/blah.txt' % pbucket(1)])
+
+## ====== Copy between buckets
+test_s3cmd("Copy between buckets", ['cp', '%s/xyz/etc2/Logo.PNG' % pbucket(1), '%s/xyz/etc2/logo.png' % pbucket(3)],
+    must_find = [ "remote copy: '%s/xyz/etc2/Logo.PNG' -> '%s/xyz/etc2/logo.png'" % (pbucket(1), pbucket(3)) ])
+
+## ====== Recursive copy
+test_s3cmd("Recursive copy, set ACL", ['cp', '-r', '--acl-public', '%s/xyz/' % pbucket(1), '%s/copy/' % pbucket(2), '--exclude', 'demo/dir?/*.txt', '--exclude', 'non-printables*'],
+    must_find = [ "remote copy: '%s/xyz/etc2/Logo.PNG' -> '%s/copy/etc2/Logo.PNG'" % (pbucket(1), pbucket(2)),
+                  "remote copy: '%s/xyz/blahBlah/Blah.txt' -> '%s/copy/blahBlah/Blah.txt'" % (pbucket(1), pbucket(2)),
+                  "remote copy: '%s/xyz/blahBlah/blah.txt' -> '%s/copy/blahBlah/blah.txt'" % (pbucket(1), pbucket(2)) ],
+    must_not_find = [ "demo/dir1/file1-1.txt" ])
+
+## ====== Verify ACL and MIME type
+test_s3cmd("Verify ACL and MIME type", ['info', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+    must_find_re = [ "MIME type:.*image/png",
+                     "ACL:.*\*anon\*: READ",
+                     "URL:.*http://%s.%s/copy/etc2/Logo.PNG" % (bucket(2), cfg.host_base) ])
+
+## ====== modify MIME type
+test_s3cmd("Modify MIME type", ['modify', '--mime-type=binary/octet-stream', '%s/copy/etc2/Logo.PNG' % pbucket(2) ])
+
+test_s3cmd("Verify ACL and MIME type", ['info', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+    must_find_re = [ "MIME type:.*binary/octet-stream",
+                     "ACL:.*\*anon\*: READ",
+                     "URL:.*http://%s.%s/copy/etc2/Logo.PNG" % (bucket(2), cfg.host_base) ])
+
+test_s3cmd("Modify MIME type back", ['modify', '--mime-type=image/png', '%s/copy/etc2/Logo.PNG' % pbucket(2) ])
+
+test_s3cmd("Verify ACL and MIME type", ['info', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+    must_find_re = [ "MIME type:.*image/png",
+                     "ACL:.*\*anon\*: READ",
+                     "URL:.*http://%s.%s/copy/etc2/Logo.PNG" % (bucket(2), cfg.host_base) ])
+
+test_s3cmd("Add cache-control header", ['modify', '--add-header=cache-control: max-age=3600, public', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+    must_find_re = [ "modify: .*" ])
+
+if have_curl:
+    test_curl_HEAD("HEAD check Cache-Control present", 'http://%s.%s/copy/etc2/Logo.PNG' % (bucket(2), cfg.host_base),
+                   must_find_re = [ "Cache-Control: max-age=3600" ])
+
+test_s3cmd("Remove cache-control header", ['modify', '--remove-header=cache-control', '%s/copy/etc2/Logo.PNG' % pbucket(2) ],
+           must_find_re = [ "modify: .*" ])
+
+if have_curl:
+    test_curl_HEAD("HEAD check Cache-Control not present", 'http://%s.%s/copy/etc2/Logo.PNG' % (bucket(2), cfg.host_base),
+                   must_not_find_re = [ "Cache-Control: max-age=3600" ])
+
+## ====== sign
+test_s3cmd("sign string", ['sign', 's3cmd'], must_find_re = ["Signature:"])
+test_s3cmd("signurl time", ['signurl', '%s/copy/etc2/Logo.PNG' % pbucket(2), str(int(time.time()) + 60)], must_find_re = ["http://"])
+test_s3cmd("signurl time offset", ['signurl', '%s/copy/etc2/Logo.PNG' % pbucket(2), '+60'], must_find_re = ["https?://"])
+test_s3cmd("signurl content disposition and type", ['signurl', '%s/copy/etc2/Logo.PNG' % pbucket(2), '+60', '--content-disposition=inline; filename=video.mp4', '--content-type=video/mp4'], must_find_re = [ 'response-content-disposition', 'response-content-type' ] )
+
+## ====== Rename within S3
+test_s3cmd("Rename within S3", ['mv', '%s/copy/etc2/Logo.PNG' % pbucket(2), '%s/copy/etc/logo.png' % pbucket(2)],
+    must_find = [ "move: '%s/copy/etc2/Logo.PNG' -> '%s/copy/etc/logo.png'" % (pbucket(2), pbucket(2))])
+
+## ====== Sync between buckets
+test_s3cmd("Sync remote2remote", ['sync', '%s/xyz/' % pbucket(1), '%s/copy/' % pbucket(2), '--delete-removed', '--exclude', 'non-printables*'],
+    must_find = [ "remote copy: '%s/xyz/demo/dir1/file1-1.txt' -> '%s/copy/demo/dir1/file1-1.txt'" % (pbucket(1), pbucket(2)),
+                  "remote copy: 'etc/logo.png' -> 'etc2/Logo.PNG'",
+                  "delete: '%s/copy/etc/logo.png'" % pbucket(2) ],
+    must_not_find = [ "blah.txt" ])
+
+## ====== Don't Put symbolic link
+test_s3cmd("Don't put symbolic links", ['put', 'testsuite/etc/linked1.png', 's3://%s/xyz/' % bucket(1),],
+           retcode = EX_USAGE,
+           must_find = ["WARNING: Skipping over symbolic link: testsuite/etc/linked1.png"],
+           must_not_find_re = ["^(?!WARNING: Skipping).*linked1.png"])
+
+## ====== Put symbolic link
+test_s3cmd("Put symbolic links", ['put', 'testsuite/etc/linked1.png', 's3://%s/xyz/' % bucket(1),'--follow-symlinks' ],
+           must_find = [ "'testsuite/etc/linked1.png' -> '%s/xyz/linked1.png'" % pbucket(1)])
+
+## ====== Sync symbolic links
+test_s3cmd("Sync symbolic links", ['sync', 'testsuite/', 's3://%s/xyz/' % bucket(1), '--no-encrypt', '--follow-symlinks' ],
+    must_find = ["remote copy: 'etc2/Logo.PNG' -> 'etc/linked.png'"],
+           # Don't want to recursively copy linked directories!
+           must_not_find_re = ["etc/more/linked-dir/more/give-me-more.txt",
+                               "etc/brokenlink.png"],
+           retcode = EX_PARTIAL)
+
+## ====== Multi source move
+test_s3cmd("Multi-source move", ['mv', '-r', '%s/copy/blahBlah/Blah.txt' % pbucket(2), '%s/copy/etc/' % pbucket(2), '%s/moved/' % pbucket(2)],
+    must_find = [ "move: '%s/copy/blahBlah/Blah.txt' -> '%s/moved/Blah.txt'" % (pbucket(2), pbucket(2)),
+                  "move: '%s/copy/etc/AtomicClockRadio.ttf' -> '%s/moved/AtomicClockRadio.ttf'" % (pbucket(2), pbucket(2)),
+                  "move: '%s/copy/etc/TypeRa.ttf' -> '%s/moved/TypeRa.ttf'" % (pbucket(2), pbucket(2)) ],
+    must_not_find = [ "blah.txt" ])
+
+## ====== Verify move
+test_s3cmd("Verify move", ['ls', '-r', pbucket(2)],
+    must_find = [ "%s/moved/Blah.txt" % pbucket(2),
+                  "%s/moved/AtomicClockRadio.ttf" % pbucket(2),
+                  "%s/moved/TypeRa.ttf" % pbucket(2),
+                  "%s/copy/blahBlah/blah.txt" % pbucket(2) ],
+    must_not_find = [ "%s/copy/blahBlah/Blah.txt" % pbucket(2),
+                      "%s/copy/etc/AtomicClockRadio.ttf" % pbucket(2),
+                      "%s/copy/etc/TypeRa.ttf" % pbucket(2) ])
+
+## ====== List all
+test_s3cmd("List all", ['la'],
+           must_find = [ "%s/urandom.bin" % pbucket(1)])
+
+## ====== Simple delete
+test_s3cmd("Simple delete", ['del', '%s/xyz/etc2/Logo.PNG' % pbucket(1)],
+    must_find = [ "delete: '%s/xyz/etc2/Logo.PNG'" % pbucket(1) ])
+
+## ====== Simple delete with rm
+test_s3cmd("Simple delete with rm", ['rm', '%s/xyz/test_rm/TypeRa.ttf' % pbucket(1)],
+    must_find = [ "delete: '%s/xyz/test_rm/TypeRa.ttf'" % pbucket(1) ])
+
+## ====== Create expiration rule with days and prefix
+test_s3cmd("Create expiration rule with days and prefix", ['expire', pbucket(1), '--expiry-days=365', '--expiry-prefix=log/'],
+    must_find = [ "Bucket '%s/': expiration configuration is set." % pbucket(1)])
+
+## ====== Create expiration rule with date and prefix
+test_s3cmd("Create expiration rule with date and prefix", ['expire', pbucket(1), '--expiry-date=2020-12-31T00:00:00.000Z', '--expiry-prefix=log/'],
+    must_find = [ "Bucket '%s/': expiration configuration is set." % pbucket(1)])
+
+## ====== Create expiration rule with days only
+test_s3cmd("Create expiration rule with days only", ['expire', pbucket(1), '--expiry-days=365'],
+    must_find = [ "Bucket '%s/': expiration configuration is set." % pbucket(1)])
+
+## ====== Create expiration rule with date only
+test_s3cmd("Create expiration rule with date only", ['expire', pbucket(1), '--expiry-date=2020-12-31T00:00:00.000Z'],
+    must_find = [ "Bucket '%s/': expiration configuration is set." % pbucket(1)])
+
+## ====== Get current expiration setting
+test_s3cmd("Get current expiration setting", ['info', pbucket(1)],
+    must_find = [ "Expiration Rule: all objects in this bucket will expire in '2020-12-31T00:00:00.000Z'"])
+
+## ====== Delete expiration rule
+test_s3cmd("Delete expiration rule", ['expire', pbucket(1)],
+    must_find = [ "Bucket '%s/': expiration configuration is deleted." % pbucket(1)])
+
+## ====== set Requester Pays flag
+test_s3cmd("Set requester pays", ['payer', '--requester-pays', pbucket(2)])
+
+## ====== get Requester Pays flag
+test_s3cmd("Get requester pays flag", ['info', pbucket(2)],
+    must_find = [ "Payer:     Requester"])
+
+## ====== ls using Requester Pays flag
+test_s3cmd("ls using requester pays flag", ['ls', '--requester-pays', pbucket(2)])
+
+## ====== clear Requester Pays flag
+test_s3cmd("Clear requester pays", ['payer', pbucket(2)])
+
+## ====== get Requester Pays flag
+test_s3cmd("Get requester pays flag", ['info', pbucket(2)],
+    must_find = [ "Payer:     BucketOwner"])
+
+## ====== Recursive delete maximum exceeed
+test_s3cmd("Recursive delete maximum exceeded", ['del', '--recursive', '--max-delete=1', '--exclude', 'Atomic*', '%s/xyz/etc' % pbucket(1)],
+    must_not_find = [ "delete: '%s/xyz/etc/TypeRa.ttf'" % pbucket(1) ])
+
+## ====== Recursive delete
+test_s3cmd("Recursive delete", ['del', '--recursive', '--exclude', 'Atomic*', '%s/xyz/etc' % pbucket(1)],
+    must_find = [ "delete: '%s/xyz/etc/TypeRa.ttf'" % pbucket(1) ],
+    must_find_re = [ "delete: '.*/etc/logo.png'" ],
+    must_not_find = [ "AtomicClockRadio.ttf" ])
+
+## ====== Recursive delete with rm
+test_s3cmd("Recursive delete with rm", ['rm', '--recursive', '--exclude', 'Atomic*', '%s/xyz/test_rm' % pbucket(1)],
+    must_find = [ "delete: '%s/xyz/test_rm/more/give-me-more.txt'" % pbucket(1) ],
+    must_find_re = [ "delete: '.*/test_rm/logo.png'" ],
+    must_not_find = [ "AtomicClockRadio.ttf" ])
+
+## ====== Recursive delete all
+test_s3cmd("Recursive delete all", ['del', '--recursive', '--force', pbucket(1)],
+    must_find_re = [ "delete: '.*binary/random-crap'" ])
+
+## ====== Remove empty bucket
+test_s3cmd("Remove empty bucket", ['rb', pbucket(1)],
+    must_find = [ "Bucket '%s/' removed" % pbucket(1) ])
+
+## ====== Remove remaining buckets
+test_s3cmd("Remove remaining buckets", ['rb', '--recursive', pbucket(2), pbucket(3)],
+    must_find = [ "Bucket '%s/' removed" % pbucket(2),
+              "Bucket '%s/' removed" % pbucket(3) ])
+
+# vim:et:ts=4:sts=4:ai
diff --git a/s3cmd b/s3cmd
index 2ef1350..cef1e9c 100755
--- a/s3cmd
+++ b/s3cmd
@@ -102,8 +102,8 @@ def subcmd_bucket_usage_all(s3):
             buckets_size += size
     total_size, size_coeff = formatSize(buckets_size, cfg.human_readable_sizes)
     total_size_str = str(total_size) + size_coeff
-    output(u"".rjust(8, "-"))
-    output(u"%s Total" % (total_size_str.ljust(8)))
+    output(u"".rjust(12, "-"))
+    output(u"%s Total" % (total_size_str.ljust(12)))
     return size
 
 def subcmd_bucket_usage(s3, uri):
@@ -131,9 +131,14 @@ def subcmd_bucket_usage(s3, uri):
     except KeyboardInterrupt as e:
         extra_info = u' [interrupted]'
 
-    total_size, size_coeff = formatSize(bucket_size, Config().human_readable_sizes)
-    total_size_str = str(total_size) + size_coeff
-    output(u"%s %s objects %s%s" % (total_size_str.ljust(8), object_count, uri, extra_info))
+    total_size_str = u"%d%s" % formatSize(bucket_size,
+                                          Config().human_readable_sizes)
+    if Config().human_readable_sizes:
+        total_size_str = total_size_str.rjust(5)
+    else:
+        total_size_str = total_size_str.rjust(12)
+    output(u"%s %7s objects %s%s" % (total_size_str, object_count, uri,
+                                     extra_info))
     return bucket_size
 
 def cmd_ls(args):
@@ -184,18 +189,25 @@ def subcmd_bucket_list(s3, uri, limit):
             error(S3.codes[e.info["Code"]] % bucket)
         raise
 
+    # md5 are 32 char long, but for multipart there could be a suffix
+    if Config().human_readable_sizes:
+        # %(size)5s%(coeff)1s
+        format_size = u"%5d%1s"
+        dir_str = u"DIR".rjust(6)
+    else:
+        format_size = u"%12d%s"
+        dir_str = u"DIR".rjust(12)
     if cfg.long_listing:
-        format_string = u"%(timestamp)16s %(size)9s%(coeff)1s  %(md5)32s  %(storageclass)s  %(uri)s"
+        format_string = u"%(timestamp)16s %(size)s  %(md5)-35s  %(storageclass)-11s  %(uri)s"
     elif cfg.list_md5:
-        format_string = u"%(timestamp)16s %(size)9s%(coeff)1s  %(md5)32s  %(uri)s"
+        format_string = u"%(timestamp)16s %(size)s  %(md5)-35s  %(uri)s"
     else:
-        format_string = u"%(timestamp)16s %(size)9s%(coeff)1s  %(uri)s"
+        format_string = u"%(timestamp)16s %(size)s  %(uri)s"
 
     for prefix in response['common_prefixes']:
         output(format_string % {
             "timestamp": "",
-            "size": "DIR",
-            "coeff": "",
+            "size": dir_str,
             "md5": "",
             "storageclass": "",
             "uri": uri.compose_uri(bucket, prefix["Prefix"])})
@@ -213,11 +225,11 @@ def subcmd_bucket_list(s3, uri, limit):
                 except KeyError:
                     pass
 
-        size, size_coeff = formatSize(object["Size"], Config().human_readable_sizes)
+        size_and_coeff = formatSize(object["Size"],
+                                    Config().human_readable_sizes)
         output(format_string % {
             "timestamp": formatDateTime(object["LastModified"]),
-            "size" : str(size),
-            "coeff": size_coeff,
+            "size" : format_size % size_and_coeff,
             "md5" : md5,
             "storageclass" : storageclass,
             "uri": uri.compose_uri(bucket, object["Key"]),
@@ -2650,7 +2662,7 @@ def main():
     optparser.add_option(      "--no-encrypt", dest="encrypt", action="store_false", help="Don't encrypt files.")
     optparser.add_option("-f", "--force", dest="force", action="store_true", help="Force overwrite and other dangerous operations.")
     optparser.add_option(      "--continue", dest="get_continue", action="store_true", help="Continue getting a partially downloaded file (only for [get] command).")
-    optparser.add_option(      "--continue-put", dest="put_continue", action="store_true", help="Continue uploading partially uploaded files or multipart upload parts.  Restarts/parts files that don't have matching size and md5.  Skips files/parts that do.  Note: md5sum checks are not always sufficient to check (part) file equality.  Enable this at your own risk.")
+    optparser.add_option(      "--continue-put", dest="put_continue", action="store_true", help="Continue uploading partially uploaded files or multipart upload parts.  Restarts parts/files that don't have matching size and md5.  Skips files/parts that do.  Note: md5sum checks are not always sufficient to check (part) file equality.  Enable this at your own risk.")
     optparser.add_option(      "--upload-id", dest="upload_id", help="UploadId for Multipart Upload, in case you want continue an existing upload (equivalent to --continue-put) and there are multiple partial uploads.  Use s3cmd multipart [URI] to see what UploadIds are associated with the given URI.")
     optparser.add_option(      "--skip-existing", dest="skip_existing", action="store_true", help="Skip over files that exist at the destination (only for [get] and [sync] commands).")
     optparser.add_option("-r", "--recursive", dest="recursive", action="store_true", help="Recursive upload, download or removal.")
@@ -2661,7 +2673,7 @@ def main():
     optparser.add_option(      "--acl-grant", dest="acl_grants", type="s3acl", action="append", metavar="PERMISSION:EMAIL or USER_CANONICAL_ID", help="Grant stated permission to a given amazon user. Permission is one of: read, write, read_acp, write_acp, full_control, all")
     optparser.add_option(      "--acl-revoke", dest="acl_revokes", type="s3acl", action="append", metavar="PERMISSION:USER_CANONICAL_ID", help="Revoke stated permission for a given amazon user. Permission is one of: read, write, read_acp, write_acp, full_control, all")
 
-    optparser.add_option("-D", "--restore-days", dest="restore_days", action="store", help="Number of days to keep restored file available (only for 'restore' command).", metavar="NUM")
+    optparser.add_option("-D", "--restore-days", dest="restore_days", action="store", help="Number of days to keep restored file available (only for 'restore' command). Default is 1 day.", metavar="NUM")
     optparser.add_option(      "--restore-priority", dest="restore_priority", action="store", choices=['standard', 'expedited', 'bulk'], help="Priority for restoring files from S3 Glacier (only for 'restore' command). Choices available: bulk, standard, expedited")
 
     optparser.add_option(      "--delete-removed", dest="delete_removed", action="store_true", help="Delete destination objects with no corresponding source file [sync]")
diff --git a/s3cmd.egg-info/PKG-INFO b/s3cmd.egg-info/PKG-INFO
deleted file mode 100644
index d4ec2b2..0000000
--- a/s3cmd.egg-info/PKG-INFO
+++ /dev/null
@@ -1,45 +0,0 @@
-Metadata-Version: 1.1
-Name: s3cmd
-Version: 2.0.2
-Summary: Command line tool for managing Amazon S3 and CloudFront services
-Home-page: http://s3tools.org
-Author: github.com/mdomsch, github.com/matteobar, github.com/fviard
-Author-email: s3tools-bugs@lists.sourceforge.net
-License: GNU GPL v2+
-Description-Content-Type: UNKNOWN
-Description: 
-        
-        S3cmd lets you copy files from/to Amazon S3
-        (Simple Storage Service) using a simple to use
-        command line client. Supports rsync-like backup,
-        GPG encryption, and more. Also supports management
-        of Amazon's CloudFront content delivery network.
-        
-        
-        Authors:
-        --------
-            Michal Ludvig  <michal@logix.cz>
-        
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Environment :: Console
-Classifier: Environment :: MacOS X
-Classifier: Environment :: Win32 (MS Windows)
-Classifier: Intended Audience :: End Users/Desktop
-Classifier: Intended Audience :: System Administrators
-Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
-Classifier: Natural Language :: English
-Classifier: Operating System :: MacOS :: MacOS X
-Classifier: Operating System :: Microsoft :: Windows
-Classifier: Operating System :: POSIX
-Classifier: Operating System :: Unix
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: 3.6
-Classifier: Topic :: System :: Archiving
-Classifier: Topic :: Utilities
diff --git a/s3cmd.egg-info/SOURCES.txt b/s3cmd.egg-info/SOURCES.txt
deleted file mode 100644
index ef097bd..0000000
--- a/s3cmd.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,36 +0,0 @@
-INSTALL
-LICENSE
-MANIFEST.in
-NEWS
-README.md
-s3cmd
-s3cmd.1
-setup.cfg
-setup.py
-S3/ACL.py
-S3/AccessLog.py
-S3/BidirMap.py
-S3/CloudFront.py
-S3/Config.py
-S3/ConnMan.py
-S3/Crypto.py
-S3/Custom_httplib27.py
-S3/Custom_httplib3x.py
-S3/Exceptions.py
-S3/ExitCodes.py
-S3/FileDict.py
-S3/FileLists.py
-S3/HashCache.py
-S3/MultiPart.py
-S3/PkgInfo.py
-S3/Progress.py
-S3/S3.py
-S3/S3Uri.py
-S3/SortedDict.py
-S3/Utils.py
-S3/__init__.py
-s3cmd.egg-info/PKG-INFO
-s3cmd.egg-info/SOURCES.txt
-s3cmd.egg-info/dependency_links.txt
-s3cmd.egg-info/requires.txt
-s3cmd.egg-info/top_level.txt
\ No newline at end of file
diff --git a/s3cmd.egg-info/dependency_links.txt b/s3cmd.egg-info/dependency_links.txt
deleted file mode 100644
index 8b13789..0000000
--- a/s3cmd.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/s3cmd.egg-info/requires.txt b/s3cmd.egg-info/requires.txt
deleted file mode 100644
index ffde045..0000000
--- a/s3cmd.egg-info/requires.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-python-dateutil
-python-magic
diff --git a/s3cmd.egg-info/top_level.txt b/s3cmd.egg-info/top_level.txt
deleted file mode 100644
index 878cb3c..0000000
--- a/s3cmd.egg-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-S3
diff --git a/s3cmd.spec.in b/s3cmd.spec.in
new file mode 100644
index 0000000..8a40178
--- /dev/null
+++ b/s3cmd.spec.in
@@ -0,0 +1,174 @@
+%{!?python_sitelib: %define python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")}
+
+%global commit ##COMMIT##
+%global shortcommit ##SHORTCOMMIT##
+
+Name:           s3cmd
+Version:        ##VERSION##
+Release:        1%{dist}
+Summary:        Tool for accessing Amazon Simple Storage Service
+
+Group:          Applications/Internet
+License:        GPLv2
+URL:            http://s3tools.com
+# git clone https://github.com/s3tools/s3cmd
+# python setup.py sdist
+Source0:        https://github.com/s3tools/s3cmd/archive/%{commit}/%{name}-%{version}-%{shortcommit}.tar.gz
+BuildRoot:      %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
+BuildArch:      noarch
+
+%if %{!?fedora:16}%{?fedora} < 16 || %{!?rhel:7}%{?rhel} < 7
+BuildRequires:  python-devel
+%else
+BuildRequires:  python2-devel
+%endif
+%if %{!?fedora:8}%{?fedora} < 8 || %{!?rhel:6}%{?rhel} < 6
+# This is in standard library since 2.5
+BuildRequires:  python-elementtree
+Requires:       python-elementtree
+%endif
+BuildRequires:  python-dateutil
+BuildRequires:  python-setuptools
+Requires:       python-dateutil
+Requires:       python-magic
+
+%description
+S3cmd lets you copy files from/to Amazon S3
+(Simple Storage Service) using a simple to use
+command line client.
+
+
+%prep
+%setup -q -n s3cmd-%{commit}
+
+%build
+
+
+%install
+rm -rf $RPM_BUILD_ROOT
+S3CMD_PACKAGING=Yes python setup.py install --prefix=%{_prefix} --root=$RPM_BUILD_ROOT
+install -d $RPM_BUILD_ROOT%{_mandir}/man1
+install -m 644 s3cmd.1 $RPM_BUILD_ROOT%{_mandir}/man1
+
+
+%clean
+rm -rf $RPM_BUILD_ROOT
+
+
+%files
+%defattr(-,root,root,-)
+%{_bindir}/s3cmd
+%{_mandir}/man1/s3cmd.1*
+%{python_sitelib}/S3
+%if 0%{?fedora} >= 9 || 0%{?rhel} >= 6
+%{python_sitelib}/s3cmd*.egg-info
+%endif
+%doc NEWS README.md LICENSE
+
+
+%changelog
+* Thu Feb  5 2015 Matt Domsch <mdomsch@fedoraproject.org> - 1.5.1.2-5
+- add Requires: python-magic
+
+* Wed Feb  4 2015 Matt Domsch <mdomsch@fedoraproject.org> - 1.5.1.2-4
+- upstream 1.5.1.2, mostly bug fixes
+- add dependency on python-setuptools
+
+ Mon Jan 12 2015 Matt Domsch <mdomsch@fedoraproject.org> - 1.5.0-1
+- upstream 1.5.0 final
+
+* Tue Jul  1 2014 Matt Domsch <mdomsch@fedoraproject.org> - 1.5.0-0.6.rc1
+- upstream 1.5.0-rc1
+
+* Sun Mar 23 2014 Matt Domsch <mdomsch@fedoraproject.org> - 1.5.0-0.4.git
+- upstream 1.5.0-beta1 plus even newer upstream fixes
+
+* Sun Feb 02 2014 Matt Domsch <mdomsch@fedoraproject.org> - 1.5.0-0.3.git
+- upstream 1.5.0-beta1 plus newer upstream fixes
+
+* Wed May 29 2013 Matt Domsch <mdomsch@fedoraproject.org> - 1.5.0-0.2.gita122d97
+- more upstream bugfixes
+- drop pyxattr dep, that codepath got dropped in this release
+
+* Mon May 20 2013 Matt Domsch <mdomsch@fedoraproject.org> - 1.5.0-0.1.gitb1ae0fbe
+- upstream 1.5.0-alpha3 plus fixes
+- add dep on pyxattr for the --xattr option
+
+* Tue Jun 19 2012 Matt Domsch <mdomsch@fedoraproject.org> - 1.1.0-0.4.git11e5755e
+- add local MD5 cache
+
+* Mon Jun 18 2012 Matt Domsch <mdomsch@fedoraproject.org> - 1.1.0-0.3.git7de0789d
+- parallelize local->remote syncs
+
+* Mon Jun 18 2012 Matt Domsch <mdomsch@fedoraproject.org> - 1.1.0-0.2.gitf881b162
+- add hardlink / duplicate file detection support
+
+* Fri Mar  9 2012 Matt Domsch <mdomsch@fedoraproject.org> - 1.1.0-0.1.git2dfe4a65
+- build from git for mdomsch patches to s3cmd sync
+
+* Thu Feb 23 2012 Dennis Gilmore <dennis@ausil.us> - 1.0.1-1
+- update to 1.0.1 release
+
+* Sat Jan 14 2012 Fedora Release Engineering <rel-eng@lists.fedoraproject.org> - 1.0.0-4
+- Rebuilt for https://fedoraproject.org/wiki/Fedora_17_Mass_Rebuild
+
+* Thu May 05 2011 Lubomir Rintel (GoodData) <lubo.rintel@gooddata.com> - 1.0.0-3
+- No hashlib hackery
+
+* Wed Feb 09 2011 Fedora Release Engineering <rel-eng@lists.fedoraproject.org> - 1.0.0-2
+- Rebuilt for https://fedoraproject.org/wiki/Fedora_15_Mass_Rebuild
+
+* Tue Jan 11 2011 Lubomir Rintel (GoodData) <lubo.rintel@gooddata.com> - 1.0.0-1
+- New upstream release
+
+* Mon Nov 29 2010 Lubomir Rintel (GoodData) <lubo.rintel@gooddata.com> - 0.9.9.91-3
+- Patch for broken f14 httplib
+
+* Thu Jul 22 2010 David Malcolm <dmalcolm@redhat.com> - 0.9.9.91-2.1
+- Rebuilt for https://fedoraproject.org/wiki/Features/Python_2.7/MassRebuild
+
+* Wed Apr 28 2010 Lubomir Rintel (GoodData) <lubo.rintel@gooddata.com> - 0.9.9.91-1.1
+- Do not use sha1 from hashlib
+
+* Sun Feb 21 2010 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.9.91-1
+- New upstream release
+
+* Sun Jul 26 2009 Fedora Release Engineering <rel-eng@lists.fedoraproject.org> - 0.9.9-2
+- Rebuilt for https://fedoraproject.org/wiki/Fedora_12_Mass_Rebuild
+
+* Tue Feb 24 2009 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.9-1
+- New upstream release
+
+* Sat Nov 29 2008 Ignacio Vazquez-Abrams <ivazqueznet+rpm@gmail.com> - 0.9.8.4-2
+- Rebuild for Python 2.6
+
+* Tue Nov 11 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.4-1
+- New upstream release, URI encoding patch upstreamed
+
+* Fri Sep 26 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.3-4
+- Try 3/65536
+
+* Fri Sep 26 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.3-3
+- Whoops, forgot to actually apply the patch.
+
+* Fri Sep 26 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.3-2
+- Fix listing of directories with special characters in names
+
+* Thu Jul 31 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.3-1
+- New upstream release: Avoid running out-of-memory in MD5'ing large files.
+
+* Fri Jul 25 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.2-1.1
+- Fix a typo
+
+* Tue Jul 15 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.2-1
+- New upstream
+
+* Fri Jul 04 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.1-3
+- Be satisfied with ET provided by 2.5 python
+
+* Fri Jul 04 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.1-2
+- Added missing python-devel BR, thanks to Marek Mahut
+- Packaged the Python egg file
+
+* Wed Jul 02 2008 Lubomir Rintel (Good Data) <lubo.rintel@gooddata.com> - 0.9.8.1-1
+- Initial packaging attempt
diff --git a/setup.cfg b/setup.cfg
index 2ec3fc1..83d33b9 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,7 +1,2 @@
 [sdist]
 formats = gztar,zip
-
-[egg_info]
-tag_build = 
-tag_date = 0
-
diff --git a/testsuite.tar.gz b/testsuite.tar.gz
new file mode 100644
index 0000000..b3e76e7
Binary files /dev/null and b/testsuite.tar.gz differ
diff --git a/upload-to-sf.sh b/upload-to-sf.sh
new file mode 100755
index 0000000..176f4db
--- /dev/null
+++ b/upload-to-sf.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+VERSION=$(./s3cmd --version | awk '{print $NF}')
+echo -e "Uploading \033[32ms3cmd \033[31m${VERSION}\033[0m ..."
+#rsync -avP dist/s3cmd-${VERSION}.* ludvigm@frs.sourceforge.net:uploads/
+ln -f NEWS README.txt
+rsync -avP dist/s3cmd-${VERSION}.* README.txt ludvigm,s3tools@frs.sourceforge.net:/home/frs/project/s/s3/s3tools/s3cmd/${VERSION}/