Codebase list s3cmd / a6befe1
Imported Debian patch 0.9.8.4-1 Mikhail Gusarov authored 15 years ago Gianfranco Costamagna committed 9 years ago
11 changed file(s) with 198 addition(s) and 51 deletion(s). Raw diff Collapse all Expand all
0 s3cmd 0.9.8.4 - 2008-11-07
1 =============
2 * Stabilisation / bugfix release:
3 * Restored access to upper-case named buckets.
4 * Improved handling of filenames with Unicode characters.
5 * Avoid ZeroDivisionError on ultrafast links (for instance
6 on Amazon EC2)
7 * Re-issue failed requests (e.g. connection errors, internal
8 server errors, etc).
9 * Sync skips over files that can't be open instead of
10 terminating the sync completely.
11 * Doesn't run out of open files quota on sync with lots of
12 files.
13
014 s3cmd 0.9.8.3 - 2008-07-29
115 =============
216 * Bugfix release. Avoid running out-of-memory in MD5'ing
00 Metadata-Version: 1.0
11 Name: s3cmd
2 Version: 0.9.8.3
2 Version: 0.9.8.4
33 Summary: S3cmd is a tool for managing Amazon S3 storage space.
44 Home-page: http://s3tools.logix.cz
55 Author: Michal Ludvig
99 except ImportError:
1010 import elementtree.ElementTree as ET
1111
12 class S3Error (Exception):
12 class S3Exception(Exception):
13 def __str__(self):
14 ## Is this legal?
15 return unicode(self)
16
17 def __unicode__(self):
18 return self.message
19
20 class S3Error (S3Exception):
1321 def __init__(self, response):
1422 self.status = response["status"]
1523 self.reason = response["reason"]
2533 debug("ErrorXML: " + child.tag + ": " + repr(child.text))
2634 self.info[child.tag] = child.text
2735
28 def __str__(self):
36 def __unicode__(self):
2937 retval = "%d (%s)" % (self.status, self.reason)
3038 try:
3139 retval += (": %s" % self.info["Code"])
3341 pass
3442 return retval
3543
36 class S3UploadError(Exception):
44 class S3UploadError(S3Exception):
3745 pass
3846
39 class S3DownloadError(Exception):
47 class S3DownloadError(S3Exception):
4048 pass
4149
42 class ParameterError(Exception):
50 class S3RequestError(S3Exception):
4351 pass
4452
53 class InvalidFileError(S3Exception):
54 pass
4555
56 class ParameterError(S3Exception):
57 pass
00 package = "s3cmd"
1 version = "0.9.8.3"
1 version = "0.9.8.4"
22 url = "http://s3tools.logix.cz"
33 license = "GPL version 2"
44 short_description = "S3cmd is a tool for managing Amazon S3 storage space."
7070 return httplib.HTTPConnection(self.get_hostname(bucket))
7171
7272 def get_hostname(self, bucket):
73 if bucket:
73 if bucket and self.check_bucket_name_dns_conformity(bucket):
7474 if self.redir_map.has_key(bucket):
7575 host = self.redir_map[bucket]
7676 else:
8484 self.redir_map[bucket] = redir_hostname
8585
8686 def format_uri(self, resource):
87 if self.config.proxy_host != "":
88 uri = "http://%s%s" % (self.get_hostname(resource['bucket']), resource['uri'])
87 if resource['bucket'] and not self.check_bucket_name_dns_conformity(resource['bucket']):
88 uri = "/%s%s" % (resource['bucket'], resource['uri'])
8989 else:
9090 uri = resource['uri']
91 if self.config.proxy_host != "":
92 uri = "http://%s%s" % (self.get_hostname(resource['bucket']), uri)
9193 debug('format_uri(): ' + uri)
9294 return uri
9395
107109 def _get_contents(data):
108110 return getListFromXml(data, "Contents")
109111
112 prefix = self.urlencode_string(prefix)
110113 request = self.create_request("BUCKET_LIST", bucket = bucket, prefix = prefix)
111114 response = self.send_request(request)
112115 #debug(response)
123126 return response
124127
125128 def bucket_create(self, bucket, bucket_location = None):
126 self.check_bucket_name(bucket)
127129 headers = SortedDict()
128130 body = ""
129131 if bucket_location and bucket_location.strip().upper() != "US":
131133 body += bucket_location.strip().upper()
132134 body += "</LocationConstraint></CreateBucketConfiguration>"
133135 debug("bucket_location: " + body)
136 self.check_bucket_name(bucket, dns_strict = True)
137 else:
138 self.check_bucket_name(bucket, dns_strict = False)
134139 headers["content-length"] = len(body)
135140 if self.config.acl_public:
136141 headers["x-amz-acl"] = "public-read"
151156
152157 def object_put(self, filename, bucket, object, extra_headers = None):
153158 if not os.path.isfile(filename):
154 raise ParameterError("%s is not a regular file" % filename)
159 raise InvalidFileError("%s is not a regular file" % filename)
155160 try:
156161 file = open(filename, "rb")
157162 size = os.stat(filename)[ST_SIZE]
158163 except IOError, e:
159 raise ParameterError("%s: %s" % (filename, e.strerror))
164 raise InvalidFileError("%s: %s" % (filename, e.strerror))
160165 headers = SortedDict()
161166 if extra_headers:
162167 headers.update(extra_headers)
224229
225230 ## Low level methods
226231 def urlencode_string(self, string):
232 if type(string) == unicode:
233 string = string.encode("utf-8")
227234 encoded = ""
228235 ## List of characters that must be escaped for S3
229236 ## Haven't found this in any official docs
294301 debug("CreateRequest: resource[uri]=" + resource['uri'])
295302 return (method_string, resource, headers)
296303
297 def send_request(self, request, body = None):
304 def send_request(self, request, body = None, retries = 5):
298305 method_string, resource, headers = request
299306 info("Processing request, please wait...")
300 conn = self.get_connection(resource['bucket'])
301 conn.request(method_string, self.format_uri(resource), body, headers)
302 response = {}
303 http_response = conn.getresponse()
304 response["status"] = http_response.status
305 response["reason"] = http_response.reason
306 response["headers"] = convertTupleListToDict(http_response.getheaders())
307 response["data"] = http_response.read()
308 debug("Response: " + str(response))
309 conn.close()
307 try:
308 conn = self.get_connection(resource['bucket'])
309 conn.request(method_string, self.format_uri(resource), body, headers)
310 response = {}
311 http_response = conn.getresponse()
312 response["status"] = http_response.status
313 response["reason"] = http_response.reason
314 response["headers"] = convertTupleListToDict(http_response.getheaders())
315 response["data"] = http_response.read()
316 debug("Response: " + str(response))
317 conn.close()
318 except Exception, e:
319 if retries:
320 warning("Retrying failed request: %s (%s)" % (resource['uri'], e))
321 return self.send_request(request, body, retries - 1)
322 else:
323 raise S3RequestError("Request failed for: %s" % resource['uri'])
310324
311325 if response["status"] == 307:
312326 ## RedirectPermanent
316330 info("Redirected to: %s" % (redir_hostname))
317331 return self.send_request(request, body)
318332
333 if response["status"] >= 500:
334 e = S3Error(response)
335 if retries:
336 warning(u"Retrying failed request: %s" % resource['uri'])
337 warning(unicode(e))
338 return self.send_request(request, body, retries - 1)
339 else:
340 raise e
341
319342 if response["status"] < 200 or response["status"] > 299:
320343 raise S3Error(response)
344
321345 return response
322346
323347 def send_file(self, request, file, throttle = 0, retries = 3):
355379 size_left -= len(data)
356380 if throttle:
357381 time.sleep(throttle)
358 info("Sent %d bytes (%d %% of %d)" % (
382 debug("Sent %d bytes (%d %% of %d)" % (
359383 (size_total - size_left),
360384 (size_total - size_left) * 100 / size_total,
361385 size_total))
369393 response["data"] = http_response.read()
370394 response["elapsed"] = timestamp_end - timestamp_start
371395 response["size"] = size_total
372 response["speed"] = float(response["size"]) / response["elapsed"]
396 response["speed"] = response["elapsed"] and float(response["size"]) / response["elapsed"] or float(-1)
373397 conn.close()
374398
375399 if response["status"] == 307:
438462 stream.write(data)
439463 md5_hash.update(data)
440464 size_recvd += len(data)
441 info("Received %d bytes (%d %% of %d)" % (
465 debug("Received %d bytes (%d %% of %d)" % (
442466 size_recvd,
443467 size_recvd * 100 / size_total,
444468 size_total))
448472 response["md5match"] = response["headers"]["etag"].find(response["md5"]) >= 0
449473 response["elapsed"] = timestamp_end - timestamp_start
450474 response["size"] = size_recvd
451 response["speed"] = float(response["size"]) / response["elapsed"]
475 response["speed"] = response["elapsed"] and float(response["size"]) / response["elapsed"] or float(-1)
452476 if response["size"] != long(response["headers"]["content-length"]):
453477 warning("Reported size (%s) does not match received size (%s)" % (
454478 response["headers"]["content-length"], response["size"]))
472496 debug("SignHeaders: " + repr(h))
473497 return base64.encodestring(hmac.new(self.config.secret_key, h, sha).digest()).strip()
474498
475 def check_bucket_name(self, bucket):
476 invalid = re.compile("([^a-z0-9\._-])").search(bucket)
477 if invalid:
478 raise ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: lowercase us-ascii letters (a-z), digits (0-9), dot (.), hyphen (-) and underscore (_)." % (bucket, invalid.groups()[0]))
499 @staticmethod
500 def check_bucket_name(bucket, dns_strict = True):
501 if dns_strict:
502 invalid = re.search("([^a-z0-9\.-])", bucket)
503 if invalid:
504 raise ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: lowercase us-ascii letters (a-z), digits (0-9), dot (.) and hyphen (-)." % (bucket, invalid.groups()[0]))
505 else:
506 invalid = re.search("([^A-Za-z0-9\._-])", bucket)
507 if invalid:
508 raise ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: us-ascii letters (a-z, A-Z), digits (0-9), dot (.), hyphen (-) and underscore (_)." % (bucket, invalid.groups()[0]))
509
479510 if len(bucket) < 3:
480511 raise ParameterError("Bucket name '%s' is too short (min 3 characters)" % bucket)
481512 if len(bucket) > 255:
482513 raise ParameterError("Bucket name '%s' is too long (max 255 characters)" % bucket)
514 if dns_strict:
515 if len(bucket) > 63:
516 raise ParameterError("Bucket name '%s' is too long (max 63 characters)" % bucket)
517 if re.search("-\.", bucket):
518 raise ParameterError("Bucket name '%s' must not contain sequence '-.' for DNS compatibility" % bucket)
519 if re.search("\.\.", bucket):
520 raise ParameterError("Bucket name '%s' must not contain sequence '..' for DNS compatibility" % bucket)
521 if not re.search("^[0-9a-z]", bucket):
522 raise ParameterError("Bucket name '%s' must start with a letter or a digit" % bucket)
523 if not re.search("[0-9a-z]$", bucket):
524 raise ParameterError("Bucket name '%s' must end with a letter or a digit" % bucket)
483525 return True
484526
527 @staticmethod
528 def check_bucket_name_dns_conformity(bucket):
529 try:
530 return S3.check_bucket_name(bucket, dns_strict = True)
531 except ParameterError:
532 return False
55 import re
66 import sys
77 from BidirMap import BidirMap
8 from logging import debug
9 from S3 import S3
10 from Utils import unicodise
811
912 class S3Uri(object):
1013 type = None
3134
3235 def __str__(self):
3336 return self.uri()
34
37
38 def __unicode__(self):
39 return self.uri()
40
3541 def public_url(self):
3642 raise ValueError("This S3 URI does not have Anonymous URL representation")
37
43
3844 class S3UriS3(S3Uri):
3945 type = "s3"
4046 _re = re.compile("^s3://([^/]+)/?(.*)", re.IGNORECASE)
4450 raise ValueError("%s: not a S3 URI" % string)
4551 groups = match.groups()
4652 self._bucket = groups[0]
47 self._object = groups[1]
53 self._object = unicodise(groups[1])
4854
4955 def bucket(self):
5056 return self._bucket
6268 return "/".join(["s3:/", self._bucket, self._object])
6369
6470 def public_url(self):
65 return "http://%s.s3.amazonaws.com/%s" % (self._bucket, self._object)
71 if S3.check_bucket_name_dns_conformity(self._bucket):
72 return "http://%s.s3.amazonaws.com/%s" % (self._bucket, self._object)
73 else:
74 return "http://s3.amazonaws.com/%s/%s" % (self._bucket, self._object)
6675
6776 @staticmethod
6877 def compose_uri(bucket, object = ""):
6978 return "s3://%s/%s" % (bucket, object)
70
79
7180 class S3UriS3FS(S3Uri):
7281 type = "s3fs"
7382 _re = re.compile("^s3fs://([^/]*)/?(.*)", re.IGNORECASE)
7786 raise ValueError("%s: not a S3fs URI" % string)
7887 groups = match.groups()
7988 self._fsname = groups[0]
80 self._path = groups[1].split("/")
89 self._path = unicodise(groups[1]).split("/")
8190
8291 def fsname(self):
8392 return self._fsname
96105 groups = match.groups()
97106 if groups[0] not in (None, "file://"):
98107 raise ValueError("%s: not a file:// URI" % string)
99 self._path = groups[1].split("/")
108 self._path = unicodise(groups[1]).split("/")
100109
101110 def path(self):
102111 return "/".join(self._path)
172172 warning("%s: %s" % (cur_dir, e))
173173 return False
174174 return True
175
176 def unicodise(string):
177 """
178 Convert 'string' to Unicode or raise an exception.
179 """
180 debug("Unicodising %r" % string)
181 if type(string) == unicode:
182 return string
183 try:
184 return string.decode("utf-8")
185 except UnicodeDecodeError:
186 raise UnicodeDecodeError("Conversion to unicode failed: %r" % string)
187
188 def try_unicodise(string):
189 try:
190 return unicodise(string)
191 except UnicodeDecodeError:
192 return string
193
0 s3cmd (0.9.8.4-1) unstable; urgency=low
1
2 [Mikhail Gusarov]
3 * New upstream release.
4
5 [Sandro Tosi]
6 * debian/control
7 - switch Vcs-Browser field to viewsvn
8
9 -- Mikhail Gusarov <dottedmag@dottedmag.net> Tue, 09 Dec 2008 01:56:19 +0600
10
011 s3cmd (0.9.8.3-1) unstable; urgency=low
112
213 * New upstream release.
77 Standards-Version: 3.8.0
88 XS-Python-Version: >=2.4
99 Vcs-Svn: svn://svn.debian.org/svn/python-apps/packages/s3cmd/trunk
10 Vcs-Browser: http://svn.debian.org/wsvn/python-apps/packages/s3cmd/trunk/
10 Vcs-Browser: http://svn.debian.org/viewsvn/python-apps/packages/s3cmd/trunk/
1111 Homepage: http://s3tools.logix.cz/s3cmd
1212
1313 Package: s3cmd
0 Upstream checks for the presence of ElementTree at the build time. This is
1 unnecessary (ET is only used run-time), but he prefers it this way, so let's
2 just ignore this check.
3
04 Index: s3cmd-0.9.5/setup.py
15 ===================================================================
26 --- s3cmd-0.9.5.orig/setup.py 2007-08-25 07:46:25.000000000 +0700
+41
-11
s3cmd less more
1313 import pwd, grp
1414 import glob
1515 import traceback
16 import codecs
1617
1718 from copy import copy
1819 from optparse import OptionParser, Option, OptionValueError, IndentedHelpFormatter
1920 from logging import debug, info, warning, error
2021 from distutils.spawn import find_executable
2122
23 ## Output native on TTY, UTF-8 otherwise (redirects)
24 #_stdout = sys.stdout.isatty() and sys.stdout or codecs.getwriter("utf-8")(sys.stdout)
25 #_stderr = sys.stderr.isatty() and sys.stderr or codecs.getwriter("utf-8")(sys.stderr)
26 ## Output UTF-8 in all cases
27 _stdout = codecs.getwriter("utf-8")(sys.stdout)
28 _stderr = codecs.getwriter("utf-8")(sys.stderr)
29 ## Leave it to the terminal
30 #_stdout = sys.stdout
31 #_stderr = sys.stderr
32
2233 def output(message):
23 print message
34 _stdout.write(message + "\n")
2435
2536 def check_args_type(args, type, verbose_type):
2637 for arg in args:
103114 bucket = uri.bucket()
104115 object = uri.object()
105116
106 output("Bucket '%s':" % bucket)
117 output("Bucket 's3://%s':" % bucket)
107118 if object.endswith('*'):
108119 object = object[:-1]
109120 try:
187198 response = s3.object_put_uri(real_filename, uri_final, extra_headers)
188199 except S3UploadError, e:
189200 error("Upload of '%s' failed too many times. Skipping that file." % real_filename)
201 continue
202 except InvalidFileError, e:
203 warning("File can not be uploaded: %s" % e)
190204 continue
191205 speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
192206 output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" %
341355 rem_base_len = len(rem_base)
342356 rem_list = {}
343357 for object in response['list']:
344 key = object['Key'][rem_base_len:].encode('utf-8')
358 key = object['Key'][rem_base_len:]
345359 rem_list[key] = {
346360 'size' : int(object['Size']),
347361 # 'mtime' : dateS3toUnix(object['LastModified']), ## That's upload time, not our lastmod time :-(
348362 'md5' : object['ETag'][1:-1],
349 'object_key' : object['Key'].encode('utf-8'),
363 'object_key' : object['Key']
350364 }
351365 return rem_list
352366
474488
475489 debug("dst_file=%s" % dst_file)
476490 # This will have failed should the file exist
477 os.open(dst_file, open_flags)
491 os.close(os.open(dst_file, open_flags))
478492 # Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode.
479493 dst_stream = open(dst_file, "wb")
480494 response = s3.object_get_uri(uri, dst_stream)
497511 if e.errno in (errno.EPERM, errno.EACCES):
498512 warning("%s not writable: %s" % (dst_file, e.strerror))
499513 continue
500 raise
514 raise e
501515 except KeyboardInterrupt:
502516 try: dst_stream.close()
503517 except: pass
553567 for k in attrs: result += "%s:%s/" % (k, attrs[k])
554568 return { 'x-amz-meta-s3cmd-attrs' : result[:-1] }
555569
556 s3 = S3(Config())
570 s3 = S3(cfg)
571
572 if cfg.encrypt:
573 error("S3cmd 'sync' doesn't support GPG encryption, sorry.")
574 error("Either use unconditional 's3cmd put --recursive'")
575 error("or disable encryption with --no-encryption parameter.")
576 sys.exit(1)
577
557578
558579 src_uri = S3Uri(src)
559580 dst_uri = S3Uri(dst)
598619 response = s3.object_put_uri(src, uri, attr_header)
599620 except S3UploadError, e:
600621 error("%s: upload failed too many times. Skipping that file." % src)
622 continue
623 except InvalidFileError, e:
624 warning("File can not be uploaded: %s" % e)
601625 continue
602626 speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
603627 output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" %
911935
912936 ## Some mucking with logging levels to enable
913937 ## debugging/verbose output for config file parser on request
914 logging.basicConfig(level=options.verbosity, format='%(levelname)s: %(message)s')
938 logging.basicConfig(level=options.verbosity,
939 format='%(levelname)s: %(message)s',
940 stream = _stderr)
915941
916942 if options.show_version:
917943 output("s3cmd version %s" % PkgInfo.version)
10001026 error("Missing command. Please run with --help for more information.")
10011027 sys.exit(1)
10021028
1029 ## Unicodise all remaining arguments:
1030 args = [unicodise(arg) for arg in args]
1031
10031032 command = args.pop(0)
10041033 try:
1005 debug("Command: " + commands[command]["cmd"])
1034 debug("Command: %s" % commands[command]["cmd"])
10061035 ## We must do this lookup in extra step to
10071036 ## avoid catching all KeyError exceptions
10081037 ## from inner functions.
10181047 try:
10191048 cmd_func(args)
10201049 except S3Error, e:
1021 error("S3 error: " + str(e))
1050 error("S3 error: %s" % e)
10221051 sys.exit(1)
10231052 except ParameterError, e:
1024 error("Parameter problem: " + str(e))
1053 error("Parameter problem: %s" % e)
10251054 sys.exit(1)
10261055
10271056 if __name__ == '__main__':
10351064 from S3.S3Uri import *
10361065 from S3 import Utils
10371066 from S3.Exceptions import *
1067 from S3.Utils import unicodise
10381068
10391069 main()
10401070 sys.exit(0)