Imported Debian patch 0.9.8.4-1
Mikhail Gusarov authored 15 years ago
Gianfranco Costamagna committed 9 years ago
0 | s3cmd 0.9.8.4 - 2008-11-07 | |
1 | ============= | |
2 | * Stabilisation / bugfix release: | |
3 | * Restored access to upper-case named buckets. | |
4 | * Improved handling of filenames with Unicode characters. | |
5 | * Avoid ZeroDivisionError on ultrafast links (for instance | |
6 | on Amazon EC2) | |
7 | * Re-issue failed requests (e.g. connection errors, internal | |
8 | server errors, etc). | |
9 | * Sync skips over files that can't be open instead of | |
10 | terminating the sync completely. | |
11 | * Doesn't run out of open files quota on sync with lots of | |
12 | files. | |
13 | ||
0 | 14 | s3cmd 0.9.8.3 - 2008-07-29 |
1 | 15 | ============= |
2 | 16 | * Bugfix release. Avoid running out-of-memory in MD5'ing |
0 | 0 | Metadata-Version: 1.0 |
1 | 1 | Name: s3cmd |
2 | Version: 0.9.8.3 | |
2 | Version: 0.9.8.4 | |
3 | 3 | Summary: S3cmd is a tool for managing Amazon S3 storage space. |
4 | 4 | Home-page: http://s3tools.logix.cz |
5 | 5 | Author: Michal Ludvig |
9 | 9 | except ImportError: |
10 | 10 | import elementtree.ElementTree as ET |
11 | 11 | |
12 | class S3Error (Exception): | |
12 | class S3Exception(Exception): | |
13 | def __str__(self): | |
14 | ## Is this legal? | |
15 | return unicode(self) | |
16 | ||
17 | def __unicode__(self): | |
18 | return self.message | |
19 | ||
20 | class S3Error (S3Exception): | |
13 | 21 | def __init__(self, response): |
14 | 22 | self.status = response["status"] |
15 | 23 | self.reason = response["reason"] |
25 | 33 | debug("ErrorXML: " + child.tag + ": " + repr(child.text)) |
26 | 34 | self.info[child.tag] = child.text |
27 | 35 | |
28 | def __str__(self): | |
36 | def __unicode__(self): | |
29 | 37 | retval = "%d (%s)" % (self.status, self.reason) |
30 | 38 | try: |
31 | 39 | retval += (": %s" % self.info["Code"]) |
33 | 41 | pass |
34 | 42 | return retval |
35 | 43 | |
36 | class S3UploadError(Exception): | |
44 | class S3UploadError(S3Exception): | |
37 | 45 | pass |
38 | 46 | |
39 | class S3DownloadError(Exception): | |
47 | class S3DownloadError(S3Exception): | |
40 | 48 | pass |
41 | 49 | |
42 | class ParameterError(Exception): | |
50 | class S3RequestError(S3Exception): | |
43 | 51 | pass |
44 | 52 | |
53 | class InvalidFileError(S3Exception): | |
54 | pass | |
45 | 55 | |
56 | class ParameterError(S3Exception): | |
57 | pass |
0 | 0 | package = "s3cmd" |
1 | version = "0.9.8.3" | |
1 | version = "0.9.8.4" | |
2 | 2 | url = "http://s3tools.logix.cz" |
3 | 3 | license = "GPL version 2" |
4 | 4 | short_description = "S3cmd is a tool for managing Amazon S3 storage space." |
70 | 70 | return httplib.HTTPConnection(self.get_hostname(bucket)) |
71 | 71 | |
72 | 72 | def get_hostname(self, bucket): |
73 | if bucket: | |
73 | if bucket and self.check_bucket_name_dns_conformity(bucket): | |
74 | 74 | if self.redir_map.has_key(bucket): |
75 | 75 | host = self.redir_map[bucket] |
76 | 76 | else: |
84 | 84 | self.redir_map[bucket] = redir_hostname |
85 | 85 | |
86 | 86 | def format_uri(self, resource): |
87 | if self.config.proxy_host != "": | |
88 | uri = "http://%s%s" % (self.get_hostname(resource['bucket']), resource['uri']) | |
87 | if resource['bucket'] and not self.check_bucket_name_dns_conformity(resource['bucket']): | |
88 | uri = "/%s%s" % (resource['bucket'], resource['uri']) | |
89 | 89 | else: |
90 | 90 | uri = resource['uri'] |
91 | if self.config.proxy_host != "": | |
92 | uri = "http://%s%s" % (self.get_hostname(resource['bucket']), uri) | |
91 | 93 | debug('format_uri(): ' + uri) |
92 | 94 | return uri |
93 | 95 | |
107 | 109 | def _get_contents(data): |
108 | 110 | return getListFromXml(data, "Contents") |
109 | 111 | |
112 | prefix = self.urlencode_string(prefix) | |
110 | 113 | request = self.create_request("BUCKET_LIST", bucket = bucket, prefix = prefix) |
111 | 114 | response = self.send_request(request) |
112 | 115 | #debug(response) |
123 | 126 | return response |
124 | 127 | |
125 | 128 | def bucket_create(self, bucket, bucket_location = None): |
126 | self.check_bucket_name(bucket) | |
127 | 129 | headers = SortedDict() |
128 | 130 | body = "" |
129 | 131 | if bucket_location and bucket_location.strip().upper() != "US": |
131 | 133 | body += bucket_location.strip().upper() |
132 | 134 | body += "</LocationConstraint></CreateBucketConfiguration>" |
133 | 135 | debug("bucket_location: " + body) |
136 | self.check_bucket_name(bucket, dns_strict = True) | |
137 | else: | |
138 | self.check_bucket_name(bucket, dns_strict = False) | |
134 | 139 | headers["content-length"] = len(body) |
135 | 140 | if self.config.acl_public: |
136 | 141 | headers["x-amz-acl"] = "public-read" |
151 | 156 | |
152 | 157 | def object_put(self, filename, bucket, object, extra_headers = None): |
153 | 158 | if not os.path.isfile(filename): |
154 | raise ParameterError("%s is not a regular file" % filename) | |
159 | raise InvalidFileError("%s is not a regular file" % filename) | |
155 | 160 | try: |
156 | 161 | file = open(filename, "rb") |
157 | 162 | size = os.stat(filename)[ST_SIZE] |
158 | 163 | except IOError, e: |
159 | raise ParameterError("%s: %s" % (filename, e.strerror)) | |
164 | raise InvalidFileError("%s: %s" % (filename, e.strerror)) | |
160 | 165 | headers = SortedDict() |
161 | 166 | if extra_headers: |
162 | 167 | headers.update(extra_headers) |
224 | 229 | |
225 | 230 | ## Low level methods |
226 | 231 | def urlencode_string(self, string): |
232 | if type(string) == unicode: | |
233 | string = string.encode("utf-8") | |
227 | 234 | encoded = "" |
228 | 235 | ## List of characters that must be escaped for S3 |
229 | 236 | ## Haven't found this in any official docs |
294 | 301 | debug("CreateRequest: resource[uri]=" + resource['uri']) |
295 | 302 | return (method_string, resource, headers) |
296 | 303 | |
297 | def send_request(self, request, body = None): | |
304 | def send_request(self, request, body = None, retries = 5): | |
298 | 305 | method_string, resource, headers = request |
299 | 306 | info("Processing request, please wait...") |
300 | conn = self.get_connection(resource['bucket']) | |
301 | conn.request(method_string, self.format_uri(resource), body, headers) | |
302 | response = {} | |
303 | http_response = conn.getresponse() | |
304 | response["status"] = http_response.status | |
305 | response["reason"] = http_response.reason | |
306 | response["headers"] = convertTupleListToDict(http_response.getheaders()) | |
307 | response["data"] = http_response.read() | |
308 | debug("Response: " + str(response)) | |
309 | conn.close() | |
307 | try: | |
308 | conn = self.get_connection(resource['bucket']) | |
309 | conn.request(method_string, self.format_uri(resource), body, headers) | |
310 | response = {} | |
311 | http_response = conn.getresponse() | |
312 | response["status"] = http_response.status | |
313 | response["reason"] = http_response.reason | |
314 | response["headers"] = convertTupleListToDict(http_response.getheaders()) | |
315 | response["data"] = http_response.read() | |
316 | debug("Response: " + str(response)) | |
317 | conn.close() | |
318 | except Exception, e: | |
319 | if retries: | |
320 | warning("Retrying failed request: %s (%s)" % (resource['uri'], e)) | |
321 | return self.send_request(request, body, retries - 1) | |
322 | else: | |
323 | raise S3RequestError("Request failed for: %s" % resource['uri']) | |
310 | 324 | |
311 | 325 | if response["status"] == 307: |
312 | 326 | ## RedirectPermanent |
316 | 330 | info("Redirected to: %s" % (redir_hostname)) |
317 | 331 | return self.send_request(request, body) |
318 | 332 | |
333 | if response["status"] >= 500: | |
334 | e = S3Error(response) | |
335 | if retries: | |
336 | warning(u"Retrying failed request: %s" % resource['uri']) | |
337 | warning(unicode(e)) | |
338 | return self.send_request(request, body, retries - 1) | |
339 | else: | |
340 | raise e | |
341 | ||
319 | 342 | if response["status"] < 200 or response["status"] > 299: |
320 | 343 | raise S3Error(response) |
344 | ||
321 | 345 | return response |
322 | 346 | |
323 | 347 | def send_file(self, request, file, throttle = 0, retries = 3): |
355 | 379 | size_left -= len(data) |
356 | 380 | if throttle: |
357 | 381 | time.sleep(throttle) |
358 | info("Sent %d bytes (%d %% of %d)" % ( | |
382 | debug("Sent %d bytes (%d %% of %d)" % ( | |
359 | 383 | (size_total - size_left), |
360 | 384 | (size_total - size_left) * 100 / size_total, |
361 | 385 | size_total)) |
369 | 393 | response["data"] = http_response.read() |
370 | 394 | response["elapsed"] = timestamp_end - timestamp_start |
371 | 395 | response["size"] = size_total |
372 | response["speed"] = float(response["size"]) / response["elapsed"] | |
396 | response["speed"] = response["elapsed"] and float(response["size"]) / response["elapsed"] or float(-1) | |
373 | 397 | conn.close() |
374 | 398 | |
375 | 399 | if response["status"] == 307: |
438 | 462 | stream.write(data) |
439 | 463 | md5_hash.update(data) |
440 | 464 | size_recvd += len(data) |
441 | info("Received %d bytes (%d %% of %d)" % ( | |
465 | debug("Received %d bytes (%d %% of %d)" % ( | |
442 | 466 | size_recvd, |
443 | 467 | size_recvd * 100 / size_total, |
444 | 468 | size_total)) |
448 | 472 | response["md5match"] = response["headers"]["etag"].find(response["md5"]) >= 0 |
449 | 473 | response["elapsed"] = timestamp_end - timestamp_start |
450 | 474 | response["size"] = size_recvd |
451 | response["speed"] = float(response["size"]) / response["elapsed"] | |
475 | response["speed"] = response["elapsed"] and float(response["size"]) / response["elapsed"] or float(-1) | |
452 | 476 | if response["size"] != long(response["headers"]["content-length"]): |
453 | 477 | warning("Reported size (%s) does not match received size (%s)" % ( |
454 | 478 | response["headers"]["content-length"], response["size"])) |
472 | 496 | debug("SignHeaders: " + repr(h)) |
473 | 497 | return base64.encodestring(hmac.new(self.config.secret_key, h, sha).digest()).strip() |
474 | 498 | |
475 | def check_bucket_name(self, bucket): | |
476 | invalid = re.compile("([^a-z0-9\._-])").search(bucket) | |
477 | if invalid: | |
478 | raise ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: lowercase us-ascii letters (a-z), digits (0-9), dot (.), hyphen (-) and underscore (_)." % (bucket, invalid.groups()[0])) | |
499 | @staticmethod | |
500 | def check_bucket_name(bucket, dns_strict = True): | |
501 | if dns_strict: | |
502 | invalid = re.search("([^a-z0-9\.-])", bucket) | |
503 | if invalid: | |
504 | raise ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: lowercase us-ascii letters (a-z), digits (0-9), dot (.) and hyphen (-)." % (bucket, invalid.groups()[0])) | |
505 | else: | |
506 | invalid = re.search("([^A-Za-z0-9\._-])", bucket) | |
507 | if invalid: | |
508 | raise ParameterError("Bucket name '%s' contains disallowed character '%s'. The only supported ones are: us-ascii letters (a-z, A-Z), digits (0-9), dot (.), hyphen (-) and underscore (_)." % (bucket, invalid.groups()[0])) | |
509 | ||
479 | 510 | if len(bucket) < 3: |
480 | 511 | raise ParameterError("Bucket name '%s' is too short (min 3 characters)" % bucket) |
481 | 512 | if len(bucket) > 255: |
482 | 513 | raise ParameterError("Bucket name '%s' is too long (max 255 characters)" % bucket) |
514 | if dns_strict: | |
515 | if len(bucket) > 63: | |
516 | raise ParameterError("Bucket name '%s' is too long (max 63 characters)" % bucket) | |
517 | if re.search("-\.", bucket): | |
518 | raise ParameterError("Bucket name '%s' must not contain sequence '-.' for DNS compatibility" % bucket) | |
519 | if re.search("\.\.", bucket): | |
520 | raise ParameterError("Bucket name '%s' must not contain sequence '..' for DNS compatibility" % bucket) | |
521 | if not re.search("^[0-9a-z]", bucket): | |
522 | raise ParameterError("Bucket name '%s' must start with a letter or a digit" % bucket) | |
523 | if not re.search("[0-9a-z]$", bucket): | |
524 | raise ParameterError("Bucket name '%s' must end with a letter or a digit" % bucket) | |
483 | 525 | return True |
484 | 526 | |
527 | @staticmethod | |
528 | def check_bucket_name_dns_conformity(bucket): | |
529 | try: | |
530 | return S3.check_bucket_name(bucket, dns_strict = True) | |
531 | except ParameterError: | |
532 | return False |
5 | 5 | import re |
6 | 6 | import sys |
7 | 7 | from BidirMap import BidirMap |
8 | from logging import debug | |
9 | from S3 import S3 | |
10 | from Utils import unicodise | |
8 | 11 | |
9 | 12 | class S3Uri(object): |
10 | 13 | type = None |
31 | 34 | |
32 | 35 | def __str__(self): |
33 | 36 | return self.uri() |
34 | ||
37 | ||
38 | def __unicode__(self): | |
39 | return self.uri() | |
40 | ||
35 | 41 | def public_url(self): |
36 | 42 | raise ValueError("This S3 URI does not have Anonymous URL representation") |
37 | ||
43 | ||
38 | 44 | class S3UriS3(S3Uri): |
39 | 45 | type = "s3" |
40 | 46 | _re = re.compile("^s3://([^/]+)/?(.*)", re.IGNORECASE) |
44 | 50 | raise ValueError("%s: not a S3 URI" % string) |
45 | 51 | groups = match.groups() |
46 | 52 | self._bucket = groups[0] |
47 | self._object = groups[1] | |
53 | self._object = unicodise(groups[1]) | |
48 | 54 | |
49 | 55 | def bucket(self): |
50 | 56 | return self._bucket |
62 | 68 | return "/".join(["s3:/", self._bucket, self._object]) |
63 | 69 | |
64 | 70 | def public_url(self): |
65 | return "http://%s.s3.amazonaws.com/%s" % (self._bucket, self._object) | |
71 | if S3.check_bucket_name_dns_conformity(self._bucket): | |
72 | return "http://%s.s3.amazonaws.com/%s" % (self._bucket, self._object) | |
73 | else: | |
74 | return "http://s3.amazonaws.com/%s/%s" % (self._bucket, self._object) | |
66 | 75 | |
67 | 76 | @staticmethod |
68 | 77 | def compose_uri(bucket, object = ""): |
69 | 78 | return "s3://%s/%s" % (bucket, object) |
70 | ||
79 | ||
71 | 80 | class S3UriS3FS(S3Uri): |
72 | 81 | type = "s3fs" |
73 | 82 | _re = re.compile("^s3fs://([^/]*)/?(.*)", re.IGNORECASE) |
77 | 86 | raise ValueError("%s: not a S3fs URI" % string) |
78 | 87 | groups = match.groups() |
79 | 88 | self._fsname = groups[0] |
80 | self._path = groups[1].split("/") | |
89 | self._path = unicodise(groups[1]).split("/") | |
81 | 90 | |
82 | 91 | def fsname(self): |
83 | 92 | return self._fsname |
96 | 105 | groups = match.groups() |
97 | 106 | if groups[0] not in (None, "file://"): |
98 | 107 | raise ValueError("%s: not a file:// URI" % string) |
99 | self._path = groups[1].split("/") | |
108 | self._path = unicodise(groups[1]).split("/") | |
100 | 109 | |
101 | 110 | def path(self): |
102 | 111 | return "/".join(self._path) |
172 | 172 | warning("%s: %s" % (cur_dir, e)) |
173 | 173 | return False |
174 | 174 | return True |
175 | ||
176 | def unicodise(string): | |
177 | """ | |
178 | Convert 'string' to Unicode or raise an exception. | |
179 | """ | |
180 | debug("Unicodising %r" % string) | |
181 | if type(string) == unicode: | |
182 | return string | |
183 | try: | |
184 | return string.decode("utf-8") | |
185 | except UnicodeDecodeError: | |
186 | raise UnicodeDecodeError("Conversion to unicode failed: %r" % string) | |
187 | ||
188 | def try_unicodise(string): | |
189 | try: | |
190 | return unicodise(string) | |
191 | except UnicodeDecodeError: | |
192 | return string | |
193 |
0 | s3cmd (0.9.8.4-1) unstable; urgency=low | |
1 | ||
2 | [Mikhail Gusarov] | |
3 | * New upstream release. | |
4 | ||
5 | [Sandro Tosi] | |
6 | * debian/control | |
7 | - switch Vcs-Browser field to viewsvn | |
8 | ||
9 | -- Mikhail Gusarov <dottedmag@dottedmag.net> Tue, 09 Dec 2008 01:56:19 +0600 | |
10 | ||
0 | 11 | s3cmd (0.9.8.3-1) unstable; urgency=low |
1 | 12 | |
2 | 13 | * New upstream release. |
7 | 7 | Standards-Version: 3.8.0 |
8 | 8 | XS-Python-Version: >=2.4 |
9 | 9 | Vcs-Svn: svn://svn.debian.org/svn/python-apps/packages/s3cmd/trunk |
10 | Vcs-Browser: http://svn.debian.org/wsvn/python-apps/packages/s3cmd/trunk/ | |
10 | Vcs-Browser: http://svn.debian.org/viewsvn/python-apps/packages/s3cmd/trunk/ | |
11 | 11 | Homepage: http://s3tools.logix.cz/s3cmd |
12 | 12 | |
13 | 13 | Package: s3cmd |
0 | Upstream checks for the presence of ElementTree at the build time. This is | |
1 | unnecessary (ET is only used run-time), but he prefers it this way, so let's | |
2 | just ignore this check. | |
3 | ||
0 | 4 | Index: s3cmd-0.9.5/setup.py |
1 | 5 | =================================================================== |
2 | 6 | --- s3cmd-0.9.5.orig/setup.py 2007-08-25 07:46:25.000000000 +0700 |
13 | 13 | import pwd, grp |
14 | 14 | import glob |
15 | 15 | import traceback |
16 | import codecs | |
16 | 17 | |
17 | 18 | from copy import copy |
18 | 19 | from optparse import OptionParser, Option, OptionValueError, IndentedHelpFormatter |
19 | 20 | from logging import debug, info, warning, error |
20 | 21 | from distutils.spawn import find_executable |
21 | 22 | |
23 | ## Output native on TTY, UTF-8 otherwise (redirects) | |
24 | #_stdout = sys.stdout.isatty() and sys.stdout or codecs.getwriter("utf-8")(sys.stdout) | |
25 | #_stderr = sys.stderr.isatty() and sys.stderr or codecs.getwriter("utf-8")(sys.stderr) | |
26 | ## Output UTF-8 in all cases | |
27 | _stdout = codecs.getwriter("utf-8")(sys.stdout) | |
28 | _stderr = codecs.getwriter("utf-8")(sys.stderr) | |
29 | ## Leave it to the terminal | |
30 | #_stdout = sys.stdout | |
31 | #_stderr = sys.stderr | |
32 | ||
22 | 33 | def output(message): |
23 | print message | |
34 | _stdout.write(message + "\n") | |
24 | 35 | |
25 | 36 | def check_args_type(args, type, verbose_type): |
26 | 37 | for arg in args: |
103 | 114 | bucket = uri.bucket() |
104 | 115 | object = uri.object() |
105 | 116 | |
106 | output("Bucket '%s':" % bucket) | |
117 | output("Bucket 's3://%s':" % bucket) | |
107 | 118 | if object.endswith('*'): |
108 | 119 | object = object[:-1] |
109 | 120 | try: |
187 | 198 | response = s3.object_put_uri(real_filename, uri_final, extra_headers) |
188 | 199 | except S3UploadError, e: |
189 | 200 | error("Upload of '%s' failed too many times. Skipping that file." % real_filename) |
201 | continue | |
202 | except InvalidFileError, e: | |
203 | warning("File can not be uploaded: %s" % e) | |
190 | 204 | continue |
191 | 205 | speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True) |
192 | 206 | output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" % |
341 | 355 | rem_base_len = len(rem_base) |
342 | 356 | rem_list = {} |
343 | 357 | for object in response['list']: |
344 | key = object['Key'][rem_base_len:].encode('utf-8') | |
358 | key = object['Key'][rem_base_len:] | |
345 | 359 | rem_list[key] = { |
346 | 360 | 'size' : int(object['Size']), |
347 | 361 | # 'mtime' : dateS3toUnix(object['LastModified']), ## That's upload time, not our lastmod time :-( |
348 | 362 | 'md5' : object['ETag'][1:-1], |
349 | 'object_key' : object['Key'].encode('utf-8'), | |
363 | 'object_key' : object['Key'] | |
350 | 364 | } |
351 | 365 | return rem_list |
352 | 366 | |
474 | 488 | |
475 | 489 | debug("dst_file=%s" % dst_file) |
476 | 490 | # This will have failed should the file exist |
477 | os.open(dst_file, open_flags) | |
491 | os.close(os.open(dst_file, open_flags)) | |
478 | 492 | # Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode. |
479 | 493 | dst_stream = open(dst_file, "wb") |
480 | 494 | response = s3.object_get_uri(uri, dst_stream) |
497 | 511 | if e.errno in (errno.EPERM, errno.EACCES): |
498 | 512 | warning("%s not writable: %s" % (dst_file, e.strerror)) |
499 | 513 | continue |
500 | raise | |
514 | raise e | |
501 | 515 | except KeyboardInterrupt: |
502 | 516 | try: dst_stream.close() |
503 | 517 | except: pass |
553 | 567 | for k in attrs: result += "%s:%s/" % (k, attrs[k]) |
554 | 568 | return { 'x-amz-meta-s3cmd-attrs' : result[:-1] } |
555 | 569 | |
556 | s3 = S3(Config()) | |
570 | s3 = S3(cfg) | |
571 | ||
572 | if cfg.encrypt: | |
573 | error("S3cmd 'sync' doesn't support GPG encryption, sorry.") | |
574 | error("Either use unconditional 's3cmd put --recursive'") | |
575 | error("or disable encryption with --no-encryption parameter.") | |
576 | sys.exit(1) | |
577 | ||
557 | 578 | |
558 | 579 | src_uri = S3Uri(src) |
559 | 580 | dst_uri = S3Uri(dst) |
598 | 619 | response = s3.object_put_uri(src, uri, attr_header) |
599 | 620 | except S3UploadError, e: |
600 | 621 | error("%s: upload failed too many times. Skipping that file." % src) |
622 | continue | |
623 | except InvalidFileError, e: | |
624 | warning("File can not be uploaded: %s" % e) | |
601 | 625 | continue |
602 | 626 | speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True) |
603 | 627 | output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" % |
911 | 935 | |
912 | 936 | ## Some mucking with logging levels to enable |
913 | 937 | ## debugging/verbose output for config file parser on request |
914 | logging.basicConfig(level=options.verbosity, format='%(levelname)s: %(message)s') | |
938 | logging.basicConfig(level=options.verbosity, | |
939 | format='%(levelname)s: %(message)s', | |
940 | stream = _stderr) | |
915 | 941 | |
916 | 942 | if options.show_version: |
917 | 943 | output("s3cmd version %s" % PkgInfo.version) |
1000 | 1026 | error("Missing command. Please run with --help for more information.") |
1001 | 1027 | sys.exit(1) |
1002 | 1028 | |
1029 | ## Unicodise all remaining arguments: | |
1030 | args = [unicodise(arg) for arg in args] | |
1031 | ||
1003 | 1032 | command = args.pop(0) |
1004 | 1033 | try: |
1005 | debug("Command: " + commands[command]["cmd"]) | |
1034 | debug("Command: %s" % commands[command]["cmd"]) | |
1006 | 1035 | ## We must do this lookup in extra step to |
1007 | 1036 | ## avoid catching all KeyError exceptions |
1008 | 1037 | ## from inner functions. |
1018 | 1047 | try: |
1019 | 1048 | cmd_func(args) |
1020 | 1049 | except S3Error, e: |
1021 | error("S3 error: " + str(e)) | |
1050 | error("S3 error: %s" % e) | |
1022 | 1051 | sys.exit(1) |
1023 | 1052 | except ParameterError, e: |
1024 | error("Parameter problem: " + str(e)) | |
1053 | error("Parameter problem: %s" % e) | |
1025 | 1054 | sys.exit(1) |
1026 | 1055 | |
1027 | 1056 | if __name__ == '__main__': |
1035 | 1064 | from S3.S3Uri import * |
1036 | 1065 | from S3 import Utils |
1037 | 1066 | from S3.Exceptions import * |
1067 | from S3.Utils import unicodise | |
1038 | 1068 | |
1039 | 1069 | main() |
1040 | 1070 | sys.exit(0) |