1# Copyright 2010 Google Inc.
2# Copyright (c) 2011, Nexenta Systems Inc.
3#
4# Permission is hereby granted, free of charge, to any person obtaining a
5# copy of this software and associated documentation files (the
6# "Software"), to deal in the Software without restriction, including
7# without limitation the rights to use, copy, modify, merge, publish, dis-
8# tribute, sublicense, and/or sell copies of the Software, and to permit
9# persons to whom the Software is furnished to do so, subject to the fol-
10# lowing conditions:
11#
12# The above copyright notice and this permission notice shall be included
13# in all copies or substantial portions of the Software.
14#
15# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
17# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
18# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
19# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21# IN THE SOFTWARE.
22
23import boto
24import os
25import sys
26import textwrap
27from boto.s3.deletemarker import DeleteMarker
28from boto.exception import BotoClientError
29from boto.exception import InvalidUriError
30
31
32class StorageUri(object):
33    """
34    Base class for representing storage provider-independent bucket and
35    object name with a shorthand URI-like syntax.
36
37    This is an abstract class: the constructor cannot be called (throws an
38    exception if you try).
39    """
40
41    connection = None
42    # Optional args that can be set from one of the concrete subclass
43    # constructors, to change connection behavior (e.g., to override
44    # https_connection_factory).
45    connection_args = None
46
47    # Map of provider scheme ('s3' or 'gs') to AWSAuthConnection object. We
48    # maintain a pool here in addition to the connection pool implemented
49    # in AWSAuthConnection because the latter re-creates its connection pool
50    # every time that class is instantiated (so the current pool is used to
51    # avoid re-instantiating AWSAuthConnection).
52    provider_pool = {}
53
54    def __init__(self):
55        """Uncallable constructor on abstract base StorageUri class.
56        """
57        raise BotoClientError('Attempt to instantiate abstract StorageUri '
58                              'class')
59
60    def __repr__(self):
61        """Returns string representation of URI."""
62        return self.uri
63
64    def equals(self, uri):
65        """Returns true if two URIs are equal."""
66        return self.uri == uri.uri
67
68    def check_response(self, resp, level, uri):
69        if resp is None:
70            raise InvalidUriError('\n'.join(textwrap.wrap(
71                'Attempt to get %s for "%s" failed. This can happen if '
72                'the URI refers to a non-existent object or if you meant to '
73                'operate on a directory (e.g., leaving off -R option on gsutil '
74                'cp, mv, or ls of a bucket)' % (level, uri), 80)))
75
76    def _check_bucket_uri(self, function_name):
77        if issubclass(type(self), BucketStorageUri) and not self.bucket_name:
78            raise InvalidUriError(
79                '%s on bucket-less URI (%s)' % (function_name, self.uri))
80
81    def _check_object_uri(self, function_name):
82        if issubclass(type(self), BucketStorageUri) and not self.object_name:
83            raise InvalidUriError('%s on object-less URI (%s)' %
84                                  (function_name, self.uri))
85
86    def _warn_about_args(self, function_name, **args):
87        for arg in args:
88            if args[arg]:
89                sys.stderr.write(
90                    'Warning: %s ignores argument: %s=%s\n' %
91                    (function_name, arg, str(args[arg])))
92
93    def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
94        """
95        Opens a connection to appropriate provider, depending on provider
96        portion of URI. Requires Credentials defined in boto config file (see
97        boto/pyami/config.py).
98        @type storage_uri: StorageUri
99        @param storage_uri: StorageUri specifying a bucket or a bucket+object
100        @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>}
101        @return: A connection to storage service provider of the given URI.
102        """
103        connection_args = dict(self.connection_args or ())
104
105        if (hasattr(self, 'suppress_consec_slashes') and
106                'suppress_consec_slashes' not in connection_args):
107            connection_args['suppress_consec_slashes'] = (
108                self.suppress_consec_slashes)
109        connection_args.update(kwargs)
110        if not self.connection:
111            if self.scheme in self.provider_pool:
112                self.connection = self.provider_pool[self.scheme]
113            elif self.scheme == 's3':
114                from boto.s3.connection import S3Connection
115                self.connection = S3Connection(access_key_id,
116                                               secret_access_key,
117                                               **connection_args)
118                self.provider_pool[self.scheme] = self.connection
119            elif self.scheme == 'gs':
120                from boto.gs.connection import GSConnection
121                # Use OrdinaryCallingFormat instead of boto-default
122                # SubdomainCallingFormat because the latter changes the hostname
123                # that's checked during cert validation for HTTPS connections,
124                # which will fail cert validation (when cert validation is
125                # enabled).
126                #
127                # The same is not true for S3's HTTPS certificates. In fact,
128                # we don't want to do this for S3 because S3 requires the
129                # subdomain to match the location of the bucket. If the proper
130                # subdomain is not used, the server will return a 301 redirect
131                # with no Location header.
132                #
133                # Note: the following import can't be moved up to the
134                # start of this file else it causes a config import failure when
135                # run from the resumable upload/download tests.
136                from boto.s3.connection import OrdinaryCallingFormat
137                connection_args['calling_format'] = OrdinaryCallingFormat()
138                self.connection = GSConnection(access_key_id,
139                                               secret_access_key,
140                                               **connection_args)
141                self.provider_pool[self.scheme] = self.connection
142            elif self.scheme == 'file':
143                from boto.file.connection import FileConnection
144                self.connection = FileConnection(self)
145            else:
146                raise InvalidUriError('Unrecognized scheme "%s"' %
147                                      self.scheme)
148        self.connection.debug = self.debug
149        return self.connection
150
151    def has_version(self):
152        return (issubclass(type(self), BucketStorageUri)
153                and ((self.version_id is not None)
154                     or (self.generation is not None)))
155
156    def delete_key(self, validate=False, headers=None, version_id=None,
157                   mfa_token=None):
158        self._check_object_uri('delete_key')
159        bucket = self.get_bucket(validate, headers)
160        return bucket.delete_key(self.object_name, headers, version_id,
161                                 mfa_token)
162
163    def list_bucket(self, prefix='', delimiter='', headers=None,
164                    all_versions=False):
165        self._check_bucket_uri('list_bucket')
166        bucket = self.get_bucket(headers=headers)
167        if all_versions:
168            return (v for v in bucket.list_versions(
169                prefix=prefix, delimiter=delimiter, headers=headers)
170                if not isinstance(v, DeleteMarker))
171        else:
172            return bucket.list(prefix=prefix, delimiter=delimiter,
173                               headers=headers)
174
175    def get_all_keys(self, validate=False, headers=None, prefix=None):
176        bucket = self.get_bucket(validate, headers)
177        return bucket.get_all_keys(headers)
178
179    def get_bucket(self, validate=False, headers=None):
180        self._check_bucket_uri('get_bucket')
181        conn = self.connect()
182        bucket = conn.get_bucket(self.bucket_name, validate, headers)
183        self.check_response(bucket, 'bucket', self.uri)
184        return bucket
185
186    def get_key(self, validate=False, headers=None, version_id=None):
187        self._check_object_uri('get_key')
188        bucket = self.get_bucket(validate, headers)
189        key = bucket.get_key(self.object_name, headers, version_id)
190        self.check_response(key, 'key', self.uri)
191        return key
192
193    def new_key(self, validate=False, headers=None):
194        self._check_object_uri('new_key')
195        bucket = self.get_bucket(validate, headers)
196        return bucket.new_key(self.object_name)
197
198    def get_contents_to_stream(self, fp, headers=None, version_id=None):
199        self._check_object_uri('get_key')
200        self._warn_about_args('get_key', validate=False)
201        key = self.get_key(None, headers)
202        self.check_response(key, 'key', self.uri)
203        return key.get_contents_to_file(fp, headers, version_id=version_id)
204
205    def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=10,
206                             torrent=False, version_id=None,
207                             res_download_handler=None, response_headers=None,
208                             hash_algs=None):
209        self._check_object_uri('get_contents_to_file')
210        key = self.get_key(None, headers)
211        self.check_response(key, 'key', self.uri)
212        if hash_algs:
213            key.get_contents_to_file(fp, headers, cb, num_cb, torrent,
214                                     version_id, res_download_handler,
215                                     response_headers,
216                                     hash_algs=hash_algs)
217        else:
218            key.get_contents_to_file(fp, headers, cb, num_cb, torrent,
219                                     version_id, res_download_handler,
220                                     response_headers)
221
222    def get_contents_as_string(self, validate=False, headers=None, cb=None,
223                               num_cb=10, torrent=False, version_id=None):
224        self._check_object_uri('get_contents_as_string')
225        key = self.get_key(validate, headers)
226        self.check_response(key, 'key', self.uri)
227        return key.get_contents_as_string(headers, cb, num_cb, torrent,
228                                          version_id)
229
230    def acl_class(self):
231        conn = self.connect()
232        acl_class = conn.provider.acl_class
233        self.check_response(acl_class, 'acl_class', self.uri)
234        return acl_class
235
236    def canned_acls(self):
237        conn = self.connect()
238        canned_acls = conn.provider.canned_acls
239        self.check_response(canned_acls, 'canned_acls', self.uri)
240        return canned_acls
241
242
243class BucketStorageUri(StorageUri):
244    """
245    StorageUri subclass that handles bucket storage providers.
246    Callers should instantiate this class by calling boto.storage_uri().
247    """
248
249    delim = '/'
250    capabilities = set([])  # A set of additional capabilities.
251
252    def __init__(self, scheme, bucket_name=None, object_name=None,
253                 debug=0, connection_args=None, suppress_consec_slashes=True,
254                 version_id=None, generation=None, is_latest=False):
255        """Instantiate a BucketStorageUri from scheme,bucket,object tuple.
256
257        @type scheme: string
258        @param scheme: URI scheme naming the storage provider (gs, s3, etc.)
259        @type bucket_name: string
260        @param bucket_name: bucket name
261        @type object_name: string
262        @param object_name: object name, excluding generation/version.
263        @type debug: int
264        @param debug: debug level to pass in to connection (range 0..2)
265        @type connection_args: map
266        @param connection_args: optional map containing args to be
267            passed to {S3,GS}Connection constructor (e.g., to override
268            https_connection_factory).
269        @param suppress_consec_slashes: If provided, controls whether
270            consecutive slashes will be suppressed in key paths.
271        @param version_id: Object version id (S3-specific).
272        @param generation: Object generation number (GCS-specific).
273        @param is_latest: boolean indicating that a versioned object is the
274            current version
275
276        After instantiation the components are available in the following
277        fields: scheme, bucket_name, object_name, version_id, generation,
278        is_latest, versionless_uri, version_specific_uri, uri.
279        Note: If instantiated without version info, the string representation
280        for a URI stays versionless; similarly, if instantiated with version
281        info, the string representation for a URI stays version-specific. If you
282        call one of the uri.set_contents_from_xyz() methods, a specific object
283        version will be created, and its version-specific URI string can be
284        retrieved from version_specific_uri even if the URI was instantiated
285        without version info.
286        """
287
288        self.scheme = scheme
289        self.bucket_name = bucket_name
290        self.object_name = object_name
291        self.debug = debug
292        if connection_args:
293            self.connection_args = connection_args
294        self.suppress_consec_slashes = suppress_consec_slashes
295        self.version_id = version_id
296        self.generation = generation and int(generation)
297        self.is_latest = is_latest
298        self.is_version_specific = bool(self.generation) or bool(version_id)
299        self._build_uri_strings()
300
301    def _build_uri_strings(self):
302        if self.bucket_name and self.object_name:
303            self.versionless_uri = '%s://%s/%s' % (self.scheme, self.bucket_name,
304                                                   self.object_name)
305            if self.generation:
306                self.version_specific_uri = '%s#%s' % (self.versionless_uri,
307                                                       self.generation)
308            elif self.version_id:
309                self.version_specific_uri = '%s#%s' % (
310                    self.versionless_uri, self.version_id)
311            if self.is_version_specific:
312                self.uri = self.version_specific_uri
313            else:
314                self.uri = self.versionless_uri
315        elif self.bucket_name:
316            self.uri = ('%s://%s/' % (self.scheme, self.bucket_name))
317        else:
318            self.uri = ('%s://' % self.scheme)
319
320    def _update_from_key(self, key):
321        self._update_from_values(
322            getattr(key, 'version_id', None),
323            getattr(key, 'generation', None),
324            getattr(key, 'is_latest', None),
325            getattr(key, 'md5', None))
326
327    def _update_from_values(self, version_id, generation, is_latest, md5):
328        self.version_id = version_id
329        self.generation = generation
330        self.is_latest = is_latest
331        self._build_uri_strings()
332        self.md5 = md5
333
334    def get_key(self, validate=False, headers=None, version_id=None):
335        self._check_object_uri('get_key')
336        bucket = self.get_bucket(validate, headers)
337        if self.get_provider().name == 'aws':
338            key = bucket.get_key(self.object_name, headers,
339                                 version_id=(version_id or self.version_id))
340        elif self.get_provider().name == 'google':
341            key = bucket.get_key(self.object_name, headers,
342                                 generation=self.generation)
343        self.check_response(key, 'key', self.uri)
344        return key
345
346    def delete_key(self, validate=False, headers=None, version_id=None,
347                   mfa_token=None):
348        self._check_object_uri('delete_key')
349        bucket = self.get_bucket(validate, headers)
350        if self.get_provider().name == 'aws':
351            version_id = version_id or self.version_id
352            return bucket.delete_key(self.object_name, headers, version_id,
353                                     mfa_token)
354        elif self.get_provider().name == 'google':
355            return bucket.delete_key(self.object_name, headers,
356                                     generation=self.generation)
357
358    def clone_replace_name(self, new_name):
359        """Instantiate a BucketStorageUri from the current BucketStorageUri,
360        but replacing the object_name.
361
362        @type new_name: string
363        @param new_name: new object name
364        """
365        self._check_bucket_uri('clone_replace_name')
366        return BucketStorageUri(
367            self.scheme, bucket_name=self.bucket_name, object_name=new_name,
368            debug=self.debug,
369            suppress_consec_slashes=self.suppress_consec_slashes)
370
371    def clone_replace_key(self, key):
372        """Instantiate a BucketStorageUri from the current BucketStorageUri, by
373        replacing the object name with the object name and other metadata found
374        in the given Key object (including generation).
375
376        @type key: Key
377        @param key: key for the new StorageUri to represent
378        """
379        self._check_bucket_uri('clone_replace_key')
380        version_id = None
381        generation = None
382        is_latest = False
383        if hasattr(key, 'version_id'):
384            version_id = key.version_id
385        if hasattr(key, 'generation'):
386            generation = key.generation
387        if hasattr(key, 'is_latest'):
388            is_latest = key.is_latest
389
390        return BucketStorageUri(
391            key.provider.get_provider_name(),
392            bucket_name=key.bucket.name,
393            object_name=key.name,
394            debug=self.debug,
395            suppress_consec_slashes=self.suppress_consec_slashes,
396            version_id=version_id,
397            generation=generation,
398            is_latest=is_latest)
399
400    def get_acl(self, validate=False, headers=None, version_id=None):
401        """returns a bucket's acl"""
402        self._check_bucket_uri('get_acl')
403        bucket = self.get_bucket(validate, headers)
404        # This works for both bucket- and object- level ACLs (former passes
405        # key_name=None):
406        key_name = self.object_name or ''
407        if self.get_provider().name == 'aws':
408            version_id = version_id or self.version_id
409            acl = bucket.get_acl(key_name, headers, version_id)
410        else:
411            acl = bucket.get_acl(key_name, headers, generation=self.generation)
412        self.check_response(acl, 'acl', self.uri)
413        return acl
414
415    def get_def_acl(self, validate=False, headers=None):
416        """returns a bucket's default object acl"""
417        self._check_bucket_uri('get_def_acl')
418        bucket = self.get_bucket(validate, headers)
419        acl = bucket.get_def_acl(headers)
420        self.check_response(acl, 'acl', self.uri)
421        return acl
422
423    def get_cors(self, validate=False, headers=None):
424        """returns a bucket's CORS XML"""
425        self._check_bucket_uri('get_cors')
426        bucket = self.get_bucket(validate, headers)
427        cors = bucket.get_cors(headers)
428        self.check_response(cors, 'cors', self.uri)
429        return cors
430
431    def set_cors(self, cors, validate=False, headers=None):
432        """sets or updates a bucket's CORS XML"""
433        self._check_bucket_uri('set_cors ')
434        bucket = self.get_bucket(validate, headers)
435        bucket.set_cors(cors.to_xml(), headers)
436
437    def get_location(self, validate=False, headers=None):
438        self._check_bucket_uri('get_location')
439        bucket = self.get_bucket(validate, headers)
440        return bucket.get_location()
441
442    def get_storage_class(self, validate=False, headers=None):
443        self._check_bucket_uri('get_storage_class')
444        # StorageClass is defined as a bucket param for GCS, but as a key
445        # param for S3.
446        if self.scheme != 'gs':
447            raise ValueError('get_storage_class() not supported for %s '
448                             'URIs.' % self.scheme)
449        bucket = self.get_bucket(validate, headers)
450        return bucket.get_storage_class()
451
452    def get_subresource(self, subresource, validate=False, headers=None,
453                        version_id=None):
454        self._check_bucket_uri('get_subresource')
455        bucket = self.get_bucket(validate, headers)
456        return bucket.get_subresource(subresource, self.object_name, headers,
457                                      version_id)
458
459    def add_group_email_grant(self, permission, email_address, recursive=False,
460                              validate=False, headers=None):
461        self._check_bucket_uri('add_group_email_grant')
462        if self.scheme != 'gs':
463            raise ValueError('add_group_email_grant() not supported for %s '
464                             'URIs.' % self.scheme)
465        if self.object_name:
466            if recursive:
467                raise ValueError('add_group_email_grant() on key-ful URI cannot '
468                                 'specify recursive=True')
469            key = self.get_key(validate, headers)
470            self.check_response(key, 'key', self.uri)
471            key.add_group_email_grant(permission, email_address, headers)
472        elif self.bucket_name:
473            bucket = self.get_bucket(validate, headers)
474            bucket.add_group_email_grant(permission, email_address, recursive,
475                                         headers)
476        else:
477            raise InvalidUriError('add_group_email_grant() on bucket-less URI '
478                                  '%s' % self.uri)
479
480    def add_email_grant(self, permission, email_address, recursive=False,
481                        validate=False, headers=None):
482        self._check_bucket_uri('add_email_grant')
483        if not self.object_name:
484            bucket = self.get_bucket(validate, headers)
485            bucket.add_email_grant(permission, email_address, recursive,
486                                   headers)
487        else:
488            key = self.get_key(validate, headers)
489            self.check_response(key, 'key', self.uri)
490            key.add_email_grant(permission, email_address)
491
492    def add_user_grant(self, permission, user_id, recursive=False,
493                       validate=False, headers=None):
494        self._check_bucket_uri('add_user_grant')
495        if not self.object_name:
496            bucket = self.get_bucket(validate, headers)
497            bucket.add_user_grant(permission, user_id, recursive, headers)
498        else:
499            key = self.get_key(validate, headers)
500            self.check_response(key, 'key', self.uri)
501            key.add_user_grant(permission, user_id)
502
503    def list_grants(self, headers=None):
504        self._check_bucket_uri('list_grants ')
505        bucket = self.get_bucket(headers)
506        return bucket.list_grants(headers)
507
508    def is_file_uri(self):
509        """Returns True if this URI names a file or directory."""
510        return False
511
512    def is_cloud_uri(self):
513        """Returns True if this URI names a bucket or object."""
514        return True
515
516    def names_container(self):
517        """
518        Returns True if this URI names a directory or bucket. Will return
519        False for bucket subdirs; providing bucket subdir semantics needs to
520        be done by the caller (like gsutil does).
521        """
522        return bool(not self.object_name)
523
524    def names_singleton(self):
525        """Returns True if this URI names a file or object."""
526        return bool(self.object_name)
527
528    def names_directory(self):
529        """Returns True if this URI names a directory."""
530        return False
531
532    def names_provider(self):
533        """Returns True if this URI names a provider."""
534        return bool(not self.bucket_name)
535
536    def names_bucket(self):
537        """Returns True if this URI names a bucket."""
538        return bool(self.bucket_name) and bool(not self.object_name)
539
540    def names_file(self):
541        """Returns True if this URI names a file."""
542        return False
543
544    def names_object(self):
545        """Returns True if this URI names an object."""
546        return self.names_singleton()
547
548    def is_stream(self):
549        """Returns True if this URI represents input/output stream."""
550        return False
551
552    def create_bucket(self, headers=None, location='', policy=None,
553                      storage_class=None):
554        self._check_bucket_uri('create_bucket ')
555        conn = self.connect()
556        # Pass storage_class param only if this is a GCS bucket. (In S3 the
557        # storage class is specified on the key object.)
558        if self.scheme == 'gs':
559            return conn.create_bucket(self.bucket_name, headers, location, policy,
560                                      storage_class)
561        else:
562            return conn.create_bucket(self.bucket_name, headers, location, policy)
563
564    def delete_bucket(self, headers=None):
565        self._check_bucket_uri('delete_bucket')
566        conn = self.connect()
567        return conn.delete_bucket(self.bucket_name, headers)
568
569    def get_all_buckets(self, headers=None):
570        conn = self.connect()
571        return conn.get_all_buckets(headers)
572
573    def get_provider(self):
574        conn = self.connect()
575        provider = conn.provider
576        self.check_response(provider, 'provider', self.uri)
577        return provider
578
579    def set_acl(self, acl_or_str, key_name='', validate=False, headers=None,
580                version_id=None, if_generation=None, if_metageneration=None):
581        """Sets or updates a bucket's ACL."""
582        self._check_bucket_uri('set_acl')
583        key_name = key_name or self.object_name or ''
584        bucket = self.get_bucket(validate, headers)
585        if self.generation:
586            bucket.set_acl(
587                acl_or_str, key_name, headers, generation=self.generation,
588                if_generation=if_generation, if_metageneration=if_metageneration)
589        else:
590            version_id = version_id or self.version_id
591            bucket.set_acl(acl_or_str, key_name, headers, version_id)
592
593    def set_xml_acl(self, xmlstring, key_name='', validate=False, headers=None,
594                    version_id=None, if_generation=None, if_metageneration=None):
595        """Sets or updates a bucket's ACL with an XML string."""
596        self._check_bucket_uri('set_xml_acl')
597        key_name = key_name or self.object_name or ''
598        bucket = self.get_bucket(validate, headers)
599        if self.generation:
600            bucket.set_xml_acl(
601                xmlstring, key_name, headers, generation=self.generation,
602                if_generation=if_generation, if_metageneration=if_metageneration)
603        else:
604            version_id = version_id or self.version_id
605            bucket.set_xml_acl(xmlstring, key_name, headers,
606                               version_id=version_id)
607
608    def set_def_xml_acl(self, xmlstring, validate=False, headers=None):
609        """Sets or updates a bucket's default object ACL with an XML string."""
610        self._check_bucket_uri('set_def_xml_acl')
611        self.get_bucket(validate, headers).set_def_xml_acl(xmlstring, headers)
612
613    def set_def_acl(self, acl_or_str, validate=False, headers=None,
614                    version_id=None):
615        """Sets or updates a bucket's default object ACL."""
616        self._check_bucket_uri('set_def_acl')
617        self.get_bucket(validate, headers).set_def_acl(acl_or_str, headers)
618
619    def set_canned_acl(self, acl_str, validate=False, headers=None,
620                       version_id=None):
621        """Sets or updates a bucket's acl to a predefined (canned) value."""
622        self._check_object_uri('set_canned_acl')
623        self._warn_about_args('set_canned_acl', version_id=version_id)
624        key = self.get_key(validate, headers)
625        self.check_response(key, 'key', self.uri)
626        key.set_canned_acl(acl_str, headers)
627
628    def set_def_canned_acl(self, acl_str, validate=False, headers=None,
629                           version_id=None):
630        """Sets or updates a bucket's default object acl to a predefined
631           (canned) value."""
632        self._check_bucket_uri('set_def_canned_acl ')
633        key = self.get_key(validate, headers)
634        self.check_response(key, 'key', self.uri)
635        key.set_def_canned_acl(acl_str, headers, version_id)
636
637    def set_subresource(self, subresource, value, validate=False, headers=None,
638                        version_id=None):
639        self._check_bucket_uri('set_subresource')
640        bucket = self.get_bucket(validate, headers)
641        bucket.set_subresource(subresource, value, self.object_name, headers,
642                               version_id)
643
644    def set_contents_from_string(self, s, headers=None, replace=True,
645                                 cb=None, num_cb=10, policy=None, md5=None,
646                                 reduced_redundancy=False):
647        self._check_object_uri('set_contents_from_string')
648        key = self.new_key(headers=headers)
649        if self.scheme == 'gs':
650            if reduced_redundancy:
651                sys.stderr.write('Warning: GCS does not support '
652                                 'reduced_redundancy; argument ignored by '
653                                 'set_contents_from_string')
654            result = key.set_contents_from_string(
655                s, headers, replace, cb, num_cb, policy, md5)
656        else:
657            result = key.set_contents_from_string(
658                s, headers, replace, cb, num_cb, policy, md5,
659                reduced_redundancy)
660        self._update_from_key(key)
661        return result
662
663    def set_contents_from_file(self, fp, headers=None, replace=True, cb=None,
664                               num_cb=10, policy=None, md5=None, size=None,
665                               rewind=False, res_upload_handler=None):
666        self._check_object_uri('set_contents_from_file')
667        key = self.new_key(headers=headers)
668        if self.scheme == 'gs':
669            result = key.set_contents_from_file(
670                fp, headers, replace, cb, num_cb, policy, md5, size=size,
671                rewind=rewind, res_upload_handler=res_upload_handler)
672            if res_upload_handler:
673                self._update_from_values(None, res_upload_handler.generation,
674                                         None, md5)
675        else:
676            self._warn_about_args('set_contents_from_file',
677                                  res_upload_handler=res_upload_handler)
678            result = key.set_contents_from_file(
679                fp, headers, replace, cb, num_cb, policy, md5, size=size,
680                rewind=rewind)
681        self._update_from_key(key)
682        return result
683
684    def set_contents_from_stream(self, fp, headers=None, replace=True, cb=None,
685                                 policy=None, reduced_redundancy=False):
686        self._check_object_uri('set_contents_from_stream')
687        dst_key = self.new_key(False, headers)
688        result = dst_key.set_contents_from_stream(
689            fp, headers, replace, cb, policy=policy,
690            reduced_redundancy=reduced_redundancy)
691        self._update_from_key(dst_key)
692        return result
693
694    def copy_key(self, src_bucket_name, src_key_name, metadata=None,
695                 src_version_id=None, storage_class='STANDARD',
696                 preserve_acl=False, encrypt_key=False, headers=None,
697                 query_args=None, src_generation=None):
698        """Returns newly created key."""
699        self._check_object_uri('copy_key')
700        dst_bucket = self.get_bucket(validate=False, headers=headers)
701        if src_generation:
702            return dst_bucket.copy_key(
703                new_key_name=self.object_name,
704                src_bucket_name=src_bucket_name,
705                src_key_name=src_key_name, metadata=metadata,
706                storage_class=storage_class, preserve_acl=preserve_acl,
707                encrypt_key=encrypt_key, headers=headers, query_args=query_args,
708                src_generation=src_generation)
709        else:
710            return dst_bucket.copy_key(
711                new_key_name=self.object_name,
712                src_bucket_name=src_bucket_name, src_key_name=src_key_name,
713                metadata=metadata, src_version_id=src_version_id,
714                storage_class=storage_class, preserve_acl=preserve_acl,
715                encrypt_key=encrypt_key, headers=headers, query_args=query_args)
716
717    def enable_logging(self, target_bucket, target_prefix=None, validate=False,
718                       headers=None, version_id=None):
719        self._check_bucket_uri('enable_logging')
720        bucket = self.get_bucket(validate, headers)
721        bucket.enable_logging(target_bucket, target_prefix, headers=headers)
722
723    def disable_logging(self, validate=False, headers=None, version_id=None):
724        self._check_bucket_uri('disable_logging')
725        bucket = self.get_bucket(validate, headers)
726        bucket.disable_logging(headers=headers)
727
728    def get_logging_config(self, validate=False, headers=None, version_id=None):
729        self._check_bucket_uri('get_logging_config')
730        bucket = self.get_bucket(validate, headers)
731        return bucket.get_logging_config(headers=headers)
732
733    def set_website_config(self, main_page_suffix=None, error_key=None,
734                           validate=False, headers=None):
735        self._check_bucket_uri('set_website_config')
736        bucket = self.get_bucket(validate, headers)
737        if not (main_page_suffix or error_key):
738            bucket.delete_website_configuration(headers)
739        else:
740            bucket.configure_website(main_page_suffix, error_key, headers)
741
742    def get_website_config(self, validate=False, headers=None):
743        self._check_bucket_uri('get_website_config')
744        bucket = self.get_bucket(validate, headers)
745        return bucket.get_website_configuration(headers)
746
747    def get_versioning_config(self, headers=None):
748        self._check_bucket_uri('get_versioning_config')
749        bucket = self.get_bucket(False, headers)
750        return bucket.get_versioning_status(headers)
751
752    def configure_versioning(self, enabled, headers=None):
753        self._check_bucket_uri('configure_versioning')
754        bucket = self.get_bucket(False, headers)
755        return bucket.configure_versioning(enabled, headers)
756
757    def set_metadata(self, metadata_plus, metadata_minus, preserve_acl,
758                     headers=None):
759        return self.get_key(False).set_remote_metadata(metadata_plus,
760                                                       metadata_minus,
761                                                       preserve_acl,
762                                                       headers=headers)
763
764    def compose(self, components, content_type=None, headers=None):
765        self._check_object_uri('compose')
766        component_keys = []
767        for suri in components:
768            component_keys.append(suri.new_key())
769            component_keys[-1].generation = suri.generation
770        self.generation = self.new_key().compose(
771            component_keys, content_type=content_type, headers=headers)
772        self._build_uri_strings()
773        return self
774
775    def get_lifecycle_config(self, validate=False, headers=None):
776        """Returns a bucket's lifecycle configuration."""
777        self._check_bucket_uri('get_lifecycle_config')
778        bucket = self.get_bucket(validate, headers)
779        lifecycle_config = bucket.get_lifecycle_config(headers)
780        self.check_response(lifecycle_config, 'lifecycle', self.uri)
781        return lifecycle_config
782
783    def configure_lifecycle(self, lifecycle_config, validate=False,
784                            headers=None):
785        """Sets or updates a bucket's lifecycle configuration."""
786        self._check_bucket_uri('configure_lifecycle')
787        bucket = self.get_bucket(validate, headers)
788        bucket.configure_lifecycle(lifecycle_config, headers)
789
790    def exists(self, headers=None):
791        """Returns True if the object exists or False if it doesn't"""
792        if not self.object_name:
793            raise InvalidUriError('exists on object-less URI (%s)' % self.uri)
794        bucket = self.get_bucket()
795        key = bucket.get_key(self.object_name, headers=headers)
796        return bool(key)
797
798
799class FileStorageUri(StorageUri):
800    """
801    StorageUri subclass that handles files in the local file system.
802    Callers should instantiate this class by calling boto.storage_uri().
803
804    See file/README about how we map StorageUri operations onto a file system.
805    """
806
807    delim = os.sep
808
809    def __init__(self, object_name, debug, is_stream=False):
810        """Instantiate a FileStorageUri from a path name.
811
812        @type object_name: string
813        @param object_name: object name
814        @type debug: boolean
815        @param debug: whether to enable debugging on this StorageUri
816
817        After instantiation the components are available in the following
818        fields: uri, scheme, bucket_name (always blank for this "anonymous"
819        bucket), object_name.
820        """
821
822        self.scheme = 'file'
823        self.bucket_name = ''
824        self.object_name = object_name
825        self.uri = 'file://' + object_name
826        self.debug = debug
827        self.stream = is_stream
828
829    def clone_replace_name(self, new_name):
830        """Instantiate a FileStorageUri from the current FileStorageUri,
831        but replacing the object_name.
832
833        @type new_name: string
834        @param new_name: new object name
835        """
836        return FileStorageUri(new_name, self.debug, self.stream)
837
838    def is_file_uri(self):
839        """Returns True if this URI names a file or directory."""
840        return True
841
842    def is_cloud_uri(self):
843        """Returns True if this URI names a bucket or object."""
844        return False
845
846    def names_container(self):
847        """Returns True if this URI names a directory or bucket."""
848        return self.names_directory()
849
850    def names_singleton(self):
851        """Returns True if this URI names a file (or stream) or object."""
852        return not self.names_container()
853
854    def names_directory(self):
855        """Returns True if this URI names a directory."""
856        if self.stream:
857            return False
858        return os.path.isdir(self.object_name)
859
860    def names_provider(self):
861        """Returns True if this URI names a provider."""
862        return False
863
864    def names_bucket(self):
865        """Returns True if this URI names a bucket."""
866        return False
867
868    def names_file(self):
869        """Returns True if this URI names a file."""
870        return self.names_singleton()
871
872    def names_object(self):
873        """Returns True if this URI names an object."""
874        return False
875
876    def is_stream(self):
877        """Returns True if this URI represents input/output stream.
878        """
879        return bool(self.stream)
880
881    def close(self):
882        """Closes the underlying file.
883        """
884        self.get_key().close()
885
886    def exists(self, _headers_not_used=None):
887        """Returns True if the file exists or False if it doesn't"""
888        # The _headers_not_used parameter is ignored. It is only there to ensure
889        # that this method's signature is identical to the exists method on the
890        # BucketStorageUri class.
891        return os.path.exists(self.object_name)
892