1# -*- coding: utf-8 -*-
2# Copyright 2014 Google Inc. All Rights Reserved.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15"""JSON gsutil Cloud API implementation for Google Cloud Storage."""
16
17from __future__ import absolute_import
18
19import httplib
20import json
21import logging
22import os
23import socket
24import ssl
25import time
26import traceback
27
28from apitools.base.py import credentials_lib
29from apitools.base.py import encoding
30from apitools.base.py import exceptions as apitools_exceptions
31from apitools.base.py import http_wrapper as apitools_http_wrapper
32from apitools.base.py import transfer as apitools_transfer
33from apitools.base.py.util import CalculateWaitForRetry
34
35import boto
36from boto import config
37from gcs_oauth2_boto_plugin import oauth2_helper
38import httplib2
39import oauth2client
40from oauth2client import devshell
41from oauth2client import multistore_file
42
43from gslib.cloud_api import AccessDeniedException
44from gslib.cloud_api import ArgumentException
45from gslib.cloud_api import BadRequestException
46from gslib.cloud_api import CloudApi
47from gslib.cloud_api import NotEmptyException
48from gslib.cloud_api import NotFoundException
49from gslib.cloud_api import PreconditionException
50from gslib.cloud_api import Preconditions
51from gslib.cloud_api import ResumableDownloadException
52from gslib.cloud_api import ResumableUploadAbortException
53from gslib.cloud_api import ResumableUploadException
54from gslib.cloud_api import ResumableUploadStartOverException
55from gslib.cloud_api import ServiceException
56from gslib.cloud_api_helper import ValidateDstObjectMetadata
57from gslib.cred_types import CredTypes
58from gslib.exception import CommandException
59from gslib.gcs_json_media import BytesTransferredContainer
60from gslib.gcs_json_media import DownloadCallbackConnectionClassFactory
61from gslib.gcs_json_media import HttpWithDownloadStream
62from gslib.gcs_json_media import HttpWithNoRetries
63from gslib.gcs_json_media import UploadCallbackConnectionClassFactory
64from gslib.gcs_json_media import WrapDownloadHttpRequest
65from gslib.gcs_json_media import WrapUploadHttpRequest
66from gslib.no_op_credentials import NoOpCredentials
67from gslib.progress_callback import ProgressCallbackWithBackoff
68from gslib.project_id import PopulateProjectId
69from gslib.third_party.storage_apitools import storage_v1_client as apitools_client
70from gslib.third_party.storage_apitools import storage_v1_messages as apitools_messages
71from gslib.tracker_file import DeleteTrackerFile
72from gslib.tracker_file import GetRewriteTrackerFilePath
73from gslib.tracker_file import HashRewriteParameters
74from gslib.tracker_file import ReadRewriteTrackerFile
75from gslib.tracker_file import WriteRewriteTrackerFile
76from gslib.translation_helper import CreateBucketNotFoundException
77from gslib.translation_helper import CreateNotFoundExceptionForObjectWrite
78from gslib.translation_helper import CreateObjectNotFoundException
79from gslib.translation_helper import DEFAULT_CONTENT_TYPE
80from gslib.translation_helper import PRIVATE_DEFAULT_OBJ_ACL
81from gslib.translation_helper import REMOVE_CORS_CONFIG
82from gslib.util import GetBotoConfigFileList
83from gslib.util import GetCertsFile
84from gslib.util import GetCredentialStoreFilename
85from gslib.util import GetGceCredentialCacheFilename
86from gslib.util import GetJsonResumableChunkSize
87from gslib.util import GetMaxRetryDelay
88from gslib.util import GetNewHttp
89from gslib.util import GetNumRetries
90from gslib.util import UTF8
91
92
93# Implementation supports only 'gs' URLs, so provider is unused.
94# pylint: disable=unused-argument
95
96DEFAULT_GCS_JSON_VERSION = 'v1'
97
98NUM_BUCKETS_PER_LIST_PAGE = 1000
99NUM_OBJECTS_PER_LIST_PAGE = 1000
100
101TRANSLATABLE_APITOOLS_EXCEPTIONS = (apitools_exceptions.HttpError,
102                                    apitools_exceptions.StreamExhausted,
103                                    apitools_exceptions.TransferError,
104                                    apitools_exceptions.TransferInvalidError)
105
106# TODO: Distribute these exceptions better through apitools and here.
107# Right now, apitools is configured not to handle any exceptions on
108# uploads/downloads.
109# oauth2_client tries to JSON-decode the response, which can result
110# in a ValueError if the response was invalid. Until that is fixed in
111# oauth2_client, need to handle it here.
112HTTP_TRANSFER_EXCEPTIONS = (apitools_exceptions.TransferRetryError,
113                            apitools_exceptions.BadStatusCodeError,
114                            # TODO: Honor retry-after headers.
115                            apitools_exceptions.RetryAfterError,
116                            apitools_exceptions.RequestError,
117                            httplib.BadStatusLine,
118                            httplib.IncompleteRead,
119                            httplib.ResponseNotReady,
120                            httplib2.ServerNotFoundError,
121                            socket.error,
122                            socket.gaierror,
123                            socket.timeout,
124                            ssl.SSLError,
125                            ValueError)
126
127_VALIDATE_CERTIFICATES_503_MESSAGE = (
128    """Service Unavailable. If you have recently changed
129    https_validate_certificates from True to False in your boto configuration
130    file, please delete any cached access tokens in your filesystem (at %s)
131    and try again.""" % GetCredentialStoreFilename())
132
133
134class GcsJsonApi(CloudApi):
135  """Google Cloud Storage JSON implementation of gsutil Cloud API."""
136
137  def __init__(self, bucket_storage_uri_class, logger, provider=None,
138               credentials=None, debug=0, trace_token=None):
139    """Performs necessary setup for interacting with Google Cloud Storage.
140
141    Args:
142      bucket_storage_uri_class: Unused.
143      logger: logging.logger for outputting log messages.
144      provider: Unused.  This implementation supports only Google Cloud Storage.
145      credentials: Credentials to be used for interacting with Google Cloud
146                   Storage.
147      debug: Debug level for the API implementation (0..3).
148      trace_token: Trace token to pass to the API implementation.
149    """
150    # TODO: Plumb host_header for perfdiag / test_perfdiag.
151    # TODO: Add jitter to apitools' http_wrapper retry mechanism.
152    super(GcsJsonApi, self).__init__(bucket_storage_uri_class, logger,
153                                     provider='gs', debug=debug)
154    no_op_credentials = False
155    if not credentials:
156      loaded_credentials = self._CheckAndGetCredentials(logger)
157
158      if not loaded_credentials:
159        loaded_credentials = NoOpCredentials()
160        no_op_credentials = True
161    else:
162      if isinstance(credentials, NoOpCredentials):
163        no_op_credentials = True
164
165    self.credentials = credentials or loaded_credentials
166
167    self.certs_file = GetCertsFile()
168
169    self.http = GetNewHttp()
170
171    # Re-use download and upload connections. This class is only called
172    # sequentially, but we can share TCP warmed-up connections across calls.
173    self.download_http = self._GetNewDownloadHttp()
174    self.upload_http = self._GetNewUploadHttp()
175    if self.credentials:
176      self.authorized_download_http = self.credentials.authorize(
177          self.download_http)
178      self.authorized_upload_http = self.credentials.authorize(self.upload_http)
179    else:
180      self.authorized_download_http = self.download_http
181      self.authorized_upload_http = self.upload_http
182
183    WrapDownloadHttpRequest(self.authorized_download_http)
184    WrapUploadHttpRequest(self.authorized_upload_http)
185
186    self.http_base = 'https://'
187    gs_json_host = config.get('Credentials', 'gs_json_host', None)
188    self.host_base = gs_json_host or 'www.googleapis.com'
189
190    if not gs_json_host:
191      gs_host = config.get('Credentials', 'gs_host', None)
192      if gs_host:
193        raise ArgumentException(
194            'JSON API is selected but gs_json_host is not configured, '
195            'while gs_host is configured to %s. Please also configure '
196            'gs_json_host and gs_json_port to match your desired endpoint.'
197            % gs_host)
198
199    gs_json_port = config.get('Credentials', 'gs_json_port', None)
200
201    if not gs_json_port:
202      gs_port = config.get('Credentials', 'gs_port', None)
203      if gs_port:
204        raise ArgumentException(
205            'JSON API is selected but gs_json_port is not configured, '
206            'while gs_port is configured to %s. Please also configure '
207            'gs_json_host and gs_json_port to match your desired endpoint.'
208            % gs_port)
209      self.host_port = ''
210    else:
211      self.host_port = ':' + config.get('Credentials', 'gs_json_port')
212
213    self.api_version = config.get('GSUtil', 'json_api_version',
214                                  DEFAULT_GCS_JSON_VERSION)
215    self.url_base = (self.http_base + self.host_base + self.host_port + '/' +
216                     'storage/' + self.api_version + '/')
217
218    credential_store_key_dict = self._GetCredentialStoreKeyDict(
219        self.credentials)
220
221    self.credentials.set_store(
222        multistore_file.get_credential_storage_custom_key(
223            GetCredentialStoreFilename(), credential_store_key_dict))
224
225    self.num_retries = GetNumRetries()
226    self.max_retry_wait = GetMaxRetryDelay()
227
228    log_request = (debug >= 3)
229    log_response = (debug >= 3)
230
231    self.global_params = apitools_messages.StandardQueryParameters(
232        trace='token:%s' % trace_token) if trace_token else None
233
234    self.api_client = apitools_client.StorageV1(
235        url=self.url_base, http=self.http, log_request=log_request,
236        log_response=log_response, credentials=self.credentials,
237        version=self.api_version, default_global_params=self.global_params)
238    self.api_client.max_retry_wait = self.max_retry_wait
239    self.api_client.num_retries = self.num_retries
240
241    if no_op_credentials:
242      # This API key is not secret and is used to identify gsutil during
243      # anonymous requests.
244      self.api_client.AddGlobalParam('key',
245                                     u'AIzaSyDnacJHrKma0048b13sh8cgxNUwulubmJM')
246
247  def _CheckAndGetCredentials(self, logger):
248    configured_cred_types = []
249    try:
250      if self._HasOauth2UserAccountCreds():
251        configured_cred_types.append(CredTypes.OAUTH2_USER_ACCOUNT)
252      if self._HasOauth2ServiceAccountCreds():
253        configured_cred_types.append(CredTypes.OAUTH2_SERVICE_ACCOUNT)
254      if len(configured_cred_types) > 1:
255        # We only allow one set of configured credentials. Otherwise, we're
256        # choosing one arbitrarily, which can be very confusing to the user
257        # (e.g., if only one is authorized to perform some action) and can
258        # also mask errors.
259        # Because boto merges config files, GCE credentials show up by default
260        # for GCE VMs. We don't want to fail when a user creates a boto file
261        # with their own credentials, so in this case we'll use the OAuth2
262        # user credentials.
263        failed_cred_type = None
264        raise CommandException(
265            ('You have multiple types of configured credentials (%s), which is '
266             'not supported. One common way this happens is if you run gsutil '
267             'config to create credentials and later run gcloud auth, and '
268             'create a second set of credentials. Your boto config path is: '
269             '%s. For more help, see "gsutil help creds".')
270            % (configured_cred_types, GetBotoConfigFileList()))
271
272      failed_cred_type = CredTypes.OAUTH2_USER_ACCOUNT
273      user_creds = self._GetOauth2UserAccountCreds()
274      failed_cred_type = CredTypes.OAUTH2_SERVICE_ACCOUNT
275      service_account_creds = self._GetOauth2ServiceAccountCreds()
276      failed_cred_type = CredTypes.GCE
277      gce_creds = self._GetGceCreds()
278      failed_cred_type = CredTypes.DEVSHELL
279      devshell_creds = self._GetDevshellCreds()
280      return user_creds or service_account_creds or gce_creds or devshell_creds
281    except:  # pylint: disable=bare-except
282
283      # If we didn't actually try to authenticate because there were multiple
284      # types of configured credentials, don't emit this warning.
285      if failed_cred_type:
286        if os.environ.get('CLOUDSDK_WRAPPER') == '1':
287          logger.warn(
288              'Your "%s" credentials are invalid. Please run\n'
289              '  $ gcloud auth login', failed_cred_type)
290        else:
291          logger.warn(
292              'Your "%s" credentials are invalid. For more help, see '
293              '"gsutil help creds", or re-run the gsutil config command (see '
294              '"gsutil help config").', failed_cred_type)
295
296      # If there's any set of configured credentials, we'll fail if they're
297      # invalid, rather than silently falling back to anonymous config (as
298      # boto does). That approach leads to much confusion if users don't
299      # realize their credentials are invalid.
300      raise
301
302  def _HasOauth2ServiceAccountCreds(self):
303    return config.has_option('Credentials', 'gs_service_key_file')
304
305  def _HasOauth2UserAccountCreds(self):
306    return config.has_option('Credentials', 'gs_oauth2_refresh_token')
307
308  def _HasGceCreds(self):
309    return config.has_option('GoogleCompute', 'service_account')
310
311  def _GetOauth2ServiceAccountCreds(self):
312    if self._HasOauth2ServiceAccountCreds():
313      return oauth2_helper.OAuth2ClientFromBotoConfig(
314          boto.config,
315          cred_type=CredTypes.OAUTH2_SERVICE_ACCOUNT).GetCredentials()
316
317  def _GetOauth2UserAccountCreds(self):
318    if self._HasOauth2UserAccountCreds():
319      return oauth2_helper.OAuth2ClientFromBotoConfig(
320          boto.config).GetCredentials()
321
322  def _GetGceCreds(self):
323    if self._HasGceCreds():
324      try:
325        return credentials_lib.GceAssertionCredentials(
326            cache_filename=GetGceCredentialCacheFilename())
327      except apitools_exceptions.ResourceUnavailableError, e:
328        if 'service account' in str(e) and 'does not exist' in str(e):
329          return None
330        raise
331
332  def _GetDevshellCreds(self):
333    try:
334      return devshell.DevshellCredentials()
335    except devshell.NoDevshellServer:
336      return None
337    except:
338      raise
339
340  def _GetCredentialStoreKeyDict(self, credentials):
341    """Disambiguates a credential for caching in a credential store.
342
343    Different credential types have different fields that identify them.
344    This function assembles relevant information in a dict and returns it.
345
346    Args:
347      credentials: An OAuth2Credentials object.
348
349    Returns:
350      Dict of relevant identifiers for credentials.
351    """
352    # TODO: If scopes ever become available in the credentials themselves,
353    # include them in the key dict.
354    key_dict = {'api_version': self.api_version}
355    # pylint: disable=protected-access
356    if isinstance(credentials, devshell.DevshellCredentials):
357      key_dict['user_email'] = credentials.user_email
358    elif isinstance(credentials,
359                    oauth2client.service_account._ServiceAccountCredentials):
360      key_dict['_service_account_email'] = credentials._service_account_email
361    elif isinstance(credentials,
362                    oauth2client.client.SignedJwtAssertionCredentials):
363      key_dict['service_account_name'] = credentials.service_account_name
364    elif isinstance(credentials, oauth2client.client.OAuth2Credentials):
365      if credentials.client_id and credentials.client_id != 'null':
366        key_dict['client_id'] = credentials.client_id
367      key_dict['refresh_token'] = credentials.refresh_token
368    # pylint: enable=protected-access
369
370    return key_dict
371
372  def _GetNewDownloadHttp(self):
373    return GetNewHttp(http_class=HttpWithDownloadStream)
374
375  def _GetNewUploadHttp(self):
376    """Returns an upload-safe Http object (by disabling httplib2 retries)."""
377    return GetNewHttp(http_class=HttpWithNoRetries)
378
379  def GetBucket(self, bucket_name, provider=None, fields=None):
380    """See CloudApi class for function doc strings."""
381    projection = (apitools_messages.StorageBucketsGetRequest
382                  .ProjectionValueValuesEnum.full)
383    apitools_request = apitools_messages.StorageBucketsGetRequest(
384        bucket=bucket_name, projection=projection)
385    global_params = apitools_messages.StandardQueryParameters()
386    if fields:
387      global_params.fields = ','.join(set(fields))
388
389    # Here and in list buckets, we have no way of knowing
390    # whether we requested a field and didn't get it because it didn't exist
391    # or because we didn't have permission to access it.
392    try:
393      return self.api_client.buckets.Get(apitools_request,
394                                         global_params=global_params)
395    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
396      self._TranslateExceptionAndRaise(e, bucket_name=bucket_name)
397
398  def PatchBucket(self, bucket_name, metadata, canned_acl=None,
399                  canned_def_acl=None, preconditions=None, provider=None,
400                  fields=None):
401    """See CloudApi class for function doc strings."""
402    projection = (apitools_messages.StorageBucketsPatchRequest
403                  .ProjectionValueValuesEnum.full)
404    bucket_metadata = metadata
405
406    if not preconditions:
407      preconditions = Preconditions()
408
409    # For blank metadata objects, we need to explicitly call
410    # them out to apitools so it will send/erase them.
411    apitools_include_fields = []
412    for metadata_field in ('metadata', 'lifecycle', 'logging', 'versioning',
413                           'website'):
414      attr = getattr(bucket_metadata, metadata_field, None)
415      if attr and not encoding.MessageToDict(attr):
416        setattr(bucket_metadata, metadata_field, None)
417        apitools_include_fields.append(metadata_field)
418
419    if bucket_metadata.cors and bucket_metadata.cors == REMOVE_CORS_CONFIG:
420      bucket_metadata.cors = []
421      apitools_include_fields.append('cors')
422
423    if (bucket_metadata.defaultObjectAcl and
424        bucket_metadata.defaultObjectAcl[0] == PRIVATE_DEFAULT_OBJ_ACL):
425      bucket_metadata.defaultObjectAcl = []
426      apitools_include_fields.append('defaultObjectAcl')
427
428    predefined_acl = None
429    if canned_acl:
430      # Must null out existing ACLs to apply a canned ACL.
431      apitools_include_fields.append('acl')
432      predefined_acl = (
433          apitools_messages.StorageBucketsPatchRequest.
434          PredefinedAclValueValuesEnum(
435              self._BucketCannedAclToPredefinedAcl(canned_acl)))
436
437    predefined_def_acl = None
438    if canned_def_acl:
439      # Must null out existing default object ACLs to apply a canned ACL.
440      apitools_include_fields.append('defaultObjectAcl')
441      predefined_def_acl = (
442          apitools_messages.StorageBucketsPatchRequest.
443          PredefinedDefaultObjectAclValueValuesEnum(
444              self._ObjectCannedAclToPredefinedAcl(canned_def_acl)))
445
446    apitools_request = apitools_messages.StorageBucketsPatchRequest(
447        bucket=bucket_name, bucketResource=bucket_metadata,
448        projection=projection,
449        ifMetagenerationMatch=preconditions.meta_gen_match,
450        predefinedAcl=predefined_acl,
451        predefinedDefaultObjectAcl=predefined_def_acl)
452    global_params = apitools_messages.StandardQueryParameters()
453    if fields:
454      global_params.fields = ','.join(set(fields))
455    with self.api_client.IncludeFields(apitools_include_fields):
456      try:
457        return self.api_client.buckets.Patch(apitools_request,
458                                             global_params=global_params)
459      except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
460        self._TranslateExceptionAndRaise(e)
461
462  def CreateBucket(self, bucket_name, project_id=None, metadata=None,
463                   provider=None, fields=None):
464    """See CloudApi class for function doc strings."""
465    projection = (apitools_messages.StorageBucketsInsertRequest
466                  .ProjectionValueValuesEnum.full)
467    if not metadata:
468      metadata = apitools_messages.Bucket()
469    metadata.name = bucket_name
470
471    if metadata.location:
472      metadata.location = metadata.location.upper()
473    if metadata.storageClass:
474      metadata.storageClass = metadata.storageClass.upper()
475
476    project_id = PopulateProjectId(project_id)
477
478    apitools_request = apitools_messages.StorageBucketsInsertRequest(
479        bucket=metadata, project=project_id, projection=projection)
480    global_params = apitools_messages.StandardQueryParameters()
481    if fields:
482      global_params.fields = ','.join(set(fields))
483    try:
484      return self.api_client.buckets.Insert(apitools_request,
485                                            global_params=global_params)
486    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
487      self._TranslateExceptionAndRaise(e, bucket_name=bucket_name)
488
489  def DeleteBucket(self, bucket_name, preconditions=None, provider=None):
490    """See CloudApi class for function doc strings."""
491    if not preconditions:
492      preconditions = Preconditions()
493
494    apitools_request = apitools_messages.StorageBucketsDeleteRequest(
495        bucket=bucket_name, ifMetagenerationMatch=preconditions.meta_gen_match)
496
497    try:
498      self.api_client.buckets.Delete(apitools_request)
499    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
500      if isinstance(
501          self._TranslateApitoolsException(e, bucket_name=bucket_name),
502          NotEmptyException):
503        # If bucket is not empty, check to see if versioning is enabled and
504        # signal that in the exception if it is.
505        bucket_metadata = self.GetBucket(bucket_name,
506                                         fields=['versioning'])
507        if bucket_metadata.versioning and bucket_metadata.versioning.enabled:
508          raise NotEmptyException('VersionedBucketNotEmpty',
509                                  status=e.status_code)
510      self._TranslateExceptionAndRaise(e, bucket_name=bucket_name)
511
512  def ListBuckets(self, project_id=None, provider=None, fields=None):
513    """See CloudApi class for function doc strings."""
514    projection = (apitools_messages.StorageBucketsListRequest
515                  .ProjectionValueValuesEnum.full)
516    project_id = PopulateProjectId(project_id)
517
518    apitools_request = apitools_messages.StorageBucketsListRequest(
519        project=project_id, maxResults=NUM_BUCKETS_PER_LIST_PAGE,
520        projection=projection)
521    global_params = apitools_messages.StandardQueryParameters()
522    if fields:
523      if 'nextPageToken' not in fields:
524        fields.add('nextPageToken')
525      global_params.fields = ','.join(set(fields))
526    try:
527      bucket_list = self.api_client.buckets.List(apitools_request,
528                                                 global_params=global_params)
529    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
530      self._TranslateExceptionAndRaise(e)
531
532    for bucket in self._YieldBuckets(bucket_list):
533      yield bucket
534
535    while bucket_list.nextPageToken:
536      apitools_request = apitools_messages.StorageBucketsListRequest(
537          project=project_id, pageToken=bucket_list.nextPageToken,
538          maxResults=NUM_BUCKETS_PER_LIST_PAGE, projection=projection)
539      try:
540        bucket_list = self.api_client.buckets.List(apitools_request,
541                                                   global_params=global_params)
542      except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
543        self._TranslateExceptionAndRaise(e)
544
545      for bucket in self._YieldBuckets(bucket_list):
546        yield bucket
547
548  def _YieldBuckets(self, bucket_list):
549    """Yields buckets from a list returned by apitools."""
550    if bucket_list.items:
551      for bucket in bucket_list.items:
552        yield bucket
553
554  def ListObjects(self, bucket_name, prefix=None, delimiter=None,
555                  all_versions=None, provider=None, fields=None):
556    """See CloudApi class for function doc strings."""
557    projection = (apitools_messages.StorageObjectsListRequest
558                  .ProjectionValueValuesEnum.full)
559    apitools_request = apitools_messages.StorageObjectsListRequest(
560        bucket=bucket_name, prefix=prefix, delimiter=delimiter,
561        versions=all_versions, projection=projection,
562        maxResults=NUM_OBJECTS_PER_LIST_PAGE)
563    global_params = apitools_messages.StandardQueryParameters()
564
565    if fields:
566      fields = set(fields)
567      if 'nextPageToken' not in fields:
568        fields.add('nextPageToken')
569      global_params.fields = ','.join(fields)
570
571    try:
572      object_list = self.api_client.objects.List(apitools_request,
573                                                 global_params=global_params)
574    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
575      self._TranslateExceptionAndRaise(e, bucket_name=bucket_name)
576
577    for object_or_prefix in self._YieldObjectsAndPrefixes(object_list):
578      yield object_or_prefix
579
580    while object_list.nextPageToken:
581      apitools_request = apitools_messages.StorageObjectsListRequest(
582          bucket=bucket_name, prefix=prefix, delimiter=delimiter,
583          versions=all_versions, projection=projection,
584          pageToken=object_list.nextPageToken,
585          maxResults=NUM_OBJECTS_PER_LIST_PAGE)
586      try:
587        object_list = self.api_client.objects.List(apitools_request,
588                                                   global_params=global_params)
589      except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
590        self._TranslateExceptionAndRaise(e, bucket_name=bucket_name)
591
592      for object_or_prefix in self._YieldObjectsAndPrefixes(object_list):
593        yield object_or_prefix
594
595  def _YieldObjectsAndPrefixes(self, object_list):
596    # Yield prefixes first so that checking for the presence of a subdirectory
597    # is fast.
598    if object_list.prefixes:
599      for prefix in object_list.prefixes:
600        yield CloudApi.CsObjectOrPrefix(prefix,
601                                        CloudApi.CsObjectOrPrefixType.PREFIX)
602    if object_list.items:
603      for cloud_obj in object_list.items:
604        yield CloudApi.CsObjectOrPrefix(cloud_obj,
605                                        CloudApi.CsObjectOrPrefixType.OBJECT)
606
607  def GetObjectMetadata(self, bucket_name, object_name, generation=None,
608                        provider=None, fields=None):
609    """See CloudApi class for function doc strings."""
610    projection = (apitools_messages.StorageObjectsGetRequest
611                  .ProjectionValueValuesEnum.full)
612
613    if generation:
614      generation = long(generation)
615
616    apitools_request = apitools_messages.StorageObjectsGetRequest(
617        bucket=bucket_name, object=object_name, projection=projection,
618        generation=generation)
619    global_params = apitools_messages.StandardQueryParameters()
620    if fields:
621      global_params.fields = ','.join(set(fields))
622
623    try:
624      return self.api_client.objects.Get(apitools_request,
625                                         global_params=global_params)
626    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
627      self._TranslateExceptionAndRaise(e, bucket_name=bucket_name,
628                                       object_name=object_name,
629                                       generation=generation)
630
631  def GetObjectMedia(
632      self, bucket_name, object_name, download_stream,
633      provider=None, generation=None, object_size=None,
634      download_strategy=CloudApi.DownloadStrategy.ONE_SHOT, start_byte=0,
635      end_byte=None, progress_callback=None, serialization_data=None,
636      digesters=None):
637    """See CloudApi class for function doc strings."""
638    # This implementation will get the object metadata first if we don't pass it
639    # in via serialization_data.
640    if generation:
641      generation = long(generation)
642
643    # 'outer_total_size' is only used for formatting user output, and is
644    # expected to be one higher than the last byte that should be downloaded.
645    # TODO: Change DownloadCallbackConnectionClassFactory and progress callbacks
646    # to more elegantly handle total size for components of files.
647    outer_total_size = object_size
648    if end_byte:
649      outer_total_size = end_byte + 1
650    elif serialization_data:
651      outer_total_size = json.loads(serialization_data)['total_size']
652
653    if progress_callback:
654      if outer_total_size is None:
655        raise ArgumentException('Download size is required when callbacks are '
656                                'requested for a download, but no size was '
657                                'provided.')
658      progress_callback(start_byte, outer_total_size)
659
660    bytes_downloaded_container = BytesTransferredContainer()
661    bytes_downloaded_container.bytes_transferred = start_byte
662
663    callback_class_factory = DownloadCallbackConnectionClassFactory(
664        bytes_downloaded_container, total_size=outer_total_size,
665        progress_callback=progress_callback, digesters=digesters)
666    download_http_class = callback_class_factory.GetConnectionClass()
667
668    # Point our download HTTP at our download stream.
669    self.download_http.stream = download_stream
670    self.download_http.connections = {'https': download_http_class}
671
672    if serialization_data:
673      apitools_download = apitools_transfer.Download.FromData(
674          download_stream, serialization_data, self.api_client.http,
675          num_retries=self.num_retries)
676    else:
677      apitools_download = apitools_transfer.Download.FromStream(
678          download_stream, auto_transfer=False, total_size=object_size,
679          num_retries=self.num_retries)
680
681    apitools_download.bytes_http = self.authorized_download_http
682    apitools_request = apitools_messages.StorageObjectsGetRequest(
683        bucket=bucket_name, object=object_name, generation=generation)
684
685    try:
686      if download_strategy == CloudApi.DownloadStrategy.RESUMABLE:
687        # Disable retries in apitools. We will handle them explicitly here.
688        apitools_download.retry_func = (
689            apitools_http_wrapper.RethrowExceptionHandler)
690        return self._PerformResumableDownload(
691            bucket_name, object_name, download_stream, apitools_request,
692            apitools_download, bytes_downloaded_container,
693            generation=generation, start_byte=start_byte, end_byte=end_byte,
694            serialization_data=serialization_data)
695      else:
696        return self._PerformDownload(
697            bucket_name, object_name, download_stream, apitools_request,
698            apitools_download, generation=generation, start_byte=start_byte,
699            end_byte=end_byte, serialization_data=serialization_data)
700    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
701      self._TranslateExceptionAndRaise(e, bucket_name=bucket_name,
702                                       object_name=object_name,
703                                       generation=generation)
704
705  def _PerformResumableDownload(
706      self, bucket_name, object_name, download_stream, apitools_request,
707      apitools_download, bytes_downloaded_container, generation=None,
708      start_byte=0, end_byte=None, serialization_data=None):
709    retries = 0
710    last_progress_byte = start_byte
711    while retries <= self.num_retries:
712      try:
713        return self._PerformDownload(
714            bucket_name, object_name, download_stream, apitools_request,
715            apitools_download, generation=generation, start_byte=start_byte,
716            end_byte=end_byte, serialization_data=serialization_data)
717      except HTTP_TRANSFER_EXCEPTIONS, e:
718        start_byte = download_stream.tell()
719        bytes_downloaded_container.bytes_transferred = start_byte
720        if start_byte > last_progress_byte:
721          # We've made progress, so allow a fresh set of retries.
722          last_progress_byte = start_byte
723          retries = 0
724        retries += 1
725        if retries > self.num_retries:
726          raise ResumableDownloadException(
727              'Transfer failed after %d retries. Final exception: %s' %
728              (self.num_retries, unicode(e).encode(UTF8)))
729        time.sleep(CalculateWaitForRetry(retries, max_wait=self.max_retry_wait))
730        if self.logger.isEnabledFor(logging.DEBUG):
731          self.logger.debug(
732              'Retrying download from byte %s after exception: %s. Trace: %s',
733              start_byte, unicode(e).encode(UTF8), traceback.format_exc())
734        apitools_http_wrapper.RebuildHttpConnections(
735            apitools_download.bytes_http)
736
737  def _PerformDownload(
738      self, bucket_name, object_name, download_stream, apitools_request,
739      apitools_download, generation=None, start_byte=0, end_byte=None,
740      serialization_data=None):
741    if not serialization_data:
742      try:
743        self.api_client.objects.Get(apitools_request,
744                                    download=apitools_download)
745      except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
746        self._TranslateExceptionAndRaise(e, bucket_name=bucket_name,
747                                         object_name=object_name,
748                                         generation=generation)
749
750    # Disable apitools' default print callbacks.
751    def _NoOpCallback(unused_response, unused_download_object):
752      pass
753
754    # TODO: If we have a resumable download with accept-encoding:gzip
755    # on a object that is compressible but not in gzip form in the cloud,
756    # on-the-fly compression will gzip the object.  In this case if our
757    # download breaks, future requests will ignore the range header and just
758    # return the object (gzipped) in its entirety.  Ideally, we would unzip
759    # the bytes that we have locally and send a range request without
760    # accept-encoding:gzip so that we can download only the (uncompressed) bytes
761    # that we don't yet have.
762
763    # Since bytes_http is created in this function, we don't get the
764    # user-agent header from api_client's http automatically.
765    additional_headers = {
766        'accept-encoding': 'gzip',
767        'user-agent': self.api_client.user_agent
768    }
769    if start_byte or end_byte is not None:
770      apitools_download.GetRange(additional_headers=additional_headers,
771                                 start=start_byte, end=end_byte,
772                                 use_chunks=False)
773    else:
774      apitools_download.StreamMedia(
775          callback=_NoOpCallback, finish_callback=_NoOpCallback,
776          additional_headers=additional_headers, use_chunks=False)
777    return apitools_download.encoding
778
779  def PatchObjectMetadata(self, bucket_name, object_name, metadata,
780                          canned_acl=None, generation=None, preconditions=None,
781                          provider=None, fields=None):
782    """See CloudApi class for function doc strings."""
783    projection = (apitools_messages.StorageObjectsPatchRequest
784                  .ProjectionValueValuesEnum.full)
785
786    if not preconditions:
787      preconditions = Preconditions()
788
789    if generation:
790      generation = long(generation)
791
792    predefined_acl = None
793    apitools_include_fields = []
794    if canned_acl:
795      # Must null out existing ACLs to apply a canned ACL.
796      apitools_include_fields.append('acl')
797      predefined_acl = (
798          apitools_messages.StorageObjectsPatchRequest.
799          PredefinedAclValueValuesEnum(
800              self._ObjectCannedAclToPredefinedAcl(canned_acl)))
801
802    apitools_request = apitools_messages.StorageObjectsPatchRequest(
803        bucket=bucket_name, object=object_name, objectResource=metadata,
804        generation=generation, projection=projection,
805        ifGenerationMatch=preconditions.gen_match,
806        ifMetagenerationMatch=preconditions.meta_gen_match,
807        predefinedAcl=predefined_acl)
808    global_params = apitools_messages.StandardQueryParameters()
809    if fields:
810      global_params.fields = ','.join(set(fields))
811
812    try:
813      with self.api_client.IncludeFields(apitools_include_fields):
814        return self.api_client.objects.Patch(apitools_request,
815                                             global_params=global_params)
816    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
817      self._TranslateExceptionAndRaise(e, bucket_name=bucket_name,
818                                       object_name=object_name,
819                                       generation=generation)
820
821  def _UploadObject(self, upload_stream, object_metadata, canned_acl=None,
822                    size=None, preconditions=None, provider=None, fields=None,
823                    serialization_data=None, tracker_callback=None,
824                    progress_callback=None,
825                    apitools_strategy=apitools_transfer.SIMPLE_UPLOAD,
826                    total_size=0):
827    # pylint: disable=g-doc-args
828    """Upload implementation. Cloud API arguments, plus two more.
829
830    Additional args:
831      apitools_strategy: SIMPLE_UPLOAD or RESUMABLE_UPLOAD.
832      total_size: Total size of the upload; None if it is unknown (streaming).
833
834    Returns:
835      Uploaded object metadata.
836    """
837    # pylint: enable=g-doc-args
838    ValidateDstObjectMetadata(object_metadata)
839    predefined_acl = None
840    if canned_acl:
841      predefined_acl = (
842          apitools_messages.StorageObjectsInsertRequest.
843          PredefinedAclValueValuesEnum(
844              self._ObjectCannedAclToPredefinedAcl(canned_acl)))
845
846    bytes_uploaded_container = BytesTransferredContainer()
847
848    if progress_callback and size:
849      total_size = size
850      progress_callback(0, size)
851
852    callback_class_factory = UploadCallbackConnectionClassFactory(
853        bytes_uploaded_container, total_size=total_size,
854        progress_callback=progress_callback)
855
856    upload_http_class = callback_class_factory.GetConnectionClass()
857    self.upload_http.connections = {'http': upload_http_class,
858                                    'https': upload_http_class}
859
860    # Since bytes_http is created in this function, we don't get the
861    # user-agent header from api_client's http automatically.
862    additional_headers = {
863        'user-agent': self.api_client.user_agent
864    }
865
866    try:
867      content_type = None
868      apitools_request = None
869      global_params = None
870      if not serialization_data:
871        # This is a new upload, set up initial upload state.
872        content_type = object_metadata.contentType
873        if not content_type:
874          content_type = DEFAULT_CONTENT_TYPE
875
876        if not preconditions:
877          preconditions = Preconditions()
878
879        apitools_request = apitools_messages.StorageObjectsInsertRequest(
880            bucket=object_metadata.bucket, object=object_metadata,
881            ifGenerationMatch=preconditions.gen_match,
882            ifMetagenerationMatch=preconditions.meta_gen_match,
883            predefinedAcl=predefined_acl)
884        global_params = apitools_messages.StandardQueryParameters()
885        if fields:
886          global_params.fields = ','.join(set(fields))
887
888      if apitools_strategy == apitools_transfer.SIMPLE_UPLOAD:
889        # One-shot upload.
890        apitools_upload = apitools_transfer.Upload(
891            upload_stream, content_type, total_size=size, auto_transfer=True,
892            num_retries=self.num_retries)
893        apitools_upload.strategy = apitools_strategy
894        apitools_upload.bytes_http = self.authorized_upload_http
895
896        return self.api_client.objects.Insert(
897            apitools_request,
898            upload=apitools_upload,
899            global_params=global_params)
900      else:  # Resumable upload.
901        return self._PerformResumableUpload(
902            upload_stream, self.authorized_upload_http, content_type, size,
903            serialization_data, apitools_strategy, apitools_request,
904            global_params, bytes_uploaded_container, tracker_callback,
905            additional_headers, progress_callback)
906    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
907      not_found_exception = CreateNotFoundExceptionForObjectWrite(
908          self.provider, object_metadata.bucket)
909      self._TranslateExceptionAndRaise(e, bucket_name=object_metadata.bucket,
910                                       object_name=object_metadata.name,
911                                       not_found_exception=not_found_exception)
912
913  def _PerformResumableUpload(
914      self, upload_stream, authorized_upload_http, content_type, size,
915      serialization_data, apitools_strategy, apitools_request, global_params,
916      bytes_uploaded_container, tracker_callback, addl_headers,
917      progress_callback):
918    try:
919      if serialization_data:
920        # Resuming an existing upload.
921        apitools_upload = apitools_transfer.Upload.FromData(
922            upload_stream, serialization_data, self.api_client.http,
923            num_retries=self.num_retries)
924        apitools_upload.chunksize = GetJsonResumableChunkSize()
925        apitools_upload.bytes_http = authorized_upload_http
926      else:
927        # New resumable upload.
928        apitools_upload = apitools_transfer.Upload(
929            upload_stream, content_type, total_size=size,
930            chunksize=GetJsonResumableChunkSize(), auto_transfer=False,
931            num_retries=self.num_retries)
932        apitools_upload.strategy = apitools_strategy
933        apitools_upload.bytes_http = authorized_upload_http
934        self.api_client.objects.Insert(
935            apitools_request,
936            upload=apitools_upload,
937            global_params=global_params)
938      # Disable retries in apitools. We will handle them explicitly here.
939      apitools_upload.retry_func = (
940          apitools_http_wrapper.RethrowExceptionHandler)
941
942      # Disable apitools' default print callbacks.
943      def _NoOpCallback(unused_response, unused_upload_object):
944        pass
945
946      # If we're resuming an upload, apitools has at this point received
947      # from the server how many bytes it already has. Update our
948      # callback class with this information.
949      bytes_uploaded_container.bytes_transferred = apitools_upload.progress
950      if tracker_callback:
951        tracker_callback(json.dumps(apitools_upload.serialization_data))
952
953      retries = 0
954      last_progress_byte = apitools_upload.progress
955      while retries <= self.num_retries:
956        try:
957          # TODO: On retry, this will seek to the bytes that the server has,
958          # causing the hash to be recalculated. Make HashingFileUploadWrapper
959          # save a digest according to json_resumable_chunk_size.
960          if size:
961            # If size is known, we can send it all in one request and avoid
962            # making a round-trip per chunk.
963            http_response = apitools_upload.StreamMedia(
964                callback=_NoOpCallback, finish_callback=_NoOpCallback,
965                additional_headers=addl_headers)
966          else:
967            # Otherwise it's a streaming request and we need to ensure that we
968            # send the bytes in chunks so that we can guarantee that we never
969            # need to seek backwards more than our buffer (and also that the
970            # chunks are aligned to 256KB).
971            http_response = apitools_upload.StreamInChunks(
972                callback=_NoOpCallback, finish_callback=_NoOpCallback,
973                additional_headers=addl_headers)
974          processed_response = self.api_client.objects.ProcessHttpResponse(
975              self.api_client.objects.GetMethodConfig('Insert'), http_response)
976          if size is None and progress_callback:
977            # Make final progress callback; total size should now be known.
978            # This works around the fact the send function counts header bytes.
979            # However, this will make the progress appear to go slightly
980            # backwards at the end.
981            progress_callback(apitools_upload.total_size,
982                              apitools_upload.total_size)
983          return processed_response
984        except HTTP_TRANSFER_EXCEPTIONS, e:
985          apitools_http_wrapper.RebuildHttpConnections(
986              apitools_upload.bytes_http)
987          while retries <= self.num_retries:
988            try:
989              # TODO: Simulate the refresh case in tests. Right now, our
990              # mocks are not complex enough to simulate a failure.
991              apitools_upload.RefreshResumableUploadState()
992              start_byte = apitools_upload.progress
993              bytes_uploaded_container.bytes_transferred = start_byte
994              break
995            except HTTP_TRANSFER_EXCEPTIONS, e2:
996              apitools_http_wrapper.RebuildHttpConnections(
997                  apitools_upload.bytes_http)
998              retries += 1
999              if retries > self.num_retries:
1000                raise ResumableUploadException(
1001                    'Transfer failed after %d retries. Final exception: %s' %
1002                    (self.num_retries, e2))
1003              time.sleep(
1004                  CalculateWaitForRetry(retries, max_wait=self.max_retry_wait))
1005          if start_byte > last_progress_byte:
1006            # We've made progress, so allow a fresh set of retries.
1007            last_progress_byte = start_byte
1008            retries = 0
1009          else:
1010            retries += 1
1011            if retries > self.num_retries:
1012              raise ResumableUploadException(
1013                  'Transfer failed after %d retries. Final exception: %s' %
1014                  (self.num_retries, unicode(e).encode(UTF8)))
1015            time.sleep(
1016                CalculateWaitForRetry(retries, max_wait=self.max_retry_wait))
1017          if self.logger.isEnabledFor(logging.DEBUG):
1018            self.logger.debug(
1019                'Retrying upload from byte %s after exception: %s. Trace: %s',
1020                start_byte, unicode(e).encode(UTF8), traceback.format_exc())
1021    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
1022      resumable_ex = self._TranslateApitoolsResumableUploadException(e)
1023      if resumable_ex:
1024        raise resumable_ex
1025      else:
1026        raise
1027
1028  def UploadObject(self, upload_stream, object_metadata, canned_acl=None,
1029                   size=None, preconditions=None, progress_callback=None,
1030                   provider=None, fields=None):
1031    """See CloudApi class for function doc strings."""
1032    return self._UploadObject(
1033        upload_stream, object_metadata, canned_acl=canned_acl,
1034        size=size, preconditions=preconditions,
1035        progress_callback=progress_callback, fields=fields,
1036        apitools_strategy=apitools_transfer.SIMPLE_UPLOAD)
1037
1038  def UploadObjectStreaming(self, upload_stream, object_metadata,
1039                            canned_acl=None, preconditions=None,
1040                            progress_callback=None, provider=None,
1041                            fields=None):
1042    """See CloudApi class for function doc strings."""
1043    # Streaming indicated by not passing a size.
1044    # Resumable capabilities are present up to the resumable chunk size using
1045    # a buffered stream.
1046    return self._UploadObject(
1047        upload_stream, object_metadata, canned_acl=canned_acl,
1048        preconditions=preconditions, progress_callback=progress_callback,
1049        fields=fields, apitools_strategy=apitools_transfer.RESUMABLE_UPLOAD,
1050        total_size=None)
1051
1052  def UploadObjectResumable(
1053      self, upload_stream, object_metadata, canned_acl=None, preconditions=None,
1054      provider=None, fields=None, size=None, serialization_data=None,
1055      tracker_callback=None, progress_callback=None):
1056    """See CloudApi class for function doc strings."""
1057    return self._UploadObject(
1058        upload_stream, object_metadata, canned_acl=canned_acl,
1059        preconditions=preconditions, fields=fields, size=size,
1060        serialization_data=serialization_data,
1061        tracker_callback=tracker_callback, progress_callback=progress_callback,
1062        apitools_strategy=apitools_transfer.RESUMABLE_UPLOAD)
1063
1064  def CopyObject(self, src_obj_metadata, dst_obj_metadata, src_generation=None,
1065                 canned_acl=None, preconditions=None, progress_callback=None,
1066                 max_bytes_per_call=None, provider=None, fields=None):
1067    """See CloudApi class for function doc strings."""
1068    ValidateDstObjectMetadata(dst_obj_metadata)
1069    predefined_acl = None
1070    if canned_acl:
1071      predefined_acl = (
1072          apitools_messages.StorageObjectsRewriteRequest.
1073          DestinationPredefinedAclValueValuesEnum(
1074              self._ObjectCannedAclToPredefinedAcl(canned_acl)))
1075
1076    if src_generation:
1077      src_generation = long(src_generation)
1078
1079    if not preconditions:
1080      preconditions = Preconditions()
1081
1082    projection = (apitools_messages.StorageObjectsRewriteRequest.
1083                  ProjectionValueValuesEnum.full)
1084    global_params = apitools_messages.StandardQueryParameters()
1085    if fields:
1086      # Rewrite returns the resultant object under the 'resource' field.
1087      new_fields = set(['done', 'objectSize', 'rewriteToken',
1088                        'totalBytesRewritten'])
1089      for field in fields:
1090        new_fields.add('resource/' + field)
1091      global_params.fields = ','.join(set(new_fields))
1092
1093    # Check to see if we are resuming a rewrite.
1094    tracker_file_name = GetRewriteTrackerFilePath(
1095        src_obj_metadata.bucket, src_obj_metadata.name, dst_obj_metadata.bucket,
1096        dst_obj_metadata.name, 'JSON')
1097    rewrite_params_hash = HashRewriteParameters(
1098        src_obj_metadata, dst_obj_metadata, projection,
1099        src_generation=src_generation, gen_match=preconditions.gen_match,
1100        meta_gen_match=preconditions.meta_gen_match,
1101        canned_acl=predefined_acl, fields=global_params.fields,
1102        max_bytes_per_call=max_bytes_per_call)
1103    resume_rewrite_token = ReadRewriteTrackerFile(tracker_file_name,
1104                                                  rewrite_params_hash)
1105
1106    progress_cb_with_backoff = None
1107    try:
1108      last_bytes_written = 0L
1109      while True:
1110        apitools_request = apitools_messages.StorageObjectsRewriteRequest(
1111            sourceBucket=src_obj_metadata.bucket,
1112            sourceObject=src_obj_metadata.name,
1113            destinationBucket=dst_obj_metadata.bucket,
1114            destinationObject=dst_obj_metadata.name,
1115            projection=projection, object=dst_obj_metadata,
1116            sourceGeneration=src_generation,
1117            ifGenerationMatch=preconditions.gen_match,
1118            ifMetagenerationMatch=preconditions.meta_gen_match,
1119            destinationPredefinedAcl=predefined_acl,
1120            rewriteToken=resume_rewrite_token,
1121            maxBytesRewrittenPerCall=max_bytes_per_call)
1122        rewrite_response = self.api_client.objects.Rewrite(
1123            apitools_request, global_params=global_params)
1124        bytes_written = long(rewrite_response.totalBytesRewritten)
1125        if progress_callback and not progress_cb_with_backoff:
1126          progress_cb_with_backoff = ProgressCallbackWithBackoff(
1127              long(rewrite_response.objectSize), progress_callback)
1128        if progress_cb_with_backoff:
1129          progress_cb_with_backoff.Progress(
1130              bytes_written - last_bytes_written)
1131
1132        if rewrite_response.done:
1133          break
1134        elif not resume_rewrite_token:
1135          # Save the token and make a tracker file if they don't already exist.
1136          resume_rewrite_token = rewrite_response.rewriteToken
1137          WriteRewriteTrackerFile(tracker_file_name, rewrite_params_hash,
1138                                  rewrite_response.rewriteToken)
1139        last_bytes_written = bytes_written
1140
1141      DeleteTrackerFile(tracker_file_name)
1142      return rewrite_response.resource
1143    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
1144      not_found_exception = CreateNotFoundExceptionForObjectWrite(
1145          self.provider, dst_obj_metadata.bucket, src_provider=self.provider,
1146          src_bucket_name=src_obj_metadata.bucket,
1147          src_object_name=src_obj_metadata.name, src_generation=src_generation)
1148      self._TranslateExceptionAndRaise(e, bucket_name=dst_obj_metadata.bucket,
1149                                       object_name=dst_obj_metadata.name,
1150                                       not_found_exception=not_found_exception)
1151
1152  def DeleteObject(self, bucket_name, object_name, preconditions=None,
1153                   generation=None, provider=None):
1154    """See CloudApi class for function doc strings."""
1155    if not preconditions:
1156      preconditions = Preconditions()
1157
1158    if generation:
1159      generation = long(generation)
1160
1161    apitools_request = apitools_messages.StorageObjectsDeleteRequest(
1162        bucket=bucket_name, object=object_name, generation=generation,
1163        ifGenerationMatch=preconditions.gen_match,
1164        ifMetagenerationMatch=preconditions.meta_gen_match)
1165    try:
1166      return self.api_client.objects.Delete(apitools_request)
1167    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
1168      self._TranslateExceptionAndRaise(e, bucket_name=bucket_name,
1169                                       object_name=object_name,
1170                                       generation=generation)
1171
1172  def ComposeObject(self, src_objs_metadata, dst_obj_metadata,
1173                    preconditions=None, provider=None, fields=None):
1174    """See CloudApi class for function doc strings."""
1175    ValidateDstObjectMetadata(dst_obj_metadata)
1176
1177    dst_obj_name = dst_obj_metadata.name
1178    dst_obj_metadata.name = None
1179    dst_bucket_name = dst_obj_metadata.bucket
1180    dst_obj_metadata.bucket = None
1181    if not dst_obj_metadata.contentType:
1182      dst_obj_metadata.contentType = DEFAULT_CONTENT_TYPE
1183
1184    if not preconditions:
1185      preconditions = Preconditions()
1186
1187    global_params = apitools_messages.StandardQueryParameters()
1188    if fields:
1189      global_params.fields = ','.join(set(fields))
1190
1191    src_objs_compose_request = apitools_messages.ComposeRequest(
1192        sourceObjects=src_objs_metadata, destination=dst_obj_metadata)
1193
1194    apitools_request = apitools_messages.StorageObjectsComposeRequest(
1195        composeRequest=src_objs_compose_request,
1196        destinationBucket=dst_bucket_name,
1197        destinationObject=dst_obj_name,
1198        ifGenerationMatch=preconditions.gen_match,
1199        ifMetagenerationMatch=preconditions.meta_gen_match)
1200    try:
1201      return self.api_client.objects.Compose(apitools_request,
1202                                             global_params=global_params)
1203    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
1204      # We can't be sure which object was missing in the 404 case.
1205      if isinstance(e, apitools_exceptions.HttpError) and e.status_code == 404:
1206        raise NotFoundException('One of the source objects does not exist.')
1207      else:
1208        self._TranslateExceptionAndRaise(e)
1209
1210  def WatchBucket(self, bucket_name, address, channel_id, token=None,
1211                  provider=None, fields=None):
1212    """See CloudApi class for function doc strings."""
1213    projection = (apitools_messages.StorageObjectsWatchAllRequest
1214                  .ProjectionValueValuesEnum.full)
1215
1216    channel = apitools_messages.Channel(address=address, id=channel_id,
1217                                        token=token, type='WEB_HOOK')
1218
1219    apitools_request = apitools_messages.StorageObjectsWatchAllRequest(
1220        bucket=bucket_name, channel=channel, projection=projection)
1221
1222    global_params = apitools_messages.StandardQueryParameters()
1223    if fields:
1224      global_params.fields = ','.join(set(fields))
1225
1226    try:
1227      return self.api_client.objects.WatchAll(apitools_request,
1228                                              global_params=global_params)
1229    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
1230      self._TranslateExceptionAndRaise(e, bucket_name=bucket_name)
1231
1232  def StopChannel(self, channel_id, resource_id, provider=None):
1233    """See CloudApi class for function doc strings."""
1234    channel = apitools_messages.Channel(id=channel_id, resourceId=resource_id)
1235    try:
1236      self.api_client.channels.Stop(channel)
1237    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
1238      self._TranslateExceptionAndRaise(e)
1239
1240  def _BucketCannedAclToPredefinedAcl(self, canned_acl_string):
1241    """Translates the input string to a bucket PredefinedAcl string.
1242
1243    Args:
1244      canned_acl_string: Canned ACL string.
1245
1246    Returns:
1247      String that can be used as a query parameter with the JSON API. This
1248      corresponds to a flavor of *PredefinedAclValueValuesEnum and can be
1249      used as input to apitools requests that affect bucket access controls.
1250    """
1251    # XML : JSON
1252    translation_dict = {
1253        None: None,
1254        'authenticated-read': 'authenticatedRead',
1255        'private': 'private',
1256        'project-private': 'projectPrivate',
1257        'public-read': 'publicRead',
1258        'public-read-write': 'publicReadWrite'
1259    }
1260    if canned_acl_string in translation_dict:
1261      return translation_dict[canned_acl_string]
1262    raise ArgumentException('Invalid canned ACL %s' % canned_acl_string)
1263
1264  def _ObjectCannedAclToPredefinedAcl(self, canned_acl_string):
1265    """Translates the input string to an object PredefinedAcl string.
1266
1267    Args:
1268      canned_acl_string: Canned ACL string.
1269
1270    Returns:
1271      String that can be used as a query parameter with the JSON API. This
1272      corresponds to a flavor of *PredefinedAclValueValuesEnum and can be
1273      used as input to apitools requests that affect object access controls.
1274    """
1275    # XML : JSON
1276    translation_dict = {
1277        None: None,
1278        'authenticated-read': 'authenticatedRead',
1279        'bucket-owner-read': 'bucketOwnerRead',
1280        'bucket-owner-full-control': 'bucketOwnerFullControl',
1281        'private': 'private',
1282        'project-private': 'projectPrivate',
1283        'public-read': 'publicRead'
1284    }
1285    if canned_acl_string in translation_dict:
1286      return translation_dict[canned_acl_string]
1287    raise ArgumentException('Invalid canned ACL %s' % canned_acl_string)
1288
1289  def _TranslateExceptionAndRaise(self, e, bucket_name=None, object_name=None,
1290                                  generation=None, not_found_exception=None):
1291    """Translates an HTTP exception and raises the translated or original value.
1292
1293    Args:
1294      e: Any Exception.
1295      bucket_name: Optional bucket name in request that caused the exception.
1296      object_name: Optional object name in request that caused the exception.
1297      generation: Optional generation in request that caused the exception.
1298      not_found_exception: Optional exception to raise in the not-found case.
1299
1300    Raises:
1301      Translated CloudApi exception, or the original exception if it was not
1302      translatable.
1303    """
1304    translated_exception = self._TranslateApitoolsException(
1305        e, bucket_name=bucket_name, object_name=object_name,
1306        generation=generation, not_found_exception=not_found_exception)
1307    if translated_exception:
1308      raise translated_exception
1309    else:
1310      raise
1311
1312  def _GetMessageFromHttpError(self, http_error):
1313    if isinstance(http_error, apitools_exceptions.HttpError):
1314      if getattr(http_error, 'content', None):
1315        try:
1316          json_obj = json.loads(http_error.content)
1317          if 'error' in json_obj and 'message' in json_obj['error']:
1318            return json_obj['error']['message']
1319        except Exception:  # pylint: disable=broad-except
1320          # If we couldn't decode anything, just leave the message as None.
1321          pass
1322
1323  def _TranslateApitoolsResumableUploadException(self, e):
1324    if isinstance(e, apitools_exceptions.HttpError):
1325      message = self._GetMessageFromHttpError(e)
1326      if (e.status_code == 503 and
1327          self.http.disable_ssl_certificate_validation):
1328        return ServiceException(_VALIDATE_CERTIFICATES_503_MESSAGE,
1329                                status=e.status_code)
1330      elif e.status_code >= 500:
1331        return ResumableUploadException(
1332            message or 'Server Error', status=e.status_code)
1333      elif e.status_code == 429:
1334        return ResumableUploadException(
1335            message or 'Too Many Requests', status=e.status_code)
1336      elif e.status_code == 410:
1337        return ResumableUploadStartOverException(
1338            message or 'Bad Request', status=e.status_code)
1339      elif e.status_code == 404:
1340        return ResumableUploadStartOverException(
1341            message or 'Bad Request', status=e.status_code)
1342      elif e.status_code >= 400:
1343        return ResumableUploadAbortException(
1344            message or 'Bad Request', status=e.status_code)
1345    if isinstance(e, apitools_exceptions.StreamExhausted):
1346      return ResumableUploadAbortException(e.message)
1347    if (isinstance(e, apitools_exceptions.TransferError) and
1348        ('Aborting transfer' in e.message or
1349         'Not enough bytes in stream' in e.message or
1350         'additional bytes left in stream' in e.message)):
1351      return ResumableUploadAbortException(e.message)
1352
1353  def _TranslateApitoolsException(self, e, bucket_name=None, object_name=None,
1354                                  generation=None, not_found_exception=None):
1355    """Translates apitools exceptions into their gsutil Cloud Api equivalents.
1356
1357    Args:
1358      e: Any exception in TRANSLATABLE_APITOOLS_EXCEPTIONS.
1359      bucket_name: Optional bucket name in request that caused the exception.
1360      object_name: Optional object name in request that caused the exception.
1361      generation: Optional generation in request that caused the exception.
1362      not_found_exception: Optional exception to raise in the not-found case.
1363
1364    Returns:
1365      CloudStorageApiServiceException for translatable exceptions, None
1366      otherwise.
1367    """
1368    if isinstance(e, apitools_exceptions.HttpError):
1369      message = self._GetMessageFromHttpError(e)
1370      if e.status_code == 400:
1371        # It is possible that the Project ID is incorrect.  Unfortunately the
1372        # JSON API does not give us much information about what part of the
1373        # request was bad.
1374        return BadRequestException(message or 'Bad Request',
1375                                   status=e.status_code)
1376      elif e.status_code == 401:
1377        if 'Login Required' in str(e):
1378          return AccessDeniedException(
1379              message or 'Access denied: login required.',
1380              status=e.status_code)
1381      elif e.status_code == 403:
1382        if 'The account for the specified project has been disabled' in str(e):
1383          return AccessDeniedException(message or 'Account disabled.',
1384                                       status=e.status_code)
1385        elif 'Daily Limit for Unauthenticated Use Exceeded' in str(e):
1386          return AccessDeniedException(
1387              message or 'Access denied: quota exceeded. '
1388              'Is your project ID valid?',
1389              status=e.status_code)
1390        elif 'The bucket you tried to delete was not empty.' in str(e):
1391          return NotEmptyException('BucketNotEmpty (%s)' % bucket_name,
1392                                   status=e.status_code)
1393        elif ('The bucket you tried to create requires domain ownership '
1394              'verification.' in str(e)):
1395          return AccessDeniedException(
1396              'The bucket you tried to create requires domain ownership '
1397              'verification. Please see '
1398              'https://developers.google.com/storage/docs/bucketnaming'
1399              '?hl=en#verification for more details.', status=e.status_code)
1400        elif 'User Rate Limit Exceeded' in str(e):
1401          return AccessDeniedException('Rate limit exceeded. Please retry this '
1402                                       'request later.', status=e.status_code)
1403        elif 'Access Not Configured' in str(e):
1404          return AccessDeniedException(
1405              'Access Not Configured. Please go to the Google Developers '
1406              'Console (https://cloud.google.com/console#/project) for your '
1407              'project, select APIs and Auth and enable the '
1408              'Google Cloud Storage JSON API.',
1409              status=e.status_code)
1410        else:
1411          return AccessDeniedException(message or e.message,
1412                                       status=e.status_code)
1413      elif e.status_code == 404:
1414        if not_found_exception:
1415          # The exception is pre-constructed prior to translation; the HTTP
1416          # status code isn't available at that time.
1417          setattr(not_found_exception, 'status', e.status_code)
1418          return not_found_exception
1419        elif bucket_name:
1420          if object_name:
1421            return CreateObjectNotFoundException(e.status_code, self.provider,
1422                                                 bucket_name, object_name,
1423                                                 generation=generation)
1424          return CreateBucketNotFoundException(e.status_code, self.provider,
1425                                               bucket_name)
1426        return NotFoundException(e.message, status=e.status_code)
1427
1428      elif e.status_code == 409 and bucket_name:
1429        if 'The bucket you tried to delete was not empty.' in str(e):
1430          return NotEmptyException('BucketNotEmpty (%s)' % bucket_name,
1431                                   status=e.status_code)
1432        return ServiceException(
1433            'Bucket %s already exists.' % bucket_name, status=e.status_code)
1434      elif e.status_code == 412:
1435        return PreconditionException(message, status=e.status_code)
1436      elif (e.status_code == 503 and
1437            not self.http.disable_ssl_certificate_validation):
1438        return ServiceException(_VALIDATE_CERTIFICATES_503_MESSAGE,
1439                                status=e.status_code)
1440      return ServiceException(message, status=e.status_code)
1441    elif isinstance(e, apitools_exceptions.TransferInvalidError):
1442      return ServiceException('Transfer invalid (possible encoding error: %s)'
1443                              % str(e))
1444