1# Copyright 2015 The Chromium Authors. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5import json 6import logging 7import os 8 9from catapult_base import cloud_storage 10from dependency_manager import archive_info 11from dependency_manager import cloud_storage_info 12from dependency_manager import dependency_info 13from dependency_manager import exceptions 14from dependency_manager import local_path_info 15from dependency_manager import uploader 16 17 18class BaseConfig(object): 19 """A basic config class for use with the DependencyManager. 20 21 Initiated with a json file in the following format: 22 23 { "config_type": "BaseConfig", 24 "dependencies": { 25 "dep_name1": { 26 "cloud_storage_base_folder": "base_folder1", 27 "cloud_storage_bucket": "bucket1", 28 "file_info": { 29 "platform1": { 30 "cloud_storage_hash": "hash_for_platform1", 31 "download_path": "download_path111", 32 "version_in_cs": "1.11.1.11." 33 "local_paths": ["local_path1110", "local_path1111"] 34 }, 35 "platform2": { 36 "cloud_storage_hash": "hash_for_platform2", 37 "download_path": "download_path2", 38 "local_paths": ["local_path20", "local_path21"] 39 }, 40 ... 41 } 42 }, 43 "dependency_name_2": { 44 ... 45 }, 46 ... 47 } 48 } 49 50 Required fields: "dependencies" and "config_type". 51 Note that config_type must be "BaseConfig" 52 53 Assumptions: 54 "cloud_storage_base_folder" is a top level folder in the given 55 "cloud_storage_bucket" where all of the dependency files are stored 56 at "dependency_name"_"cloud_storage_hash". 57 58 "download_path" and all paths in "local_paths" are relative to the 59 config file's location. 60 61 All or none of the following cloud storage related fields must be 62 included in each platform dictionary: 63 "cloud_storage_hash", "download_path", "cs_remote_path" 64 65 "version_in_cs" is an optional cloud storage field, but is dependent 66 on the above cloud storage related fields. 67 68 69 Also note that platform names are often of the form os_architechture. 70 Ex: "win_AMD64" 71 72 More information on the fields can be found in dependencies_info.py 73 """ 74 def __init__(self, file_path, writable=False): 75 """ Initialize a BaseConfig for the DependencyManager. 76 77 Args: 78 writable: False: This config will be used to lookup information. 79 True: This config will be used to update information. 80 81 file_path: Path to a file containing a json dictionary in the expected 82 json format for this config class. Base format expected: 83 84 { "config_type": config_type, 85 "dependencies": dependencies_dict } 86 87 config_type: must match the return value of GetConfigType. 88 dependencies: A dictionary with the information needed to 89 create dependency_info instances for the given 90 dependencies. 91 92 See dependency_info.py for more information. 93 """ 94 self._config_path = file_path 95 self._writable = writable 96 self._pending_uploads = [] 97 if not self._config_path: 98 raise ValueError('Must supply config file path.') 99 if not os.path.exists(self._config_path): 100 if not writable: 101 raise exceptions.EmptyConfigError(file_path) 102 self._config_data = {} 103 self._WriteConfigToFile(self._config_path, dependencies=self._config_data) 104 else: 105 with open(file_path, 'r') as f: 106 config_data = json.load(f) 107 if not config_data: 108 raise exceptions.EmptyConfigError(file_path) 109 config_type = config_data.pop('config_type', None) 110 if config_type != self.GetConfigType(): 111 raise ValueError( 112 'Supplied config_type (%s) is not the expected type (%s) in file ' 113 '%s' % (config_type, self.GetConfigType(), file_path)) 114 self._config_data = config_data.get('dependencies', {}) 115 116 def IterDependencyInfo(self): 117 """ Yields a DependencyInfo for each dependency/platform pair. 118 119 Raises: 120 ReadWriteError: If called when the config is writable. 121 ValueError: If any of the dependencies contain partial information for 122 downloading from cloud_storage. (See dependency_info.py) 123 """ 124 if self._writable: 125 raise exceptions.ReadWriteError( 126 'Trying to read dependency info from a writable config. File for ' 127 'config: %s' % self._config_path) 128 base_path = os.path.dirname(self._config_path) 129 for dependency in self._config_data: 130 dependency_dict = self._config_data.get(dependency) 131 platforms_dict = dependency_dict.get('file_info', {}) 132 for platform in platforms_dict: 133 platform_info = platforms_dict.get(platform) 134 135 local_info = None 136 local_paths = platform_info.get('local_paths', []) 137 if local_paths: 138 paths = [] 139 for path in local_paths: 140 path = self._FormatPath(path) 141 paths.append(os.path.abspath(os.path.join(base_path, path))) 142 local_info = local_path_info.LocalPathInfo(paths) 143 144 cs_info = None 145 cs_bucket = dependency_dict.get('cloud_storage_bucket') 146 cs_base_folder = dependency_dict.get('cloud_storage_base_folder', '') 147 download_path = platform_info.get('download_path') 148 if download_path: 149 download_path = self._FormatPath(download_path) 150 download_path = os.path.abspath( 151 os.path.join(base_path, download_path)) 152 153 cs_hash = platform_info.get('cloud_storage_hash') 154 if not cs_hash: 155 raise exceptions.ConfigError( 156 'Dependency %s has cloud storage info on platform %s, but is ' 157 'missing a cloud storage hash.', dependency, platform) 158 cs_remote_path = self._CloudStorageRemotePath( 159 dependency, cs_hash, cs_base_folder) 160 version_in_cs = platform_info.get('version_in_cs') 161 162 zip_info = None 163 path_within_archive = platform_info.get('path_within_archive') 164 if path_within_archive: 165 unzip_path = os.path.abspath( 166 os.path.join(os.path.dirname(download_path), 167 '%s_%s_%s' % (dependency, platform, cs_hash))) 168 zip_info = archive_info.ArchiveInfo( 169 download_path, unzip_path, path_within_archive) 170 171 cs_info = cloud_storage_info.CloudStorageInfo( 172 cs_bucket, cs_hash, download_path, cs_remote_path, 173 version_in_cs=version_in_cs, archive_info=zip_info) 174 175 dep_info = dependency_info.DependencyInfo( 176 dependency, platform, self._config_path, 177 local_path_info=local_info, cloud_storage_info=cs_info) 178 yield dep_info 179 180 @classmethod 181 def GetConfigType(cls): 182 return 'BaseConfig' 183 184 @property 185 def config_path(self): 186 return self._config_path 187 188 def AddCloudStorageDependencyUpdateJob( 189 self, dependency, platform, dependency_path, version=None, 190 execute_job=True): 191 """Update the file downloaded from cloud storage for a dependency/platform. 192 193 Upload a new file to cloud storage for the given dependency and platform 194 pair and update the cloud storage hash and the version for the given pair. 195 196 Example usage: 197 The following should update the default platform for 'dep_name': 198 UpdateCloudStorageDependency('dep_name', 'default', 'path/to/file') 199 200 The following should update both the mac and win platforms for 'dep_name', 201 or neither if either update fails: 202 UpdateCloudStorageDependency( 203 'dep_name', 'mac_x86_64', 'path/to/mac/file', execute_job=False) 204 UpdateCloudStorageDependency( 205 'dep_name', 'win_AMD64', 'path/to/win/file', execute_job=False) 206 ExecuteUpdateJobs() 207 208 Args: 209 dependency: The dependency to update. 210 platform: The platform to update the dependency info for. 211 dependency_path: Path to the new dependency to be used. 212 version: Version of the updated dependency, for checking future updates 213 against. 214 execute_job: True if the config should be written to disk and the file 215 should be uploaded to cloud storage after the update. False if 216 multiple updates should be performed atomically. Must call 217 ExecuteUpdateJobs after all non-executed jobs are added to complete 218 the update. 219 220 Raises: 221 ReadWriteError: If the config was not initialized as writable, or if 222 |execute_job| is True but the config has update jobs still pending 223 execution. 224 ValueError: If no information exists in the config for |dependency| on 225 |platform|. 226 """ 227 self._ValidateIsConfigUpdatable( 228 execute_job=execute_job, dependency=dependency, platform=platform) 229 cs_hash = cloud_storage.CalculateHash(dependency_path) 230 if version: 231 self._SetPlatformData(dependency, platform, 'version_in_cs', version) 232 self._SetPlatformData(dependency, platform, 'cloud_storage_hash', cs_hash) 233 234 cs_base_folder = self._GetPlatformData( 235 dependency, platform, 'cloud_storage_base_folder') 236 cs_bucket = self._GetPlatformData( 237 dependency, platform, 'cloud_storage_bucket') 238 cs_remote_path = self._CloudStorageRemotePath( 239 dependency, cs_hash, cs_base_folder) 240 self._pending_uploads.append(uploader.CloudStorageUploader( 241 cs_bucket, cs_remote_path, dependency_path)) 242 if execute_job: 243 self.ExecuteUpdateJobs() 244 245 def ExecuteUpdateJobs(self, force=False): 246 """Write all config changes to the config_path specified in __init__. 247 248 Upload all files pending upload and then write the updated config to 249 file. Attempt to remove all uploaded files on failure. 250 251 Args: 252 force: True if files should be uploaded to cloud storage even if a 253 file already exists in the upload location. 254 255 Returns: 256 True: if the config was dirty and the upload succeeded. 257 False: if the config was not dirty. 258 259 Raises: 260 CloudStorageUploadConflictError: If |force| is False and the potential 261 upload location of a file already exists. 262 CloudStorageError: If copying an existing file to the backup location 263 or uploading a new file fails. 264 """ 265 self._ValidateIsConfigUpdatable() 266 if not self._IsDirty(): 267 logging.info('ExecuteUpdateJobs called on clean config') 268 return False 269 if not self._pending_uploads: 270 logging.debug('No files needing upload.') 271 else: 272 try: 273 for item_pending_upload in self._pending_uploads: 274 item_pending_upload.Upload(force) 275 self._WriteConfigToFile(self._config_path, self._config_data) 276 self._pending_uploads = [] 277 except: 278 # Attempt to rollback the update in any instance of failure, even user 279 # interrupt via Ctrl+C; but don't consume the exception. 280 logging.error('Update failed, attempting to roll it back.') 281 for upload_item in reversed(self._pending_uploads): 282 upload_item.Rollback() 283 raise 284 return True 285 286 def GetVersion(self, dependency, platform): 287 """Return the Version information for the given dependency.""" 288 return self._GetPlatformData( 289 dependency, platform, data_type='version_in_cs') 290 291 def _IsDirty(self): 292 with open(self._config_path, 'r') as fstream: 293 curr_config_data = json.load(fstream) 294 curr_config_data = curr_config_data.get('dependencies', {}) 295 return self._config_data != curr_config_data 296 297 def _SetPlatformData(self, dependency, platform, data_type, data): 298 self._ValidateIsConfigWritable() 299 dependency_dict = self._config_data.get(dependency, {}) 300 platform_dict = dependency_dict.get('file_info', {}).get(platform) 301 if not platform_dict: 302 raise ValueError('No platform data for platform %s on dependency %s' % 303 (platform, dependency)) 304 if (data_type == 'cloud_storage_bucket' or 305 data_type == 'cloud_storage_base_folder'): 306 self._config_data[dependency][data_type] = data 307 else: 308 self._config_data[dependency]['file_info'][platform][data_type] = data 309 310 def _GetPlatformData(self, dependency, platform, data_type=None): 311 dependency_dict = self._config_data.get(dependency, {}) 312 if not dependency_dict: 313 raise ValueError('Dependency %s is not in config.' % dependency) 314 platform_dict = dependency_dict.get('file_info', {}).get(platform) 315 if not platform_dict: 316 raise ValueError('No platform data for platform %s on dependency %s' % 317 (platform, dependency)) 318 if data_type: 319 if (data_type == 'cloud_storage_bucket' or 320 data_type == 'cloud_storage_base_folder'): 321 return dependency_dict.get(data_type) 322 return platform_dict.get(data_type) 323 return platform_dict 324 325 def _ValidateIsConfigUpdatable( 326 self, execute_job=False, dependency=None, platform=None): 327 self._ValidateIsConfigWritable() 328 if self._IsDirty() and execute_job: 329 raise exceptions.ReadWriteError( 330 'A change has already been made to this config. Either call without' 331 'using the execute_job option or first call ExecuteUpdateJobs().') 332 if dependency and not self._config_data.get(dependency): 333 raise ValueError('Cannot update information because dependency %s does ' 334 'not exist.' % dependency) 335 if platform and not self._GetPlatformData(dependency, platform): 336 raise ValueError('No dependency info is available for the given ' 337 'dependency: %s' % dependency) 338 339 def _ValidateIsConfigWritable(self): 340 if not self._writable: 341 raise exceptions.ReadWriteError( 342 'Trying to update the information from a read-only config. ' 343 'File for config: %s' % self._config_path) 344 345 @staticmethod 346 def _CloudStorageRemotePath(dependency, cs_hash, cs_base_folder): 347 cs_remote_file = '%s_%s' % (dependency, cs_hash) 348 cs_remote_path = cs_remote_file if not cs_base_folder else ( 349 '%s/%s' % (cs_base_folder, cs_remote_file)) 350 return cs_remote_path 351 352 @classmethod 353 def _FormatPath(cls, file_path): 354 """ Format |file_path| for the current file system. 355 356 We may be downloading files for another platform, so paths must be 357 downloadable on the current system. 358 """ 359 if not file_path: 360 return file_path 361 if os.path.sep != '\\': 362 return file_path.replace('\\', os.path.sep) 363 elif os.path.sep != '/': 364 return file_path.replace('/', os.path.sep) 365 return file_path 366 367 @classmethod 368 def _WriteConfigToFile(cls, file_path, dependencies=None): 369 json_dict = cls._GetJsonDict(dependencies) 370 file_dir = os.path.dirname(file_path) 371 if not os.path.exists(file_dir): 372 os.makedirs(file_dir) 373 with open(file_path, 'w') as outfile: 374 json.dump( 375 json_dict, outfile, indent=2, sort_keys=True, separators=(',', ': ')) 376 return json_dict 377 378 @classmethod 379 def _GetJsonDict(cls, dependencies=None): 380 dependencies = dependencies or {} 381 json_dict = {'config_type': cls.GetConfigType(), 382 'dependencies': dependencies} 383 return json_dict 384