1#!/usr/bin/env python
2# Copyright 2014 The Chromium Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6import argparse
7import logging
8import os
9import subprocess
10import sys
11
12from telemetry.core import util
13from telemetry.internal.util import command_line
14
15sys.path.insert(1, os.path.abspath(os.path.join(
16    util.GetCatapultDir(), 'common', 'py_utils')))
17from py_utils import cloud_storage
18
19
20BUCKETS = {bucket: easy_bucket_name for easy_bucket_name, bucket
21           in cloud_storage.BUCKET_ALIASES.iteritems()}
22
23
24def _GetPaths(path):
25  root, ext = os.path.splitext(path)
26  if ext == '.sha1':
27    file_path = root
28    hash_path = path
29  else:
30    file_path = path
31    hash_path = path + '.sha1'
32  return file_path, hash_path
33
34
35def _FindFilesInCloudStorage(files):
36  """Returns a dict of all files and which buckets they're in."""
37  # Preprocessing: get the contents of all buckets.
38  bucket_contents = {}
39  for bucket in BUCKETS:
40    try:
41      bucket_contents[bucket] = cloud_storage.List(bucket)
42    except (cloud_storage.PermissionError, cloud_storage.CredentialsError):
43      pass
44
45  # Check if each file is in the bucket contents.
46  file_buckets = {}
47  for path in files:
48    file_path, hash_path = _GetPaths(path)
49
50    if file_path in file_buckets:
51      # Ignore duplicates, if both data and sha1 file were in the file list.
52      continue
53    if not os.path.exists(hash_path):
54      # Probably got some non-Cloud Storage files in the file list. Ignore.
55      continue
56
57    file_hash = cloud_storage.ReadHash(hash_path)
58    file_buckets[file_path] = []
59    for bucket in BUCKETS:
60      if bucket in bucket_contents and file_hash in bucket_contents[bucket]:
61        file_buckets[file_path].append(bucket)
62
63  return file_buckets
64
65
66class Ls(command_line.Command):
67  """List which bucket each file is in."""
68
69  @classmethod
70  def AddCommandLineArgs(cls, parser):
71    parser.add_argument('-r', '--recursive', action='store_true')
72    parser.add_argument('paths', nargs='+')
73
74  @classmethod
75  def ProcessCommandLineArgs(cls, parser, args):
76    for path in args.paths:
77      if not os.path.exists(path):
78        parser.error('Path not found: %s' % path)
79
80  def Run(self, args):
81    def GetFilesInPaths(paths, recursive):
82      """If path is a dir, yields all files in path, otherwise just yields path.
83      If recursive is true, walks subdirectories recursively."""
84      for path in paths:
85        if not os.path.isdir(path):
86          yield path
87          continue
88
89        if recursive:
90          for root, _, filenames in os.walk(path):
91            for filename in filenames:
92              yield os.path.join(root, filename)
93        else:
94          for filename in os.listdir(path):
95            yield os.path.join(path, filename)
96
97    files = _FindFilesInCloudStorage(GetFilesInPaths(args.paths, args.recursive))
98
99    if not files:
100      print 'No files in Cloud Storage.'
101      return
102
103    for file_path, buckets in sorted(files.iteritems()):
104      if buckets:
105        buckets = [BUCKETS[bucket] for bucket in buckets]
106        print '%-11s  %s' % (','.join(buckets), file_path)
107      else:
108        print '%-11s  %s' % ('not found', file_path)
109
110
111class Mv(command_line.Command):
112  """Move files to the given bucket."""
113
114  @classmethod
115  def AddCommandLineArgs(cls, parser):
116    parser.add_argument('files', nargs='+')
117    parser.add_argument('bucket', choices=cloud_storage.BUCKET_ALIASES)
118
119  @classmethod
120  def ProcessCommandLineArgs(cls, parser, args):
121    args.bucket = cloud_storage.BUCKET_ALIASES[args.bucket]
122
123  def Run(self, args):
124    files = _FindFilesInCloudStorage(args.files)
125
126    for file_path, buckets in sorted(files.iteritems()):
127      if not buckets:
128        raise IOError('%s not found in Cloud Storage.' % file_path)
129
130    for file_path, buckets in sorted(files.iteritems()):
131      if args.bucket in buckets:
132        buckets.remove(args.bucket)
133      if not buckets:
134        logging.info('Skipping %s, no action needed.' % file_path)
135        continue
136
137      # Move to the target bucket.
138      file_hash = cloud_storage.ReadHash(file_path + '.sha1')
139      cloud_storage.Move(buckets.pop(), args.bucket, file_hash)
140
141      # Delete all additional copies.
142      for bucket in buckets:
143        cloud_storage.Delete(bucket, file_hash)
144
145
146class Rm(command_line.Command):
147  """Remove files from Cloud Storage."""
148
149  @classmethod
150  def AddCommandLineArgs(cls, parser):
151    parser.add_argument('files', nargs='+')
152
153  def Run(self, args):
154    files = _FindFilesInCloudStorage(args.files)
155    for file_path, buckets in sorted(files.iteritems()):
156      file_hash = cloud_storage.ReadHash(file_path + '.sha1')
157      for bucket in buckets:
158        cloud_storage.Delete(bucket, file_hash)
159
160
161class Upload(command_line.Command):
162  """Upload files to Cloud Storage."""
163
164  @classmethod
165  def AddCommandLineArgs(cls, parser):
166    parser.add_argument('files', nargs='+')
167    parser.add_argument('bucket', choices=cloud_storage.BUCKET_ALIASES)
168
169  @classmethod
170  def ProcessCommandLineArgs(cls, parser, args):
171    args.bucket = cloud_storage.BUCKET_ALIASES[args.bucket]
172
173    for path in args.files:
174      if not os.path.exists(path):
175        parser.error('File not found: %s' % path)
176
177  def Run(self, args):
178    for file_path in args.files:
179      file_hash = cloud_storage.CalculateHash(file_path)
180
181      # Create or update the hash file.
182      hash_path = file_path + '.sha1'
183      with open(hash_path, 'wb') as f:
184        f.write(file_hash)
185        f.flush()
186
187      # Add the data to Cloud Storage.
188      cloud_storage.Insert(args.bucket, file_hash, file_path)
189
190      # Add the hash file to the branch, for convenience. :)
191      subprocess.call(['git', 'add', hash_path])
192
193
194class CloudStorageCommand(command_line.SubcommandCommand):
195  commands = (Ls, Mv, Rm, Upload)
196
197
198if __name__ == '__main__':
199  logging.getLogger().setLevel(logging.INFO)
200  sys.exit(CloudStorageCommand.main())
201