1#    Copyright 2013-2015 ARM Limited
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14#
15
16
17"""
18Miscellaneous functions that don't fit anywhere else.
19
20"""
21from __future__ import division
22import os
23import sys
24import re
25import string
26import threading
27import signal
28import subprocess
29import pkgutil
30import logging
31import random
32import ctypes
33from operator import itemgetter
34from itertools import groupby
35from functools import partial
36
37import wrapt
38
39from devlib.exception import HostError, TimeoutError
40
41
42# ABI --> architectures list
43ABI_MAP = {
44    'armeabi': ['armeabi', 'armv7', 'armv7l', 'armv7el', 'armv7lh', 'armeabi-v7a'],
45    'arm64': ['arm64', 'armv8', 'arm64-v8a', 'aarch64'],
46}
47
48# Vendor ID --> CPU part ID --> CPU variant ID --> Core Name
49# None means variant is not used.
50CPU_PART_MAP = {
51    0x41: {  # ARM
52        0x926: {None: 'ARM926'},
53        0x946: {None: 'ARM946'},
54        0x966: {None: 'ARM966'},
55        0xb02: {None: 'ARM11MPCore'},
56        0xb36: {None: 'ARM1136'},
57        0xb56: {None: 'ARM1156'},
58        0xb76: {None: 'ARM1176'},
59        0xc05: {None: 'A5'},
60        0xc07: {None: 'A7'},
61        0xc08: {None: 'A8'},
62        0xc09: {None: 'A9'},
63        0xc0e: {None: 'A17'},
64        0xc0f: {None: 'A15'},
65        0xc14: {None: 'R4'},
66        0xc15: {None: 'R5'},
67        0xc17: {None: 'R7'},
68        0xc18: {None: 'R8'},
69        0xc20: {None: 'M0'},
70        0xc60: {None: 'M0+'},
71        0xc21: {None: 'M1'},
72        0xc23: {None: 'M3'},
73        0xc24: {None: 'M4'},
74        0xc27: {None: 'M7'},
75        0xd01: {None: 'A32'},
76        0xd03: {None: 'A53'},
77        0xd04: {None: 'A35'},
78        0xd07: {None: 'A57'},
79        0xd08: {None: 'A72'},
80        0xd09: {None: 'A73'},
81    },
82    0x42: {  # Broadcom
83        0x516: {None: 'Vulcan'},
84    },
85    0x43: {  # Cavium
86        0x0a1: {None: 'Thunderx'},
87        0x0a2: {None: 'Thunderx81xx'},
88    },
89    0x4e: {  # Nvidia
90        0x0: {None: 'Denver'},
91    },
92    0x50: {  # AppliedMicro
93        0x0: {None: 'xgene'},
94    },
95    0x51: {  # Qualcomm
96        0x02d: {None: 'Scorpion'},
97        0x04d: {None: 'MSM8960'},
98        0x06f: {  # Krait
99            0x2: 'Krait400',
100            0x3: 'Krait450',
101        },
102        0x205: {0x1: 'KryoSilver'},
103        0x211: {0x1: 'KryoGold'},
104        0x800: {None: 'Falkor'},
105    },
106    0x53: {  # Samsung LSI
107        0x001: {0x1: 'MongooseM1'},
108    },
109    0x56: {  # Marvell
110        0x131: {
111            0x2: 'Feroceon 88F6281',
112        }
113    },
114}
115
116
117def get_cpu_name(implementer, part, variant):
118    part_data = CPU_PART_MAP.get(implementer, {}).get(part, {})
119    if None in part_data:  # variant does not determine core Name for this vendor
120        name = part_data[None]
121    else:
122        name = part_data.get(variant)
123    return name
124
125
126def preexec_function():
127    # Ignore the SIGINT signal by setting the handler to the standard
128    # signal handler SIG_IGN.
129    signal.signal(signal.SIGINT, signal.SIG_IGN)
130    # Change process group in case we have to kill the subprocess and all of
131    # its children later.
132    # TODO: this is Unix-specific; would be good to find an OS-agnostic way
133    #       to do this in case we wanna port WA to Windows.
134    os.setpgrp()
135
136
137check_output_logger = logging.getLogger('check_output')
138
139
140def check_output(command, timeout=None, ignore=None, inputtext=None, **kwargs):
141    """This is a version of subprocess.check_output that adds a timeout parameter to kill
142    the subprocess if it does not return within the specified time."""
143    # pylint: disable=too-many-branches
144    if ignore is None:
145        ignore = []
146    elif isinstance(ignore, int):
147        ignore = [ignore]
148    elif not isinstance(ignore, list) and ignore != 'all':
149        message = 'Invalid value for ignore parameter: "{}"; must be an int or a list'
150        raise ValueError(message.format(ignore))
151    if 'stdout' in kwargs:
152        raise ValueError('stdout argument not allowed, it will be overridden.')
153
154    def callback(pid):
155        try:
156            check_output_logger.debug('{} timed out; sending SIGKILL'.format(pid))
157            os.killpg(pid, signal.SIGKILL)
158        except OSError:
159            pass  # process may have already terminated.
160
161    process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
162                               stdin=subprocess.PIPE,
163                               preexec_fn=preexec_function, **kwargs)
164
165    if timeout:
166        timer = threading.Timer(timeout, callback, [process.pid, ])
167        timer.start()
168
169    try:
170        output, error = process.communicate(inputtext)
171    finally:
172        if timeout:
173            timer.cancel()
174
175    retcode = process.poll()
176    if retcode:
177        if retcode == -9:  # killed, assume due to timeout callback
178            raise TimeoutError(command, output='\n'.join([output, error]))
179        elif ignore != 'all' and retcode not in ignore:
180            raise subprocess.CalledProcessError(retcode, command, output='\n'.join([output, error]))
181    return output, error
182
183
184def walk_modules(path):
185    """
186    Given package name, return a list of all modules (including submodules, etc)
187    in that package.
188
189    :raises HostError: if an exception is raised while trying to import one of the
190                       modules under ``path``. The exception will have addtional
191                       attributes set: ``module`` will be set to the qualified name
192                       of the originating module, and ``orig_exc`` will contain
193                       the original exception.
194
195    """
196
197    def __try_import(path):
198        try:
199            return __import__(path, {}, {}, [''])
200        except Exception as e:
201            he = HostError('Could not load {}: {}'.format(path, str(e)))
202            he.module = path
203            he.exc_info = sys.exc_info()
204            he.orig_exc = e
205            raise he
206
207    root_mod = __try_import(path)
208    mods = [root_mod]
209    if not hasattr(root_mod, '__path__'):
210        # root is a module not a package -- nothing to walk
211        return mods
212    for _, name, ispkg in pkgutil.iter_modules(root_mod.__path__):
213        submod_path = '.'.join([path, name])
214        if ispkg:
215            mods.extend(walk_modules(submod_path))
216        else:
217            submod = __try_import(submod_path)
218            mods.append(submod)
219    return mods
220
221
222def ensure_directory_exists(dirpath):
223    """A filter for directory paths to ensure they exist."""
224    if not os.path.isdir(dirpath):
225        os.makedirs(dirpath)
226    return dirpath
227
228
229def ensure_file_directory_exists(filepath):
230    """
231    A filter for file paths to ensure the directory of the
232    file exists and the file can be created there. The file
233    itself is *not* going to be created if it doesn't already
234    exist.
235
236    """
237    ensure_directory_exists(os.path.dirname(filepath))
238    return filepath
239
240
241def merge_dicts(*args, **kwargs):
242    if not len(args) >= 2:
243        raise ValueError('Must specify at least two dicts to merge.')
244    func = partial(_merge_two_dicts, **kwargs)
245    return reduce(func, args)
246
247
248def _merge_two_dicts(base, other, list_duplicates='all', match_types=False,  # pylint: disable=R0912,R0914
249                     dict_type=dict, should_normalize=True, should_merge_lists=True):
250    """Merge dicts normalizing their keys."""
251    merged = dict_type()
252    base_keys = base.keys()
253    other_keys = other.keys()
254    norm = normalize if should_normalize else lambda x, y: x
255
256    base_only = []
257    other_only = []
258    both = []
259    union = []
260    for k in base_keys:
261        if k in other_keys:
262            both.append(k)
263        else:
264            base_only.append(k)
265            union.append(k)
266    for k in other_keys:
267        if k in base_keys:
268            union.append(k)
269        else:
270            union.append(k)
271            other_only.append(k)
272
273    for k in union:
274        if k in base_only:
275            merged[k] = norm(base[k], dict_type)
276        elif k in other_only:
277            merged[k] = norm(other[k], dict_type)
278        elif k in both:
279            base_value = base[k]
280            other_value = other[k]
281            base_type = type(base_value)
282            other_type = type(other_value)
283            if (match_types and (base_type != other_type) and
284                    (base_value is not None) and (other_value is not None)):
285                raise ValueError('Type mismatch for {} got {} ({}) and {} ({})'.format(k, base_value, base_type,
286                                                                                       other_value, other_type))
287            if isinstance(base_value, dict):
288                merged[k] = _merge_two_dicts(base_value, other_value, list_duplicates, match_types, dict_type)
289            elif isinstance(base_value, list):
290                if should_merge_lists:
291                    merged[k] = _merge_two_lists(base_value, other_value, list_duplicates, dict_type)
292                else:
293                    merged[k] = _merge_two_lists([], other_value, list_duplicates, dict_type)
294
295            elif isinstance(base_value, set):
296                merged[k] = norm(base_value.union(other_value), dict_type)
297            else:
298                merged[k] = norm(other_value, dict_type)
299        else:  # Should never get here
300            raise AssertionError('Unexpected merge key: {}'.format(k))
301
302    return merged
303
304
305def merge_lists(*args, **kwargs):
306    if not len(args) >= 2:
307        raise ValueError('Must specify at least two lists to merge.')
308    func = partial(_merge_two_lists, **kwargs)
309    return reduce(func, args)
310
311
312def _merge_two_lists(base, other, duplicates='all', dict_type=dict):  # pylint: disable=R0912
313    """
314    Merge lists, normalizing their entries.
315
316    parameters:
317
318        :base, other: the two lists to be merged. ``other`` will be merged on
319                      top of base.
320        :duplicates: Indicates the strategy of handling entries that appear
321                     in both lists. ``all`` will keep occurrences from both
322                     lists; ``first`` will only keep occurrences from
323                     ``base``; ``last`` will only keep occurrences from
324                     ``other``;
325
326                     .. note:: duplicate entries that appear in the *same* list
327                               will never be removed.
328
329    """
330    if not isiterable(base):
331        base = [base]
332    if not isiterable(other):
333        other = [other]
334    if duplicates == 'all':
335        merged_list = []
336        for v in normalize(base, dict_type) + normalize(other, dict_type):
337            if not _check_remove_item(merged_list, v):
338                merged_list.append(v)
339        return merged_list
340    elif duplicates == 'first':
341        base_norm = normalize(base, dict_type)
342        merged_list = normalize(base, dict_type)
343        for v in base_norm:
344            _check_remove_item(merged_list, v)
345        for v in normalize(other, dict_type):
346            if not _check_remove_item(merged_list, v):
347                if v not in base_norm:
348                    merged_list.append(v)  # pylint: disable=no-member
349        return merged_list
350    elif duplicates == 'last':
351        other_norm = normalize(other, dict_type)
352        merged_list = []
353        for v in normalize(base, dict_type):
354            if not _check_remove_item(merged_list, v):
355                if v not in other_norm:
356                    merged_list.append(v)
357        for v in other_norm:
358            if not _check_remove_item(merged_list, v):
359                merged_list.append(v)
360        return merged_list
361    else:
362        raise ValueError('Unexpected value for list duplicates argument: {}. '.format(duplicates) +
363                         'Must be in {"all", "first", "last"}.')
364
365
366def _check_remove_item(the_list, item):
367    """Helper function for merge_lists that implements checking wether an items
368    should be removed from the list and doing so if needed. Returns ``True`` if
369    the item has been removed and ``False`` otherwise."""
370    if not isinstance(item, basestring):
371        return False
372    if not item.startswith('~'):
373        return False
374    actual_item = item[1:]
375    if actual_item in the_list:
376        del the_list[the_list.index(actual_item)]
377    return True
378
379
380def normalize(value, dict_type=dict):
381    """Normalize values. Recursively normalizes dict keys to be lower case,
382    no surrounding whitespace, underscore-delimited strings."""
383    if isinstance(value, dict):
384        normalized = dict_type()
385        for k, v in value.iteritems():
386            key = k.strip().lower().replace(' ', '_')
387            normalized[key] = normalize(v, dict_type)
388        return normalized
389    elif isinstance(value, list):
390        return [normalize(v, dict_type) for v in value]
391    elif isinstance(value, tuple):
392        return tuple([normalize(v, dict_type) for v in value])
393    else:
394        return value
395
396
397def convert_new_lines(text):
398    """ Convert new lines to a common format.  """
399    return text.replace('\r\n', '\n').replace('\r', '\n')
400
401
402def escape_quotes(text):
403    """Escape quotes, and escaped quotes, in the specified text."""
404    return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\\\'').replace('\"', '\\\"')
405
406
407def escape_single_quotes(text):
408    """Escape single quotes, and escaped single quotes, in the specified text."""
409    return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\'\\\'\'')
410
411
412def escape_double_quotes(text):
413    """Escape double quotes, and escaped double quotes, in the specified text."""
414    return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\"', '\\\"')
415
416
417def getch(count=1):
418    """Read ``count`` characters from standard input."""
419    if os.name == 'nt':
420        import msvcrt  # pylint: disable=F0401
421        return ''.join([msvcrt.getch() for _ in xrange(count)])
422    else:  # assume Unix
423        import tty  # NOQA
424        import termios  # NOQA
425        fd = sys.stdin.fileno()
426        old_settings = termios.tcgetattr(fd)
427        try:
428            tty.setraw(sys.stdin.fileno())
429            ch = sys.stdin.read(count)
430        finally:
431            termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
432        return ch
433
434
435def isiterable(obj):
436    """Returns ``True`` if the specified object is iterable and
437    *is not a string type*, ``False`` otherwise."""
438    return hasattr(obj, '__iter__') and not isinstance(obj, basestring)
439
440
441def as_relative(path):
442    """Convert path to relative by stripping away the leading '/' on UNIX or
443    the equivant on other platforms."""
444    path = os.path.splitdrive(path)[1]
445    return path.lstrip(os.sep)
446
447
448def get_cpu_mask(cores):
449    """Return a string with the hex for the cpu mask for the specified core numbers."""
450    mask = 0
451    for i in cores:
452        mask |= 1 << i
453    return '0x{0:x}'.format(mask)
454
455
456def which(name):
457    """Platform-independent version of UNIX which utility."""
458    if os.name == 'nt':
459        paths = os.getenv('PATH').split(os.pathsep)
460        exts = os.getenv('PATHEXT').split(os.pathsep)
461        for path in paths:
462            testpath = os.path.join(path, name)
463            if os.path.isfile(testpath):
464                return testpath
465            for ext in exts:
466                testpathext = testpath + ext
467                if os.path.isfile(testpathext):
468                    return testpathext
469        return None
470    else:  # assume UNIX-like
471        try:
472            return check_output(['which', name])[0].strip()  # pylint: disable=E1103
473        except subprocess.CalledProcessError:
474            return None
475
476
477# This matches most ANSI escape sequences, not just colors
478_bash_color_regex = re.compile(r'\x1b\[[0-9;]*[a-zA-Z]')
479
480def strip_bash_colors(text):
481    return _bash_color_regex.sub('', text)
482
483
484def get_random_string(length):
485    """Returns a random ASCII string of the specified length)."""
486    return ''.join(random.choice(string.ascii_letters + string.digits) for _ in xrange(length))
487
488
489class LoadSyntaxError(Exception):
490
491    def __init__(self, message, filepath, lineno):
492        super(LoadSyntaxError, self).__init__(message)
493        self.filepath = filepath
494        self.lineno = lineno
495
496    def __str__(self):
497        message = 'Syntax Error in {}, line {}:\n\t{}'
498        return message.format(self.filepath, self.lineno, self.message)
499
500
501RAND_MOD_NAME_LEN = 30
502BAD_CHARS = string.punctuation + string.whitespace
503TRANS_TABLE = string.maketrans(BAD_CHARS, '_' * len(BAD_CHARS))
504
505
506def to_identifier(text):
507    """Converts text to a valid Python identifier by replacing all
508    whitespace and punctuation."""
509    return re.sub('_+', '_', text.translate(TRANS_TABLE))
510
511
512def unique(alist):
513    """
514    Returns a list containing only unique elements from the input list (but preserves
515    order, unlike sets).
516
517    """
518    result = []
519    for item in alist:
520        if item not in result:
521            result.append(item)
522    return result
523
524
525def ranges_to_list(ranges_string):
526    """Converts a sysfs-style ranges string, e.g. ``"0,2-4"``, into a list ,e.g ``[0,2,3,4]``"""
527    values = []
528    for rg in ranges_string.split(','):
529        if '-' in rg:
530            first, last = map(int, rg.split('-'))
531            values.extend(xrange(first, last + 1))
532        else:
533            values.append(int(rg))
534    return values
535
536
537def list_to_ranges(values):
538    """Converts a list, e.g ``[0,2,3,4]``, into a sysfs-style ranges string, e.g. ``"0,2-4"``"""
539    range_groups = []
540    for _, g in groupby(enumerate(values), lambda (i, x): i - x):
541        range_groups.append(map(itemgetter(1), g))
542    range_strings = []
543    for group in range_groups:
544        if len(group) == 1:
545            range_strings.append(str(group[0]))
546        else:
547            range_strings.append('{}-{}'.format(group[0], group[-1]))
548    return ','.join(range_strings)
549
550
551def list_to_mask(values, base=0x0):
552    """Converts the specified list of integer values into
553    a bit mask for those values. Optinally, the list can be
554    applied to an existing mask."""
555    for v in values:
556        base |= (1 << v)
557    return base
558
559
560def mask_to_list(mask):
561    """Converts the specfied integer bitmask into a list of
562    indexes of bits that are set in the mask."""
563    size = len(bin(mask)) - 2  # because of "0b"
564    return [size - i - 1 for i in xrange(size)
565            if mask & (1 << size - i - 1)]
566
567
568__memo_cache = {}
569
570
571def reset_memo_cache():
572    __memo_cache.clear()
573
574
575def __get_memo_id(obj):
576    """
577    An object's id() may be re-used after an object is freed, so it's not
578    sufficiently unique to identify params for the memo cache (two different
579    params may end up with the same id). this attempts to generate a more unique
580    ID string.
581    """
582    obj_id = id(obj)
583    try:
584        return '{}/{}'.format(obj_id, hash(obj))
585    except TypeError:  # obj is not hashable
586        obj_pyobj = ctypes.cast(obj_id, ctypes.py_object)
587        # TODO: Note: there is still a possibility of a clash here. If Two
588        # different objects get assigned the same ID, an are large and are
589        # identical in the first thirty two bytes. This shouldn't be much of an
590        # issue in the current application of memoizing Target calls, as it's very
591        # unlikely that a target will get passed large params; but may cause
592        # problems in other applications, e.g. when memoizing results of operations
593        # on large arrays. I can't really think of a good way around that apart
594        # form, e.g., md5 hashing the entire raw object, which will have an
595        # undesirable impact on performance.
596        num_bytes = min(ctypes.sizeof(obj_pyobj), 32)
597        obj_bytes = ctypes.string_at(ctypes.addressof(obj_pyobj), num_bytes)
598        return '{}/{}'.format(obj_id, obj_bytes)
599
600
601@wrapt.decorator
602def memoized(wrapped, instance, args, kwargs):
603    """A decorator for memoizing functions and methods."""
604    func_id = repr(wrapped)
605
606    def memoize_wrapper(*args, **kwargs):
607        id_string = func_id + ','.join([__get_memo_id(a) for a in  args])
608        id_string += ','.join('{}={}'.format(k, v)
609                              for k, v in kwargs.iteritems())
610        if id_string not in __memo_cache:
611            __memo_cache[id_string] = wrapped(*args, **kwargs)
612        return __memo_cache[id_string]
613
614    return memoize_wrapper(*args, **kwargs)
615
616