mirror of
https://github.com/ARM-software/devlib.git
synced 2025-09-22 20:01:53 +01:00
Compare commits
32 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
0ff8628c9c | ||
|
c0d8a98d90 | ||
|
441eea9897 | ||
|
b0db2067a2 | ||
|
1417e81605 | ||
|
2e81a72b39 | ||
|
22f2c8b663 | ||
|
c2db6c17ab | ||
|
e01a76ef1b | ||
|
9fcca25031 | ||
|
a6b9542f0f | ||
|
413e83f5d6 | ||
|
ac19873423 | ||
|
17d4b22b9f | ||
|
f65130b7c7 | ||
|
5b51c2644e | ||
|
a752f55956 | ||
|
781f9b068d | ||
|
7e79eeb9cb | ||
|
911a9f2ef4 | ||
|
cc0679e40f | ||
|
5dea9f8bcf | ||
|
a9ee41855d | ||
|
c13e3c260b | ||
|
aabb74c8cb | ||
|
a4c22cef71 | ||
|
3da7fbc9dd | ||
|
f2a87ce61c | ||
|
2b6cb264cf | ||
|
7e0e6e8706 | ||
|
4fabcae0b4 | ||
|
3c4a282c29 |
@@ -53,10 +53,11 @@ from devlib.host import LocalConnection
|
||||
from devlib.utils.android import AdbConnection
|
||||
from devlib.utils.ssh import SshConnection, TelnetConnection, Gem5Connection
|
||||
|
||||
from devlib.utils.version import get_commit as __get_commit
|
||||
from devlib.utils.version import (get_devlib_version as __get_devlib_version,
|
||||
get_commit as __get_commit)
|
||||
|
||||
|
||||
__version__ = '1.1.0'
|
||||
__version__ = __get_devlib_version()
|
||||
|
||||
__commit = __get_commit()
|
||||
if __commit:
|
||||
|
@@ -238,6 +238,19 @@ hotplug_online_all() {
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
################################################################################
|
||||
# Scheduler
|
||||
################################################################################
|
||||
|
||||
sched_get_kernel_attributes() {
|
||||
MATCH=${1:-'.*'}
|
||||
[ -d /proc/sys/kernel/ ] || exit 1
|
||||
$GREP '' /proc/sys/kernel/sched_* | \
|
||||
$SED -e 's|/proc/sys/kernel/sched_||' | \
|
||||
$GREP -e "$MATCH"
|
||||
}
|
||||
|
||||
################################################################################
|
||||
# Misc
|
||||
################################################################################
|
||||
@@ -264,6 +277,34 @@ read_tree_values() {
|
||||
fi
|
||||
}
|
||||
|
||||
read_tree_tgz_b64() {
|
||||
BASEPATH=$1
|
||||
MAXDEPTH=$2
|
||||
TMPBASE=$3
|
||||
|
||||
if [ ! -e $BASEPATH ]; then
|
||||
echo "ERROR: $BASEPATH does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd $TMPBASE
|
||||
TMP_FOLDER=$($BUSYBOX realpath $($BUSYBOX mktemp -d XXXXXX))
|
||||
|
||||
# 'tar' doesn't work as expected on debugfs, so copy the tree first to
|
||||
# workaround the issue
|
||||
cd $BASEPATH
|
||||
for CUR_FILE in $($BUSYBOX find . -follow -type f -maxdepth $MAXDEPTH); do
|
||||
$BUSYBOX cp --parents $CUR_FILE $TMP_FOLDER/ 2> /dev/null
|
||||
done
|
||||
|
||||
cd $TMP_FOLDER
|
||||
$BUSYBOX tar cz * 2>/dev/null | $BUSYBOX base64
|
||||
|
||||
# Clean-up the tmp folder since we won't need it any more
|
||||
cd $TMPBASE
|
||||
rm -rf $TMP_FOLDER
|
||||
}
|
||||
|
||||
get_linux_system_id() {
|
||||
kernel=$($BUSYBOX uname -r)
|
||||
hardware=$($BUSYBOX ip a | $BUSYBOX grep 'link/ether' | $BUSYBOX sed 's/://g' | $BUSYBOX awk '{print $2}' | $BUSYBOX tr -d '\n')
|
||||
@@ -337,12 +378,18 @@ hotplug_online_all)
|
||||
read_tree_values)
|
||||
read_tree_values $*
|
||||
;;
|
||||
read_tree_tgz_b64)
|
||||
read_tree_tgz_b64 $*
|
||||
;;
|
||||
get_linux_system_id)
|
||||
get_linux_system_id $*
|
||||
;;
|
||||
get_android_system_id)
|
||||
get_android_system_id $*
|
||||
;;
|
||||
sched_get_kernel_attributes)
|
||||
sched_get_kernel_attributes $*
|
||||
;;
|
||||
*)
|
||||
echo "Command [$CMD] not supported"
|
||||
exit -1
|
||||
|
@@ -106,17 +106,17 @@ class DerivedGfxInfoStats(DerivedFpsStats):
|
||||
frame_count += 1
|
||||
|
||||
if start_vsync is None:
|
||||
start_vsync = frame_data.Vsync_time_us
|
||||
end_vsync = frame_data.Vsync_time_us
|
||||
start_vsync = frame_data.Vsync_time_ns
|
||||
end_vsync = frame_data.Vsync_time_ns
|
||||
|
||||
frame_time = frame_data.FrameCompleted_time_us - frame_data.IntendedVsync_time_us
|
||||
frame_time = frame_data.FrameCompleted_time_ns - frame_data.IntendedVsync_time_ns
|
||||
pff = 1e9 / frame_time
|
||||
if pff > self.drop_threshold:
|
||||
per_frame_fps.append([pff])
|
||||
|
||||
if frame_count:
|
||||
duration = end_vsync - start_vsync
|
||||
fps = (1e6 * frame_count) / float(duration)
|
||||
fps = (1e9 * frame_count) / float(duration)
|
||||
else:
|
||||
duration = 0
|
||||
fps = 0
|
||||
@@ -133,15 +133,15 @@ class DerivedGfxInfoStats(DerivedFpsStats):
|
||||
def _process_with_pandas(self, measurements_csv):
|
||||
data = pd.read_csv(measurements_csv.path)
|
||||
data = data[data.Flags_flags == 0]
|
||||
frame_time = data.FrameCompleted_time_us - data.IntendedVsync_time_us
|
||||
per_frame_fps = (1e6 / frame_time)
|
||||
frame_time = data.FrameCompleted_time_ns - data.IntendedVsync_time_ns
|
||||
per_frame_fps = (1e9 / frame_time)
|
||||
keep_filter = per_frame_fps > self.drop_threshold
|
||||
per_frame_fps = per_frame_fps[keep_filter]
|
||||
per_frame_fps.name = 'fps'
|
||||
|
||||
frame_count = data.index.size
|
||||
if frame_count > 1:
|
||||
duration = data.Vsync_time_us.iloc[-1] - data.Vsync_time_us.iloc[0]
|
||||
duration = data.Vsync_time_ns.iloc[-1] - data.Vsync_time_ns.iloc[0]
|
||||
fps = (1e9 * frame_count) / float(duration)
|
||||
else:
|
||||
duration = 0
|
||||
|
@@ -105,6 +105,16 @@ class WorkerThreadError(DevlibError):
|
||||
super(WorkerThreadError, self).__init__(message)
|
||||
|
||||
|
||||
class KernelConfigKeyError(KeyError, IndexError, DevlibError):
|
||||
"""
|
||||
Exception raised when a kernel config option cannot be found.
|
||||
|
||||
It inherits from :exc:`IndexError` for backward compatibility, and
|
||||
:exc:`KeyError` to behave like a regular mapping.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def get_traceback(exc=None):
|
||||
"""
|
||||
Returns the string with the traceback for the specifiec exc
|
||||
|
@@ -97,20 +97,30 @@ _measurement_types = [
|
||||
# covert without being familar with individual instruments.
|
||||
MeasurementType('time', 'seconds', 'time',
|
||||
conversions={
|
||||
'time_us': lambda x: x * 1000000,
|
||||
'time_ms': lambda x: x * 1000,
|
||||
'time_us': lambda x: x * 1e6,
|
||||
'time_ms': lambda x: x * 1e3,
|
||||
'time_ns': lambda x: x * 1e9,
|
||||
}
|
||||
),
|
||||
MeasurementType('time_us', 'microseconds', 'time',
|
||||
conversions={
|
||||
'time': lambda x: x / 1000000,
|
||||
'time_ms': lambda x: x / 1000,
|
||||
'time': lambda x: x / 1e6,
|
||||
'time_ms': lambda x: x / 1e3,
|
||||
'time_ns': lambda x: x * 1e3,
|
||||
}
|
||||
),
|
||||
MeasurementType('time_ms', 'milliseconds', 'time',
|
||||
conversions={
|
||||
'time': lambda x: x / 1000,
|
||||
'time_us': lambda x: x * 1000,
|
||||
'time': lambda x: x / 1e3,
|
||||
'time_us': lambda x: x * 1e3,
|
||||
'time_ns': lambda x: x * 1e6,
|
||||
}
|
||||
),
|
||||
MeasurementType('time_ns', 'nanoseconds', 'time',
|
||||
conversions={
|
||||
'time': lambda x: x / 1e9,
|
||||
'time_ms': lambda x: x / 1e6,
|
||||
'time_us': lambda x: x / 1e3,
|
||||
}
|
||||
),
|
||||
|
||||
|
@@ -87,7 +87,8 @@ class AcmeCapeInstrument(Instrument):
|
||||
params = dict(
|
||||
iio_capture=self.iio_capture,
|
||||
host=self.host,
|
||||
buffer_size=self.buffer_size,
|
||||
# This must be a string for quote()
|
||||
buffer_size=str(self.buffer_size),
|
||||
iio_device=self.iio_device,
|
||||
outfile=self.raw_data_file
|
||||
)
|
||||
|
@@ -82,7 +82,7 @@ class GfxInfoFramesInstrument(FramesInstrument):
|
||||
if entry == 'Flags':
|
||||
self.add_channel('Flags', MeasurementType('flags', 'flags'))
|
||||
else:
|
||||
self.add_channel(entry, 'time_us')
|
||||
self.add_channel(entry, 'time_ns')
|
||||
self.header = [chan.label for chan in self.channels.values()]
|
||||
|
||||
|
||||
|
@@ -262,8 +262,9 @@ class CGroup(object):
|
||||
|
||||
# Control cgroup path
|
||||
self.directory = controller.mount_point
|
||||
|
||||
if name != '/':
|
||||
self.directory = self.target.path.join(controller.mount_point, name[1:])
|
||||
self.directory = self.target.path.join(controller.mount_point, name.strip('/'))
|
||||
|
||||
# Setup path for tasks file
|
||||
self.tasks_file = self.target.path.join(self.directory, 'tasks')
|
||||
|
@@ -137,7 +137,7 @@ class HwmonModule(Module):
|
||||
self.scan()
|
||||
|
||||
def scan(self):
|
||||
values_tree = self.target.read_tree_values(self.root, depth=3)
|
||||
values_tree = self.target.read_tree_values(self.root, depth=3, tar=True)
|
||||
for entry_id, fields in values_tree.items():
|
||||
path = self.target.path.join(self.root, entry_id)
|
||||
name = fields.pop('name', None)
|
||||
|
@@ -21,6 +21,7 @@ from past.builtins import basestring
|
||||
|
||||
from devlib.module import Module
|
||||
from devlib.utils.misc import memoized
|
||||
from devlib.utils.types import boolean
|
||||
|
||||
|
||||
class SchedProcFSNode(object):
|
||||
@@ -253,6 +254,109 @@ class SchedModule(Module):
|
||||
|
||||
return SchedProcFSData.available(target)
|
||||
|
||||
def get_kernel_attributes(self, matching=None, check_exit_code=True):
|
||||
"""
|
||||
Get the value of scheduler attributes.
|
||||
|
||||
:param matching: an (optional) substring to filter the scheduler
|
||||
attributes to be returned.
|
||||
|
||||
The scheduler exposes a list of tunable attributes under:
|
||||
/proc/sys/kernel
|
||||
all starting with the "sched_" prefix.
|
||||
|
||||
This method returns a dictionary of all the "sched_" attributes exposed
|
||||
by the target kernel, within the prefix removed.
|
||||
It's possible to restrict the list of attributes by specifying a
|
||||
substring to be matched.
|
||||
|
||||
returns: a dictionary of scheduler tunables
|
||||
"""
|
||||
command = 'sched_get_kernel_attributes {}'.format(
|
||||
matching if matching else ''
|
||||
)
|
||||
output = self.target._execute_util(command, as_root=self.target.is_rooted,
|
||||
check_exit_code=check_exit_code)
|
||||
result = {}
|
||||
for entry in output.strip().split('\n'):
|
||||
if ':' not in entry:
|
||||
continue
|
||||
path, value = entry.strip().split(':', 1)
|
||||
if value in ['0', '1']:
|
||||
value = bool(int(value))
|
||||
elif value.isdigit():
|
||||
value = int(value)
|
||||
result[path] = value
|
||||
return result
|
||||
|
||||
def set_kernel_attribute(self, attr, value, verify=True):
|
||||
"""
|
||||
Set the value of a scheduler attribute.
|
||||
|
||||
:param attr: the attribute to set, without the "sched_" prefix
|
||||
:param value: the value to set
|
||||
:param verify: true to check that the requested value has been set
|
||||
|
||||
:raise TargetError: if the attribute cannot be set
|
||||
"""
|
||||
if isinstance(value, bool):
|
||||
value = '1' if value else '0'
|
||||
elif isinstance(value, int):
|
||||
value = str(value)
|
||||
path = '/proc/sys/kernel/sched_' + attr
|
||||
self.target.write_value(path, value, verify)
|
||||
|
||||
@property
|
||||
@memoized
|
||||
def has_debug(self):
|
||||
if self.target.config.get('SCHED_DEBUG') != 'y':
|
||||
return False;
|
||||
return self.target.file_exists('/sys/kernel/debug/sched_features')
|
||||
|
||||
def get_features(self):
|
||||
"""
|
||||
Get the status of each sched feature
|
||||
|
||||
:returns: a dictionary of features and their "is enabled" status
|
||||
"""
|
||||
if not self.has_debug:
|
||||
raise RuntimeError("sched_features not available")
|
||||
feats = self.target.read_value('/sys/kernel/debug/sched_features')
|
||||
features = {}
|
||||
for feat in feats.split():
|
||||
value = True
|
||||
if feat.startswith('NO'):
|
||||
feat = feat.replace('NO_', '', 1)
|
||||
value = False
|
||||
features[feat] = value
|
||||
return features
|
||||
|
||||
def set_feature(self, feature, enable, verify=True):
|
||||
"""
|
||||
Set the status of a specified scheduler feature
|
||||
|
||||
:param feature: the feature name to set
|
||||
:param enable: true to enable the feature, false otherwise
|
||||
|
||||
:raise ValueError: if the specified enable value is not bool
|
||||
:raise RuntimeError: if the specified feature cannot be set
|
||||
"""
|
||||
if not self.has_debug:
|
||||
raise RuntimeError("sched_features not available")
|
||||
feature = feature.upper()
|
||||
feat_value = feature
|
||||
if not boolean(enable):
|
||||
feat_value = 'NO_' + feat_value
|
||||
self.target.write_value('/sys/kernel/debug/sched_features',
|
||||
feat_value, verify=False)
|
||||
if not verify:
|
||||
return
|
||||
msg = 'Failed to set {}, feature not supported?'.format(feat_value)
|
||||
features = self.get_features()
|
||||
feat_value = features.get(feature, not enable)
|
||||
if feat_value != enable:
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def get_cpu_sd_info(self, cpu):
|
||||
"""
|
||||
:returns: An object view of /proc/sys/kernel/sched_domain/cpu<cpu>/*
|
||||
|
327
devlib/target.py
327
devlib/target.py
@@ -13,6 +13,9 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import io
|
||||
import base64
|
||||
import gzip
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
@@ -27,13 +30,22 @@ import xml.dom.minidom
|
||||
import copy
|
||||
from collections import namedtuple, defaultdict
|
||||
from pipes import quote
|
||||
from past.builtins import long
|
||||
from past.types import basestring
|
||||
from numbers import Number
|
||||
try:
|
||||
from collections.abc import Mapping
|
||||
except ImportError:
|
||||
from collections import Mapping
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from devlib.host import LocalConnection, PACKAGE_BIN_DIRECTORY
|
||||
from devlib.module import get_module
|
||||
from devlib.platform import Platform
|
||||
from devlib.exception import (DevlibTransientError, TargetStableError,
|
||||
TargetNotRespondingError, TimeoutError,
|
||||
TargetTransientError) # pylint: disable=redefined-builtin
|
||||
TargetTransientError, KernelConfigKeyError) # pylint: disable=redefined-builtin
|
||||
from devlib.utils.ssh import SshConnection
|
||||
from devlib.utils.android import AdbConnection, AndroidProperties, LogcatMonitor, adb_command, adb_disconnect, INTENT_FLAGS
|
||||
from devlib.utils.misc import memoized, isiterable, convert_new_lines
|
||||
@@ -684,6 +696,43 @@ class Target(object):
|
||||
timeout = duration + 10
|
||||
self.execute('sleep {}'.format(duration), timeout=timeout)
|
||||
|
||||
def read_tree_tar_flat(self, path, depth=1, check_exit_code=True,
|
||||
decode_unicode=True, strip_null_chars=True):
|
||||
command = 'read_tree_tgz_b64 {} {} {}'.format(quote(path), depth,
|
||||
quote(self.working_directory))
|
||||
output = self._execute_util(command, as_root=self.is_rooted,
|
||||
check_exit_code=check_exit_code)
|
||||
|
||||
result = {}
|
||||
|
||||
# Unpack the archive in memory
|
||||
tar_gz = base64.b64decode(output)
|
||||
tar_gz_bytes = io.BytesIO(tar_gz)
|
||||
tar_buf = gzip.GzipFile(fileobj=tar_gz_bytes).read()
|
||||
tar_bytes = io.BytesIO(tar_buf)
|
||||
with tarfile.open(fileobj=tar_bytes) as tar:
|
||||
for member in tar.getmembers():
|
||||
try:
|
||||
content_f = tar.extractfile(member)
|
||||
# ignore exotic members like sockets
|
||||
except Exception:
|
||||
continue
|
||||
# if it is a file and not a folder
|
||||
if content_f:
|
||||
content = content_f.read()
|
||||
if decode_unicode:
|
||||
try:
|
||||
content = content.decode('utf-8').strip()
|
||||
if strip_null_chars:
|
||||
content = content.replace('\x00', '').strip()
|
||||
except UnicodeDecodeError:
|
||||
content = ''
|
||||
|
||||
name = self.path.join(path, member.name)
|
||||
result[name] = content
|
||||
|
||||
return result
|
||||
|
||||
def read_tree_values_flat(self, path, depth=1, check_exit_code=True):
|
||||
command = 'read_tree_values {} {}'.format(quote(path), depth)
|
||||
output = self._execute_util(command, as_root=self.is_rooted,
|
||||
@@ -699,8 +748,30 @@ class Target(object):
|
||||
result = {k: '\n'.join(v).strip() for k, v in accumulator.items()}
|
||||
return result
|
||||
|
||||
def read_tree_values(self, path, depth=1, dictcls=dict, check_exit_code=True):
|
||||
value_map = self.read_tree_values_flat(path, depth, check_exit_code)
|
||||
def read_tree_values(self, path, depth=1, dictcls=dict,
|
||||
check_exit_code=True, tar=False, decode_unicode=True,
|
||||
strip_null_chars=True):
|
||||
"""
|
||||
Reads the content of all files under a given tree
|
||||
|
||||
:path: path to the tree
|
||||
:depth: maximum tree depth to read
|
||||
:dictcls: type of the dict used to store the results
|
||||
:check_exit_code: raise an exception if the shutil command fails
|
||||
:tar: fetch the entire tree using tar rather than just the value (more
|
||||
robust but slower in some use-cases)
|
||||
:decode_unicode: decode the content of tar-ed files as utf-8
|
||||
:strip_null_chars: remove '\x00' chars from the content of utf-8
|
||||
decoded files
|
||||
|
||||
:returns: a tree-like dict with the content of files as leafs
|
||||
"""
|
||||
if not tar:
|
||||
value_map = self.read_tree_values_flat(path, depth, check_exit_code)
|
||||
else:
|
||||
value_map = self.read_tree_tar_flat(path, depth, check_exit_code,
|
||||
decode_unicode,
|
||||
strip_null_chars)
|
||||
return _build_path_tree(value_map, path, self.path.sep, dictcls)
|
||||
|
||||
# internal methods
|
||||
@@ -1722,8 +1793,56 @@ class KernelVersion(object):
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
class KernelConfig(object):
|
||||
class HexInt(long):
|
||||
"""
|
||||
Subclass of :class:`int` that uses hexadecimal formatting by default.
|
||||
"""
|
||||
|
||||
def __new__(cls, val=0, base=16):
|
||||
super_new = super(HexInt, cls).__new__
|
||||
if isinstance(val, Number):
|
||||
return super_new(cls, val)
|
||||
else:
|
||||
return super_new(cls, val, base=base)
|
||||
|
||||
def __str__(self):
|
||||
return hex(self).strip('L')
|
||||
|
||||
|
||||
class KernelConfigTristate(Enum):
|
||||
YES = 'y'
|
||||
NO = 'n'
|
||||
MODULE = 'm'
|
||||
|
||||
def __bool__(self):
|
||||
"""
|
||||
Allow using this enum to represent bool Kconfig type, although it is
|
||||
technically different from tristate.
|
||||
"""
|
||||
return self in (self.YES, self.MODULE)
|
||||
|
||||
def __nonzero__(self):
|
||||
"""
|
||||
For Python 2.x compatibility.
|
||||
"""
|
||||
return self.__bool__()
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, str_):
|
||||
for state in cls:
|
||||
if state.value == str_:
|
||||
return state
|
||||
raise ValueError('No kernel config tristate value matches "{}"'.format(str_))
|
||||
|
||||
|
||||
class TypedKernelConfig(Mapping):
|
||||
"""
|
||||
Mapping-like typed version of :class:`KernelConfig`.
|
||||
|
||||
Values are either :class:`str`, :class:`int`,
|
||||
:class:`KernelConfigTristate`, or :class:`HexInt`. ``hex`` Kconfig type is
|
||||
mapped to :class:`HexInt` and ``bool`` to :class:`KernelConfigTristate`.
|
||||
"""
|
||||
not_set_regex = re.compile(r'# (\S+) is not set')
|
||||
|
||||
@staticmethod
|
||||
@@ -1733,50 +1852,202 @@ class KernelConfig(object):
|
||||
name = 'CONFIG_' + name
|
||||
return name
|
||||
|
||||
def iteritems(self):
|
||||
return iter(self._config.items())
|
||||
def __init__(self, mapping=None):
|
||||
mapping = mapping if mapping is not None else {}
|
||||
self._config = {
|
||||
# Ensure we use the canonical name of the config keys for internal
|
||||
# representation
|
||||
self.get_config_name(k): v
|
||||
for k, v in dict(mapping).items()
|
||||
}
|
||||
|
||||
def __init__(self, text):
|
||||
self.text = text
|
||||
self._config = {}
|
||||
for line in text.split('\n'):
|
||||
@classmethod
|
||||
def from_str(cls, text):
|
||||
"""
|
||||
Build a :class:`TypedKernelConfig` out of the string content of a
|
||||
Kconfig file.
|
||||
"""
|
||||
return cls(cls._parse_text(text))
|
||||
|
||||
@staticmethod
|
||||
def _val_to_str(val):
|
||||
"Convert back values to Kconfig-style string value"
|
||||
# Special case the gracefully handle the output of get()
|
||||
if val is None:
|
||||
return None
|
||||
elif isinstance(val, KernelConfigTristate):
|
||||
return val.value
|
||||
elif isinstance(val, basestring):
|
||||
return '"{}"'.format(val.strip('"'))
|
||||
else:
|
||||
return str(val)
|
||||
|
||||
def __str__(self):
|
||||
return '\n'.join(
|
||||
'{}={}'.format(k, self._val_to_str(v))
|
||||
for k, v in self.items()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_val(k, v):
|
||||
"""
|
||||
Parse a value of types handled by Kconfig:
|
||||
* string
|
||||
* bool
|
||||
* tristate
|
||||
* hex
|
||||
* int
|
||||
|
||||
Since bool cannot be distinguished from tristate, tristate is
|
||||
always used. :meth:`KernelConfigTristate.__bool__` will allow using
|
||||
it as a bool though, so it should not impact user code.
|
||||
"""
|
||||
if not v:
|
||||
return None
|
||||
|
||||
# Handle "string" type
|
||||
if v.startswith('"'):
|
||||
# Strip enclosing "
|
||||
return v[1:-1]
|
||||
|
||||
else:
|
||||
try:
|
||||
# Handles "bool" and "tristate" types
|
||||
return KernelConfigTristate.from_str(v)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# Handles "int" type
|
||||
return int(v)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# Handles "hex" type
|
||||
return HexInt(v)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# If no type could be parsed
|
||||
raise ValueError('Could not parse Kconfig key: {}={}'.format(
|
||||
k, v
|
||||
), k, v
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _parse_text(cls, text):
|
||||
config = {}
|
||||
for line in text.splitlines():
|
||||
line = line.strip()
|
||||
|
||||
# skip empty lines
|
||||
if not line:
|
||||
continue
|
||||
|
||||
if line.startswith('#'):
|
||||
match = self.not_set_regex.search(line)
|
||||
match = cls.not_set_regex.search(line)
|
||||
if match:
|
||||
self._config[match.group(1)] = 'n'
|
||||
elif '=' in line:
|
||||
value = 'n'
|
||||
name = match.group(1)
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
name, value = line.split('=', 1)
|
||||
self._config[name.strip()] = value.strip()
|
||||
|
||||
def get(self, name, strict=False):
|
||||
name = cls.get_config_name(name.strip())
|
||||
value = cls._parse_val(name, value.strip())
|
||||
config[name] = value
|
||||
return config
|
||||
|
||||
def __getitem__(self, name):
|
||||
name = self.get_config_name(name)
|
||||
res = self._config.get(name)
|
||||
try:
|
||||
return self._config[name]
|
||||
except KeyError:
|
||||
raise KernelConfigKeyError(
|
||||
"{} is not exposed in kernel config".format(name),
|
||||
name
|
||||
)
|
||||
|
||||
if not res and strict:
|
||||
raise IndexError("{} is not exposed in target's config")
|
||||
def __iter__(self):
|
||||
return iter(self._config)
|
||||
|
||||
return self._config.get(name)
|
||||
def __len__(self):
|
||||
return len(self._config)
|
||||
|
||||
def __contains__(self, name):
|
||||
name = self.get_config_name(name)
|
||||
return name in self._config
|
||||
|
||||
def like(self, name):
|
||||
regex = re.compile(name, re.I)
|
||||
result = {}
|
||||
for k, v in self._config.items():
|
||||
if regex.search(k):
|
||||
result[k] = v
|
||||
return result
|
||||
return {
|
||||
k: v for k, v in self.items()
|
||||
if regex.search(k)
|
||||
}
|
||||
|
||||
def is_enabled(self, name):
|
||||
return self.get(name) == 'y'
|
||||
return self.get(name) is KernelConfigTristate.YES
|
||||
|
||||
def is_module(self, name):
|
||||
return self.get(name) == 'm'
|
||||
return self.get(name) is KernelConfigTristate.MODULE
|
||||
|
||||
def is_not_set(self, name):
|
||||
return self.get(name) == 'n'
|
||||
return self.get(name) is KernelConfigTristate.NO
|
||||
|
||||
def has(self, name):
|
||||
return self.get(name) in ['m', 'y']
|
||||
return self.is_enabled(name) or self.is_module(name)
|
||||
|
||||
|
||||
class KernelConfig(object):
|
||||
"""
|
||||
Backward compatibility shim on top of :class:`TypedKernelConfig`.
|
||||
|
||||
This class does not provide a Mapping API and only return string values.
|
||||
"""
|
||||
|
||||
def __init__(self, text):
|
||||
# Expose typed_config as a non-private attribute, so that user code
|
||||
# needing it can get it from any existing producer of KernelConfig.
|
||||
self.typed_config = TypedKernelConfig.from_str(text)
|
||||
# Expose the original text for backward compatibility
|
||||
self.text = text
|
||||
|
||||
get_config_name = TypedKernelConfig.get_config_name
|
||||
not_set_regex = TypedKernelConfig.not_set_regex
|
||||
|
||||
def iteritems(self):
|
||||
for k, v in self.typed_config.items():
|
||||
yield (k, self.typed_config._val_to_str(v))
|
||||
|
||||
items = iteritems
|
||||
|
||||
def get(self, name, strict=False):
|
||||
if strict:
|
||||
val = self.typed_config[name]
|
||||
else:
|
||||
val = self.typed_config.get(name)
|
||||
|
||||
return self.typed_config._val_to_str(val)
|
||||
|
||||
def like(self, name):
|
||||
return {
|
||||
k: self.typed_config._val_to_str(v)
|
||||
for k, v in self.typed_config.like(name).items()
|
||||
}
|
||||
|
||||
def is_enabled(self, name):
|
||||
return self.typed_config.is_enabled(name)
|
||||
|
||||
def is_module(self, name):
|
||||
return self.typed_config.is_module(name)
|
||||
|
||||
def is_not_set(self, name):
|
||||
return self.typed_config.is_not_set(name)
|
||||
|
||||
def has(self, name):
|
||||
return self.typed_config.has(name)
|
||||
|
||||
|
||||
class LocalLinuxTarget(LinuxTarget):
|
||||
|
@@ -28,6 +28,9 @@ import tempfile
|
||||
import subprocess
|
||||
from collections import defaultdict
|
||||
import pexpect
|
||||
import xml.etree.ElementTree
|
||||
import zipfile
|
||||
|
||||
from pipes import quote
|
||||
|
||||
from devlib.exception import TargetTransientError, TargetStableError, HostError
|
||||
@@ -132,6 +135,7 @@ class ApkInfo(object):
|
||||
version_regex = re.compile(r"name='(?P<name>[^']+)' versionCode='(?P<vcode>[^']+)' versionName='(?P<vname>[^']+)'")
|
||||
name_regex = re.compile(r"name='(?P<name>[^']+)'")
|
||||
permission_regex = re.compile(r"name='(?P<permission>[^']+)'")
|
||||
activity_regex = re.compile(r'\s*A:\s*android:name\(0x\d+\)=".(?P<name>\w+)"')
|
||||
|
||||
def __init__(self, path=None):
|
||||
self.path = path
|
||||
@@ -147,15 +151,7 @@ class ApkInfo(object):
|
||||
# pylint: disable=too-many-branches
|
||||
def parse(self, apk_path):
|
||||
_check_env()
|
||||
command = [aapt, 'dump', 'badging', apk_path]
|
||||
logger.debug(' '.join(command))
|
||||
try:
|
||||
output = subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
if sys.version_info[0] == 3:
|
||||
output = output.decode(sys.stdout.encoding or 'utf-8', 'replace')
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise HostError('Error parsing APK file {}. `aapt` says:\n{}'
|
||||
.format(apk_path, e.output))
|
||||
output = self._run([aapt, 'dump', 'badging', apk_path])
|
||||
for line in output.split('\n'):
|
||||
if line.startswith('application-label:'):
|
||||
self.label = line.split(':')[1].strip().replace('\'', '')
|
||||
@@ -188,6 +184,50 @@ class ApkInfo(object):
|
||||
else:
|
||||
pass # not interested
|
||||
|
||||
self._apk_path = apk_path
|
||||
self._activities = None
|
||||
self._methods = None
|
||||
|
||||
@property
|
||||
def activities(self):
|
||||
if self._activities is None:
|
||||
cmd = [aapt, 'dump', 'xmltree', self._apk_path,
|
||||
'AndroidManifest.xml']
|
||||
matched_activities = self.activity_regex.finditer(self._run(cmd))
|
||||
self._activities = [m.group('name') for m in matched_activities]
|
||||
return self._activities
|
||||
|
||||
@property
|
||||
def methods(self):
|
||||
if self._methods is None:
|
||||
with zipfile.ZipFile(self._apk_path, 'r') as z:
|
||||
extracted = z.extract('classes.dex', tempfile.gettempdir())
|
||||
|
||||
dexdump = os.path.join(os.path.dirname(aapt), 'dexdump')
|
||||
command = [dexdump, '-l', 'xml', extracted]
|
||||
dump = self._run(command)
|
||||
|
||||
xml_tree = xml.etree.ElementTree.fromstring(dump)
|
||||
|
||||
package = next(i for i in xml_tree.iter('package')
|
||||
if i.attrib['name'] == self.package)
|
||||
|
||||
self._methods = [(meth.attrib['name'], klass.attrib['name'])
|
||||
for klass in package.iter('class')
|
||||
for meth in klass.iter('method')]
|
||||
return self._methods
|
||||
|
||||
def _run(self, command):
|
||||
logger.debug(' '.join(command))
|
||||
try:
|
||||
output = subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
if sys.version_info[0] == 3:
|
||||
output = output.decode(sys.stdout.encoding or 'utf-8', 'replace')
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise HostError('Error while running "{}":\n{}'
|
||||
.format(command, e.output))
|
||||
return output
|
||||
|
||||
|
||||
class AdbConnection(object):
|
||||
|
||||
@@ -268,7 +308,7 @@ class AdbConnection(object):
|
||||
raise
|
||||
|
||||
def background(self, command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, as_root=False):
|
||||
return adb_background_shell(self.device, command, stdout, stderr, as_root)
|
||||
return adb_background_shell(self.device, command, stdout, stderr, as_root, adb_server=self.adb_server)
|
||||
|
||||
def close(self):
|
||||
AdbConnection.active_connections[self.device] -= 1
|
||||
@@ -439,12 +479,15 @@ def adb_shell(device, command, timeout=None, check_exit_code=False,
|
||||
def adb_background_shell(device, command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
as_root=False):
|
||||
as_root=False,
|
||||
adb_server=None):
|
||||
"""Runs the sepcified command in a subprocess, returning the the Popen object."""
|
||||
_check_env()
|
||||
if as_root:
|
||||
command = 'echo {} | su'.format(quote(command))
|
||||
device_string = ' -s {}'.format(device) if device else ''
|
||||
|
||||
device_string = ' -H {}'.format(adb_server) if adb_server else ''
|
||||
device_string += ' -s {}'.format(device) if device else ''
|
||||
full_command = 'adb{} shell {}'.format(device_string, quote(command))
|
||||
logger.debug(full_command)
|
||||
return subprocess.Popen(full_command, stdout=stdout, stderr=stderr, shell=True)
|
||||
|
@@ -49,12 +49,12 @@ class FrameCollector(threading.Thread):
|
||||
self.refresh_period = None
|
||||
self.drop_threshold = None
|
||||
self.unresponsive_count = 0
|
||||
self.last_ready_time = None
|
||||
self.last_ready_time = 0
|
||||
self.exc = None
|
||||
self.header = None
|
||||
|
||||
def run(self):
|
||||
logger.debug('Surface flinger frame data collection started.')
|
||||
logger.debug('Frame data collection started.')
|
||||
try:
|
||||
self.stop_signal.clear()
|
||||
fd, self.temp_file = tempfile.mkstemp()
|
||||
@@ -71,7 +71,7 @@ class FrameCollector(threading.Thread):
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
logger.warning('Exception on collector thread: {}({})'.format(e.__class__.__name__, e))
|
||||
self.exc = WorkerThreadError(self.name, sys.exc_info())
|
||||
logger.debug('Surface flinger frame data collection stopped.')
|
||||
logger.debug('Frame data collection stopped.')
|
||||
|
||||
def stop(self):
|
||||
self.stop_signal.set()
|
||||
@@ -133,7 +133,7 @@ class SurfaceFlingerFrameCollector(FrameCollector):
|
||||
def collect_frames(self, wfh):
|
||||
for activity in self.list():
|
||||
if activity == self.view:
|
||||
wfh.write(self.get_latencies(activity))
|
||||
wfh.write(self.get_latencies(activity).encode('utf-8'))
|
||||
|
||||
def clear(self):
|
||||
self.target.execute('dumpsys SurfaceFlinger --latency-clear ')
|
||||
|
@@ -41,7 +41,8 @@ from pexpect import EOF, TIMEOUT, spawn
|
||||
# pylint: disable=redefined-builtin,wrong-import-position
|
||||
from devlib.exception import (HostError, TargetStableError, TargetNotRespondingError,
|
||||
TimeoutError, TargetTransientError)
|
||||
from devlib.utils.misc import which, strip_bash_colors, check_output, sanitize_cmd_template
|
||||
from devlib.utils.misc import (which, strip_bash_colors, check_output,
|
||||
sanitize_cmd_template, memoized)
|
||||
from devlib.utils.types import boolean
|
||||
|
||||
|
||||
@@ -62,7 +63,7 @@ def ssh_get_shell(host, username, password=None, keyfile=None, port=None, timeou
|
||||
raise ValueError('keyfile may not be used with a telnet connection.')
|
||||
conn = TelnetPxssh(original_prompt=original_prompt)
|
||||
else: # ssh
|
||||
conn = pxssh.pxssh()
|
||||
conn = pxssh.pxssh(echo=False)
|
||||
|
||||
try:
|
||||
if keyfile:
|
||||
@@ -253,7 +254,7 @@ class SshConnection(object):
|
||||
# simulate impatiently hitting ^C until command prompt appears
|
||||
logger.debug('Sending ^C')
|
||||
for _ in range(self.max_cancel_attempts):
|
||||
self.conn.sendline(chr(3))
|
||||
self._sendline(chr(3))
|
||||
if self.conn.prompt(0.1):
|
||||
return True
|
||||
return False
|
||||
@@ -267,25 +268,21 @@ class SshConnection(object):
|
||||
command = self.sudo_cmd.format(quote(command))
|
||||
if log:
|
||||
logger.debug(command)
|
||||
self.conn.sendline(command)
|
||||
self._sendline(command)
|
||||
if self.password:
|
||||
index = self.conn.expect_exact([self.password_prompt, TIMEOUT], timeout=0.5)
|
||||
if index == 0:
|
||||
self.conn.sendline(self.password)
|
||||
self._sendline(self.password)
|
||||
else: # not as_root
|
||||
if log:
|
||||
logger.debug(command)
|
||||
self.conn.sendline(command)
|
||||
self._sendline(command)
|
||||
timed_out = self._wait_for_prompt(timeout)
|
||||
# the regex removes line breaks potential introduced when writing
|
||||
# command to shell.
|
||||
if sys.version_info[0] == 3:
|
||||
output = process_backspaces(self.conn.before.decode(sys.stdout.encoding or 'utf-8', 'replace'))
|
||||
else:
|
||||
output = process_backspaces(self.conn.before)
|
||||
output = re.sub(r'\r([^\n])', r'\1', output)
|
||||
if '\r\n' in output: # strip the echoed command
|
||||
output = output.split('\r\n', 1)[1]
|
||||
|
||||
if timed_out:
|
||||
self.cancel_running_command()
|
||||
raise TimeoutError(command, output)
|
||||
@@ -321,6 +318,21 @@ class SshConnection(object):
|
||||
except TimeoutError as e:
|
||||
raise TimeoutError(command_redacted, e.output)
|
||||
|
||||
def _sendline(self, command):
|
||||
# Workaround for https://github.com/pexpect/pexpect/issues/552
|
||||
if len(command) == self._get_window_size()[1] - self._get_prompt_length():
|
||||
command += ' '
|
||||
self.conn.sendline(command)
|
||||
|
||||
@memoized
|
||||
def _get_prompt_length(self):
|
||||
self.conn.sendline()
|
||||
self.conn.prompt()
|
||||
return len(self.conn.after)
|
||||
|
||||
@memoized
|
||||
def _get_window_size(self):
|
||||
return self.conn.getwinsize()
|
||||
|
||||
class TelnetConnection(SshConnection):
|
||||
|
||||
|
@@ -15,8 +15,23 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
|
||||
VersionTuple = namedtuple('Version', ['major', 'minor', 'revision', 'dev'])
|
||||
|
||||
version = VersionTuple(1, 1, 1, '')
|
||||
|
||||
|
||||
def get_devlib_version():
|
||||
version_string = '{}.{}.{}'.format(
|
||||
version.major, version.minor, version.revision)
|
||||
if version.dev:
|
||||
version_string += '.{}'.format(version.dev)
|
||||
return version_string
|
||||
|
||||
|
||||
def get_commit():
|
||||
p = Popen(['git', 'rev-parse', 'HEAD'], cwd=os.path.dirname(__file__),
|
||||
stdout=PIPE, stderr=PIPE)
|
||||
|
@@ -346,7 +346,7 @@ Target
|
||||
some sysfs entries silently failing to set the written value without
|
||||
returning an error code.
|
||||
|
||||
.. method:: Target.read_tree_values(path, depth=1, dictcls=dict):
|
||||
.. method:: Target.read_tree_values(path, depth=1, dictcls=dict, [, tar [, decode_unicode [, strip_null_char ]]]):
|
||||
|
||||
Read values of all sysfs (or similar) file nodes under ``path``, traversing
|
||||
up to the maximum depth ``depth``.
|
||||
@@ -358,9 +358,18 @@ Target
|
||||
value is a dict-line object with a key for every entry under ``path``
|
||||
mapping onto its value or further dict-like objects as appropriate.
|
||||
|
||||
Although the default behaviour should suit most users, it is possible to
|
||||
encounter issues when reading binary files, or files with colons in their
|
||||
name for example. In such cases, the ``tar`` parameter can be set to force a
|
||||
full archive of the tree using tar, hence providing a more robust behaviour.
|
||||
This can, however, slow down the read process significantly.
|
||||
|
||||
:param path: sysfs path to scan
|
||||
:param depth: maximum depth to descend
|
||||
:param dictcls: a dict-like type to be used for each level of the hierarchy.
|
||||
:param tar: the files will be read using tar rather than grep
|
||||
:param decode_unicode: decode the content of tar-ed files as utf-8
|
||||
:param strip_null_char: remove null chars from utf-8 decoded files
|
||||
|
||||
.. method:: Target.read_tree_values_flat(path, depth=1):
|
||||
|
||||
|
24
setup.py
24
setup.py
@@ -41,23 +41,13 @@ except OSError:
|
||||
pass
|
||||
|
||||
|
||||
with open(os.path.join(devlib_dir, '__init__.py')) as fh:
|
||||
# Extract the version by parsing the text of the file,
|
||||
# as may not be able to load as a module yet.
|
||||
for line in fh:
|
||||
if '__version__' in line:
|
||||
parts = line.split("'")
|
||||
__version__ = parts[1]
|
||||
break
|
||||
else:
|
||||
raise RuntimeError('Did not see __version__')
|
||||
|
||||
vh_path = os.path.join(devlib_dir, 'utils', 'version.py')
|
||||
# can load this, as it does not have any devlib imports
|
||||
version_helper = imp.load_source('version_helper', vh_path)
|
||||
commit = version_helper.get_commit()
|
||||
if commit:
|
||||
__version__ = '{}+{}'.format(__version__, commit)
|
||||
vh_path = os.path.join(devlib_dir, 'utils', 'version.py')
|
||||
# can load this, as it does not have any devlib imports
|
||||
version_helper = imp.load_source('version_helper', vh_path)
|
||||
__version__ = version_helper.get_devlib_version()
|
||||
commit = version_helper.get_commit()
|
||||
if commit:
|
||||
__version__ = '{}+{}'.format(__version__, commit)
|
||||
|
||||
|
||||
packages = []
|
||||
|
Reference in New Issue
Block a user