mirror of
https://github.com/ARM-software/devlib.git
synced 2025-01-31 02:00:45 +00:00
Add support for Python 3
Add support for running on Python 3 while maintaining Python 2 compatibility.
This commit is contained in:
parent
0d63386343
commit
5cafd2ec4d
@ -34,7 +34,7 @@ class DerivedEnergyMeasurements(DerivedMeasurements):
|
||||
if channel.site == 'timestamp':
|
||||
use_timestamp = True
|
||||
time_measurment = channel.measurement_type
|
||||
for site, kinds in channel_map.iteritems():
|
||||
for site, kinds in channel_map.items():
|
||||
if 'power' in kinds and not 'energy' in kinds:
|
||||
should_calculate_energy.append(site)
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
from __future__ import division
|
||||
import csv
|
||||
import os
|
||||
import re
|
||||
|
||||
@ -8,8 +7,11 @@ try:
|
||||
except ImportError:
|
||||
pd = None
|
||||
|
||||
from past.builtins import basestring
|
||||
|
||||
from devlib import DerivedMeasurements, DerivedMetric, MeasurementsCsv, InstrumentChannel
|
||||
from devlib.exception import HostError
|
||||
from devlib.utils.csvutil import csvwriter
|
||||
from devlib.utils.rendering import gfxinfo_get_last_dump, VSYNC_INTERVAL
|
||||
from devlib.utils.types import numeric
|
||||
|
||||
@ -103,8 +105,7 @@ class DerivedGfxInfoStats(DerivedFpsStats):
|
||||
fps = 0
|
||||
|
||||
csv_file = self._get_csv_file_name(measurements_csv.path)
|
||||
with open(csv_file, 'wb') as wfh:
|
||||
writer = csv.writer(wfh)
|
||||
with csvwriter(csv_file) as writer:
|
||||
writer.writerow(['fps'])
|
||||
writer.writerows(per_frame_fps)
|
||||
|
||||
|
@ -15,7 +15,11 @@
|
||||
|
||||
class DevlibError(Exception):
|
||||
"""Base class for all Devlib exceptions."""
|
||||
pass
|
||||
@property
|
||||
def message(self):
|
||||
if self.args:
|
||||
return self.args[0]
|
||||
return str(self)
|
||||
|
||||
|
||||
class TargetError(DevlibError):
|
||||
@ -75,13 +79,13 @@ def get_traceback(exc=None):
|
||||
object, or for the current exception exc is not specified.
|
||||
|
||||
"""
|
||||
import StringIO, traceback, sys
|
||||
import io, traceback, sys
|
||||
if exc is None:
|
||||
exc = sys.exc_info()
|
||||
if not exc:
|
||||
return None
|
||||
tb = exc[2]
|
||||
sio = StringIO.StringIO()
|
||||
sio = io.BytesIO()
|
||||
traceback.print_tb(tb, file=sio)
|
||||
del tb # needs to be done explicitly see: http://docs.python.org/2/library/sys.html#sys.exc_info
|
||||
return sio.getvalue()
|
||||
|
@ -13,10 +13,12 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
from __future__ import division
|
||||
import csv
|
||||
import logging
|
||||
import collections
|
||||
|
||||
from past.builtins import basestring
|
||||
|
||||
from devlib.utils.csvutil import csvreader
|
||||
from devlib.utils.types import numeric
|
||||
from devlib.utils.types import identifier
|
||||
|
||||
@ -37,7 +39,7 @@ class MeasurementType(object):
|
||||
self.category = category
|
||||
self.conversions = {}
|
||||
if conversions is not None:
|
||||
for key, value in conversions.iteritems():
|
||||
for key, value in conversions.items():
|
||||
if not callable(value):
|
||||
msg = 'Converter must be callable; got {} "{}"'
|
||||
raise ValueError(msg.format(type(value), value))
|
||||
@ -189,14 +191,13 @@ class MeasurementsCsv(object):
|
||||
|
||||
def iter_values(self):
|
||||
for row in self._iter_rows():
|
||||
values = map(numeric, row)
|
||||
values = list(map(numeric, row))
|
||||
yield self.data_tuple(*values)
|
||||
|
||||
def _load_channels(self):
|
||||
header = []
|
||||
with open(self.path, 'rb') as fh:
|
||||
reader = csv.reader(fh)
|
||||
header = reader.next()
|
||||
with csvreader(self.path) as reader:
|
||||
header = next(reader)
|
||||
|
||||
self.channels = []
|
||||
for entry in header:
|
||||
@ -218,9 +219,8 @@ class MeasurementsCsv(object):
|
||||
self.channels.append(chan)
|
||||
|
||||
def _iter_rows(self):
|
||||
with open(self.path, 'rb') as fh:
|
||||
reader = csv.reader(fh)
|
||||
reader.next() # headings
|
||||
with csvreader(self.path) as reader:
|
||||
next(reader) # headings
|
||||
for row in reader:
|
||||
yield row
|
||||
|
||||
@ -252,7 +252,7 @@ class InstrumentChannel(object):
|
||||
self.measurement_type = MEASUREMENT_TYPES[measurement_type]
|
||||
except KeyError:
|
||||
raise ValueError('Unknown measurement type: {}'.format(measurement_type))
|
||||
for atname, atvalue in attrs.iteritems():
|
||||
for atname, atvalue in attrs.items():
|
||||
setattr(self, atname, atvalue)
|
||||
|
||||
def __str__(self):
|
||||
@ -278,7 +278,7 @@ class Instrument(object):
|
||||
# channel management
|
||||
|
||||
def list_channels(self):
|
||||
return self.channels.values()
|
||||
return list(self.channels.values())
|
||||
|
||||
def get_channels(self, measure):
|
||||
if hasattr(measure, 'name'):
|
||||
|
@ -1,7 +1,7 @@
|
||||
#pylint: disable=attribute-defined-outside-init
|
||||
from __future__ import division
|
||||
import csv
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import tempfile
|
||||
from fcntl import fcntl, F_GETFL, F_SETFL
|
||||
@ -10,6 +10,7 @@ from subprocess import Popen, PIPE, STDOUT
|
||||
|
||||
from devlib import Instrument, CONTINUOUS, MeasurementsCsv
|
||||
from devlib.exception import HostError
|
||||
from devlib.utils.csvutil import csvreader, csvwriter
|
||||
from devlib.utils.misc import which
|
||||
|
||||
OUTPUT_CAPTURE_FILE = 'acme-cape.csv'
|
||||
@ -83,7 +84,7 @@ class AcmeCapeInstrument(Instrument):
|
||||
self.process.terminate()
|
||||
timeout_secs = 10
|
||||
output = ''
|
||||
for _ in xrange(timeout_secs):
|
||||
for _ in range(timeout_secs):
|
||||
if self.process.poll() is not None:
|
||||
break
|
||||
time.sleep(1)
|
||||
@ -95,7 +96,10 @@ class AcmeCapeInstrument(Instrument):
|
||||
msg = 'Could not terminate iio-capture:\n{}'
|
||||
raise HostError(msg.format(output))
|
||||
if self.process.returncode != 15: # iio-capture exits with 15 when killed
|
||||
output += self.process.stdout.read()
|
||||
if sys.version_info[0] == 3:
|
||||
output += self.process.stdout.read().decode(sys.stdout.encoding)
|
||||
else:
|
||||
output += self.process.stdout.read()
|
||||
self.logger.info('ACME instrument encountered an error, '
|
||||
'you may want to try rebooting the ACME device:\n'
|
||||
' ssh root@{} reboot'.format(self.host))
|
||||
@ -114,13 +118,11 @@ class AcmeCapeInstrument(Instrument):
|
||||
active_channels = [c.label for c in self.active_channels]
|
||||
active_indexes = [all_channels.index(ac) for ac in active_channels]
|
||||
|
||||
with open(self.raw_data_file, 'rb') as fh:
|
||||
with open(outfile, 'wb') as wfh:
|
||||
writer = csv.writer(wfh)
|
||||
with csvreader(self.raw_data_file, skipinitialspace=True) as reader:
|
||||
with csvwriter(outfile) as writer:
|
||||
writer.writerow(active_channels)
|
||||
|
||||
reader = csv.reader(fh, skipinitialspace=True)
|
||||
header = reader.next()
|
||||
header = next(reader)
|
||||
ts_index = header.index('timestamp ms')
|
||||
|
||||
|
||||
|
@ -17,7 +17,6 @@
|
||||
# pylint: disable=W0613,E1101,access-member-before-definition,attribute-defined-outside-init
|
||||
from __future__ import division
|
||||
import os
|
||||
import csv
|
||||
import subprocess
|
||||
import signal
|
||||
import struct
|
||||
@ -28,6 +27,7 @@ import shutil
|
||||
|
||||
from devlib.instrument import Instrument, CONTINUOUS, MeasurementsCsv
|
||||
from devlib.exception import HostError
|
||||
from devlib.utils.csvutil import csvreader, csvwriter
|
||||
from devlib.utils.misc import which
|
||||
|
||||
from devlib.utils.parse_aep import AepParser
|
||||
@ -108,10 +108,8 @@ class ArmEnergyProbeInstrument(Instrument):
|
||||
active_channels = [c.label for c in self.active_channels]
|
||||
active_indexes = [all_channels.index(ac) for ac in active_channels]
|
||||
|
||||
with open(self.output_file, 'rb') as ifile:
|
||||
reader = csv.reader(ifile, delimiter=' ')
|
||||
with open(outfile, 'wb') as wfh:
|
||||
writer = csv.writer(wfh)
|
||||
with csvreader(self.output_file, delimiter=' ') as reader:
|
||||
with csvwriter(outfile) as writer:
|
||||
for row in reader:
|
||||
if skip_header == 1:
|
||||
writer.writerow(active_channels)
|
||||
|
@ -1,19 +1,19 @@
|
||||
import os
|
||||
import csv
|
||||
import tempfile
|
||||
from itertools import chain
|
||||
|
||||
from devlib.instrument import Instrument, MeasurementsCsv, CONTINUOUS
|
||||
from devlib.exception import HostError
|
||||
from devlib.utils.csvutil import csvwriter, create_reader
|
||||
from devlib.utils.misc import unique
|
||||
|
||||
try:
|
||||
from daqpower.client import execute_command, Status
|
||||
from daqpower.config import DeviceConfiguration, ServerConfiguration
|
||||
except ImportError, e:
|
||||
except ImportError as e:
|
||||
execute_command, Status = None, None
|
||||
DeviceConfiguration, ServerConfiguration, ConfigurationError = None, None, None
|
||||
import_error_mesg = e.message
|
||||
import_error_mesg = e.args[0] if e.args else str(e)
|
||||
|
||||
|
||||
class DaqInstrument(Instrument):
|
||||
@ -37,7 +37,7 @@ class DaqInstrument(Instrument):
|
||||
if execute_command is None:
|
||||
raise HostError('Could not import "daqpower": {}'.format(import_error_mesg))
|
||||
if labels is None:
|
||||
labels = ['PORT_{}'.format(i) for i in xrange(len(resistor_values))]
|
||||
labels = ['PORT_{}'.format(i) for i in range(len(resistor_values))]
|
||||
if len(labels) != len(resistor_values):
|
||||
raise ValueError('"labels" and "resistor_values" must be of the same length')
|
||||
self.server_config = ServerConfiguration(host=host,
|
||||
@ -97,8 +97,8 @@ class DaqInstrument(Instrument):
|
||||
for site in active_sites:
|
||||
try:
|
||||
site_file = raw_file_map[site]
|
||||
fh = open(site_file, 'rb')
|
||||
site_readers[site] = csv.reader(fh)
|
||||
reader, fh = create_reader(site_file)
|
||||
site_readers[site] = reader
|
||||
file_handles.append(fh)
|
||||
except KeyError:
|
||||
message = 'Could not get DAQ trace for {}; Obtained traces are in {}'
|
||||
@ -106,22 +106,21 @@ class DaqInstrument(Instrument):
|
||||
|
||||
# The first row is the headers
|
||||
channel_order = []
|
||||
for site, reader in site_readers.iteritems():
|
||||
for site, reader in site_readers.items():
|
||||
channel_order.extend(['{}_{}'.format(site, kind)
|
||||
for kind in reader.next()])
|
||||
for kind in next(reader)])
|
||||
|
||||
def _read_next_rows():
|
||||
parts = []
|
||||
for reader in site_readers.itervalues():
|
||||
for reader in site_readers.values():
|
||||
try:
|
||||
parts.extend(reader.next())
|
||||
parts.extend(next(reader))
|
||||
except StopIteration:
|
||||
parts.extend([None, None])
|
||||
return list(chain(parts))
|
||||
|
||||
with open(outfile, 'wb') as wfh:
|
||||
with csvwriter(outfile) as writer:
|
||||
field_names = [c.label for c in self.active_channels]
|
||||
writer = csv.writer(wfh)
|
||||
writer.writerow(field_names)
|
||||
raw_row = _read_next_rows()
|
||||
while any(raw_row):
|
||||
|
@ -14,14 +14,15 @@
|
||||
#
|
||||
from __future__ import division
|
||||
import os
|
||||
import csv
|
||||
import signal
|
||||
import tempfile
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from devlib.instrument import Instrument, CONTINUOUS, MeasurementsCsv
|
||||
from devlib.exception import HostError
|
||||
from devlib.utils.csvutil import csvwriter
|
||||
from devlib.utils.misc import which
|
||||
|
||||
|
||||
@ -39,7 +40,7 @@ class EnergyProbeInstrument(Instrument):
|
||||
self.labels = labels
|
||||
else:
|
||||
self.labels = ['PORT_{}'.format(i)
|
||||
for i in xrange(len(resistor_values))]
|
||||
for i in range(len(resistor_values))]
|
||||
self.device_entry = device_entry
|
||||
self.caiman = which('caiman')
|
||||
if self.caiman is None:
|
||||
@ -80,6 +81,9 @@ class EnergyProbeInstrument(Instrument):
|
||||
self.process.poll()
|
||||
if self.process.returncode is not None:
|
||||
stdout, stderr = self.process.communicate()
|
||||
if sys.version_info[0] == 3:
|
||||
stdout = stdout.decode(sys.stdout.encoding)
|
||||
stderr = stderr.decode(sys.stdout.encoding)
|
||||
raise HostError(
|
||||
'Energy Probe: Caiman exited unexpectedly with exit code {}.\n'
|
||||
'stdout:\n{}\nstderr:\n{}'.format(self.process.returncode,
|
||||
@ -98,8 +102,7 @@ class EnergyProbeInstrument(Instrument):
|
||||
|
||||
self.logger.debug('Parsing raw data file: {}'.format(self.raw_data_file))
|
||||
with open(self.raw_data_file, 'rb') as bfile:
|
||||
with open(outfile, 'wb') as wfh:
|
||||
writer = csv.writer(wfh)
|
||||
with csvwriter(outfile) as writer:
|
||||
writer.writerow(active_channels)
|
||||
while True:
|
||||
data = bfile.read(num_of_ports * self.bytes_per_sample)
|
||||
|
@ -13,12 +13,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import division
|
||||
import csv
|
||||
import re
|
||||
|
||||
from devlib.platform.gem5 import Gem5SimulationPlatform
|
||||
from devlib.instrument import Instrument, CONTINUOUS, MeasurementsCsv
|
||||
from devlib.exception import TargetError, HostError
|
||||
from devlib.utils.csvutil import csvwriter
|
||||
|
||||
|
||||
class Gem5PowerInstrument(Instrument):
|
||||
@ -66,8 +66,7 @@ class Gem5PowerInstrument(Instrument):
|
||||
|
||||
def get_data(self, outfile):
|
||||
active_sites = [c.site for c in self.active_channels]
|
||||
with open(outfile, 'wb') as wfh:
|
||||
writer = csv.writer(wfh)
|
||||
with csvwriter(outfile) as writer:
|
||||
writer.writerow([c.label for c in self.active_channels]) # headers
|
||||
sites_to_match = [self.site_mapping.get(s, s) for s in active_sites]
|
||||
for rec, rois in self.target.gem5stats.match_iter(sites_to_match,
|
||||
|
@ -1,13 +1,16 @@
|
||||
import csv
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
from subprocess import Popen, PIPE
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from devlib.instrument import Instrument, CONTINUOUS, MeasurementsCsv
|
||||
from devlib.exception import HostError
|
||||
from devlib.host import PACKAGE_BIN_DIRECTORY
|
||||
from devlib.utils.csvutil import csvwriter
|
||||
from devlib.utils.misc import which
|
||||
|
||||
|
||||
INSTALL_INSTRUCTIONS="""
|
||||
MonsoonInstrument requires the monsoon.py tool, available from AOSP:
|
||||
|
||||
@ -18,6 +21,7 @@ parameter to MonsoonInstrument). `pip install python-gflags pyserial` to install
|
||||
the dependencies.
|
||||
"""
|
||||
|
||||
|
||||
class MonsoonInstrument(Instrument):
|
||||
"""Instrument for Monsoon Solutions power monitor
|
||||
|
||||
@ -81,6 +85,9 @@ class MonsoonInstrument(Instrument):
|
||||
process.poll()
|
||||
if process.returncode is not None:
|
||||
stdout, stderr = process.communicate()
|
||||
if sys.version_info[0] == 3:
|
||||
stdout = stdout.encode(sys.stdout.encoding)
|
||||
stderr = stderr.encode(sys.stdout.encoding)
|
||||
raise HostError(
|
||||
'Monsoon script exited unexpectedly with exit code {}.\n'
|
||||
'stdout:\n{}\nstderr:\n{}'.format(process.returncode,
|
||||
@ -104,8 +111,7 @@ class MonsoonInstrument(Instrument):
|
||||
|
||||
stdout, stderr = self.output
|
||||
|
||||
with open(outfile, 'wb') as f:
|
||||
writer = csv.writer(f)
|
||||
with csvwriter(outfile) as writer:
|
||||
active_sites = [c.site for c in self.active_channels]
|
||||
|
||||
# Write column headers
|
||||
|
@ -1,14 +1,15 @@
|
||||
import os
|
||||
import re
|
||||
import csv
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
from collections import defaultdict
|
||||
from itertools import izip_longest
|
||||
|
||||
from future.moves.itertools import zip_longest
|
||||
|
||||
from devlib.instrument import Instrument, MeasurementsCsv, CONTINUOUS
|
||||
from devlib.exception import TargetError, HostError
|
||||
from devlib.utils.android import ApkInfo
|
||||
from devlib.utils.csvutil import csvwriter
|
||||
|
||||
|
||||
THIS_DIR = os.path.dirname(__file__)
|
||||
@ -46,10 +47,9 @@ def netstats_to_measurements(netstats):
|
||||
def write_measurements_csv(measurements, filepath):
|
||||
headers = sorted(measurements.keys())
|
||||
columns = [measurements[h] for h in headers]
|
||||
with open(filepath, 'wb') as wfh:
|
||||
writer = csv.writer(wfh)
|
||||
with csvwriter(filepath) as writer:
|
||||
writer.writerow(headers)
|
||||
writer.writerows(izip_longest(*columns))
|
||||
writer.writerows(zip_longest(*columns))
|
||||
|
||||
|
||||
class NetstatsInstrument(Instrument):
|
||||
|
@ -15,6 +15,8 @@
|
||||
import logging
|
||||
from inspect import isclass
|
||||
|
||||
from past.builtins import basestring
|
||||
|
||||
from devlib.utils.misc import walk_modules
|
||||
from devlib.utils.types import identifier
|
||||
|
||||
@ -75,7 +77,7 @@ class BootModule(Module): # pylint: disable=R0921
|
||||
raise NotImplementedError()
|
||||
|
||||
def update(self, **kwargs):
|
||||
for name, value in kwargs.iteritems():
|
||||
for name, value in kwargs.items():
|
||||
if not hasattr(self, name):
|
||||
raise ValueError('Unknown parameter "{}" for {}'.format(name, self.name))
|
||||
self.logger.debug('Updating "{}" to "{}"'.format(name, value))
|
||||
@ -117,6 +119,6 @@ def register_module(mod):
|
||||
|
||||
def __load_cache():
|
||||
for module in walk_modules('devlib.module'):
|
||||
for obj in vars(module).itervalues():
|
||||
for obj in vars(module).values():
|
||||
if isclass(obj) and issubclass(obj, Module) and obj.name:
|
||||
register_module(obj)
|
||||
|
@ -63,7 +63,7 @@ class FastbootFlashModule(FlashModule):
|
||||
image_bundle = expand_path(image_bundle)
|
||||
to_flash = self._bundle_to_images(image_bundle)
|
||||
to_flash = merge_dicts(to_flash, images or {}, should_normalize=False)
|
||||
for partition, image_path in to_flash.iteritems():
|
||||
for partition, image_path in to_flash.items():
|
||||
self.logger.debug('flashing {}'.format(partition))
|
||||
self._flash_image(self.target, partition, expand_path(image_path))
|
||||
fastboot_command('reboot')
|
||||
|
@ -325,7 +325,7 @@ class CGroup(object):
|
||||
def get_tasks(self):
|
||||
task_ids = self.target.read_value(self.tasks_file).split()
|
||||
logging.debug('Tasks: %s', task_ids)
|
||||
return map(int, task_ids)
|
||||
return list(map(int, task_ids))
|
||||
|
||||
def add_task(self, tid):
|
||||
self.target.write_value(self.tasks_file, tid, verify=False)
|
||||
|
@ -150,7 +150,7 @@ class CpufreqModule(Module):
|
||||
if governor is None:
|
||||
governor = self.get_governor(cpu)
|
||||
valid_tunables = self.list_governor_tunables(cpu)
|
||||
for tunable, value in kwargs.iteritems():
|
||||
for tunable, value in kwargs.items():
|
||||
if tunable in valid_tunables:
|
||||
path = '/sys/devices/system/cpu/{}/cpufreq/{}/{}'.format(cpu, governor, tunable)
|
||||
try:
|
||||
@ -176,7 +176,7 @@ class CpufreqModule(Module):
|
||||
try:
|
||||
cmd = 'cat /sys/devices/system/cpu/{}/cpufreq/scaling_available_frequencies'.format(cpu)
|
||||
output = self.target.execute(cmd)
|
||||
available_frequencies = map(int, output.strip().split()) # pylint: disable=E1103
|
||||
available_frequencies = list(map(int, output.strip().split())) # pylint: disable=E1103
|
||||
except TargetError:
|
||||
# On some devices scaling_frequencies is not generated.
|
||||
# http://adrynalyne-teachtofish.blogspot.co.uk/2011/11/how-to-enable-scalingavailablefrequenci.html
|
||||
@ -190,7 +190,7 @@ class CpufreqModule(Module):
|
||||
return []
|
||||
raise
|
||||
|
||||
available_frequencies = map(int, reversed([f for f, _ in zip(out_iter, out_iter)]))
|
||||
available_frequencies = list(map(int, reversed([f for f, _ in zip(out_iter, out_iter)])))
|
||||
return available_frequencies
|
||||
|
||||
@memoized
|
||||
@ -478,7 +478,7 @@ class CpufreqModule(Module):
|
||||
"""
|
||||
cpus = set(range(self.target.number_of_cpus))
|
||||
while cpus:
|
||||
cpu = iter(cpus).next()
|
||||
cpu = next(iter(cpus))
|
||||
domain = self.target.cpufreq.get_related_cpus(cpu)
|
||||
yield domain
|
||||
cpus = cpus.difference(domain)
|
||||
|
@ -13,6 +13,8 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
from past.builtins import basestring
|
||||
|
||||
from devlib.module import Module
|
||||
from devlib.utils.misc import memoized
|
||||
from devlib.utils.types import integer, boolean
|
||||
|
@ -75,7 +75,7 @@ class Gem5StatsModule(Module):
|
||||
raise KeyError('ROI label {} already used'.format(label))
|
||||
if len(self.rois) >= GEM5STATS_ROI_NUMBER:
|
||||
raise RuntimeError('Too many ROIs reserved')
|
||||
all_rois = set(xrange(GEM5STATS_ROI_NUMBER))
|
||||
all_rois = set(range(GEM5STATS_ROI_NUMBER))
|
||||
used_rois = set([roi.number for roi in self.rois.values()])
|
||||
avail_rois = all_rois - used_rois
|
||||
self.rois[label] = Gem5ROI(list(avail_rois)[0], self.target)
|
||||
@ -223,7 +223,7 @@ class Gem5StatsModule(Module):
|
||||
'''
|
||||
with open(self._stats_file_path, 'r') as stats_file:
|
||||
# _goto_dump reach EOF and returns the total number of dumps + 1
|
||||
return self._goto_dump(stats_file, sys.maxint)
|
||||
return self._goto_dump(stats_file, sys.maxsize)
|
||||
|
||||
def _goto_dump(self, stats_file, target_dump):
|
||||
if target_dump < 0:
|
||||
@ -243,7 +243,7 @@ class Gem5StatsModule(Module):
|
||||
dump_iterator = iter_statistics_dump(stats_file)
|
||||
while curr_dump < target_dump:
|
||||
try:
|
||||
dump = dump_iterator.next()
|
||||
dump = next(dump_iterator)
|
||||
except StopIteration:
|
||||
break
|
||||
# End of passed dump is beginning og next one
|
||||
|
@ -26,7 +26,7 @@ class GpufreqModule(Module):
|
||||
def __init__(self, target):
|
||||
super(GpufreqModule, self).__init__(target)
|
||||
frequencies_str = self.target.read_value("/sys/kernel/gpu/gpu_freq_table")
|
||||
self.frequencies = map(int, frequencies_str.split(" "))
|
||||
self.frequencies = list(map(int, frequencies_str.split(" ")))
|
||||
self.frequencies.sort()
|
||||
self.governors = self.target.read_value("/sys/kernel/gpu/gpu_available_governor").split(" ")
|
||||
|
||||
|
@ -75,8 +75,8 @@ class HwmonDevice(object):
|
||||
@property
|
||||
def sensors(self):
|
||||
all_sensors = []
|
||||
for sensors_of_kind in self._sensors.itervalues():
|
||||
all_sensors.extend(sensors_of_kind.values())
|
||||
for sensors_of_kind in self._sensors.values():
|
||||
all_sensors.extend(list(sensors_of_kind.values()))
|
||||
return all_sensors
|
||||
|
||||
def __init__(self, target, path, name, fields):
|
||||
@ -100,7 +100,7 @@ class HwmonDevice(object):
|
||||
|
||||
def get(self, kind, number=None):
|
||||
if number is None:
|
||||
return [s for _, s in sorted(self._sensors[kind].iteritems(),
|
||||
return [s for _, s in sorted(self._sensors[kind].items(),
|
||||
key=lambda x: x[0])]
|
||||
else:
|
||||
return self._sensors[kind].get(number)
|
||||
@ -139,7 +139,7 @@ class HwmonModule(Module):
|
||||
|
||||
def scan(self):
|
||||
values_tree = self.target.read_tree_values(self.root, depth=3)
|
||||
for entry_id, fields in values_tree.iteritems():
|
||||
for entry_id, fields in values_tree.items():
|
||||
path = self.target.path.join(self.root, entry_id)
|
||||
name = fields.pop('name', None)
|
||||
if name is None:
|
||||
|
@ -100,5 +100,5 @@ class ThermalModule(Module):
|
||||
|
||||
def disable_all_zones(self):
|
||||
"""Disables all the thermal zones in the target"""
|
||||
for zone in self.zones.itervalues():
|
||||
for zone in self.zones.values():
|
||||
zone.set_enabled(False)
|
||||
|
@ -251,7 +251,7 @@ class VexpressUBoot(VexpressBootModule):
|
||||
menu = UbootMenu(tty)
|
||||
self.logger.debug('Waiting for U-Boot prompt...')
|
||||
menu.open(timeout=120)
|
||||
for var, value in self.env.iteritems():
|
||||
for var, value in self.env.items():
|
||||
menu.setenv(var, value)
|
||||
menu.boot()
|
||||
|
||||
@ -338,7 +338,7 @@ class VersatileExpressFlashModule(FlashModule):
|
||||
if images:
|
||||
self._overlay_images(images)
|
||||
os.system('sync')
|
||||
except (IOError, OSError), e:
|
||||
except (IOError, OSError) as e:
|
||||
msg = 'Could not deploy images to {}; got: {}'
|
||||
raise TargetError(msg.format(self.vemsd_mount, e))
|
||||
self.target.boot()
|
||||
@ -352,7 +352,7 @@ class VersatileExpressFlashModule(FlashModule):
|
||||
tar.extractall(self.vemsd_mount)
|
||||
|
||||
def _overlay_images(self, images):
|
||||
for dest, src in images.iteritems():
|
||||
for dest, src in images.items():
|
||||
dest = os.path.join(self.vemsd_mount, dest)
|
||||
self.logger.debug('Copying {} to {}'.format(src, dest))
|
||||
shutil.copy(src, dest)
|
||||
@ -379,7 +379,7 @@ def wait_for_vemsd(vemsd_mount, tty, mcc_prompt=DEFAULT_MCC_PROMPT, short_delay=
|
||||
path = os.path.join(vemsd_mount, 'config.txt')
|
||||
if os.path.exists(path):
|
||||
return
|
||||
for _ in xrange(attempts):
|
||||
for _ in range(attempts):
|
||||
tty.sendline('') # clear any garbage
|
||||
tty.expect(mcc_prompt, timeout=short_delay)
|
||||
tty.sendline('usb_on')
|
||||
|
@ -15,7 +15,6 @@
|
||||
from __future__ import division
|
||||
import os
|
||||
import tempfile
|
||||
import csv
|
||||
import time
|
||||
import pexpect
|
||||
|
||||
@ -23,6 +22,7 @@ from devlib.platform import Platform
|
||||
from devlib.instrument import Instrument, InstrumentChannel, MeasurementsCsv, Measurement, CONTINUOUS, INSTANTANEOUS
|
||||
from devlib.exception import TargetError, HostError
|
||||
from devlib.host import PACKAGE_BIN_DIRECTORY
|
||||
from devlib.utils.csvutil import csvreader, csvwriter
|
||||
from devlib.utils.serial_port import open_serial_connection
|
||||
|
||||
|
||||
@ -267,9 +267,8 @@ class JunoEnergyInstrument(Instrument):
|
||||
self.target.pull(self.on_target_file, temp_file)
|
||||
self.target.remove(self.on_target_file)
|
||||
|
||||
with open(temp_file, 'rb') as fh:
|
||||
reader = csv.reader(fh)
|
||||
headings = reader.next()
|
||||
with csvreader(temp_file) as reader:
|
||||
headings = next(reader)
|
||||
|
||||
# Figure out which columns from the collected csv we actually want
|
||||
select_columns = []
|
||||
@ -279,10 +278,9 @@ class JunoEnergyInstrument(Instrument):
|
||||
except ValueError:
|
||||
raise HostError('Channel "{}" is not in {}'.format(chan.name, temp_file))
|
||||
|
||||
with open(output_file, 'wb') as wfh:
|
||||
with csvwriter(output_file) as writer:
|
||||
write_headings = ['{}_{}'.format(c.site, c.kind)
|
||||
for c in self.active_channels]
|
||||
writer = csv.writer(wfh)
|
||||
writer.writerow(write_headings)
|
||||
for row in reader:
|
||||
write_row = [row[c] for c in select_columns]
|
||||
@ -293,11 +291,11 @@ class JunoEnergyInstrument(Instrument):
|
||||
def take_measurement(self):
|
||||
result = []
|
||||
output = self.target.execute(self.command2).split()
|
||||
reader=csv.reader(output)
|
||||
headings=reader.next()
|
||||
values = reader.next()
|
||||
for chan in self.active_channels:
|
||||
value = values[headings.index(chan.name)]
|
||||
result.append(Measurement(value, chan))
|
||||
with csvreader(output) as reader:
|
||||
headings=next(reader)
|
||||
values = next(reader)
|
||||
for chan in self.active_channels:
|
||||
value = values[headings.index(chan.name)]
|
||||
result.append(Measurement(value, chan))
|
||||
return result
|
||||
|
||||
|
@ -63,13 +63,12 @@ class Gem5SimulationPlatform(Platform):
|
||||
|
||||
# Find the first one that does not exist. Ensures that we do not re-use
|
||||
# the directory used by someone else.
|
||||
for i in xrange(sys.maxint):
|
||||
i = 0
|
||||
directory = os.path.join(self.gem5_interact_dir, "wa_{}".format(i))
|
||||
while os.path.exists(directory):
|
||||
i += 1
|
||||
directory = os.path.join(self.gem5_interact_dir, "wa_{}".format(i))
|
||||
try:
|
||||
os.stat(directory)
|
||||
continue
|
||||
except OSError:
|
||||
break
|
||||
|
||||
self.gem5_interact_dir = directory
|
||||
self.logger.debug("Using {} as the temporary directory."
|
||||
.format(self.gem5_interact_dir))
|
||||
|
@ -4,6 +4,7 @@ import time
|
||||
import logging
|
||||
import posixpath
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import threading
|
||||
@ -233,7 +234,7 @@ class Target(object):
|
||||
self._install_module(get_module('bl'))
|
||||
|
||||
def disconnect(self):
|
||||
for conn in self._connections.itervalues():
|
||||
for conn in self._connections.values():
|
||||
conn.close()
|
||||
self._connections = {}
|
||||
|
||||
@ -514,8 +515,8 @@ class Target(object):
|
||||
|
||||
def tempfile(self, prefix='', suffix=''):
|
||||
names = tempfile._get_candidate_names() # pylint: disable=W0212
|
||||
for _ in xrange(tempfile.TMP_MAX):
|
||||
name = names.next()
|
||||
for _ in range(tempfile.TMP_MAX):
|
||||
name = next(names)
|
||||
path = self.get_workpath(prefix + name + suffix)
|
||||
if not self.file_exists(path):
|
||||
return path
|
||||
@ -542,7 +543,7 @@ class Target(object):
|
||||
|
||||
def list_offline_cpus(self):
|
||||
online = self.list_online_cpus()
|
||||
return [c for c in xrange(self.number_of_cpus)
|
||||
return [c for c in range(self.number_of_cpus)
|
||||
if c not in online]
|
||||
|
||||
def getenv(self, variable):
|
||||
@ -716,7 +717,7 @@ class Target(object):
|
||||
def _update_modules(self, stage):
|
||||
for mod in self.modules:
|
||||
if isinstance(mod, dict):
|
||||
mod, params = mod.items()[0]
|
||||
mod, params = list(mod.items())[0]
|
||||
else:
|
||||
params = {}
|
||||
mod = get_module(mod)
|
||||
@ -790,7 +791,7 @@ class LinuxTarget(Target):
|
||||
@memoized
|
||||
def abi(self):
|
||||
value = self.execute('uname -m').strip()
|
||||
for abi, architectures in ABI_MAP.iteritems():
|
||||
for abi, architectures in ABI_MAP.items():
|
||||
if value in architectures:
|
||||
result = abi
|
||||
break
|
||||
@ -858,27 +859,27 @@ class LinuxTarget(Target):
|
||||
result = self.execute('ps -C {} -o pid'.format(process_name), # NOQA
|
||||
check_exit_code=False).strip().split()
|
||||
if len(result) >= 2: # at least one row besides the header
|
||||
return map(int, result[1:])
|
||||
return list(map(int, result[1:]))
|
||||
else:
|
||||
return []
|
||||
|
||||
def ps(self, **kwargs):
|
||||
command = 'ps -eo user,pid,ppid,vsize,rss,wchan,pcpu,state,fname'
|
||||
lines = iter(convert_new_lines(self.execute(command)).split('\n'))
|
||||
lines.next() # header
|
||||
next(lines) # header
|
||||
|
||||
result = []
|
||||
for line in lines:
|
||||
parts = re.split(r'\s+', line, maxsplit=8)
|
||||
if parts and parts != ['']:
|
||||
result.append(PsEntry(*(parts[0:1] + map(int, parts[1:5]) + parts[5:])))
|
||||
result.append(PsEntry(*(parts[0:1] + list(map(int, parts[1:5])) + parts[5:])))
|
||||
|
||||
if not kwargs:
|
||||
return result
|
||||
else:
|
||||
filtered_result = []
|
||||
for entry in result:
|
||||
if all(getattr(entry, k) == v for k, v in kwargs.iteritems()):
|
||||
if all(getattr(entry, k) == v for k, v in kwargs.items()):
|
||||
filtered_result.append(entry)
|
||||
return filtered_result
|
||||
|
||||
@ -952,7 +953,7 @@ class AndroidTarget(Target):
|
||||
|
||||
mapped_result = []
|
||||
for supported_abi in result:
|
||||
for abi, architectures in ABI_MAP.iteritems():
|
||||
for abi, architectures in ABI_MAP.items():
|
||||
found = False
|
||||
if supported_abi in architectures and abi not in mapped_result:
|
||||
mapped_result.append(abi)
|
||||
@ -1125,7 +1126,7 @@ class AndroidTarget(Target):
|
||||
|
||||
def ps(self, **kwargs):
|
||||
lines = iter(convert_new_lines(self.execute('ps')).split('\n'))
|
||||
lines.next() # header
|
||||
next(lines) # header
|
||||
result = []
|
||||
for line in lines:
|
||||
parts = line.split(None, 8)
|
||||
@ -1134,13 +1135,13 @@ class AndroidTarget(Target):
|
||||
if len(parts) == 8:
|
||||
# wchan was blank; insert an empty field where it should be.
|
||||
parts.insert(5, '')
|
||||
result.append(PsEntry(*(parts[0:1] + map(int, parts[1:5]) + parts[5:])))
|
||||
result.append(PsEntry(*(parts[0:1] + list(map(int, parts[1:5])) + parts[5:])))
|
||||
if not kwargs:
|
||||
return result
|
||||
else:
|
||||
filtered_result = []
|
||||
for entry in result:
|
||||
if all(getattr(entry, k) == v for k, v in kwargs.iteritems()):
|
||||
if all(getattr(entry, k) == v for k, v in kwargs.items()):
|
||||
filtered_result.append(entry)
|
||||
return filtered_result
|
||||
|
||||
@ -1188,7 +1189,10 @@ class AndroidTarget(Target):
|
||||
|
||||
parsed_xml = xml.dom.minidom.parse(filepath)
|
||||
with open(filepath, 'w') as f:
|
||||
f.write(parsed_xml.toprettyxml().encode('utf-8'))
|
||||
if sys.version_info[0] == 3:
|
||||
f.write(parsed_xml.toprettyxml())
|
||||
else:
|
||||
f.write(parsed_xml.toprettyxml().encode('utf-8'))
|
||||
|
||||
def is_installed(self, name):
|
||||
return super(AndroidTarget, self).is_installed(name) or self.package_is_installed(name)
|
||||
@ -1626,7 +1630,7 @@ class KernelConfig(object):
|
||||
return name
|
||||
|
||||
def iteritems(self):
|
||||
return self._config.iteritems()
|
||||
return iter(self._config.items())
|
||||
|
||||
def __init__(self, text):
|
||||
self.text = text
|
||||
@ -1647,7 +1651,7 @@ class KernelConfig(object):
|
||||
def like(self, name):
|
||||
regex = re.compile(name, re.I)
|
||||
result = {}
|
||||
for k, v in self._config.iteritems():
|
||||
for k, v in self._config.items():
|
||||
if regex.search(k):
|
||||
result[k] = v
|
||||
return result
|
||||
@ -1707,7 +1711,7 @@ def _get_part_name(section):
|
||||
implementer = section.get('CPU implementer', '0x0')
|
||||
part = section['CPU part']
|
||||
variant = section.get('CPU variant', '0x0')
|
||||
name = get_cpu_name(*map(integer, [implementer, part, variant]))
|
||||
name = get_cpu_name(*list(map(integer, [implementer, part, variant])))
|
||||
if name is None:
|
||||
name = '{}/{}/{}'.format(implementer, part, variant)
|
||||
return name
|
||||
@ -1730,13 +1734,13 @@ def _build_path_tree(path_map, basepath, sep=os.path.sep, dictcls=dict):
|
||||
process_node(node[parts[0]], parts[1], value)
|
||||
|
||||
relpath_map = {os.path.relpath(p, basepath): v
|
||||
for p, v in path_map.iteritems()}
|
||||
for p, v in path_map.items()}
|
||||
|
||||
if len(relpath_map) == 1 and relpath_map.keys()[0] == '.':
|
||||
result = relpath_map.values()[0]
|
||||
if len(relpath_map) == 1 and list(relpath_map.keys())[0] == '.':
|
||||
result = list(relpath_map.values())[0]
|
||||
else:
|
||||
result = dictcls()
|
||||
for path, value in relpath_map.iteritems():
|
||||
for path, value in relpath_map.items():
|
||||
process_node(result, path, value)
|
||||
|
||||
return result
|
||||
|
@ -19,6 +19,7 @@ import json
|
||||
import time
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from devlib.trace import TraceCollector
|
||||
from devlib.host import PACKAGE_BIN_DIRECTORY
|
||||
@ -121,7 +122,7 @@ class FtraceCollector(TraceCollector):
|
||||
_event = '*' + event
|
||||
event_re = re.compile(_event.replace('*', '.*'))
|
||||
# Select events matching the required ones
|
||||
if len(filter(event_re.match, available_events)) == 0:
|
||||
if len(list(filter(event_re.match, available_events))) == 0:
|
||||
message = 'Event [{}] not available for tracing'.format(event)
|
||||
if strict:
|
||||
raise TargetError(message)
|
||||
@ -276,6 +277,8 @@ class FtraceCollector(TraceCollector):
|
||||
self.logger.debug(command)
|
||||
process = subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
|
||||
_, error = process.communicate()
|
||||
if sys.version_info[0] == 3:
|
||||
error = error.decode(sys.stdout.encoding)
|
||||
if process.returncode:
|
||||
raise TargetError('trace-cmd returned non-zero exit code {}'.format(process.returncode))
|
||||
if error:
|
||||
|
@ -27,7 +27,8 @@ import logging
|
||||
import re
|
||||
import threading
|
||||
import tempfile
|
||||
import Queue
|
||||
import queue
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
|
||||
from devlib.exception import TargetError, HostError, DevlibError
|
||||
@ -88,7 +89,7 @@ class AndroidProperties(object):
|
||||
self._properties = dict(re.findall(r'\[(.*?)\]:\s+\[(.*?)\]', text))
|
||||
|
||||
def iteritems(self):
|
||||
return self._properties.iteritems()
|
||||
return iter(self._properties.items())
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._properties)
|
||||
@ -140,6 +141,8 @@ class ApkInfo(object):
|
||||
logger.debug(' '.join(command))
|
||||
try:
|
||||
output = subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
if sys.version_info[0] == 3:
|
||||
output = output.decode(sys.stdout.encoding)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise HostError('Error parsing APK file {}. `aapt` says:\n{}'
|
||||
.format(apk_path, e.output))
|
||||
@ -160,7 +163,7 @@ class ApkInfo(object):
|
||||
mapped_abis = []
|
||||
for apk_abi in apk_abis:
|
||||
found = False
|
||||
for abi, architectures in ABI_MAP.iteritems():
|
||||
for abi, architectures in ABI_MAP.items():
|
||||
if apk_abi in architectures:
|
||||
mapped_abis.append(abi)
|
||||
found = True
|
||||
|
85
devlib/utils/csvutil.py
Normal file
85
devlib/utils/csvutil.py
Normal file
@ -0,0 +1,85 @@
|
||||
'''
|
||||
Due to the change in the nature of "binary mode" when opening files in
|
||||
Python 3, the way files need to be opened for ``csv.reader`` and ``csv.writer``
|
||||
is different from Python 2.
|
||||
|
||||
The functions in this module are intended to hide these differences allowing
|
||||
the rest of the code to create csv readers/writers without worrying about which
|
||||
Python version it is running under.
|
||||
|
||||
First up are ``csvwriter`` and ``csvreader`` context mangers that handle the
|
||||
opening and closing of the underlying file. These are intended to replace the
|
||||
most common usage pattern
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with open(filepath, 'wb') as wfh: # or open(filepath, 'w', newline='') in Python 3
|
||||
writer = csv.writer(wfh)
|
||||
writer.writerows(data)
|
||||
|
||||
|
||||
with
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with csvwriter(filepath) as writer:
|
||||
writer.writerows(data)
|
||||
|
||||
|
||||
``csvreader`` works in an analogous way. ``csvreader`` and ``writer`` can take
|
||||
additional arguments which will be passed directly to the
|
||||
``csv.reader``/``csv.writer`` calls.
|
||||
|
||||
In some cases, it is desirable not to use a context manager (e.g. if the
|
||||
reader/writer is intended to be returned from the function that creates it. For
|
||||
such cases, alternative functions, ``create_reader`` and ``create_writer``,
|
||||
exit. These return a two-tuple, with the created reader/writer as the first
|
||||
element, and the corresponding ``FileObject`` as the second. It is the
|
||||
responsibility of the calling code to ensure that the file is closed properly.
|
||||
|
||||
'''
|
||||
import csv
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
@contextmanager
|
||||
def csvwriter(filepath, *args, **kwargs):
|
||||
if sys.version_info[0] == 3:
|
||||
wfh = open(filepath, 'w', newline='')
|
||||
else:
|
||||
wfh = open(filepath, 'wb')
|
||||
|
||||
try:
|
||||
yield csv.writer(wfh, *args, **kwargs)
|
||||
finally:
|
||||
wfh.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def csvreader(filepath, *args, **kwargs):
|
||||
if sys.version_info[0] == 3:
|
||||
fh = open(filepath, 'r', newline='')
|
||||
else:
|
||||
fh = open(filepath, 'rb')
|
||||
|
||||
try:
|
||||
yield csv.reader(fh, *args, **kwargs)
|
||||
finally:
|
||||
fh.close()
|
||||
|
||||
|
||||
def create_writer(filepath, *args, **kwargs):
|
||||
if sys.version_info[0] == 3:
|
||||
wfh = open(filepath, 'w', newline='')
|
||||
else:
|
||||
wfh = open(filepath, 'wb')
|
||||
return csv.writer(wfh, *args, **kwargs), wfh
|
||||
|
||||
|
||||
def create_reader(filepath, *args, **kwargs):
|
||||
if sys.version_info[0] == 3:
|
||||
fh = open(filepath, 'r', newline='')
|
||||
else:
|
||||
fh = open(filepath, 'rb')
|
||||
return csv.reader(fh, *args, **kwargs), fh
|
@ -45,7 +45,7 @@ def iter_statistics_dump(stats_file):
|
||||
k = res.group("key")
|
||||
vtext = res.group("value")
|
||||
try:
|
||||
v = map(numeric, vtext.split())
|
||||
v = list(map(numeric, vtext.split()))
|
||||
cur_dump[k] = v[0] if len(v)==1 else set(v)
|
||||
except ValueError:
|
||||
msg = 'Found non-numeric entry in gem5 stats ({}: {})'
|
||||
|
@ -36,8 +36,10 @@ from itertools import groupby
|
||||
from functools import partial
|
||||
|
||||
import wrapt
|
||||
from past.builtins import basestring
|
||||
|
||||
from devlib.exception import HostError, TimeoutError
|
||||
from functools import reduce
|
||||
|
||||
|
||||
# ABI --> architectures list
|
||||
@ -176,6 +178,9 @@ def check_output(command, timeout=None, ignore=None, inputtext=None, **kwargs):
|
||||
|
||||
try:
|
||||
output, error = process.communicate(inputtext)
|
||||
if sys.version_info[0] == 3:
|
||||
output = output.decode(sys.stdout.encoding)
|
||||
error = error.decode(sys.stderr.encoding)
|
||||
finally:
|
||||
if timeout:
|
||||
timer.cancel()
|
||||
@ -185,7 +190,7 @@ def check_output(command, timeout=None, ignore=None, inputtext=None, **kwargs):
|
||||
if retcode == -9: # killed, assume due to timeout callback
|
||||
raise TimeoutError(command, output='\n'.join([output, error]))
|
||||
elif ignore != 'all' and retcode not in ignore:
|
||||
raise subprocess.CalledProcessError(retcode, command, output='\n'.join([output, error]))
|
||||
raise subprocess.CalledProcessError(retcode, command, output='\n'.join([str(output), str(error)]))
|
||||
return output, error
|
||||
|
||||
|
||||
@ -257,8 +262,8 @@ def _merge_two_dicts(base, other, list_duplicates='all', match_types=False, # p
|
||||
dict_type=dict, should_normalize=True, should_merge_lists=True):
|
||||
"""Merge dicts normalizing their keys."""
|
||||
merged = dict_type()
|
||||
base_keys = base.keys()
|
||||
other_keys = other.keys()
|
||||
base_keys = list(base.keys())
|
||||
other_keys = list(other.keys())
|
||||
norm = normalize if should_normalize else lambda x, y: x
|
||||
|
||||
base_only = []
|
||||
@ -390,7 +395,7 @@ def normalize(value, dict_type=dict):
|
||||
no surrounding whitespace, underscore-delimited strings."""
|
||||
if isinstance(value, dict):
|
||||
normalized = dict_type()
|
||||
for k, v in value.iteritems():
|
||||
for k, v in value.items():
|
||||
key = k.strip().lower().replace(' ', '_')
|
||||
normalized[key] = normalize(v, dict_type)
|
||||
return normalized
|
||||
@ -431,7 +436,7 @@ def getch(count=1):
|
||||
"""Read ``count`` characters from standard input."""
|
||||
if os.name == 'nt':
|
||||
import msvcrt # pylint: disable=F0401
|
||||
return ''.join([msvcrt.getch() for _ in xrange(count)])
|
||||
return ''.join([msvcrt.getch() for _ in range(count)])
|
||||
else: # assume Unix
|
||||
import tty # NOQA
|
||||
import termios # NOQA
|
||||
@ -509,7 +514,7 @@ def strip_bash_colors(text):
|
||||
|
||||
def get_random_string(length):
|
||||
"""Returns a random ASCII string of the specified length)."""
|
||||
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in xrange(length))
|
||||
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(length))
|
||||
|
||||
|
||||
class LoadSyntaxError(Exception):
|
||||
@ -526,7 +531,10 @@ class LoadSyntaxError(Exception):
|
||||
|
||||
RAND_MOD_NAME_LEN = 30
|
||||
BAD_CHARS = string.punctuation + string.whitespace
|
||||
TRANS_TABLE = string.maketrans(BAD_CHARS, '_' * len(BAD_CHARS))
|
||||
if sys.version_info[0] == 3:
|
||||
TRANS_TABLE = str.maketrans(BAD_CHARS, '_' * len(BAD_CHARS))
|
||||
else:
|
||||
TRANS_TABLE = string.maketrans(BAD_CHARS, '_' * len(BAD_CHARS))
|
||||
|
||||
|
||||
def to_identifier(text):
|
||||
@ -555,8 +563,8 @@ def ranges_to_list(ranges_string):
|
||||
values = []
|
||||
for rg in ranges_string.split(','):
|
||||
if '-' in rg:
|
||||
first, last = map(int, rg.split('-'))
|
||||
values.extend(xrange(first, last + 1))
|
||||
first, last = list(map(int, rg.split('-')))
|
||||
values.extend(range(first, last + 1))
|
||||
else:
|
||||
values.append(int(rg))
|
||||
return values
|
||||
@ -565,8 +573,8 @@ def ranges_to_list(ranges_string):
|
||||
def list_to_ranges(values):
|
||||
"""Converts a list, e.g ``[0,2,3,4]``, into a sysfs-style ranges string, e.g. ``"0,2-4"``"""
|
||||
range_groups = []
|
||||
for _, g in groupby(enumerate(values), lambda (i, x): i - x):
|
||||
range_groups.append(map(itemgetter(1), g))
|
||||
for _, g in groupby(enumerate(values), lambda i_x: i_x[0] - i_x[1]):
|
||||
range_groups.append(list(map(itemgetter(1), g)))
|
||||
range_strings = []
|
||||
for group in range_groups:
|
||||
if len(group) == 1:
|
||||
@ -589,7 +597,7 @@ def mask_to_list(mask):
|
||||
"""Converts the specfied integer bitmask into a list of
|
||||
indexes of bits that are set in the mask."""
|
||||
size = len(bin(mask)) - 2 # because of "0b"
|
||||
return [size - i - 1 for i in xrange(size)
|
||||
return [size - i - 1 for i in range(size)
|
||||
if mask & (1 << size - i - 1)]
|
||||
|
||||
|
||||
@ -634,7 +642,7 @@ def memoized(wrapped, instance, args, kwargs):
|
||||
def memoize_wrapper(*args, **kwargs):
|
||||
id_string = func_id + ','.join([__get_memo_id(a) for a in args])
|
||||
id_string += ','.join('{}={}'.format(k, v)
|
||||
for k, v in kwargs.iteritems())
|
||||
for k, v in kwargs.items())
|
||||
if id_string not in __memo_cache:
|
||||
__memo_cache[id_string] = wrapped(*args, **kwargs)
|
||||
return __memo_cache[id_string]
|
||||
|
@ -67,7 +67,7 @@ class AepParser(object):
|
||||
virtual = {}
|
||||
|
||||
# Create an entry for each virtual parent
|
||||
for supply in topo.iterkeys():
|
||||
for supply in topo.keys():
|
||||
index = topo[supply]['index']
|
||||
# Don't care of hidden columns
|
||||
if hide[index]:
|
||||
@ -85,11 +85,11 @@ class AepParser(object):
|
||||
|
||||
# Remove parent with 1 child as they don't give more information than their
|
||||
# child
|
||||
for supply in virtual.keys():
|
||||
for supply in list(virtual.keys()):
|
||||
if len(virtual[supply]) == 1:
|
||||
del virtual[supply];
|
||||
|
||||
for supply in virtual.keys():
|
||||
for supply in list(virtual.keys()):
|
||||
# Add label, hide and duplicate columns for virtual domains
|
||||
hide.append(0)
|
||||
duplicate.append(1)
|
||||
@ -166,9 +166,9 @@ class AepParser(object):
|
||||
@staticmethod
|
||||
def add_virtual_data(data, virtual):
|
||||
# write virtual domain
|
||||
for parent in virtual.iterkeys():
|
||||
for parent in virtual.keys():
|
||||
power = 0
|
||||
for child in virtual[parent].values():
|
||||
for child in list(virtual[parent].values()):
|
||||
try:
|
||||
power += data[child]
|
||||
except IndexError:
|
||||
@ -440,7 +440,7 @@ class AepParser(object):
|
||||
|
||||
|
||||
# Create an entry for each virtual parent
|
||||
for supply in topo.iterkeys():
|
||||
for supply in topo.keys():
|
||||
# Parent is in the topology
|
||||
parent = topo[supply]['parent']
|
||||
if parent in topo:
|
||||
@ -454,15 +454,15 @@ class AepParser(object):
|
||||
|
||||
# Remove parent with 1 child as they don't give more information than their
|
||||
# child
|
||||
for supply in virtual.keys():
|
||||
for supply in list(virtual.keys()):
|
||||
if len(virtual[supply]) == 1:
|
||||
del virtual[supply];
|
||||
|
||||
topo_list = ['']*(1+len(topo)+len(virtual))
|
||||
topo_list[0] = 'time'
|
||||
for chnl in topo.iterkeys():
|
||||
for chnl in topo.keys():
|
||||
topo_list[topo[chnl]['index']] = chnl
|
||||
for chnl in virtual.iterkeys():
|
||||
for chnl in virtual.keys():
|
||||
index +=1
|
||||
topo_list[index] = chnl
|
||||
|
||||
@ -495,7 +495,7 @@ if __name__ == '__main__':
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "i:vo:s:l:t:")
|
||||
except getopt.GetoptError as err:
|
||||
print str(err) # will print something like "option -a not recognized"
|
||||
print(str(err)) # will print something like "option -a not recognized"
|
||||
sys.exit(2)
|
||||
|
||||
for o, a in opts:
|
||||
@ -513,7 +513,7 @@ if __name__ == '__main__':
|
||||
if o == "-t":
|
||||
topofile = a
|
||||
parser = AepParser()
|
||||
print parser.topology_from_config(topofile)
|
||||
print(parser.topology_from_config(topofile))
|
||||
exit(0)
|
||||
|
||||
parser = AepParser()
|
||||
|
@ -1,4 +1,3 @@
|
||||
import csv
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@ -11,6 +10,7 @@ from collections import namedtuple, OrderedDict
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from devlib.exception import WorkerThreadError, TargetNotRespondingError, TimeoutError
|
||||
from devlib.utils.csvutil import csvwriter
|
||||
|
||||
|
||||
logger = logging.getLogger('rendering')
|
||||
@ -53,7 +53,7 @@ class FrameCollector(threading.Thread):
|
||||
wfh.close()
|
||||
except (TargetNotRespondingError, TimeoutError): # pylint: disable=W0703
|
||||
raise
|
||||
except Exception, e: # pylint: disable=W0703
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
logger.warning('Exception on collector thread: {}({})'.format(e.__class__.__name__, e))
|
||||
self.exc = WorkerThreadError(self.name, sys.exc_info())
|
||||
logger.debug('Surface flinger frame data collection stopped.')
|
||||
@ -93,8 +93,7 @@ class FrameCollector(threading.Thread):
|
||||
indexes.append(self.header.index(c))
|
||||
frames = [[f[i] for i in indexes] for f in self.frames]
|
||||
header = columns
|
||||
with open(outfile, 'w') as wfh:
|
||||
writer = csv.writer(wfh)
|
||||
with csvwriter(outfile) as writer:
|
||||
if header:
|
||||
writer.writerow(header)
|
||||
writer.writerows(frames)
|
||||
@ -142,7 +141,7 @@ class SurfaceFlingerFrameCollector(FrameCollector):
|
||||
def _process_trace_line(self, line):
|
||||
parts = line.split()
|
||||
if len(parts) == 3:
|
||||
frame = SurfaceFlingerFrame(*map(int, parts))
|
||||
frame = SurfaceFlingerFrame(*list(map(int, parts)))
|
||||
if not frame.frame_ready_time:
|
||||
return # "null" frame
|
||||
if frame.frame_ready_time <= self.last_ready_time:
|
||||
@ -167,7 +166,7 @@ def read_gfxinfo_columns(target):
|
||||
for line in lines:
|
||||
if line.startswith('---PROFILEDATA---'):
|
||||
break
|
||||
columns_line = lines.next()
|
||||
columns_line = next(lines)
|
||||
return columns_line.split(',')[:-1] # has a trailing ','
|
||||
|
||||
|
||||
@ -202,11 +201,11 @@ class GfxinfoFrameCollector(FrameCollector):
|
||||
found = True
|
||||
break
|
||||
|
||||
fh.next() # headers
|
||||
next(fh) # headers
|
||||
for line in fh:
|
||||
if line.startswith('---PROFILEDATA---'):
|
||||
break
|
||||
entries = map(int, line.strip().split(',')[:-1]) # has a trailing ','
|
||||
entries = list(map(int, line.strip().split(',')[:-1])) # has a trailing ','
|
||||
if entries[1] <= last_vsync:
|
||||
continue # repeat frame
|
||||
last_vsync = entries[1]
|
||||
@ -240,14 +239,14 @@ def gfxinfo_get_last_dump(filepath):
|
||||
fh_iter = _file_reverse_iter(fh)
|
||||
try:
|
||||
while True:
|
||||
buf = fh_iter.next()
|
||||
buf = next(fh_iter)
|
||||
ix = buf.find('** Graphics')
|
||||
if ix >= 0:
|
||||
return buf[ix:] + record
|
||||
|
||||
ix = buf.find(' **\n')
|
||||
if ix >= 0:
|
||||
buf = fh_iter.next() + buf
|
||||
buf = next(fh_iter) + buf
|
||||
ix = buf.find('** Graphics')
|
||||
if ix < 0:
|
||||
msg = '"{}" appears to be corrupted'
|
||||
|
@ -23,6 +23,7 @@ import threading
|
||||
import tempfile
|
||||
import shutil
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
|
||||
import pexpect
|
||||
@ -236,7 +237,7 @@ class SshConnection(object):
|
||||
def cancel_running_command(self):
|
||||
# simulate impatiently hitting ^C until command prompt appears
|
||||
logger.debug('Sending ^C')
|
||||
for _ in xrange(self.max_cancel_attempts):
|
||||
for _ in range(self.max_cancel_attempts):
|
||||
self.conn.sendline(chr(3))
|
||||
if self.conn.prompt(0.1):
|
||||
return True
|
||||
@ -263,7 +264,10 @@ class SshConnection(object):
|
||||
timed_out = self._wait_for_prompt(timeout)
|
||||
# the regex removes line breaks potential introduced when writing
|
||||
# command to shell.
|
||||
output = process_backspaces(self.conn.before)
|
||||
if sys.version_info[0] == 3:
|
||||
output = process_backspaces(self.conn.before.decode(sys.stdout.encoding))
|
||||
else:
|
||||
output = process_backspaces(self.conn.before)
|
||||
output = re.sub(r'\r([^\n])', r'\1', output)
|
||||
if '\r\n' in output: # strip the echoed command
|
||||
output = output.split('\r\n', 1)[1]
|
||||
@ -604,7 +608,7 @@ class Gem5Connection(TelnetConnection):
|
||||
break
|
||||
except pxssh.ExceptionPxssh:
|
||||
pass
|
||||
except EOF, err:
|
||||
except EOF as err:
|
||||
self._gem5_EOF_handler(gem5_simulation, gem5_out_dir, err)
|
||||
else:
|
||||
gem5_simulation.kill()
|
||||
@ -626,7 +630,7 @@ class Gem5Connection(TelnetConnection):
|
||||
self._login_to_device()
|
||||
except TIMEOUT:
|
||||
pass
|
||||
except EOF, err:
|
||||
except EOF as err:
|
||||
self._gem5_EOF_handler(gem5_simulation, gem5_out_dir, err)
|
||||
|
||||
try:
|
||||
@ -636,7 +640,7 @@ class Gem5Connection(TelnetConnection):
|
||||
prompt_found = True
|
||||
except TIMEOUT:
|
||||
pass
|
||||
except EOF, err:
|
||||
except EOF as err:
|
||||
self._gem5_EOF_handler(gem5_simulation, gem5_out_dir, err)
|
||||
|
||||
gem5_logger.info("Successfully logged in")
|
||||
|
@ -26,6 +26,9 @@ is not the best language to use for configuration.
|
||||
|
||||
"""
|
||||
import math
|
||||
from functools import total_ordering
|
||||
|
||||
from past.builtins import basestring
|
||||
|
||||
from devlib.utils.misc import isiterable, to_identifier, ranges_to_list, list_to_mask
|
||||
|
||||
@ -88,6 +91,7 @@ def numeric(value):
|
||||
return fvalue
|
||||
|
||||
|
||||
@total_ordering
|
||||
class caseless_string(str):
|
||||
"""
|
||||
Just like built-in Python string except case-insensitive on comparisons. However, the
|
||||
@ -100,13 +104,13 @@ class caseless_string(str):
|
||||
other = other.lower()
|
||||
return self.lower() == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __cmp__(self, other):
|
||||
if isinstance(basestring, other):
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, basestring):
|
||||
other = other.lower()
|
||||
return cmp(self.lower(), other)
|
||||
return self.lower() < other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.lower())
|
||||
|
||||
def format(self, *args, **kwargs):
|
||||
return caseless_string(super(caseless_string, self).format(*args, **kwargs))
|
||||
|
@ -19,6 +19,8 @@ import time
|
||||
import logging
|
||||
from copy import copy
|
||||
|
||||
from past.builtins import basestring
|
||||
|
||||
from devlib.utils.serial_port import write_characters, TIMEOUT
|
||||
from devlib.utils.types import boolean
|
||||
|
||||
@ -193,14 +195,14 @@ class UefiMenu(object):
|
||||
is not in the current menu, ``LookupError`` will be raised."""
|
||||
if not self.prompt:
|
||||
self.read_menu(timeout)
|
||||
return self.options.items()
|
||||
return list(self.options.items())
|
||||
|
||||
def get_option_index(self, text, timeout=default_timeout):
|
||||
"""Returns the menu index of the specified option text (uses regex matching). If the option
|
||||
is not in the current menu, ``LookupError`` will be raised."""
|
||||
if not self.prompt:
|
||||
self.read_menu(timeout)
|
||||
for k, v in self.options.iteritems():
|
||||
for k, v in self.options.items():
|
||||
if re.search(text, v):
|
||||
return k
|
||||
raise LookupError(text)
|
||||
|
3
setup.py
3
setup.py
@ -70,6 +70,7 @@ params = dict(
|
||||
'pexpect>=3.3', # Send/recieve to/from device
|
||||
'pyserial', # Serial port interface
|
||||
'wrapt', # Basic for construction of decorator functions
|
||||
'future', # Python 2-3 compatibility
|
||||
],
|
||||
extras_require={
|
||||
'daq': ['daqpower'],
|
||||
@ -85,7 +86,7 @@ params = dict(
|
||||
],
|
||||
)
|
||||
|
||||
all_extras = list(chain(params['extras_require'].itervalues()))
|
||||
all_extras = list(chain(iter(params['extras_require'].values())))
|
||||
params['extras_require']['full'] = all_extras
|
||||
|
||||
setup(**params)
|
||||
|
Loading…
x
Reference in New Issue
Block a user