mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-02-20 20:09:11 +00:00
Added filer, http and user directory resource getters.
This commit is contained in:
parent
84c1fc2bcf
commit
ebf917a8a9
@ -71,6 +71,22 @@ def get_generic_resource(resource, files):
|
||||
return matches[0]
|
||||
|
||||
|
||||
def get_from_location(basepath, resource):
|
||||
if resource.kind == 'file':
|
||||
path = os.path.join(basepath, resource.path)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
elif resource.kind == 'executable':
|
||||
path = os.path.join(basepath, 'bin', resource.abi, resource.filename)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
elif resource.kind in ['apk', 'jar', 'revent']:
|
||||
files = get_by_extension(basepath, resource.kind)
|
||||
return get_generic_resource(resource, files)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class Package(ResourceGetter):
|
||||
|
||||
name = 'package'
|
||||
@ -84,19 +100,232 @@ class Package(ResourceGetter):
|
||||
else:
|
||||
modname = resource.owner.__module__
|
||||
basepath = os.path.dirname(sys.modules[modname].__file__)
|
||||
return get_from_location(basepath, resource)
|
||||
|
||||
if resource.kind == 'file':
|
||||
path = os.path.join(basepath, resource.path)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
class UserDirectory(ResourceGetter):
|
||||
|
||||
name = 'user'
|
||||
|
||||
def register(self, resolver):
|
||||
resolver.register(self.get, SourcePriority.local)
|
||||
|
||||
def get(self, resource):
|
||||
basepath = settings.dependencies_directory
|
||||
return get_from_location(basepath, resource)
|
||||
|
||||
|
||||
class Http(ResourceGetter):
|
||||
|
||||
name = 'http'
|
||||
description = """
|
||||
Downloads resources from a server based on an index fetched from the
|
||||
specified URL.
|
||||
|
||||
Given a URL, this will try to fetch ``<URL>/index.json``. The index file
|
||||
maps extension names to a list of corresponing asset descriptons. Each
|
||||
asset description continas a path (relative to the base URL) of the
|
||||
resource and a SHA256 hash, so that this Getter can verify whether the
|
||||
resource on the remote has changed.
|
||||
|
||||
For example, let's assume we want to get the APK file for workload "foo",
|
||||
and that assets are hosted at ``http://example.com/assets``. This Getter
|
||||
will first try to donwload ``http://example.com/assests/index.json``. The
|
||||
index file may contian something like ::
|
||||
|
||||
{
|
||||
"foo": [
|
||||
{
|
||||
"path": "foo-app.apk",
|
||||
"sha256": "b14530bb47e04ed655ac5e80e69beaa61c2020450e18638f54384332dffebe86"
|
||||
},
|
||||
{
|
||||
"path": "subdir/some-other-asset.file",
|
||||
"sha256": "48d9050e9802246d820625717b72f1c2ba431904b8484ca39befd68d1dbedfff"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
This Getter will look through the list of assets for "foo" (in this case,
|
||||
two) check the paths until it finds one matching the resource (in this
|
||||
case, "foo-app.apk"). Finally, it will try to dowload that file relative
|
||||
to the base URL and extension name (in this case,
|
||||
"http://example.com/assets/foo/foo-app.apk"). The downloaded version will
|
||||
be cached locally, so that in the future, the getter will check the SHA256
|
||||
hash of the local file against the one advertised inside index.json, and
|
||||
provided that hasn't changed, it won't try to download the file again.
|
||||
|
||||
"""
|
||||
parameters = [
|
||||
Parameter('url', global_alias='remote_assets_url',
|
||||
description="""
|
||||
URL of the index file for assets on an HTTP server.
|
||||
"""),
|
||||
Parameter('username',
|
||||
description="""
|
||||
User name for authenticating with assets URL
|
||||
"""),
|
||||
Parameter('password',
|
||||
description="""
|
||||
Password for authenticationg with assets URL
|
||||
"""),
|
||||
Parameter('always_fetch', kind=boolean, default=False,
|
||||
global_alias='always_fetch_remote_assets',
|
||||
description="""
|
||||
If ``True``, will always attempt to fetch assets from the
|
||||
remote, even if a local cached copy is available.
|
||||
"""),
|
||||
Parameter('chunk_size', kind=int, default=1024,
|
||||
description="""
|
||||
Chunk size for streaming large assets.
|
||||
"""),
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(Http, self).__init__(**kwargs)
|
||||
self.logger = logger
|
||||
self.index = None
|
||||
|
||||
def register(self, resolver):
|
||||
resolver.register(self.get, SourcePriority.remote)
|
||||
|
||||
def get(self, resource):
|
||||
if not resource.owner:
|
||||
return # TODO: add support for unowned resources
|
||||
if not self.index:
|
||||
self.index = self.fetch_index()
|
||||
asset = self.resolve_resource(resource)
|
||||
if not asset:
|
||||
return
|
||||
return self.download_asset(asset, resource.owner.name)
|
||||
|
||||
def fetch_index(self):
|
||||
if not self.url:
|
||||
return {}
|
||||
index_url = urljoin(self.url, 'index.json')
|
||||
response = self.geturl(index_url)
|
||||
if response.status_code != httplib.OK:
|
||||
message = 'Could not fetch "{}"; recieved "{} {}"'
|
||||
self.logger.error(message.format(index_url,
|
||||
response.status_code,
|
||||
response.reason))
|
||||
return {}
|
||||
return json.loads(response.content)
|
||||
|
||||
def download_asset(self, asset, owner_name):
|
||||
url = urljoin(self.url, owner_name, asset['path'])
|
||||
local_path = _f(os.path.join(settings.dependencies_directory, '__remote',
|
||||
owner_name, asset['path'].replace('/', os.sep)))
|
||||
if os.path.exists(local_path) and not self.always_fetch:
|
||||
local_sha = sha256(local_path)
|
||||
if local_sha == asset['sha256']:
|
||||
self.logger.debug('Local SHA256 matches; not re-downloading')
|
||||
return local_path
|
||||
self.logger.debug('Downloading {}'.format(url))
|
||||
response = self.geturl(url, stream=True)
|
||||
if response.status_code != httplib.OK:
|
||||
message = 'Could not download asset "{}"; recieved "{} {}"'
|
||||
self.logger.warning(message.format(url,
|
||||
response.status_code,
|
||||
response.reason))
|
||||
return
|
||||
with open(local_path, 'wb') as wfh:
|
||||
for chunk in response.iter_content(chunk_size=self.chunk_size):
|
||||
wfh.write(chunk)
|
||||
return local_path
|
||||
|
||||
def geturl(self, url, stream=False):
|
||||
if self.username:
|
||||
auth = (self.username, self.password)
|
||||
else:
|
||||
auth = None
|
||||
return requests.get(url, auth=auth, stream=stream)
|
||||
|
||||
def resolve_resource(self, resource):
|
||||
# pylint: disable=too-many-branches,too-many-locals
|
||||
assets = self.index.get(resource.owner.name, {})
|
||||
if not assets:
|
||||
return {}
|
||||
|
||||
asset_map = {a['path']: a for a in assets}
|
||||
if resource.kind in ['apk', 'jar', 'revent']:
|
||||
if resource.kind == 'apk' and resource.version:
|
||||
# TODO: modify the index format to attach version info to the
|
||||
# APK entries.
|
||||
msg = 'Versions of APKs cannot be fetched over HTTP at this time'
|
||||
self.logger.warning(msg)
|
||||
return {}
|
||||
path = get_generic_resource(resource, asset_map.keys())
|
||||
if path:
|
||||
return asset_map[path]
|
||||
elif resource.kind == 'executable':
|
||||
path = os.path.join(basepath, 'bin', resource.abi, resource.filename)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
elif resource.kind in ['apk', 'jar', 'revent']:
|
||||
files = get_by_extension(basepath, resource.kind)
|
||||
return get_generic_resource(resource, files)
|
||||
|
||||
return None
|
||||
path = '/'.join(['bin', resource.abi, resource.filename])
|
||||
for asset in assets:
|
||||
if asset['path'].lower() == path.lower():
|
||||
return asset
|
||||
else: # file
|
||||
for asset in assets:
|
||||
if asset['path'].lower() == resource.path.lower():
|
||||
return asset
|
||||
|
||||
|
||||
class Filer(ResourceGetter):
|
||||
|
||||
name = 'filer'
|
||||
description = """
|
||||
Finds resources on a (locally mounted) remote filer and caches them
|
||||
locally.
|
||||
|
||||
This assumes that the filer is mounted on the local machine (e.g. as a
|
||||
samba share).
|
||||
|
||||
"""
|
||||
parameters = [
|
||||
Parameter('remote_path', global_alias='remote_assets_path', default='',
|
||||
description="""
|
||||
Path, on the local system, where the assets are located.
|
||||
"""),
|
||||
Parameter('always_fetch', kind=boolean, default=False,
|
||||
global_alias='always_fetch_remote_assets',
|
||||
description="""
|
||||
If ``True``, will always attempt to fetch assets from the
|
||||
remote, even if a local cached copy is available.
|
||||
"""),
|
||||
]
|
||||
|
||||
def register(self, resolver):
|
||||
resolver.register(self.get, SourcePriority.lan)
|
||||
|
||||
def get(self, resource):
|
||||
if resource.owner:
|
||||
remote_path = os.path.join(self.remote_path, resource.owner.name)
|
||||
local_path = os.path.join(settings.dependencies_directory, '__filer',
|
||||
resource.owner.dependencies_directory)
|
||||
return self.try_get_resource(resource, remote_path, local_path)
|
||||
else: # No owner
|
||||
result = None
|
||||
for entry in os.listdir(remote_path):
|
||||
remote_path = os.path.join(self.remote_path, entry)
|
||||
local_path = os.path.join(settings.dependencies_directory, '__filer',
|
||||
settings.dependencies_directory, entry)
|
||||
result = self.try_get_resource(resource, remote_path, local_path)
|
||||
if result:
|
||||
break
|
||||
return result
|
||||
|
||||
def try_get_resource(self, resource, remote_path, local_path):
|
||||
if not self.always_fetch:
|
||||
result = get_from_location(local_path, resource)
|
||||
if result:
|
||||
return result
|
||||
if remote_path:
|
||||
# Didn't find it cached locally; now check the remoted
|
||||
result = get_from_location(remote_path, resource)
|
||||
if not result:
|
||||
return result
|
||||
else: # remote path is not set
|
||||
return None
|
||||
# Found it remotely, cache locally, then return it
|
||||
local_full_path = os.path.join(_d(local_path), os.path.basename(result))
|
||||
self.logger.debug('cp {} {}'.format(result, local_full_path))
|
||||
shutil.copy(result, local_full_path)
|
||||
|
@ -588,10 +588,11 @@ def touch(path):
|
||||
def get_object_name(obj):
|
||||
if hasattr(obj, 'name'):
|
||||
return obj.name
|
||||
elif hasattr(obj,'func_name'):
|
||||
return obj.func_name
|
||||
elif hasattr(obj, 'im_func'):
|
||||
return '{}.{}'.format(obj.im_class.__name__, obj.im_func.func_name)
|
||||
return '{}.{}'.format(get_object_name(obj.im_class),
|
||||
obj.im_func.func_name)
|
||||
elif hasattr(obj, 'func_name'):
|
||||
return obj.func_name
|
||||
elif hasattr(obj, '__name__'):
|
||||
return obj.__name__
|
||||
elif hasattr(obj, '__class__'):
|
||||
|
Loading…
x
Reference in New Issue
Block a user