2018-07-04 15:39:44 +01:00
|
|
|
# Copyright 2018 ARM Limited
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
"""
|
|
|
|
This module contains wrappers for Python serialization modules for
|
|
|
|
common formats that make it easier to serialize/deserialize WA
|
2017-03-06 11:10:25 +00:00
|
|
|
Plain Old Data structures (serilizable WA classes implement
|
|
|
|
``to_pod()``/``from_pod()`` methods for converting between POD
|
2017-02-21 13:37:11 +00:00
|
|
|
structures and Python class instances).
|
|
|
|
|
|
|
|
The modifications to standard serilization procedures are:
|
|
|
|
|
2017-03-06 11:10:25 +00:00
|
|
|
- mappings are deserialized as ``OrderedDict``\ 's rather than standard
|
2017-02-21 13:37:11 +00:00
|
|
|
Python ``dict``\ 's. This allows for cleaner syntax in certain parts
|
|
|
|
of WA configuration (e.g. values to be written to files can be specified
|
|
|
|
as a dict, and they will be written in the order specified in the config).
|
|
|
|
- regular expressions are automatically encoded/decoded. This allows for
|
|
|
|
configuration values to be transparently specified as strings or regexes
|
|
|
|
in the POD config.
|
|
|
|
|
|
|
|
This module exports the "wrapped" versions of serialization libraries,
|
2017-03-06 11:10:25 +00:00
|
|
|
and this should be imported and used instead of importing the libraries
|
2017-02-21 13:37:11 +00:00
|
|
|
directly. i.e. ::
|
|
|
|
|
|
|
|
from wa.utils.serializer import yaml
|
|
|
|
pod = yaml.load(fh)
|
|
|
|
|
|
|
|
instead of ::
|
|
|
|
|
|
|
|
import yaml
|
|
|
|
pod = yaml.load(fh)
|
|
|
|
|
2017-03-06 11:10:25 +00:00
|
|
|
It's also possible to use the serializer directly::
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
from wa.utils import serializer
|
|
|
|
pod = serializer.load(fh)
|
|
|
|
|
|
|
|
This can also be used to ``dump()`` POD structures. By default,
|
|
|
|
``dump()`` will produce JSON, but ``fmt`` parameter may be used to
|
|
|
|
specify an alternative format (``yaml`` or ``python``). ``load()`` will
|
2017-03-06 11:10:25 +00:00
|
|
|
use the file plugin to guess the format, but ``fmt`` may also be used
|
2017-02-21 13:37:11 +00:00
|
|
|
to specify it explicitly.
|
|
|
|
|
|
|
|
"""
|
2017-03-06 11:10:25 +00:00
|
|
|
# pylint: disable=unused-argument
|
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import json as _json
|
2019-12-03 12:44:25 +00:00
|
|
|
from collections import OrderedDict
|
|
|
|
from collections.abc import Hashable
|
2017-02-21 13:37:11 +00:00
|
|
|
from datetime import datetime
|
|
|
|
import dateutil.parser
|
2018-07-18 10:50:42 +01:00
|
|
|
import yaml as _yaml # pylint: disable=wrong-import-order
|
2019-05-15 17:55:54 +01:00
|
|
|
from yaml import MappingNode
|
2019-02-27 14:15:12 +00:00
|
|
|
try:
|
|
|
|
from yaml import FullLoader as _yaml_loader
|
|
|
|
except ImportError:
|
|
|
|
from yaml import Loader as _yaml_loader
|
2019-05-14 15:36:29 +01:00
|
|
|
from yaml.constructor import ConstructorError
|
2019-02-27 14:15:12 +00:00
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
|
2018-07-04 17:44:55 +01:00
|
|
|
# pylint: disable=redefined-builtin
|
2018-07-09 15:28:22 +01:00
|
|
|
from past.builtins import basestring # pylint: disable=wrong-import-order
|
2018-06-12 13:24:53 +01:00
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
from wa.framework.exception import SerializerSyntaxError
|
|
|
|
from wa.utils.misc import isiterable
|
2018-05-11 10:15:57 +01:00
|
|
|
from wa.utils.types import regex_type, none_type, level, cpu_mask
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
__all__ = [
|
|
|
|
'json',
|
|
|
|
'yaml',
|
|
|
|
'read_pod',
|
|
|
|
'dump',
|
|
|
|
'load',
|
2017-03-06 11:10:25 +00:00
|
|
|
'is_pod',
|
|
|
|
'POD_TYPES',
|
2017-02-21 13:37:11 +00:00
|
|
|
]
|
|
|
|
|
2017-03-06 11:10:25 +00:00
|
|
|
POD_TYPES = [
|
|
|
|
list,
|
|
|
|
tuple,
|
|
|
|
dict,
|
|
|
|
set,
|
2018-06-12 13:28:51 +01:00
|
|
|
basestring,
|
2018-05-30 13:58:49 +01:00
|
|
|
str,
|
2017-03-06 11:10:25 +00:00
|
|
|
int,
|
|
|
|
float,
|
|
|
|
bool,
|
2017-09-28 10:46:03 +01:00
|
|
|
OrderedDict,
|
2017-03-06 11:10:25 +00:00
|
|
|
datetime,
|
|
|
|
regex_type,
|
|
|
|
none_type,
|
2017-08-15 16:25:32 +01:00
|
|
|
level,
|
2018-05-25 14:01:12 +01:00
|
|
|
cpu_mask,
|
2017-03-06 11:10:25 +00:00
|
|
|
]
|
2017-02-21 13:37:11 +00:00
|
|
|
|
2018-07-02 12:24:10 +01:00
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
class WAJSONEncoder(_json.JSONEncoder):
|
|
|
|
|
2018-07-09 15:28:22 +01:00
|
|
|
def default(self, obj): # pylint: disable=method-hidden,arguments-differ
|
2017-03-06 11:10:25 +00:00
|
|
|
if isinstance(obj, regex_type):
|
2017-02-21 13:37:11 +00:00
|
|
|
return 'REGEX:{}:{}'.format(obj.flags, obj.pattern)
|
|
|
|
elif isinstance(obj, datetime):
|
|
|
|
return 'DATET:{}'.format(obj.isoformat())
|
2017-08-15 16:25:32 +01:00
|
|
|
elif isinstance(obj, level):
|
|
|
|
return 'LEVEL:{}:{}'.format(obj.name, obj.value)
|
2018-05-11 10:15:57 +01:00
|
|
|
elif isinstance(obj, cpu_mask):
|
|
|
|
return 'CPUMASK:{}'.format(obj.mask())
|
2017-02-21 13:37:11 +00:00
|
|
|
else:
|
|
|
|
return _json.JSONEncoder.default(self, obj)
|
|
|
|
|
|
|
|
|
|
|
|
class WAJSONDecoder(_json.JSONDecoder):
|
|
|
|
|
2018-07-09 15:28:22 +01:00
|
|
|
def decode(self, s, **kwargs): # pylint: disable=arguments-differ
|
2017-03-06 11:10:25 +00:00
|
|
|
d = _json.JSONDecoder.decode(self, s, **kwargs)
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
def try_parse_object(v):
|
2018-06-12 13:24:53 +01:00
|
|
|
if isinstance(v, basestring):
|
2017-08-15 16:25:32 +01:00
|
|
|
if v.startswith('REGEX:'):
|
|
|
|
_, flags, pattern = v.split(':', 2)
|
|
|
|
return re.compile(pattern, int(flags or 0))
|
|
|
|
elif v.startswith('DATET:'):
|
|
|
|
_, pattern = v.split(':', 1)
|
|
|
|
return dateutil.parser.parse(pattern)
|
|
|
|
elif v.startswith('LEVEL:'):
|
|
|
|
_, name, value = v.split(':', 2)
|
|
|
|
return level(name, value)
|
2018-05-11 10:15:57 +01:00
|
|
|
elif v.startswith('CPUMASK:'):
|
|
|
|
_, value = v.split(':', 1)
|
|
|
|
return cpu_mask(value)
|
2017-08-15 16:25:32 +01:00
|
|
|
|
|
|
|
return v
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
def load_objects(d):
|
2018-10-29 15:04:59 +00:00
|
|
|
if not hasattr(d, 'items'):
|
|
|
|
return d
|
2017-02-21 13:37:11 +00:00
|
|
|
pairs = []
|
2018-05-30 13:58:49 +01:00
|
|
|
for k, v in d.items():
|
|
|
|
if hasattr(v, 'items'):
|
2017-02-21 13:37:11 +00:00
|
|
|
pairs.append((k, load_objects(v)))
|
|
|
|
elif isiterable(v):
|
|
|
|
pairs.append((k, [try_parse_object(i) for i in v]))
|
|
|
|
else:
|
|
|
|
pairs.append((k, try_parse_object(v)))
|
|
|
|
return OrderedDict(pairs)
|
|
|
|
|
|
|
|
return load_objects(d)
|
|
|
|
|
|
|
|
|
|
|
|
class json(object):
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def dump(o, wfh, indent=4, *args, **kwargs):
|
|
|
|
return _json.dump(o, wfh, cls=WAJSONEncoder, indent=indent, *args, **kwargs)
|
|
|
|
|
2017-03-15 14:07:14 +00:00
|
|
|
@staticmethod
|
|
|
|
def dumps(o, indent=4, *args, **kwargs):
|
|
|
|
return _json.dumps(o, cls=WAJSONEncoder, indent=indent, *args, **kwargs)
|
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
@staticmethod
|
|
|
|
def load(fh, *args, **kwargs):
|
|
|
|
try:
|
|
|
|
return _json.load(fh, cls=WAJSONDecoder, object_pairs_hook=OrderedDict, *args, **kwargs)
|
|
|
|
except ValueError as e:
|
2018-10-31 11:24:26 +00:00
|
|
|
raise SerializerSyntaxError(e.args[0])
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def loads(s, *args, **kwargs):
|
|
|
|
try:
|
|
|
|
return _json.loads(s, cls=WAJSONDecoder, object_pairs_hook=OrderedDict, *args, **kwargs)
|
|
|
|
except ValueError as e:
|
2018-10-31 11:24:26 +00:00
|
|
|
raise SerializerSyntaxError(e.args[0])
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
_mapping_tag = _yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
|
2018-05-30 13:58:49 +01:00
|
|
|
_regex_tag = 'tag:wa:regex'
|
|
|
|
_level_tag = 'tag:wa:level'
|
|
|
|
_cpu_mask_tag = 'tag:wa:cpu_mask'
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _wa_dict_representer(dumper, data):
|
2018-05-30 13:58:49 +01:00
|
|
|
return dumper.represent_mapping(_mapping_tag, iter(data.items()))
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _wa_regex_representer(dumper, data):
|
|
|
|
text = '{}:{}'.format(data.flags, data.pattern)
|
|
|
|
return dumper.represent_scalar(_regex_tag, text)
|
|
|
|
|
2018-07-02 12:24:10 +01:00
|
|
|
|
2017-08-15 16:25:32 +01:00
|
|
|
def _wa_level_representer(dumper, data):
|
|
|
|
text = '{}:{}'.format(data.name, data.level)
|
|
|
|
return dumper.represent_scalar(_level_tag, text)
|
2017-02-21 13:37:11 +00:00
|
|
|
|
2018-07-02 12:24:10 +01:00
|
|
|
|
2018-05-11 10:15:57 +01:00
|
|
|
def _wa_cpu_mask_representer(dumper, data):
|
|
|
|
return dumper.represent_scalar(_cpu_mask_tag, data.mask())
|
|
|
|
|
2018-07-02 12:24:10 +01:00
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
def _wa_regex_constructor(loader, node):
|
|
|
|
value = loader.construct_scalar(node)
|
|
|
|
flags, pattern = value.split(':', 1)
|
|
|
|
return re.compile(pattern, int(flags or 0))
|
|
|
|
|
2018-07-02 12:24:10 +01:00
|
|
|
|
2017-08-15 16:25:32 +01:00
|
|
|
def _wa_level_constructor(loader, node):
|
|
|
|
value = loader.construct_scalar(node)
|
|
|
|
name, value = value.split(':', 1)
|
|
|
|
return level(name, value)
|
|
|
|
|
2018-07-02 12:24:10 +01:00
|
|
|
|
2018-05-11 10:15:57 +01:00
|
|
|
def _wa_cpu_mask_constructor(loader, node):
|
|
|
|
value = loader.construct_scalar(node)
|
|
|
|
return cpu_mask(value)
|
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
|
2019-05-15 18:03:15 +01:00
|
|
|
class _WaYamlLoader(_yaml_loader): # pylint: disable=too-many-ancestors
|
2019-05-14 15:36:29 +01:00
|
|
|
|
|
|
|
def construct_mapping(self, node, deep=False):
|
2019-05-17 15:54:50 +01:00
|
|
|
if isinstance(node, MappingNode):
|
|
|
|
self.flatten_mapping(node)
|
2019-05-14 15:36:29 +01:00
|
|
|
if not isinstance(node, MappingNode):
|
|
|
|
raise ConstructorError(None, None,
|
2019-05-15 18:03:15 +01:00
|
|
|
"expected a mapping node, but found %s" % node.id,
|
|
|
|
node.start_mark)
|
2019-05-14 15:36:29 +01:00
|
|
|
mapping = OrderedDict()
|
|
|
|
for key_node, value_node in node.value:
|
|
|
|
key = self.construct_object(key_node, deep=deep)
|
2019-05-15 17:55:54 +01:00
|
|
|
if not isinstance(key, Hashable):
|
2019-05-14 15:36:29 +01:00
|
|
|
raise ConstructorError("while constructing a mapping", node.start_mark,
|
2019-05-15 18:03:15 +01:00
|
|
|
"found unhashable key", key_node.start_mark)
|
2019-05-14 15:36:29 +01:00
|
|
|
value = self.construct_object(value_node, deep=deep)
|
|
|
|
mapping[key] = value
|
|
|
|
return mapping
|
|
|
|
|
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
_yaml.add_representer(OrderedDict, _wa_dict_representer)
|
|
|
|
_yaml.add_representer(regex_type, _wa_regex_representer)
|
2017-08-15 16:25:32 +01:00
|
|
|
_yaml.add_representer(level, _wa_level_representer)
|
2018-05-11 10:15:57 +01:00
|
|
|
_yaml.add_representer(cpu_mask, _wa_cpu_mask_representer)
|
2019-05-14 15:36:29 +01:00
|
|
|
_yaml.add_constructor(_regex_tag, _wa_regex_constructor, Loader=_WaYamlLoader)
|
|
|
|
_yaml.add_constructor(_level_tag, _wa_level_constructor, Loader=_WaYamlLoader)
|
|
|
|
_yaml.add_constructor(_cpu_mask_tag, _wa_cpu_mask_constructor, Loader=_WaYamlLoader)
|
|
|
|
_yaml.add_constructor(_mapping_tag, _WaYamlLoader.construct_yaml_map, Loader=_WaYamlLoader)
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
class yaml(object):
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def dump(o, wfh, *args, **kwargs):
|
|
|
|
return _yaml.dump(o, wfh, *args, **kwargs)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def load(fh, *args, **kwargs):
|
|
|
|
try:
|
2019-05-15 17:55:54 +01:00
|
|
|
return _yaml.load(fh, *args, Loader=_WaYamlLoader, **kwargs)
|
2017-02-21 13:37:11 +00:00
|
|
|
except _yaml.YAMLError as e:
|
|
|
|
lineno = None
|
|
|
|
if hasattr(e, 'problem_mark'):
|
2017-03-06 11:10:25 +00:00
|
|
|
lineno = e.problem_mark.line # pylint: disable=no-member
|
2019-05-13 16:13:53 +01:00
|
|
|
message = e.args[0] if (e.args and e.args[0]) else str(e)
|
|
|
|
raise SerializerSyntaxError(message, lineno)
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
loads = load
|
|
|
|
|
|
|
|
|
|
|
|
class python(object):
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def dump(o, wfh, *args, **kwargs):
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def load(cls, fh, *args, **kwargs):
|
|
|
|
return cls.loads(fh.read())
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def loads(s, *args, **kwargs):
|
|
|
|
pod = {}
|
|
|
|
try:
|
2018-05-30 13:58:49 +01:00
|
|
|
exec(s, pod) # pylint: disable=exec-used
|
2017-02-21 13:37:11 +00:00
|
|
|
except SyntaxError as e:
|
|
|
|
raise SerializerSyntaxError(e.message, e.lineno)
|
2018-07-04 17:44:55 +01:00
|
|
|
for k in list(pod.keys()): # pylint: disable=consider-iterating-dictionary
|
2017-02-21 13:37:11 +00:00
|
|
|
if k.startswith('__'):
|
|
|
|
del pod[k]
|
|
|
|
return pod
|
|
|
|
|
|
|
|
|
|
|
|
def read_pod(source, fmt=None):
|
2018-05-30 13:58:49 +01:00
|
|
|
if isinstance(source, str):
|
2017-02-21 13:37:11 +00:00
|
|
|
with open(source) as fh:
|
|
|
|
return _read_pod(fh, fmt)
|
2017-03-06 11:10:25 +00:00
|
|
|
elif hasattr(source, 'read') and (hasattr(source, 'name') or fmt):
|
2017-02-21 13:37:11 +00:00
|
|
|
return _read_pod(source, fmt)
|
|
|
|
else:
|
|
|
|
message = 'source must be a path or an open file handle; got {}'
|
|
|
|
raise ValueError(message.format(type(source)))
|
|
|
|
|
2017-03-16 17:54:48 +00:00
|
|
|
|
2017-03-06 11:10:25 +00:00
|
|
|
def write_pod(pod, dest, fmt=None):
|
2018-05-30 13:58:49 +01:00
|
|
|
if isinstance(dest, str):
|
2017-03-06 11:10:25 +00:00
|
|
|
with open(dest, 'w') as wfh:
|
|
|
|
return _write_pod(pod, wfh, fmt)
|
|
|
|
elif hasattr(dest, 'write') and (hasattr(dest, 'name') or fmt):
|
|
|
|
return _write_pod(pod, dest, fmt)
|
|
|
|
else:
|
|
|
|
message = 'dest must be a path or an open file handle; got {}'
|
|
|
|
raise ValueError(message.format(type(dest)))
|
|
|
|
|
2017-02-21 13:37:11 +00:00
|
|
|
|
|
|
|
def dump(o, wfh, fmt='json', *args, **kwargs):
|
2017-03-06 11:10:25 +00:00
|
|
|
serializer = {'yaml': yaml,
|
|
|
|
'json': json,
|
|
|
|
'python': python,
|
|
|
|
'py': python,
|
|
|
|
}.get(fmt)
|
2017-02-21 13:37:11 +00:00
|
|
|
if serializer is None:
|
|
|
|
raise ValueError('Unknown serialization format: "{}"'.format(fmt))
|
|
|
|
serializer.dump(o, wfh, *args, **kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
def load(s, fmt='json', *args, **kwargs):
|
|
|
|
return read_pod(s, fmt=fmt)
|
|
|
|
|
|
|
|
|
|
|
|
def _read_pod(fh, fmt=None):
|
|
|
|
if fmt is None:
|
|
|
|
fmt = os.path.splitext(fh.name)[1].lower().strip('.')
|
2018-06-14 16:15:33 +01:00
|
|
|
if fmt == '':
|
|
|
|
# Special case of no given file extension
|
2020-10-19 18:09:04 +01:00
|
|
|
message = ("Could not determine format "
|
2018-07-02 12:24:10 +01:00
|
|
|
"from file extension for \"{}\". "
|
|
|
|
"Please specify it or modify the fmt parameter.")
|
|
|
|
raise ValueError(message.format(getattr(fh, 'name', '<none>')))
|
2017-02-21 13:37:11 +00:00
|
|
|
if fmt == 'yaml':
|
|
|
|
return yaml.load(fh)
|
|
|
|
elif fmt == 'json':
|
|
|
|
return json.load(fh)
|
|
|
|
elif fmt == 'py':
|
|
|
|
return python.load(fh)
|
|
|
|
else:
|
2017-03-06 11:10:25 +00:00
|
|
|
raise ValueError('Unknown format "{}": {}'.format(fmt, getattr(fh, 'name', '<none>')))
|
|
|
|
|
2018-05-24 13:51:05 +01:00
|
|
|
|
2017-03-06 11:10:25 +00:00
|
|
|
def _write_pod(pod, wfh, fmt=None):
|
|
|
|
if fmt is None:
|
|
|
|
fmt = os.path.splitext(wfh.name)[1].lower().strip('.')
|
|
|
|
if fmt == 'yaml':
|
|
|
|
return yaml.dump(pod, wfh)
|
|
|
|
elif fmt == 'json':
|
|
|
|
return json.dump(pod, wfh)
|
|
|
|
elif fmt == 'py':
|
|
|
|
raise ValueError('Serializing to Python is not supported')
|
|
|
|
else:
|
|
|
|
raise ValueError('Unknown format "{}": {}'.format(fmt, getattr(wfh, 'name', '<none>')))
|
|
|
|
|
2018-05-24 13:51:05 +01:00
|
|
|
|
2017-03-06 11:10:25 +00:00
|
|
|
def is_pod(obj):
|
2018-07-04 17:44:55 +01:00
|
|
|
if type(obj) not in POD_TYPES: # pylint: disable=unidiomatic-typecheck
|
2018-05-24 13:51:05 +01:00
|
|
|
return False
|
2018-05-30 13:58:49 +01:00
|
|
|
if hasattr(obj, 'items'):
|
|
|
|
for k, v in obj.items():
|
2018-05-24 13:51:05 +01:00
|
|
|
if not (is_pod(k) and is_pod(v)):
|
|
|
|
return False
|
|
|
|
elif isiterable(obj):
|
|
|
|
for v in obj:
|
|
|
|
if not is_pod(v):
|
|
|
|
return False
|
|
|
|
return True
|
2018-11-16 17:17:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Podable(object):
|
|
|
|
|
|
|
|
_pod_serialization_version = 0
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_pod(cls, pod):
|
|
|
|
pod = cls._upgrade_pod(pod)
|
|
|
|
instance = cls()
|
|
|
|
instance._pod_version = pod.pop('_pod_version') # pylint: disable=protected-access
|
|
|
|
return instance
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _upgrade_pod(cls, pod):
|
|
|
|
_pod_serialization_version = pod.pop('_pod_serialization_version', None) or 0
|
|
|
|
while _pod_serialization_version < cls._pod_serialization_version:
|
|
|
|
_pod_serialization_version += 1
|
|
|
|
upgrade = getattr(cls, '_pod_upgrade_v{}'.format(_pod_serialization_version))
|
|
|
|
pod = upgrade(pod)
|
|
|
|
return pod
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self._pod_version = self._pod_serialization_version
|
|
|
|
|
|
|
|
def to_pod(self):
|
|
|
|
pod = {}
|
|
|
|
pod['_pod_version'] = self._pod_version
|
|
|
|
pod['_pod_serialization_version'] = self._pod_serialization_version
|
|
|
|
return pod
|