Python json encoding with classes that need custom encodings but which may themselves have nested data structures
This is obviously a simplified version of what I am trying to do, but ... let's say I have
class X(object): pass x = X() y = X() x.val = {1:2,3:4} y.val = {1:2,3:x}
How do I write a custom json encoder so that it recodes the encoding loop naturally? I don't need json to demonstrate that the class is of type X (dot dict would be fine). An actual example of this might have data structures nested 10 depths.
Obviously, I could just override the default () method, but that doesn't seem to allow recursive calls, i.e. the best i have is something like this (and i need json.loads, otherwise the thing gets double quotes / escaped):
class XEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, X):
return json.loads(self.encode({x:getattr(obj,x) for x in dir(obj) if "__" not in x}))
return json.JSONEncoder.default(self, obj)
source to share
Perhaps over murder, but I used the Mixin class like this:
def _default_json_encoder(obj):
""" Default encoder, encountered must have to_dict method to be serialized. """
if hasattr(obj, "to_dict"):
return obj.to_dict()
else:
raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(obj), repr(obj)))
class MixinJSONable(object):
"""
Mixin pattern to add a capability to an object to be jsonable.
If object have to_dict method it will be used to produce the dict, otherwise
MixinJSONable.to_dict will be used.
Only "public" attributes will be dump and instance of self._jsonable tuple.
Attributes could be ignored by passing ignored_keys parameter to to_json method.
Thus _ignored_json_keys (class attribute) will be update and spread over all class using this Mixin.
"""
# _ignored_json_keys = list()
def to_dict(self):
"""
to_dict method to dump public attributes.
"""
self._jsonable = (int, list, str, dict)
_dict = dict()
for attr in dir(self):
value = getattr(self, attr)
if attr.startswith("_") or attr in getattr(MixinJSONable, "_ignored_json_keys", []):
continue
elif isinstance(value, self._jsonable) or value is None or hasattr(value, 'to_dict'):
# to_dict method is used as serialization method.
value = value
else:
continue
_dict[attr] = value
return _dict
def to_json(self, **kw):
"""
Dump object as Json.
Accept the same keys than :func json.dumps:. If ignored_keys (list) is passed,
the keys will not be dumped in the json (filter over all sons)
"""
indent = kw.pop("indent", 4) # use indent key if passed otherwise 4.
_ignored_json_keys = kw.pop("ignored_keys", [])
if _ignored_json_keys:
MixinJSONable._ignored_json_keys = _ignored_json_keys
return json.dumps(self, indent=indent, default=_default_json_encoder, **kw)
class X(MixinJSONable):
pass
x = X()
y = X()
setattr(x,"val",{1:2,3:4})
setattr(y,"val",{1:2,3:x})
y.to_json()
will print:
{
"val": {
"1": 2,
"3": {
"val": {
"1": 2,
"3": 4
}
}
}
}
source to share
Could you expand the values ββand then code the python structure?
class XEncoder(json.JSONEncoder):
def default(self, obj):
new_object = {}
def expand_it(obj, X):
blank_dict = {}
try:
data = vars(obj)
except TypeError:
data = obj
data_keys = list(data.keys())
for k in data_keys:
if isinstance(data[k], X):
print(data[k])
blank_dict[k] = expand_it(data[k], X)
elif type(data[k]) == dict:
blank_dict[k] = expand_it(data[k], X)
else:
blank_dict[k] = data[k]
return blank_dict
new_object = expand_it(y, X)
encoded_json = json.JSONEncoder().encode(new_object)
loads_json = json.loads(encoded_json)
return loads_json
Here's the output:
class X(object):
pass
w = X()
x = X()
y = X()
z = X()
setattr(w,"val",{1:2,3:4})
setattr(x,"val",{1:2,3:w})
setattr(y,"val",{1:2,3:x})
setattr(z,"val",{1:2,3:y})
in_json = XEncoder().encode(z.val)
print(in_json)
{"1": 2, "3": {"val": {"1": 2, "3": {"val": {"1": 2, "3": {"val": {" 1 ": 2," 3 ": 4}}}}}}}
source to share
I may not understand the question, but the json.dumps()
default parameter handles recursion through the object graph simply.
If you don't want to keep the type of the original object:
def jsondefault(obj):
return dict(obj.__dict__)
If you want to remember the type of object being encoded:
def jsondefault_types(obj):
result = dict(obj.__dict__)
result['__type__'] = ".".join([
obj.__class__.__module__, obj.__class__.__name__])
return result
Then you can dump to both sides:
class X(object): pass
x, y, z = X(), X(), X()
x.val = {1:2, 3:4}
y.val = {1:2, 3:x}
z.val = {1:2, 3:y}
print json.dumps(z, default=jsondefault)
print json.dumps(z, default=jsondefault_types)
as a result:
'{"val": {"1": 2, "3": {"val": {"1": 2, "3": {"val": {"1": 2, "3": 4}}}}}}'
'{"__type__": "__main__.X", "val": {"1": 2, "3": {"__type__": "__main__.X", "val": {"1": 2, "3": {"__type__": "__main__.X", "val": {"1": 2, "3": 4}}}}}}'
source to share
vars
does.
class XEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, X):
return vars(obj)
return json.JSONEncoder.default(self, obj)
test;
class X(object):
pass
x = X()
x.val = {1:2,3:None}
def nest(obj,level):
for _ in range(level):
o = X()
o.val = {1:2,3:obj}
obj = o
return o
XEncoder().encode(nest(x,10))
output;
'{"val": {"1": 2, "3": {"val": {"1": 2, "3": {"val": {"1": 2, "3": { "val": {"1": 2, "3": {"val": {"1": 2, "3": {"val": {"1": 2, "3": {"val ": {" 1 ": 2," 3 ": {" val ": {" 1 ": 2," 3 ": {" val ": {" 1 ": 2," 3 ": {" val ": {"1": 2, "3": {"val": {"1": 2, "3": null}}}}}}}}}}}}}}}}}}}}} '
source to share