>>> raw_post_data = request.raw_post_data >>> print raw_post_data {"group":{"groupId":"2", "groupName":"GroupName"}, "members":{"1":{"firstName":"fName","lastName":"LName","address":"address"},"1": {"firstName":"f_Name","lastName":"L_Name","address":"_address"}}} >>> create_request = json.loads(raw_post_data) >>> print create_request {u'group': {u'groupName': u'GroupName', u'groupId': u'2'}, u'members': {u'1': {u'lastName': u'L_Name', u'firstName': u'f_Name', u'address': u'_address'}}}
As you can see members with key '1' is overwritten when I use json.dumps()
Is there any way to catch it as exception in python, saying found duplicate keys in request from client ?
The rfc 4627 for application/json
media type recommends unique keys but it doesn't forbid them explicitly:
The names within an object SHOULD be unique.
From rfc 2119:
SHOULD This word, or the adjective "RECOMMENDED", mean that there
may exist valid reasons in particular circumstances to ignore a
particular item, but the full implications must be understood and
carefully weighed before choosing a different course.
import json def dict_raise_on_duplicates(ordered_pairs): """Reject duplicate keys.""" d = {} for k, v in ordered_pairs: if k in d: raise ValueError("duplicate key: %r" % (k,)) else: d[k] = v return d json.loads(raw_post_data, object_pairs_hook=dict_raise_on_duplicates) # -> ValueError: duplicate key: u'1'
Alternatively if you want to catch all the duplicate keys (per level) you can use a collections.Counter
from collections import Counter class KeyWatcher(dict): def __init__(self, *args): duplicates = [d for d,i in Counter([pair[0] for pair in args[0]]).items() if i > 0] if duplicates: raise KeyError("Can't add duplicate keys {} to a json message".format(duplicates)) self.update(*args[0]) json.loads(raw_post_data, object_pairs_hook=KeyWatcher)