1  from tokenize  import  generate_tokens, STRING, NAME, OP 
 2  from cStringIO import  StringIO 
 3  from re        import  compile, DOTALL 
 4   
 5  comments = compile(r'/\*.*\*/|//[^\r\n]*', DOTALL) 
 6   
 7   
 9      ''' 
10      Fairly competent json parser exploiting the python tokenizer and eval() 
11   
12      _loads(serialized_json) -> object 
13      ''' 
14      try: 
15          res = [] 
16          consts = {'true': True, 'false': False, 'null': None} 
17          string = '(' + comments.sub('', string) + ')' 
18          for type, val, _, _, _ in generate_tokens(StringIO(string).readline): 
19              if (type == OP and val not in '[]{}:,()-') or \ 
20                 (type == NAME and val not in consts): 
21                  raise AttributeError() 
22              elif type == STRING: 
23                  res.append('u') 
24                  res.append(val.replace('\\/', '/')) 
25              else: 
26                  res.append(val) 
27          return eval(''.join(res), {}, consts) 
28      except: 
29          raise AttributeError() 
 30   
31   
32   
33  try: 
34       
35      from json import loads as json_loads 
36  except ImportError: 
37      try: 
38           
39          from simplejson import loads as json_loads 
40       
41      except ImportError: 
42          json_loads = _loads 
43   
44  __all__ = ['json_loads'] 
45