summaryrefslogtreecommitdiffstats
path: root/tools/json_schema_compiler/json_schema.py
blob: bb4e9c4bc5d6f0c9e556ef40cb06cefa23232ad8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import copy

import json_parse


def DeleteNodes(item, delete_key=None, matcher=None):
  """Deletes certain nodes in item, recursively. If |delete_key| is set, all
  dicts with |delete_key| as an attribute are deleted. If a callback is passed
  as |matcher|, |DeleteNodes| will delete all dicts for which matcher(dict)
  returns True.
  """
  assert (delete_key is not None) != (matcher is not None)

  def ShouldDelete(thing):
    return json_parse.IsDict(thing) and (
        delete_key is not None and delete_key in thing or
        matcher is not None and matcher(thing))

  if json_parse.IsDict(item):
    toDelete = []
    for key, value in item.items():
      if ShouldDelete(value):
        toDelete.append(key)
      else:
        DeleteNodes(value, delete_key, matcher)
    for key in toDelete:
      del item[key]
  elif type(item) == list:
    item[:] = [DeleteNodes(thing, delete_key, matcher)
        for thing in item if not ShouldDelete(thing)]

  return item


def Load(filename):
  with open(filename, 'r') as handle:
    schemas = json_parse.Parse(handle.read())
  return schemas


# A dictionary mapping |filename| to the object resulting from loading the JSON
# at |filename|.
_cache = {}


def CachedLoad(filename):
  """Equivalent to Load(filename), but caches results for subsequent calls"""
  if filename not in _cache:
    _cache[filename] = Load(filename)
  # Return a copy of the object so that any changes a caller makes won't affect
  # the next caller.
  return copy.deepcopy(_cache[filename])