1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
|
#!/usr/bin/python2.4
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Support for formatting a data pack file used for platform agnostic resource
files.
'''
import exceptions
import os
import struct
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
from grit.format import interface
from grit.node import include
from grit.node import message
from grit.node import misc
FILE_FORMAT_VERSION = 3
HEADER_LENGTH = 2 * 4 # Two uint32s. (file version and number of entries)
class WrongFileVersion(Exception):
pass
class DataPack(interface.ItemFormatter):
'''Writes out the data pack file format (platform agnostic resource file).'''
def Format(self, item, lang='en', begin_item=True, output_dir='.'):
if not begin_item:
return ''
assert isinstance(item, misc.ReleaseNode)
nodes = DataPack.GetDataNodes(item)
data = {}
for node in nodes:
id, value = node.GetDataPackPair(lang)
data[id] = value
return DataPack.WriteDataPackToString(data)
@staticmethod
def GetDataNodes(item):
'''Returns a list of nodes that can be packed into the data pack file.'''
nodes = []
if (isinstance(item, misc.IfNode) and not item.IsConditionSatisfied()):
return nodes
if (isinstance(item, include.IncludeNode) or
isinstance(item, message.MessageNode)):
# Include this node if it wasn't marked as skipped by a whitelist.
if not item.WhitelistMarkedAsSkip():
return [item]
return nodes
for child in item.children:
nodes.extend(DataPack.GetDataNodes(child))
return nodes
@staticmethod
def ReadDataPack(input_file):
"""Reads a data pack file and returns a dictionary."""
data = open(input_file, "rb").read()
original_data = data
# Read the header.
version, num_entries = struct.unpack("<II", data[:HEADER_LENGTH])
if version != FILE_FORMAT_VERSION:
raise WrongFileVersion
resources = {}
if num_entries == 0:
return resources
# Read the index and data.
data = data[HEADER_LENGTH:]
kIndexEntrySize = 2 + 4 # Each entry is a uint16 and a uint32.
for _ in range(num_entries):
id, offset = struct.unpack("<HI", data[:kIndexEntrySize])
data = data[kIndexEntrySize:]
next_id, next_offset = struct.unpack("<HI", data[:kIndexEntrySize])
resources[id] = original_data[offset:next_offset]
return resources
@staticmethod
def WriteDataPackToString(resources):
"""Write a map of id=>data into a string in the data pack format and return
it."""
ids = sorted(resources.keys())
ret = []
# Write file header.
ret.append(struct.pack("<II", FILE_FORMAT_VERSION, len(ids)))
HEADER_LENGTH = 2 * 4 # Two uint32s.
# Each entry is a uint16 + a uint32s. We have one extra entry for the last
# item.
index_length = (len(ids) + 1) * (2 + 4)
# Write index.
data_offset = HEADER_LENGTH + index_length
for id in ids:
ret.append(struct.pack("<HI", id, data_offset))
data_offset += len(resources[id])
ret.append(struct.pack("<HI", 0, data_offset))
# Write data.
for id in ids:
ret.append(resources[id])
return ''.join(ret)
@staticmethod
def WriteDataPack(resources, output_file):
"""Write a map of id=>data into output_file as a data pack."""
file = open(output_file, "wb")
content = DataPack.WriteDataPackToString(resources)
file.write(content)
@staticmethod
def RePack(output_file, input_files):
"""Write a new data pack to |output_file| based on a list of filenames
(|input_files|)"""
resources = {}
for filename in input_files:
new_resources = DataPack.ReadDataPack(filename)
# Make sure we have no duplicates.
duplicate_keys = set(new_resources.keys()) & set(resources.keys())
if len(duplicate_keys) != 0:
raise exceptions.KeyError("Duplicate keys: " +
str(list(duplicate_keys)))
resources.update(new_resources)
DataPack.WriteDataPack(resources, output_file)
def main():
# Just write a simple file.
data = { 1: "", 4: "this is id 4", 6: "this is id 6", 10: "" }
WriteDataPack(data, "datapack1.pak")
data2 = { 1000: "test", 5: "five" }
WriteDataPack(data2, "datapack2.pak")
print "wrote datapack1 and datapack2 to current directory."
if __name__ == '__main__':
main()
|