blob: 0c45265080b0ac29544723d23ccbc356643d99ae (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# usage: action_maketokenizer.py OUTPUTS -- INPUTS
#
# Multiple INPUTS may be listed. The sections are separated by -- arguments.
#
# OUTPUTS must contain a single item: a path to tokenizer.cpp.
#
# INPUTS must contain exactly two items. The first item must be the path to
# maketokenizer. The second item must be the path to tokenizer.flex.
import os
import subprocess
import sys
def SplitArgsIntoSections(args):
sections = []
while len(args) > 0:
if not '--' in args:
# If there is no '--' left, everything remaining is an entire section.
dashes = len(args)
else:
dashes = args.index('--')
sections.append(args[:dashes])
# Next time through the loop, look at everything after this '--'.
if dashes + 1 == len(args):
# If the '--' is at the end of the list, we won't come back through the
# loop again. Add an empty section now corresponding to the nothingness
# following the final '--'.
args = []
sections.append(args)
else:
args = args[dashes + 1:]
return sections
def main(args):
sections = SplitArgsIntoSections(args[1:])
assert len(sections) == 2
(outputs, inputs) = sections
assert len(outputs) == 1
output = outputs[0]
assert len(inputs) == 2
maketokenizer = inputs[0]
flex_input = inputs[1]
# Do it. check_call is new in 2.5, so simulate its behavior with call and
# assert.
outfile = open(output, 'wb')
p1 = subprocess.Popen(['flex', '-t', flex_input], stdout=subprocess.PIPE)
p2 = subprocess.Popen(['perl', maketokenizer], stdin=p1.stdout,
stdout=outfile)
r1 = p1.wait()
r2 = p2.wait()
assert r1 == 0
assert r2 == 0
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|