3 from __future__ import print_function
11 class SXPParser(object):
12 tokenizer_rules = r""" (?P<open> \( ) | (?P<close> \) ) | (?P<whitespace> \s+ ) | [^()^\s]+ """
13 tokenizer_re = re.compile(tokenizer_rules, re.X)
19 def __call__(self, input):
20 for match in self.tokenizer_re.finditer(input):
21 if match.group('open'):
23 elif match.group('close'):
24 top = self.stack.pop()
26 self.stack[-1].append(top)
29 elif match.group('whitespace'):
33 self.stack[-1].append(match.group())
38 def __call__(self, out):
39 for domid, info in sorted(self.data.iteritems(), reverse=True):
40 print(str(domid), *info)
44 def __init__(self, p):
49 name = i['config']['c_info']['name']
54 def __init__(self, p):
58 if i and i[0] == 'domain':
60 data = dict(j for j in i if len(j) == 2)
61 domid = int(data['domid'])
66 except (KeyError, ValueError) as e:
70 if __name__ == '__main__':
71 p = subprocess.check_output(('/usr/lib/xen/bin/xen-toolstack', 'list', '-l'))