Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
eclipse
GitHub Repository: eclipse/sumo
Path: blob/main/tools/net/netdiff.py
193772 views
1
#!/usr/bin/env python
2
# Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.dev/sumo
3
# Copyright (C) 2011-2026 German Aerospace Center (DLR) and others.
4
# This program and the accompanying materials are made available under the
5
# terms of the Eclipse Public License 2.0 which is available at
6
# https://www.eclipse.org/legal/epl-2.0/
7
# This Source Code may also be made available under the following Secondary
8
# Licenses when the conditions for such availability set forth in the Eclipse
9
# Public License 2.0 are satisfied: GNU General Public License, version 2
10
# or later which is available at
11
# https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html
12
# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
13
14
# @file netdiff.py
15
# @author Daniel Krajzewicz
16
# @author Michael Behrisch
17
# @author Jakob Erdmann
18
# @date 2011-10-04
19
20
"""
21
Reads two networks (source, dest) and tries to produce the minimal plain-xml input
22
which can be loaded with netconvert alongside source to create dest
23
"""
24
from __future__ import absolute_import
25
from __future__ import print_function
26
27
import sys
28
import os
29
try:
30
from StringIO import StringIO
31
except ImportError:
32
from io import StringIO
33
from xml.dom import pulldom
34
from xml.dom import Node
35
from subprocess import call
36
from collections import defaultdict
37
38
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
39
import sumolib # noqa
40
from sumolib.datastructures.OrderedMultiSet import OrderedMultiSet # noqa
41
from sumolib.options import ArgumentParser # noqa
42
43
44
def parse_args():
45
optParser = ArgumentParser()
46
optParser.add_option("source", category="input", type=optParser.net_file,
47
help="original network")
48
optParser.add_option("dest", category="input", type=optParser.net_file,
49
help="modified network")
50
optParser.add_option("outprefix", category="output", type=optParser.file,
51
help="prefix for the diff files")
52
optParser.add_option("-v", "--verbose", action="store_true",
53
default=False, help="Give more output")
54
optParser.add_option("-p", "--use-prefix", action="store_true",
55
default=False, help="interpret source and dest as plain-xml prefix instead of network names")
56
optParser.add_option("-d", "--direct", action="store_true",
57
default=False, help="compare source and dest files directly")
58
optParser.add_option("-i", "--patch-on-import", action="store_true",
59
default=False, help="generate patch that can be applied during initial network import" +
60
" (exports additional connection elements)")
61
optParser.add_option("--copy",
62
help="comma-separated list of element names to copy (if they are unchanged)")
63
optParser.add_option("--path", dest="path", help="Path to binaries")
64
optParser.add_option("--remove-plain", action="store_true",
65
help="avoid saving plain xml files of source and destination networks")
66
optParser.add_option("-l", "--write-selections", category="output", action="store_true", default=False,
67
help="Write selection files for created, deleted and changed elements")
68
optParser.add_option("-s", "--write-shapes", category="output", action="store_true", default=False,
69
help="Write shape files for created, deleted and changed elements")
70
optParser.add_option("-g", "--plain-geo", category="output", action="store_true", default=False,
71
help="Write geo coordinates instead of network coordinates")
72
options = optParser.parse_args()
73
if options.use_prefix and options.direct:
74
optParser.error(
75
"Options --use-prefix and --direct are mutually exclusive")
76
77
if options.write_shapes:
78
if options.direct:
79
optParser.error(
80
"Options --write-shapes and --direct are mutually exclusive")
81
if options.use_prefix:
82
optParser.error(
83
"Options --write-shapes and --use-prefix are mutually exclusive")
84
85
return options
86
87
88
# CONSTANTS
89
INDENT = 4
90
91
# file types to compare
92
TYPE_NODES = '.nod.xml'
93
TYPE_EDGES = '.edg.xml'
94
TYPE_CONNECTIONS = '.con.xml'
95
TYPE_TLLOGICS = '.tll.xml'
96
TYPE_EDGETYPES = '.typ.xml'
97
PLAIN_TYPES = [
98
TYPE_NODES,
99
TYPE_EDGES,
100
TYPE_CONNECTIONS,
101
TYPE_TLLOGICS,
102
TYPE_EDGETYPES
103
]
104
105
# traffic lights have some peculiarities
106
# CAVEAT1 - ids are not unique (only in combination with programID)
107
# CAVEAT2 - the order of their children (phases) is important.
108
# this makes partial diffs unfeasible. The easiest solution is to forgo diffs and always
109
# export the whole new traffic light
110
# CAVEAT3 - deletes need not be written because they are also signaled by a changed node type
111
# (and they complicate the handling of deleted tl-connections)
112
# CAVEAT4 - deleted connections must be written with their tlID and tlIndex, otherwise
113
# parsing in netconvert becomes tedious
114
# CAVEAT5 - phases must maintain their order
115
# CAVEAT6 - identical phases may occur multiple times, thus OrderedMultiSet
116
# CAVEAT7 - changing edge type triggers 'type override'
117
# (all attributes defined for the edge type are applied. This must be avoided)
118
# CAVEAT8 - TAG_TLL must always be written before TAG_CONNECTION
119
# CAVEAT9 - when TAG_NEIGH is removed, <neigh lane=""/> must written into the diff to indicate removal
120
# CAVEAT10 - when a connection element is written without 'to' it describes an edge without connections.
121
# This must be omitted from 'deleted elements'
122
123
TAG_TLL = 'tlLogic'
124
TAG_CONNECTION = 'connection'
125
TAG_CROSSING = 'crossing'
126
TAG_ROUNDABOUT = 'roundabout'
127
TAG_LANE = 'lane'
128
TAG_NEIGH = 'neigh'
129
TAG_EDGE = 'edge'
130
TAG_NODE = 'node'
131
TAG_PARAM = 'param'
132
TAG_LOCATION = 'location'
133
134
# see CAVEAT1
135
IDATTRS = defaultdict(lambda: ('id',))
136
IDATTRS[TAG_TLL] = ('id', 'programID')
137
IDATTRS[TAG_CONNECTION] = ('from', 'to', 'fromLane', 'toLane')
138
IDATTRS[TAG_CROSSING] = ('node', 'edges')
139
IDATTRS[TAG_ROUNDABOUT] = ('edges',)
140
IDATTRS['interval'] = ('begin', 'end')
141
IDATTRS[TAG_LANE] = ('index',)
142
IDATTRS[TAG_NEIGH] = ('lane',)
143
IDATTRS[TAG_PARAM] = ('key',)
144
145
DELETE_ELEMENT = 'delete' # the xml element for signifying deletes
146
147
# provide an order for the attribute names
148
ATTRIBUTE_NAMES = {
149
# '.nod.xml' : ()
150
# '.edg.xml' : ()
151
# '.con.xml' : ()
152
}
153
154
# default values for the given attribute (needed when attributes appear in
155
# source but do not appear in dest)
156
MISSING_DEFAULT = "_MISSING_DEFAULT_"
157
DEFAULT_VALUES = defaultdict(lambda: MISSING_DEFAULT)
158
DEFAULT_VALUES['offset'] = "0"
159
DEFAULT_VALUES['spreadType'] = "right"
160
DEFAULT_VALUES['customShape'] = "false"
161
DEFAULT_VALUES['keepClear'] = "true"
162
DEFAULT_VALUES['contPos'] = "-1"
163
DEFAULT_VALUES['visibility'] = "-1"
164
DEFAULT_VALUES['z'] = "0"
165
DEFAULT_VALUES['radius'] = "-1"
166
DEFAULT_VALUES['allow'] = "all"
167
DEFAULT_VALUES['rightOfWay'] = "default"
168
DEFAULT_VALUES['fringe'] = "default"
169
170
IGNORE_TAGS = set([TAG_LOCATION])
171
172
173
# stores attributes for later comparison
174
class AttributeStore:
175
patchImport = False
176
177
def __init__(self, type, copy_tags, level=1):
178
# xml type being parsed
179
self.type = type
180
# tag names to copy even if unchanged
181
self.copy_tags = copy_tags
182
# indent level
183
self.level = level
184
# dict of names-tuples
185
self.attrnames = {}
186
# sets of (tag, id) preserve order to avoid dangling references during
187
# loading
188
self.ids_deleted = OrderedMultiSet()
189
self.ids_created = OrderedMultiSet()
190
self.ids_copied = OrderedMultiSet()
191
# dict from (tag, id) to (names, values, children)
192
self.id_attrs = {}
193
# dict from tag to (names, values)-sets, need to preserve order
194
# (CAVEAT5)
195
self.idless_deleted = defaultdict(OrderedMultiSet)
196
self.idless_created = defaultdict(OrderedMultiSet)
197
self.idless_copied = defaultdict(OrderedMultiSet)
198
199
def __str__(self):
200
return ("AttributeStore(level=%s, attrnames=%s, id_attrs:%s)" % (
201
self.level, self.attrnames,
202
''.join(["\n%s%s: n=%s, v=%s, c=%s" % (' ' * self.level, k, n, v, c)
203
for k, (n, v, c) in self.id_attrs.items()])))
204
205
# getAttribute returns "" if not present
206
def getValue(self, node, name):
207
if node.hasAttribute(name):
208
return node.getAttribute(name)
209
else:
210
return None
211
212
def getNames(self, xmlnode):
213
idattrs = IDATTRS[xmlnode.localName]
214
a = xmlnode.attributes
215
all = [a.item(i).localName for i in range(a.length)]
216
instance = tuple([n for n in all if n not in idattrs])
217
if instance not in self.attrnames:
218
self.attrnames[instance] = instance
219
# only store a single instance of this tuple to conserve memory
220
return self.attrnames[instance]
221
222
def getAttrs(self, xmlnode):
223
names = self.getNames(xmlnode)
224
values = tuple([self.getValue(xmlnode, a) for a in names])
225
children = None
226
if any([c.nodeType == Node.ELEMENT_NODE for c in xmlnode.childNodes]):
227
children = AttributeStore(
228
self.type, self.copy_tags, self.level + 1)
229
tag = xmlnode.localName
230
id = tuple([xmlnode.getAttribute(a)
231
for a in IDATTRS[tag] if xmlnode.hasAttribute(a)])
232
return tag, id, children, (names, values, children)
233
234
def store(self, xmlnode):
235
tag, id, children, attrs = self.getAttrs(xmlnode)
236
tagid = (tag, id)
237
if id != ():
238
self.ids_deleted.add(tagid)
239
self.ids_copied.add(tagid)
240
self.id_attrs[tagid] = attrs
241
if children:
242
for child in xmlnode.childNodes:
243
if child.nodeType == Node.ELEMENT_NODE:
244
children.store(child)
245
else:
246
self.no_children_supported(children, tag)
247
self.idless_deleted[tag].add(attrs)
248
249
def compare(self, xmlnode):
250
tag, id, children, attrs = self.getAttrs(xmlnode)
251
oldChildren = None
252
tagid = (tag, id)
253
if id != ():
254
if AttributeStore.patchImport:
255
if self.hasChangedConnection(tagid, attrs):
256
# export all connections from the same edge
257
fromEdge = id[0]
258
markChanged = []
259
for tagid2 in self.ids_deleted:
260
fromEdge2 = tagid2[1][0]
261
if fromEdge == fromEdge2:
262
markChanged.append(tagid2)
263
for tagid2 in markChanged:
264
self.ids_deleted.remove(tagid2)
265
return
266
if tagid in self.ids_deleted:
267
oldChildren = self.id_attrs[tagid][2]
268
self.ids_deleted.remove(tagid)
269
self.id_attrs[tagid] = self.compareAttrs(
270
self.id_attrs[tagid], attrs, tag)
271
else:
272
self.ids_created.add(tagid)
273
self.id_attrs[tagid] = attrs
274
275
children = self.id_attrs[tagid][2]
276
if children:
277
for child in xmlnode.childNodes:
278
if child.nodeType == Node.ELEMENT_NODE:
279
children.compare(child)
280
if tag == TAG_TLL or tag in self.copy_tags: # see CAVEAT2
281
child_strings = StringIO()
282
children.writeDeleted(child_strings)
283
children.writeCreated(child_strings)
284
children.writeChanged(child_strings)
285
286
if len(child_strings.getvalue()) > 0 or tag in self.copy_tags:
287
# there are some changes. Go back and store everything
288
children = AttributeStore(
289
self.type, self.copy_tags, self.level + 1)
290
for child in xmlnode.childNodes:
291
if child.nodeType == Node.ELEMENT_NODE:
292
children.compare(child)
293
self.id_attrs[tagid] = self.id_attrs[
294
tagid][0:2] + (children,)
295
296
elif tag == TAG_EDGE and oldChildren:
297
# see CAVEAT9
298
oldKeys = list(oldChildren.id_attrs.keys())
299
children = oldChildren
300
for k in oldKeys:
301
n, v, c = oldChildren.id_attrs[k]
302
if c:
303
deletedNeigh = False
304
for k2, (n2, v2, c2) in c.id_attrs.items():
305
if k2[0] == TAG_NEIGH:
306
deletedNeigh = True
307
if deletedNeigh:
308
# print("k2=%s n2=%s v2=%s c2=%s" % (k2, n2, v2, c2))
309
delkey = (TAG_NEIGH, ("",))
310
children.id_attrs[k][2].id_attrs = {delkey: ([], [], None)}
311
children.id_attrs[k][2].ids_created.add(delkey)
312
children.ids_deleted.discard(k)
313
else:
314
del children.id_attrs[k]
315
self.id_attrs[tagid] = self.id_attrs[tagid][0:2] + (children,)
316
317
else:
318
self.no_children_supported(children, tag)
319
if attrs in self.idless_deleted[tag]:
320
self.idless_deleted[tag].remove(attrs)
321
if tag in self.copy_tags:
322
self.idless_copied[tag].add(attrs)
323
elif tag in IGNORE_TAGS:
324
self.idless_deleted[tag].clear()
325
else:
326
self.idless_created[tag].add(attrs)
327
328
def no_children_supported(self, children, tag):
329
if children:
330
print(
331
"WARNING: Handling of children only supported for elements with id. Ignored for element '%s'" % tag)
332
333
def compareAttrs(self, sourceAttrs, destAttrs, tag):
334
snames, svalues, schildren = sourceAttrs
335
dnames, dvalues, dchildren = destAttrs
336
# for traffic lights, always use dchildren
337
if schildren and dchildren:
338
# trigger compare
339
dchildren = schildren
340
if snames == dnames:
341
values = tuple([self.diff(tag, n, s, d)
342
for n, s, d in zip(snames, svalues, dvalues)])
343
return snames, values, dchildren
344
else:
345
sdict = defaultdict(lambda: None, zip(snames, svalues))
346
ddict = defaultdict(lambda: None, zip(dnames, dvalues))
347
names = tuple(set(snames + dnames))
348
values = tuple([self.diff(tag, n, sdict[n], ddict[n]) for n in names])
349
return names, values, dchildren
350
351
def diff(self, tag, name, sourceValue, destValue):
352
if (sourceValue == destValue or
353
# CAVEAT7
354
(tag == TAG_EDGE and name == "type")):
355
return None
356
elif destValue is None:
357
destValue = DEFAULT_VALUES[name]
358
if sourceValue == destValue:
359
return None
360
return destValue
361
362
def hasChangedConnection(self, tagid, attrs):
363
tag, id = tagid
364
if tag != TAG_CONNECTION:
365
return False
366
if tagid in self.ids_deleted:
367
names, values, children = self.compareAttrs(self.id_attrs[tagid], attrs, tag)
368
for v in values:
369
if v is not None:
370
return True
371
return False
372
else:
373
return True
374
375
def writeDeleted(self, file):
376
# data loss if two elements with different tags
377
# have the same id
378
for tag, id in self.ids_deleted:
379
comment_start, comment_end = ("", "")
380
additional = ""
381
delete_element = DELETE_ELEMENT
382
383
if self.type == TYPE_TLLOGICS and tag == TAG_CONNECTION:
384
# see CAVEAT4
385
names, values, children = self.id_attrs[(tag, id)]
386
additional = " " + self.attr_string(names, values)
387
388
if tag == TAG_TLL: # see CAVEAT3
389
comment_start, comment_end = (
390
"<!-- implicit via changed node type: ", " -->")
391
392
if tag == TAG_CROSSING:
393
delete_element = tag
394
additional = ' discard="true"'
395
396
if tag == TAG_ROUNDABOUT:
397
delete_element = tag
398
additional = ' discard="true"'
399
comment_start, comment_end = (
400
"<!-- deletion of roundabouts not yet supported. see #2225 ", " -->")
401
402
if tag == TAG_NEIGH:
403
delete_element = tag
404
additional = ' lane=""'
405
406
if self.type == TYPE_CONNECTIONS and tag == TAG_CONNECTION and len(id) == 1:
407
# see CAVEAT10
408
comment_start, comment_end = (
409
"<!-- disconnected edge implicitly loses connections when deleted: ", " -->")
410
411
self.write(file, '%s<%s %s%s/>%s\n' % (
412
comment_start,
413
delete_element, self.id_string(tag, id), additional,
414
comment_end))
415
# data loss if two elements with different tags
416
# have the same list of attributes and values
417
for value_set in self.idless_deleted.values():
418
self.write_idless(file, value_set, DELETE_ELEMENT)
419
420
def writeCreated(self, file, whiteList=None, blackList=None):
421
self.write_tagids(file, self.filterTags(self.ids_created, whiteList, blackList), True)
422
for tag, value_set in self.idless_created.items():
423
if ((whiteList is not None and tag not in whiteList)
424
or (blackList is not None and tag in blackList)):
425
continue
426
self.write_idless(file, value_set, tag)
427
428
def getTagidsChanged(self):
429
return self.ids_copied - (self.ids_deleted | self.ids_created)
430
431
def writeChanged(self, file, whiteList=None, blackList=None):
432
tagids_changed = self.getTagidsChanged()
433
self.write_tagids(file, self.filterTags(tagids_changed, whiteList, blackList), False)
434
435
def writeCopies(self, file, copy_tags):
436
tagids_unchanged = self.ids_copied - \
437
(self.ids_deleted | self.ids_created)
438
self.write_tagids(file, tagids_unchanged, False)
439
for tag, value_set in self.idless_copied.items():
440
self.write_idless(file, value_set, tag)
441
442
def write_idless(self, file, attr_set, tag):
443
for names, values, children in attr_set:
444
self.write(file, '<%s %s/>\n' %
445
(tag, self.attr_string(names, values)))
446
447
def write_tagids(self, file, tagids, create):
448
for tagid in tagids:
449
tag, id = tagid
450
names, values, children = self.id_attrs[tagid]
451
missing = []
452
attrs = self.attr_string(names, values, missing)
453
child_strings = StringIO()
454
comments = ""
455
if missing:
456
comments = " <!-- missingAttributes: %s -->" % ','.join(missing)
457
if children:
458
# writeDeleted is not supported
459
children.writeCreated(child_strings)
460
children.writeChanged(child_strings)
461
462
if len(attrs) > 0 or len(child_strings.getvalue()) > 0 or create or tag in self.copy_tags or missing:
463
close_tag = "/>%s\n" % comments
464
if len(child_strings.getvalue()) > 0:
465
close_tag = ">%s\n%s" % (comments, child_strings.getvalue())
466
self.write(file, '<%s %s %s%s' % (
467
tag,
468
self.id_string(tag, id),
469
attrs,
470
close_tag))
471
if len(child_strings.getvalue()) > 0:
472
self.write(file, "</%s>\n" % tag)
473
474
def write(self, file, item):
475
file.write(" " * INDENT * self.level)
476
file.write(item)
477
478
def attr_string(self, names, values, missing=None):
479
if missing is not None:
480
missing += [n for n, v in sorted(zip(names, values)) if v is MISSING_DEFAULT]
481
return ' '.join(['%s="%s"' % (n, v) for n, v in sorted(zip(names, values)) if v is not None and v is not MISSING_DEFAULT]) # noqa
482
483
def id_string(self, tag, id):
484
idattrs = IDATTRS[tag]
485
return ' '.join(['%s="%s"' % (n, v) for n, v in sorted(zip(idattrs, id))])
486
487
def filterTags(self, tagids, whiteList, blackList):
488
if whiteList is not None:
489
return [tagid for tagid in tagids if tagid[0] in whiteList]
490
elif blackList is not None:
491
return [tagid for tagid in tagids if tagid[0] not in blackList]
492
else:
493
return tagids
494
495
def reorderTLL(self):
496
for tag, id in self.ids_created:
497
if tag == TAG_CONNECTION:
498
for tag2, id2 in self.getTagidsChanged():
499
if tag2 == TAG_TLL:
500
return True
501
return False
502
return False
503
504
def writeCreatedSelection(self, file):
505
for tag, id in self.ids_created:
506
# multi-id elements (connections) are not suppored by selection files
507
if len(id) == 1:
508
file.write("%s:%s\n" % (tag, str(id[0])))
509
510
def writeDeletedSelection(self, file):
511
for tag, id in self.ids_deleted:
512
# multi-id elements (connections) are not suppored by selection files
513
if len(id) == 1:
514
file.write("%s:%s\n" % (tag, str(id[0])))
515
516
def writeChangedSelection(self, file):
517
for tag, id in self.getTagidsChanged():
518
# multi-id elements (connections) are not suppored by selection files
519
if len(id) == 1:
520
names, values, children = self.id_attrs[(tag, id)]
521
attrs = self.attr_string(names, values)
522
if attrs:
523
file.write("%s:%s\n" % (tag, str(id[0])))
524
525
def writeDeletedShapes(self, file, sourceNet):
526
for tag, id in self.ids_deleted:
527
self.writeShape(file, tag, id, "red", sourceNet, id)
528
529
def writeCreatedShapes(self, file, destNet):
530
for tag, id in self.ids_created:
531
self.writeShape(file, tag, id, "green", destNet, id)
532
533
def writeChangedShapes(self, file, sourceNet, destNet):
534
for tag, id in self.getTagidsChanged():
535
names, values, children = self.id_attrs[(tag, id)]
536
attrs = self.attr_string(names, values)
537
if attrs:
538
self.writeShape(file, tag, id, "orange", sourceNet, id)
539
if "shape" in names:
540
self.writeShape(file, tag, id, "yellow", destNet, id + ("dest",))
541
542
def writeShape(self, file, tag, id, color, net, id2):
543
shape = None
544
fill = False
545
layer = 10
546
if tag == TAG_EDGE:
547
shape = net.getEdge(id[0]).getShape()
548
if tag == TAG_NODE:
549
shape = net.getNode(id[0]).getShape()
550
fill = True
551
layer = 11
552
if shape:
553
shape = ' '.join([','.join(map(lambda x: "%.2f" % x, pos)) for pos in shape])
554
file.write(' <poly id="%s" type="%s" shape="%s" fill="%s" layer="%s" color="%s"/>\n' % (
555
":".join(id2), tag, shape, fill, layer, color))
556
557
558
def create_plain(netfile, netconvert, plain_geo):
559
prefix = netfile[:-11] if netfile[-3:] == '.gz' else netfile[:-8]
560
call([netconvert,
561
"--sumo-net-file", netfile,
562
"--plain-output-prefix", prefix,
563
"--default.spreadtype", "right", # overwrite value in net
564
"--roundabouts.guess", "false"]
565
+ (["--proj.plain-geo"] if plain_geo else []))
566
return prefix
567
568
569
# creates diff of a flat xml structure
570
# (only children of the root element and their attrs are compared)
571
def xmldiff(options, source, dest, diff, type, copy_tags, patchImport,
572
selectionOutputFiles, shapeOutputFiles,
573
sourceNet=None, destNet=None):
574
attributeStore = AttributeStore(type, copy_tags)
575
root = None
576
have_source = os.path.isfile(source)
577
have_dest = os.path.isfile(dest)
578
if have_source:
579
root, schema, version = handle_children(source, attributeStore.store)
580
if have_dest:
581
if patchImport:
582
# run diff twice to determine edges with changed connections
583
AttributeStore.patchImport = True
584
root, schema, version = handle_children(dest, attributeStore.compare)
585
AttributeStore.patchImport = False
586
root, schema, version = handle_children(dest, attributeStore.compare)
587
else:
588
root, schema, version = handle_children(dest, attributeStore.compare)
589
590
if not have_source and not have_dest:
591
print("Skipping %s due to lack of input files." % diff)
592
else:
593
if not have_source:
594
print("Source file %s is missing. Assuming all elements are created." % source)
595
elif not have_dest:
596
print("Dest file %s is missing. Assuming all elements are deleted." % dest)
597
598
with sumolib.openz(diff, 'w') as diff_file:
599
sumolib.xml.writeHeader(diff_file, root=root, schemaPath=schema, rootAttrs=version, options=options)
600
if copy_tags:
601
attributeStore.write(diff_file, "<!-- Copied Elements -->\n")
602
attributeStore.writeCopies(diff_file, copy_tags)
603
attributeStore.write(diff_file, "<!-- Deleted Elements -->\n")
604
attributeStore.writeDeleted(diff_file)
605
606
if attributeStore.reorderTLL():
607
# CAVEAT8
608
attributeStore.write(diff_file, "<!-- Created Elements -->\n")
609
attributeStore.writeCreated(diff_file, whiteList=[TAG_TLL])
610
attributeStore.write(diff_file, "<!-- Changed Elements -->\n")
611
attributeStore.writeChanged(diff_file, whiteList=[TAG_TLL])
612
attributeStore.write(diff_file, "<!-- Created Elements -->\n")
613
attributeStore.writeCreated(diff_file, blackList=[TAG_TLL])
614
attributeStore.write(diff_file, "<!-- Changed Elements -->\n")
615
attributeStore.writeChanged(diff_file, blackList=[TAG_TLL])
616
else:
617
attributeStore.write(diff_file, "<!-- Created Elements -->\n")
618
attributeStore.writeCreated(diff_file)
619
attributeStore.write(diff_file, "<!-- Changed Elements -->\n")
620
attributeStore.writeChanged(diff_file)
621
diff_file.write("</%s>\n" % root)
622
623
if selectionOutputFiles:
624
created, deleted, changed = selectionOutputFiles
625
attributeStore.writeCreatedSelection(created)
626
attributeStore.writeDeletedSelection(deleted)
627
attributeStore.writeChangedSelection(changed)
628
if shapeOutputFiles:
629
created, deleted, changed = shapeOutputFiles
630
attributeStore.writeCreatedShapes(created, destNet)
631
attributeStore.writeDeletedShapes(deleted, sourceNet)
632
attributeStore.writeChangedShapes(changed, sourceNet, destNet)
633
634
635
# calls function handle_parsenode for all children of the root element
636
# returns opening and closing tag of the root element
637
def handle_children(xmlfile, handle_parsenode):
638
root = None
639
schema = None
640
version = ""
641
level = 0
642
with open(xmlfile, 'rb') as in_xml:
643
xml_doc = pulldom.parse(in_xml)
644
for event, parsenode in xml_doc:
645
if event == pulldom.START_ELEMENT:
646
# print level, parsenode.getAttribute(ID_ATTR)
647
if level == 0:
648
# since we did not expand root_open contains the closing slash
649
root = parsenode.localName
650
if root == "edges":
651
schema = "edgediff_file.xsd"
652
elif root == "tlLogics":
653
schema = "tllogic_file.xsd"
654
if parsenode.hasAttribute("version"):
655
version = ' version="%s"' % parsenode.getAttribute("version")
656
if parsenode.hasAttribute("spreadType"):
657
DEFAULT_VALUES["spreadType"] = parsenode.getAttribute("spreadType")
658
version = ' version="%s"' % parsenode.getAttribute("version")
659
if root not in ("edges", "nodes", "connections", "tlLogics"):
660
# do not write schema information
661
version = None
662
if level == 1:
663
# consumes END_ELEMENT, no level increase
664
xml_doc.expandNode(parsenode)
665
handle_parsenode(parsenode)
666
else:
667
level += 1
668
elif event == pulldom.END_ELEMENT:
669
level -= 1
670
return root, schema, version
671
672
673
# run
674
def main(options):
675
copy_tags = options.copy.split(',') if options.copy else []
676
677
selectionOutputFiles = []
678
shapeOutputFiles = []
679
if options.write_selections:
680
selectionOutputFiles.append(sumolib.openz(options.outprefix + '.created.sel.txt', 'w'))
681
selectionOutputFiles.append(sumolib.openz(options.outprefix + '.deleted.sel.txt', 'w'))
682
selectionOutputFiles.append(sumolib.openz(options.outprefix + '.changed.sel.txt', 'w'))
683
if options.write_shapes:
684
shapeOutputFiles.append(sumolib.openz(options.outprefix + '.created.shape.add.xml', 'w'))
685
shapeOutputFiles.append(sumolib.openz(options.outprefix + '.deleted.shape.add.xml', 'w'))
686
shapeOutputFiles.append(sumolib.openz(options.outprefix + '.changed.shape.add.xml', 'w'))
687
for f in shapeOutputFiles:
688
sumolib.writeXMLHeader(f, "$Id$", "additional", options=options) # noqa
689
690
if options.direct:
691
type = '.xml'
692
xmldiff(options,
693
options.source,
694
options.dest,
695
options.outprefix + type,
696
type,
697
copy_tags,
698
options.patch_on_import,
699
selectionOutputFiles,
700
shapeOutputFiles)
701
else:
702
sourceNet = None
703
destNet = None
704
if not options.use_prefix:
705
netconvert = sumolib.checkBinary("netconvert", options.path)
706
if shapeOutputFiles:
707
sourceNet = sumolib.net.readNet(options.source)
708
destNet = sumolib.net.readNet(options.dest)
709
options.source = create_plain(options.source, netconvert, options.plain_geo)
710
options.dest = create_plain(options.dest, netconvert, options.plain_geo)
711
712
for type in PLAIN_TYPES:
713
xmldiff(options,
714
options.source + type,
715
options.dest + type,
716
options.outprefix + type,
717
type,
718
copy_tags,
719
options.patch_on_import,
720
selectionOutputFiles,
721
shapeOutputFiles,
722
sourceNet, destNet)
723
if options.remove_plain:
724
os.remove(options.source + type)
725
os.remove(options.dest + type)
726
727
for f in selectionOutputFiles:
728
f.close()
729
for f in shapeOutputFiles:
730
f.write("</additional>\n")
731
f.close()
732
733
734
if __name__ == "__main__":
735
main(parse_args())
736
737