Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
eclipse
GitHub Repository: eclipse/sumo
Path: blob/main/tools/net/netdiff.py
169673 views
1
#!/usr/bin/env python
2
# Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.dev/sumo
3
# Copyright (C) 2011-2025 German Aerospace Center (DLR) and others.
4
# This program and the accompanying materials are made available under the
5
# terms of the Eclipse Public License 2.0 which is available at
6
# https://www.eclipse.org/legal/epl-2.0/
7
# This Source Code may also be made available under the following Secondary
8
# Licenses when the conditions for such availability set forth in the Eclipse
9
# Public License 2.0 are satisfied: GNU General Public License, version 2
10
# or later which is available at
11
# https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html
12
# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
13
14
# @file netdiff.py
15
# @author Daniel Krajzewicz
16
# @author Michael Behrisch
17
# @author Jakob Erdmann
18
# @date 2011-10-04
19
20
"""
21
Reads two networks (source, dest) and tries to produce the minimal plain-xml input
22
which can be loaded with netconvert alongside source to create dest
23
"""
24
from __future__ import absolute_import
25
from __future__ import print_function
26
27
import sys
28
import os
29
import codecs
30
try:
31
from StringIO import StringIO
32
except ImportError:
33
from io import StringIO
34
from xml.dom import pulldom
35
from xml.dom import Node
36
from subprocess import call
37
from collections import defaultdict
38
39
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
40
import sumolib # noqa
41
from sumolib.datastructures.OrderedMultiSet import OrderedMultiSet # noqa
42
from sumolib.options import ArgumentParser # noqa
43
44
45
def parse_args():
46
optParser = ArgumentParser()
47
optParser.add_option("source", category="input", type=optParser.net_file,
48
help="original network")
49
optParser.add_option("dest", category="input", type=optParser.net_file,
50
help="modified network")
51
optParser.add_option("outprefix", category="output", type=optParser.file,
52
help="prefix for the diff files")
53
optParser.add_option("-v", "--verbose", action="store_true",
54
default=False, help="Give more output")
55
optParser.add_option("-p", "--use-prefix", action="store_true",
56
default=False, help="interpret source and dest as plain-xml prefix instead of network names")
57
optParser.add_option("-d", "--direct", action="store_true",
58
default=False, help="compare source and dest files directly")
59
optParser.add_option("-i", "--patch-on-import", action="store_true",
60
default=False, help="generate patch that can be applied during initial network import" +
61
" (exports additional connection elements)")
62
optParser.add_option("--copy",
63
help="comma-separated list of element names to copy (if they are unchanged)")
64
optParser.add_option("--path", dest="path", help="Path to binaries")
65
optParser.add_option("--remove-plain", action="store_true",
66
help="avoid saving plain xml files of source and destination networks")
67
optParser.add_option("-l", "--write-selections", category="output", action="store_true", default=False,
68
help="Write selection files for created, deleted and changed elements")
69
optParser.add_option("-s", "--write-shapes", category="output", action="store_true", default=False,
70
help="Write shape files for created, deleted and changed elements")
71
optParser.add_option("-g", "--plain-geo", category="output", action="store_true", default=False,
72
help="Write geo coordinates instead of network coordinates")
73
options = optParser.parse_args()
74
if options.use_prefix and options.direct:
75
optParser.error(
76
"Options --use-prefix and --direct are mutually exclusive")
77
78
if options.write_shapes:
79
if options.direct:
80
optParser.error(
81
"Options --write-shapes and --direct are mutually exclusive")
82
if options.use_prefix:
83
optParser.error(
84
"Options --write-shapes and --use-prefix are mutually exclusive")
85
86
return options
87
88
89
# CONSTANTS
90
INDENT = 4
91
92
# file types to compare
93
TYPE_NODES = '.nod.xml'
94
TYPE_EDGES = '.edg.xml'
95
TYPE_CONNECTIONS = '.con.xml'
96
TYPE_TLLOGICS = '.tll.xml'
97
TYPE_EDGETYPES = '.typ.xml'
98
PLAIN_TYPES = [
99
TYPE_NODES,
100
TYPE_EDGES,
101
TYPE_CONNECTIONS,
102
TYPE_TLLOGICS,
103
TYPE_EDGETYPES
104
]
105
106
# traffic lights have some peculiarities
107
# CAVEAT1 - ids are not unique (only in combination with programID)
108
# CAVEAT2 - the order of their children (phases) is important.
109
# this makes partial diffs unfeasible. The easiest solution is to forgo diffs and always
110
# export the whole new traffic light
111
# CAVEAT3 - deletes need not be written because they are also signaled by a changed node type
112
# (and they complicate the handling of deleted tl-connections)
113
# CAVEAT4 - deleted connections must be written with their tlID and tlIndex, otherwise
114
# parsing in netconvert becomes tedious
115
# CAVEAT5 - phases must maintain their order
116
# CAVEAT6 - identical phases may occur multiple times, thus OrderedMultiSet
117
# CAVEAT7 - changing edge type triggers 'type override'
118
# (all attributes defined for the edge type are applied. This must be avoided)
119
# CAVEAT8 - TAG_TLL must always be written before TAG_CONNECTION
120
# CAVEAT9 - when TAG_NEIGH is removed, <neigh lane=""/> must written into the diff to indicate removal
121
# CAVEAT10 - when a connection element is written without 'to' it describes an edge without connections.
122
# This must be omitted from 'deleted elements'
123
124
TAG_TLL = 'tlLogic'
125
TAG_CONNECTION = 'connection'
126
TAG_CROSSING = 'crossing'
127
TAG_ROUNDABOUT = 'roundabout'
128
TAG_LANE = 'lane'
129
TAG_NEIGH = 'neigh'
130
TAG_EDGE = 'edge'
131
TAG_NODE = 'node'
132
TAG_PARAM = 'param'
133
TAG_LOCATION = 'location'
134
135
# see CAVEAT1
136
IDATTRS = defaultdict(lambda: ('id',))
137
IDATTRS[TAG_TLL] = ('id', 'programID')
138
IDATTRS[TAG_CONNECTION] = ('from', 'to', 'fromLane', 'toLane')
139
IDATTRS[TAG_CROSSING] = ('node', 'edges')
140
IDATTRS[TAG_ROUNDABOUT] = ('edges',)
141
IDATTRS['interval'] = ('begin', 'end')
142
IDATTRS[TAG_LANE] = ('index',)
143
IDATTRS[TAG_NEIGH] = ('lane',)
144
IDATTRS[TAG_PARAM] = ('key',)
145
146
DELETE_ELEMENT = 'delete' # the xml element for signifying deletes
147
148
# provide an order for the attribute names
149
ATTRIBUTE_NAMES = {
150
# '.nod.xml' : ()
151
# '.edg.xml' : ()
152
# '.con.xml' : ()
153
}
154
155
# default values for the given attribute (needed when attributes appear in
156
# source but do not appear in dest)
157
MISSING_DEFAULT = "_MISSING_DEFAULT_"
158
DEFAULT_VALUES = defaultdict(lambda: MISSING_DEFAULT)
159
DEFAULT_VALUES['offset'] = "0"
160
DEFAULT_VALUES['spreadType'] = "right"
161
DEFAULT_VALUES['customShape'] = "false"
162
DEFAULT_VALUES['keepClear'] = "true"
163
DEFAULT_VALUES['contPos'] = "-1"
164
DEFAULT_VALUES['visibility'] = "-1"
165
DEFAULT_VALUES['z'] = "0"
166
DEFAULT_VALUES['radius'] = "-1"
167
DEFAULT_VALUES['allow'] = "all"
168
DEFAULT_VALUES['rightOfWay'] = "default"
169
DEFAULT_VALUES['fringe'] = "default"
170
171
IGNORE_TAGS = set([TAG_LOCATION])
172
173
174
# stores attributes for later comparison
175
class AttributeStore:
176
patchImport = False
177
178
def __init__(self, type, copy_tags, level=1):
179
# xml type being parsed
180
self.type = type
181
# tag names to copy even if unchanged
182
self.copy_tags = copy_tags
183
# indent level
184
self.level = level
185
# dict of names-tuples
186
self.attrnames = {}
187
# sets of (tag, id) preserve order to avoid dangling references during
188
# loading
189
self.ids_deleted = OrderedMultiSet()
190
self.ids_created = OrderedMultiSet()
191
self.ids_copied = OrderedMultiSet()
192
# dict from (tag, id) to (names, values, children)
193
self.id_attrs = {}
194
# dict from tag to (names, values)-sets, need to preserve order
195
# (CAVEAT5)
196
self.idless_deleted = defaultdict(OrderedMultiSet)
197
self.idless_created = defaultdict(OrderedMultiSet)
198
self.idless_copied = defaultdict(OrderedMultiSet)
199
200
def __str__(self):
201
return ("AttributeStore(level=%s, attrnames=%s, id_attrs:%s)" % (
202
self.level, self.attrnames,
203
''.join(["\n%s%s: n=%s, v=%s, c=%s" % (' ' * self.level, k, n, v, c)
204
for k, (n, v, c) in self.id_attrs.items()])))
205
206
# getAttribute returns "" if not present
207
def getValue(self, node, name):
208
if node.hasAttribute(name):
209
return node.getAttribute(name)
210
else:
211
return None
212
213
def getNames(self, xmlnode):
214
idattrs = IDATTRS[xmlnode.localName]
215
a = xmlnode.attributes
216
all = [a.item(i).localName for i in range(a.length)]
217
instance = tuple([n for n in all if n not in idattrs])
218
if instance not in self.attrnames:
219
self.attrnames[instance] = instance
220
# only store a single instance of this tuple to conserve memory
221
return self.attrnames[instance]
222
223
def getAttrs(self, xmlnode):
224
names = self.getNames(xmlnode)
225
values = tuple([self.getValue(xmlnode, a) for a in names])
226
children = None
227
if any([c.nodeType == Node.ELEMENT_NODE for c in xmlnode.childNodes]):
228
children = AttributeStore(
229
self.type, self.copy_tags, self.level + 1)
230
tag = xmlnode.localName
231
id = tuple([xmlnode.getAttribute(a)
232
for a in IDATTRS[tag] if xmlnode.hasAttribute(a)])
233
return tag, id, children, (names, values, children)
234
235
def store(self, xmlnode):
236
tag, id, children, attrs = self.getAttrs(xmlnode)
237
tagid = (tag, id)
238
if id != ():
239
self.ids_deleted.add(tagid)
240
self.ids_copied.add(tagid)
241
self.id_attrs[tagid] = attrs
242
if children:
243
for child in xmlnode.childNodes:
244
if child.nodeType == Node.ELEMENT_NODE:
245
children.store(child)
246
else:
247
self.no_children_supported(children, tag)
248
self.idless_deleted[tag].add(attrs)
249
250
def compare(self, xmlnode):
251
tag, id, children, attrs = self.getAttrs(xmlnode)
252
oldChildren = None
253
tagid = (tag, id)
254
if id != ():
255
if AttributeStore.patchImport:
256
if self.hasChangedConnection(tagid, attrs):
257
# export all connections from the same edge
258
fromEdge = id[0]
259
markChanged = []
260
for tagid2 in self.ids_deleted:
261
fromEdge2 = tagid2[1][0]
262
if fromEdge == fromEdge2:
263
markChanged.append(tagid2)
264
for tagid2 in markChanged:
265
self.ids_deleted.remove(tagid2)
266
return
267
if tagid in self.ids_deleted:
268
oldChildren = self.id_attrs[tagid][2]
269
self.ids_deleted.remove(tagid)
270
self.id_attrs[tagid] = self.compareAttrs(
271
self.id_attrs[tagid], attrs, tag)
272
else:
273
self.ids_created.add(tagid)
274
self.id_attrs[tagid] = attrs
275
276
children = self.id_attrs[tagid][2]
277
if children:
278
for child in xmlnode.childNodes:
279
if child.nodeType == Node.ELEMENT_NODE:
280
children.compare(child)
281
if tag == TAG_TLL or tag in self.copy_tags: # see CAVEAT2
282
child_strings = StringIO()
283
children.writeDeleted(child_strings)
284
children.writeCreated(child_strings)
285
children.writeChanged(child_strings)
286
287
if len(child_strings.getvalue()) > 0 or tag in self.copy_tags:
288
# there are some changes. Go back and store everything
289
children = AttributeStore(
290
self.type, self.copy_tags, self.level + 1)
291
for child in xmlnode.childNodes:
292
if child.nodeType == Node.ELEMENT_NODE:
293
children.compare(child)
294
self.id_attrs[tagid] = self.id_attrs[
295
tagid][0:2] + (children,)
296
297
elif tag == TAG_EDGE and oldChildren:
298
# see CAVEAT9
299
children = oldChildren
300
for k, (n, v, c) in oldChildren.id_attrs.items():
301
if c:
302
deletedNeigh = False
303
for k2, (n2, v2, c2) in c.id_attrs.items():
304
if k2[0] == TAG_NEIGH:
305
deletedNeigh = True
306
if deletedNeigh:
307
# print("k2=%s n2=%s v2=%s c2=%s" % (k2, n2, v2, c2))
308
delkey = (TAG_NEIGH, ("",))
309
children.id_attrs[k][2].id_attrs = {delkey: ([], [], None)}
310
children.id_attrs[k][2].ids_created.add(delkey)
311
children.ids_deleted.discard(k)
312
else:
313
del children.id_attrs[k]
314
self.id_attrs[tagid] = self.id_attrs[tagid][0:2] + (children,)
315
316
else:
317
self.no_children_supported(children, tag)
318
if attrs in self.idless_deleted[tag]:
319
self.idless_deleted[tag].remove(attrs)
320
if tag in self.copy_tags:
321
self.idless_copied[tag].add(attrs)
322
elif tag in IGNORE_TAGS:
323
self.idless_deleted[tag].clear()
324
else:
325
self.idless_created[tag].add(attrs)
326
327
def no_children_supported(self, children, tag):
328
if children:
329
print(
330
"WARNING: Handling of children only supported for elements with id. Ignored for element '%s'" % tag)
331
332
def compareAttrs(self, sourceAttrs, destAttrs, tag):
333
snames, svalues, schildren = sourceAttrs
334
dnames, dvalues, dchildren = destAttrs
335
# for traffic lights, always use dchildren
336
if schildren and dchildren:
337
# trigger compare
338
dchildren = schildren
339
if snames == dnames:
340
values = tuple([self.diff(tag, n, s, d)
341
for n, s, d in zip(snames, svalues, dvalues)])
342
return snames, values, dchildren
343
else:
344
sdict = defaultdict(lambda: None, zip(snames, svalues))
345
ddict = defaultdict(lambda: None, zip(dnames, dvalues))
346
names = tuple(set(snames + dnames))
347
values = tuple([self.diff(tag, n, sdict[n], ddict[n]) for n in names])
348
return names, values, dchildren
349
350
def diff(self, tag, name, sourceValue, destValue):
351
if (sourceValue == destValue or
352
# CAVEAT7
353
(tag == TAG_EDGE and name == "type")):
354
return None
355
elif destValue is None:
356
destValue = DEFAULT_VALUES[name]
357
if sourceValue == destValue:
358
return None
359
return destValue
360
361
def hasChangedConnection(self, tagid, attrs):
362
tag, id = tagid
363
if tag != TAG_CONNECTION:
364
return False
365
if tagid in self.ids_deleted:
366
names, values, children = self.compareAttrs(self.id_attrs[tagid], attrs, tag)
367
for v in values:
368
if v is not None:
369
return True
370
return False
371
else:
372
return True
373
374
def writeDeleted(self, file):
375
# data loss if two elements with different tags
376
# have the same id
377
for tag, id in self.ids_deleted:
378
comment_start, comment_end = ("", "")
379
additional = ""
380
delete_element = DELETE_ELEMENT
381
382
if self.type == TYPE_TLLOGICS and tag == TAG_CONNECTION:
383
# see CAVEAT4
384
names, values, children = self.id_attrs[(tag, id)]
385
additional = " " + self.attr_string(names, values)
386
387
if tag == TAG_TLL: # see CAVEAT3
388
comment_start, comment_end = (
389
"<!-- implicit via changed node type: ", " -->")
390
391
if tag == TAG_CROSSING:
392
delete_element = tag
393
additional = ' discard="true"'
394
395
if tag == TAG_ROUNDABOUT:
396
delete_element = tag
397
additional = ' discard="true"'
398
comment_start, comment_end = (
399
"<!-- deletion of roundabouts not yet supported. see #2225 ", " -->")
400
401
if tag == TAG_NEIGH:
402
delete_element = tag
403
additional = ' lane=""'
404
405
if self.type == TYPE_CONNECTIONS and tag == TAG_CONNECTION and len(id) == 1:
406
# see CAVEAT10
407
comment_start, comment_end = (
408
"<!-- disconnected edge implicitly loses connections when deleted: ", " -->")
409
410
self.write(file, '%s<%s %s%s/>%s\n' % (
411
comment_start,
412
delete_element, self.id_string(tag, id), additional,
413
comment_end))
414
# data loss if two elements with different tags
415
# have the same list of attributes and values
416
for value_set in self.idless_deleted.values():
417
self.write_idless(file, value_set, DELETE_ELEMENT)
418
419
def writeCreated(self, file, whiteList=None, blackList=None):
420
self.write_tagids(file, self.filterTags(self.ids_created, whiteList, blackList), True)
421
for tag, value_set in self.idless_created.items():
422
if ((whiteList is not None and tag not in whiteList)
423
or (blackList is not None and tag in blackList)):
424
continue
425
self.write_idless(file, value_set, tag)
426
427
def getTagidsChanged(self):
428
return self.ids_copied - (self.ids_deleted | self.ids_created)
429
430
def writeChanged(self, file, whiteList=None, blackList=None):
431
tagids_changed = self.getTagidsChanged()
432
self.write_tagids(file, self.filterTags(tagids_changed, whiteList, blackList), False)
433
434
def writeCopies(self, file, copy_tags):
435
tagids_unchanged = self.ids_copied - \
436
(self.ids_deleted | self.ids_created)
437
self.write_tagids(file, tagids_unchanged, False)
438
for tag, value_set in self.idless_copied.items():
439
self.write_idless(file, value_set, tag)
440
441
def write_idless(self, file, attr_set, tag):
442
for names, values, children in attr_set:
443
self.write(file, '<%s %s/>\n' %
444
(tag, self.attr_string(names, values)))
445
446
def write_tagids(self, file, tagids, create):
447
for tagid in tagids:
448
tag, id = tagid
449
names, values, children = self.id_attrs[tagid]
450
missing = []
451
attrs = self.attr_string(names, values, missing)
452
child_strings = StringIO()
453
comments = ""
454
if missing:
455
comments = " <!-- missingAttributes: %s -->" % ','.join(missing)
456
if children:
457
# writeDeleted is not supported
458
children.writeCreated(child_strings)
459
children.writeChanged(child_strings)
460
461
if len(attrs) > 0 or len(child_strings.getvalue()) > 0 or create or tag in self.copy_tags or missing:
462
close_tag = "/>%s\n" % comments
463
if len(child_strings.getvalue()) > 0:
464
close_tag = ">%s\n%s" % (comments, child_strings.getvalue())
465
self.write(file, '<%s %s %s%s' % (
466
tag,
467
self.id_string(tag, id),
468
attrs,
469
close_tag))
470
if len(child_strings.getvalue()) > 0:
471
self.write(file, "</%s>\n" % tag)
472
473
def write(self, file, item):
474
file.write(" " * INDENT * self.level)
475
file.write(item)
476
477
def attr_string(self, names, values, missing=None):
478
if missing is not None:
479
missing += [n for n, v in sorted(zip(names, values)) if v is MISSING_DEFAULT]
480
return ' '.join(['%s="%s"' % (n, v) for n, v in sorted(zip(names, values)) if v is not None and v is not MISSING_DEFAULT]) # noqa
481
482
def id_string(self, tag, id):
483
idattrs = IDATTRS[tag]
484
return ' '.join(['%s="%s"' % (n, v) for n, v in sorted(zip(idattrs, id))])
485
486
def filterTags(self, tagids, whiteList, blackList):
487
if whiteList is not None:
488
return [tagid for tagid in tagids if tagid[0] in whiteList]
489
elif blackList is not None:
490
return [tagid for tagid in tagids if tagid[0] not in blackList]
491
else:
492
return tagids
493
494
def reorderTLL(self):
495
for tag, id in self.ids_created:
496
if tag == TAG_CONNECTION:
497
for tag2, id2 in self.getTagidsChanged():
498
if tag2 == TAG_TLL:
499
return True
500
return False
501
return False
502
503
def writeCreatedSelection(self, file):
504
for tag, id in self.ids_created:
505
# multi-id elements (connections) are not suppored by selection files
506
if len(id) == 1:
507
file.write("%s:%s\n" % (tag, str(id[0])))
508
509
def writeDeletedSelection(self, file):
510
for tag, id in self.ids_deleted:
511
# multi-id elements (connections) are not suppored by selection files
512
if len(id) == 1:
513
file.write("%s:%s\n" % (tag, str(id[0])))
514
515
def writeChangedSelection(self, file):
516
for tag, id in self.getTagidsChanged():
517
# multi-id elements (connections) are not suppored by selection files
518
if len(id) == 1:
519
names, values, children = self.id_attrs[(tag, id)]
520
attrs = self.attr_string(names, values)
521
if attrs:
522
file.write("%s:%s\n" % (tag, str(id[0])))
523
524
def writeDeletedShapes(self, file, sourceNet):
525
for tag, id in self.ids_deleted:
526
self.writeShape(file, tag, id, "red", sourceNet, id)
527
528
def writeCreatedShapes(self, file, destNet):
529
for tag, id in self.ids_created:
530
self.writeShape(file, tag, id, "green", destNet, id)
531
532
def writeChangedShapes(self, file, sourceNet, destNet):
533
for tag, id in self.getTagidsChanged():
534
names, values, children = self.id_attrs[(tag, id)]
535
attrs = self.attr_string(names, values)
536
if attrs:
537
self.writeShape(file, tag, id, "orange", sourceNet, id)
538
if "shape" in names:
539
self.writeShape(file, tag, id, "yellow", destNet, id + ("dest",))
540
541
def writeShape(self, file, tag, id, color, net, id2):
542
shape = None
543
fill = False
544
layer = 10
545
if tag == TAG_EDGE:
546
shape = net.getEdge(id[0]).getShape()
547
if tag == TAG_NODE:
548
shape = net.getNode(id[0]).getShape()
549
fill = True
550
layer = 11
551
if shape:
552
shape = ' '.join([','.join(map(lambda x: "%.2f" % x, pos)) for pos in shape])
553
file.write(' <poly id="%s" type="%s" shape="%s" fill="%s" layer="%s" color="%s"/>\n' % (
554
":".join(id2), tag, shape, fill, layer, color))
555
556
557
def create_plain(netfile, netconvert, plain_geo):
558
prefix = netfile[:-8]
559
call([netconvert,
560
"--sumo-net-file", netfile,
561
"--plain-output-prefix", prefix,
562
"--default.spreadtype", "right", # overwrite value in net
563
"--roundabouts.guess", "false"]
564
+ (["--proj.plain-geo"] if plain_geo else []))
565
return prefix
566
567
568
# creates diff of a flat xml structure
569
# (only children of the root element and their attrs are compared)
570
def xmldiff(options, source, dest, diff, type, copy_tags, patchImport,
571
selectionOutputFiles, shapeOutputFiles,
572
sourceNet=None, destNet=None):
573
attributeStore = AttributeStore(type, copy_tags)
574
root = None
575
have_source = os.path.isfile(source)
576
have_dest = os.path.isfile(dest)
577
if have_source:
578
root, schema, version = handle_children(source, attributeStore.store)
579
if have_dest:
580
if patchImport:
581
# run diff twice to determine edges with changed connections
582
AttributeStore.patchImport = True
583
root, schema, version = handle_children(dest, attributeStore.compare)
584
AttributeStore.patchImport = False
585
root, schema, version = handle_children(dest, attributeStore.compare)
586
else:
587
root, schema, version = handle_children(dest, attributeStore.compare)
588
589
if not have_source and not have_dest:
590
print("Skipping %s due to lack of input files." % diff)
591
else:
592
if not have_source:
593
print("Source file %s is missing. Assuming all elements are created." % source)
594
elif not have_dest:
595
print("Dest file %s is missing. Assuming all elements are deleted." % dest)
596
597
with codecs.open(diff, 'w', 'utf-8') as diff_file:
598
sumolib.xml.writeHeader(diff_file, root=root, schemaPath=schema, rootAttrs=version, options=options)
599
if copy_tags:
600
attributeStore.write(diff_file, "<!-- Copied Elements -->\n")
601
attributeStore.writeCopies(diff_file, copy_tags)
602
attributeStore.write(diff_file, "<!-- Deleted Elements -->\n")
603
attributeStore.writeDeleted(diff_file)
604
605
if attributeStore.reorderTLL():
606
# CAVEAT8
607
attributeStore.write(diff_file, "<!-- Created Elements -->\n")
608
attributeStore.writeCreated(diff_file, whiteList=[TAG_TLL])
609
attributeStore.write(diff_file, "<!-- Changed Elements -->\n")
610
attributeStore.writeChanged(diff_file, whiteList=[TAG_TLL])
611
attributeStore.write(diff_file, "<!-- Created Elements -->\n")
612
attributeStore.writeCreated(diff_file, blackList=[TAG_TLL])
613
attributeStore.write(diff_file, "<!-- Changed Elements -->\n")
614
attributeStore.writeChanged(diff_file, blackList=[TAG_TLL])
615
else:
616
attributeStore.write(diff_file, "<!-- Created Elements -->\n")
617
attributeStore.writeCreated(diff_file)
618
attributeStore.write(diff_file, "<!-- Changed Elements -->\n")
619
attributeStore.writeChanged(diff_file)
620
diff_file.write("</%s>\n" % root)
621
622
if selectionOutputFiles:
623
created, deleted, changed = selectionOutputFiles
624
attributeStore.writeCreatedSelection(created)
625
attributeStore.writeDeletedSelection(deleted)
626
attributeStore.writeChangedSelection(changed)
627
if shapeOutputFiles:
628
created, deleted, changed = shapeOutputFiles
629
attributeStore.writeCreatedShapes(created, destNet)
630
attributeStore.writeDeletedShapes(deleted, sourceNet)
631
attributeStore.writeChangedShapes(changed, sourceNet, destNet)
632
633
634
# calls function handle_parsenode for all children of the root element
635
# returns opening and closing tag of the root element
636
def handle_children(xmlfile, handle_parsenode):
637
root = None
638
schema = None
639
version = ""
640
level = 0
641
with open(xmlfile, 'rb') as in_xml:
642
xml_doc = pulldom.parse(in_xml)
643
for event, parsenode in xml_doc:
644
if event == pulldom.START_ELEMENT:
645
# print level, parsenode.getAttribute(ID_ATTR)
646
if level == 0:
647
# since we did not expand root_open contains the closing slash
648
root = parsenode.localName
649
if root == "edges":
650
schema = "edgediff_file.xsd"
651
elif root == "tlLogics":
652
schema = "tllogic_file.xsd"
653
if parsenode.hasAttribute("version"):
654
version = ' version="%s"' % parsenode.getAttribute("version")
655
if parsenode.hasAttribute("spreadType"):
656
DEFAULT_VALUES["spreadType"] = parsenode.getAttribute("spreadType")
657
version = ' version="%s"' % parsenode.getAttribute("version")
658
if root not in ("edges", "nodes", "connections", "tlLogics"):
659
# do not write schema information
660
version = None
661
if level == 1:
662
# consumes END_ELEMENT, no level increase
663
xml_doc.expandNode(parsenode)
664
handle_parsenode(parsenode)
665
else:
666
level += 1
667
elif event == pulldom.END_ELEMENT:
668
level -= 1
669
return root, schema, version
670
671
672
# run
673
def main(options):
674
copy_tags = options.copy.split(',') if options.copy else []
675
676
selectionOutputFiles = []
677
shapeOutputFiles = []
678
if options.write_selections:
679
selectionOutputFiles.append(codecs.open(options.outprefix + '.created.sel.txt', 'w', 'utf-8'))
680
selectionOutputFiles.append(codecs.open(options.outprefix + '.deleted.sel.txt', 'w', 'utf-8'))
681
selectionOutputFiles.append(codecs.open(options.outprefix + '.changed.sel.txt', 'w', 'utf-8'))
682
if options.write_shapes:
683
shapeOutputFiles.append(codecs.open(options.outprefix + '.created.shape.add.xml', 'w', 'utf-8'))
684
shapeOutputFiles.append(codecs.open(options.outprefix + '.deleted.shape.add.xml', 'w', 'utf-8'))
685
shapeOutputFiles.append(codecs.open(options.outprefix + '.changed.shape.add.xml', 'w', 'utf-8'))
686
for f in shapeOutputFiles:
687
sumolib.writeXMLHeader(f, "$Id$", "additional", options=options) # noqa
688
689
if options.direct:
690
type = '.xml'
691
xmldiff(options,
692
options.source,
693
options.dest,
694
options.outprefix + type,
695
type,
696
copy_tags,
697
options.patch_on_import,
698
selectionOutputFiles,
699
shapeOutputFiles)
700
else:
701
sourceNet = None
702
destNet = None
703
if not options.use_prefix:
704
netconvert = sumolib.checkBinary("netconvert", options.path)
705
if shapeOutputFiles:
706
sourceNet = sumolib.net.readNet(options.source)
707
destNet = sumolib.net.readNet(options.dest)
708
options.source = create_plain(options.source, netconvert, options.plain_geo)
709
options.dest = create_plain(options.dest, netconvert, options.plain_geo)
710
711
for type in PLAIN_TYPES:
712
xmldiff(options,
713
options.source + type,
714
options.dest + type,
715
options.outprefix + type,
716
type,
717
copy_tags,
718
options.patch_on_import,
719
selectionOutputFiles,
720
shapeOutputFiles,
721
sourceNet, destNet)
722
if options.remove_plain:
723
os.remove(options.source + type)
724
os.remove(options.dest + type)
725
726
for f in selectionOutputFiles:
727
f.close()
728
for f in shapeOutputFiles:
729
f.write("</additional>\n")
730
f.close()
731
732
733
if __name__ == "__main__":
734
main(parse_args())
735
736