Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
eclipse
GitHub Repository: eclipse/sumo
Path: blob/main/tools/import/vissim/convert_vissimXML_flows_statRoutes.py
169679 views
1
#!/usr/bin/env python
2
# -*- coding: utf-8 -*-
3
# Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.dev/sumo
4
# Copyright (C) 2015-2025 German Aerospace Center (DLR) and others.
5
# This program and the accompanying materials are made available under the
6
# terms of the Eclipse Public License 2.0 which is available at
7
# https://www.eclipse.org/legal/epl-2.0/
8
# This Source Code may also be made available under the following Secondary
9
# Licenses when the conditions for such availability set forth in the Eclipse
10
# Public License 2.0 are satisfied: GNU General Public License, version 2
11
# or later which is available at
12
# https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html
13
# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
14
15
# @file convert_vissimXML_flows_statRoutes.py
16
# @author Lukas Grohmann <[email protected]>
17
# @author Gerald Richter <[email protected]>
18
# @date Jun 09 2015
19
20
"""
21
Parses flows and static routes from a VISSIM .inpx file
22
and writes converted information to a SUMO routes (.rou.xml) file.
23
(see source documentation)
24
25
example usage:
26
python3 convert_vissimXML_flows_statRoutes.py my_VISSIM_scenario.inpx my_VISSIM_scenario.net.xml
27
-o my_VISSIM_routes.rou.xml
28
29
see also:
30
python3 convert_vissimXML_flows_statRoutes.py -h
31
"""
32
from __future__ import absolute_import
33
from __future__ import print_function
34
35
import os
36
import sys
37
from xml.dom import minidom
38
from xml.dom.minidom import Document
39
import numpy as np
40
if 'SUMO_HOME' in os.environ:
41
sys.path.append(os.path.join(os.environ['SUMO_HOME'], 'tools'))
42
import sumolib # noqa
43
44
45
def _dict_from_node_attributes(node):
46
"""takes xml node and returns a dict with its attributes
47
"""
48
return dict((attn, node.getAttribute(attn)) for attn in
49
node.attributes.keys())
50
51
52
# FUNKTIONEN
53
def parse_flows(xmldoc):
54
"""parses the vehicleInput flows from the VISSIM data
55
:param xmldoc: input VISSIM xml
56
:type xmldoc: xml.dom.minidom.Document
57
:return: flow data by VISSIM start link id
58
:rtype: dict
59
60
.. note:: time frames are converted from [ms] -> [s]
61
.. todo:: remove the redundant col2 in ['flow']
62
"""
63
flw_d = dict() # local flows dict
64
for v_input in xmldoc.getElementsByTagName('vehicleInput'):
65
v_input_d = _dict_from_node_attributes(v_input)
66
v_input_d["vehComp"] = []
67
v_input_d["volType"] = []
68
v_input_d["flow"] = []
69
for volume in v_input.getElementsByTagName('timeIntervalVehVolume'):
70
v_input_d["vehComp"].append(volume.getAttribute('vehComp'))
71
v_input_d["volType"].append(volume.getAttribute('volType'))
72
# keeping (timeInterval, volume, vehicle composition)
73
# time interval converted to [s]
74
v_input_d["flow"].append(
75
[float(volume.getAttribute('timeInt').split(" ")[1]) / 1000,
76
float(volume.getAttribute('volume')),
77
float(volume.getAttribute('vehComp'))]) # FIXME: nasty, redundant
78
v_input_d["flow"] = np.array(v_input_d["flow"])
79
# here goes a VISSIM linkId as key (& value)
80
flw_d[v_input_d["link"]] = v_input_d
81
return flw_d
82
83
84
def parse_max_acc(xmldoc):
85
"""parses the vehicle acceleration distributions from the VISSIM data
86
:param xmldoc: input VISSIM xml
87
:type xmldoc: xml.dom.minidom.Document
88
:return: map of 1st acceleration function data point value by str(numeric id)
89
:rtype: dict
90
"""
91
acc_d = dict()
92
for max_acc in xmldoc.getElementsByTagName('maxAccelerationFunction'):
93
acc_d[max_acc.getAttribute('no')] = max_acc.getElementsByTagName(
94
'accelerationFunctionDataPoint')[0].getAttribute('y')
95
return acc_d
96
97
98
def parse_speed_avg(xmldoc):
99
"""parses the vehicle speed distribution from the VISSIM data
100
:param xmldoc: input VISSIM xml
101
:type xmldoc: xml.dom.minidom.Document
102
:return: map of some speed averages by str(numeric id)
103
:rtype: dict
104
105
.. note:: the average is only approximated
106
"""
107
spd_d = dict() # local speeds dict
108
for deSpeeDis in xmldoc.getElementsByTagName('desSpeedDistribution'):
109
# get mean speed
110
num = 0.
111
sum_val = 0.
112
data_points = deSpeeDis.getElementsByTagName('speedDistributionDataPoint')
113
for point in data_points:
114
num += 1
115
sum_val += float(point.getAttribute('x'))
116
spd_d[deSpeeDis.getAttribute('no')] = str((sum_val / num) / 3.6)
117
return spd_d
118
119
120
def parse_length(xmldoc):
121
"""parses the vehicle type lengths from the VISSIM data
122
:param xmldoc: input VISSIM xml
123
:type xmldoc: xml.dom.minidom.Document
124
:return: map of lengths by str(numeric type)
125
:rtype: dict
126
"""
127
len_d = dict()
128
model_d = dict()
129
# get model data
130
for model in xmldoc.getElementsByTagName('model2D3D'):
131
model_d[model.getAttribute('no')] = model.getElementsByTagName(
132
'model2D3DSegment')[0].getAttribute('length')
133
# calculate length data
134
for model_dist in xmldoc.getElementsByTagName('model2D3DDistribution'):
135
elements = model_dist.getElementsByTagName(
136
'model2D3DDistributionElement')
137
length = 0
138
total_probability = 0
139
for element in elements:
140
total_probability += float(element.getAttribute('share'))
141
for element in elements:
142
length += (
143
float(element.getAttribute('share')) / total_probability) * \
144
float(model_d[element.getAttribute('model2D3D')])
145
len_d[model_dist.getAttribute('no')] = str(length)
146
return len_d
147
148
149
def parse_veh_comp(xmldoc):
150
"""parses the vehicle composition from the VISSIM data
151
:param xmldoc: input VISSIM xml
152
:type xmldoc: xml.dom.minidom.Document
153
:return: relevant VISSIM vehicleComposition data
154
:rtype: dict of list of dict
155
"""
156
veh_cmp_d = dict() # local vehicle compositions' dict
157
for vehicle_comp in xmldoc.getElementsByTagName('vehicleComposition'):
158
rel_flows = vehicle_comp.getElementsByTagName(
159
'vehicleCompositionRelativeFlow')
160
flow_l = []
161
for flow in rel_flows:
162
flw_d = {
163
'desSpeedDistr': flow.getAttribute('desSpeedDistr'),
164
'rel_flow': flow.getAttribute('relFlow'),
165
'vehType': flow.getAttribute('vehType'),
166
}
167
flow_l.append(flw_d)
168
# list of dictionaries
169
veh_cmp_d[vehicle_comp.getAttribute('no')] = flow_l
170
return veh_cmp_d
171
172
173
def parse_vehicle_types(xmldoc, acc_d, length_d):
174
"""parses the vehicle types from the VISSIM data
175
:param xmldoc: input VISSIM xml
176
:type xmldoc: xml.dom.minidom.Document
177
:return: relevant VISSIM vehicle type data
178
:rtype: dict of dict
179
"""
180
veh_type_d = dict()
181
for veh_type in xmldoc.getElementsByTagName('vehicleType'):
182
type_d = {
183
'id': veh_type.getAttribute('no'),
184
'length': length_d[veh_type.getAttribute('model2D3DDistr')],
185
'acc': acc_d[veh_type.getAttribute('maxAccelFunc')],
186
}
187
veh_type_d[veh_type.getAttribute('no')] = type_d
188
return veh_type_d
189
190
191
# FIXME: not necessarily nicely done
192
def gen_verbinder_map(xmldoc):
193
"""produce dict with boolean values to check if a given link is a verbinder
194
:param xmldoc: input VISSIM xml
195
:type xmldoc: xml.dom.minidom.Document
196
:return: map of VISSIM link id -> bool flag if link is 'Verbinder'
197
:rtype: dict
198
"""
199
# simple implementation by static variable; xmldoc arg is in the way
200
# if not hasattr(gen_verbinder_map, "v_dic"):
201
# gen_verbinder_map.v_dic = dict() # doesn't exist yet, so initialize
202
is_verbinder_d = dict()
203
for link in xmldoc.getElementsByTagName("link"):
204
if len(link.getElementsByTagName("fromLinkEndPt")) > 0:
205
is_verbinder_d[link.getAttribute("no")] = True
206
else:
207
is_verbinder_d[link.getAttribute("no")] = False
208
# returning a dict...
209
return is_verbinder_d
210
211
212
def parse_routes(xmldoc, edge_id_list, verbinder_d):
213
"""parses the VISSIM route information of statically defined routes ONLY
214
:param xmldoc: input VISSIM xml
215
:type xmldoc: xml.dom.minidom.Document
216
:param edge_id_list: the name says it all; SUMO edge ids
217
:param verbinder_d: bool(verbinder status) of VISSIM link id
218
:type verbinder_d: dict
219
:return: routes by VISSIM start link id, with respective destination routes
220
:rtype: dict
221
222
.. note:: time frames are converted from [ms] -> [s]
223
.. todo:: extend for non-static routes
224
"""
225
# create a list of just the split vissim edges (marked by ending char ']')
226
split_edge_list = [e for e in edge_id_list if e[-1] == ']']
227
rts_by_start_d = dict() # dictionary[start_link] = list(<Route>)
228
# loop over all routing decisions
229
for decision in xmldoc.getElementsByTagName('vehicleRoutingDecisionStatic'):
230
start_link = decision.getAttribute('link')
231
rts_by_start_d[start_link] = []
232
for vehRtStatic in decision.getElementsByTagName('vehicleRouteStatic'):
233
route_d = {
234
"start_link": start_link, # VISSIM id
235
"dest_link": vehRtStatic.getAttribute('destLink'), # VISSIM id
236
"r_id": vehRtStatic.getAttribute('no'),
237
"rel_flow": [],
238
"links": [start_link, ] # finally translated to SUMO ids
239
}
240
# split into separate time intervals' relative flow data
241
for tIrFlow in map(str.strip, str(vehRtStatic.getAttribute('relFlow')).split(',')):
242
if len(tIrFlow) == 0:
243
continue
244
temp = tIrFlow.split() # get "id", "tInterval:relFlow"
245
try:
246
tIrFlow = map(float, temp[1].split(':')) # grab [tInterval, relFlow]
247
except TypeError:
248
print('- WARNING - incomplete relative flow definition in inpx\n',
249
decision.toxml())
250
route_d["rel_flow"].append(list(tIrFlow))
251
tIrFlow = np.array(route_d["rel_flow"])
252
if len(tIrFlow) > 0:
253
tIrFlow[:, 0] /= 1000 # VISSIM time intervals [ms]->[s]
254
route_d["rel_flow"] = tIrFlow
255
else:
256
# create something.. 0 rows, 2 cols
257
# NOTE: better None, but takes some adaption work
258
route_d["rel_flow"] = np.empty((0, 2), dtype="f")
259
260
# get all the intermediary links in their sumo representation
261
for link in vehRtStatic.getElementsByTagName('intObjectRef'):
262
link_key = link.getAttribute('key')
263
if verbinder_d[link_key]:
264
# exclude VISSIM connectors (usually id > 10k)
265
continue
266
# collect them all in VISSIM scheme first, then replace them
267
route_d["links"].append(link_key)
268
route_d["links"].append(route_d["dest_link"])
269
270
# translate to sumo edge ids
271
sumo_links = []
272
for link_key in route_d["links"]:
273
if link_key in edge_id_list:
274
# key is found unmodified in edge_id_list
275
sumo_links.append(link_key)
276
else:
277
# extension list *IS* ordered by its splitting sequence as generated
278
sumo_links.extend(e for e in split_edge_list
279
if e.startswith(link_key + '['))
280
# update with sumo ids info
281
route_d["links"] = sumo_links
282
283
# add route object to dictionary
284
rts_by_start_d[start_link].append(route_d)
285
return rts_by_start_d
286
287
288
def calc_route_probability(routes_by_start_d, flow_d):
289
"""computes the route probabilities
290
:param routes_by_start_d: map by start link id with route dicts as values
291
:type routes_by_start_d: dict
292
:param flow_d: vissim vehicle in-flow data
293
:type flow_d: dict
294
"""
295
for start_link, sl_routes in routes_by_start_d.items():
296
if start_link not in flow_d:
297
# we got no in-flow data for that route's start link
298
print('- skipping probability calc tfor route without flow def. for VISSIM start link id:', start_link)
299
continue
300
# set 0 vectors for all time frames
301
absolute_flow = flow_d[start_link]["flow"][:, 1]
302
veh_comp = flow_d[start_link]["vehComp"]
303
# time frames must have the same limits as flows, as checked before
304
# therefor all route flows for 1 start link must also have same limits
305
# get all the startlink-route rel.flows-by-time-window lined up
306
sl_rt_relF = np.stack([rt['rel_flow'] for rt in sl_routes])
307
# all summed rel.flows by timeframe
308
# sl_sum_relF = sl_rt_relF.sum(axis=0)[:, 1:] # keep shape (n x timeframes)
309
sl_sum_relF = sl_rt_relF.sum(axis=0)[:, 1] # shape (timeframes, )
310
for route in routes_by_start_d[start_link]:
311
# set the vehicle type for each route
312
route["type"] = veh_comp
313
route["probability"] = np.zeros_like(absolute_flow)
314
# get a selector for all summed up flows > 0 (= relevant)
315
comp_flow_sel = sl_sum_relF > 0.
316
route["probability"][comp_flow_sel] = \
317
(route["rel_flow"][comp_flow_sel, 1] / sl_sum_relF[comp_flow_sel])
318
319
320
def validate_rel_flow(routes_by_start_d, flow_d):
321
"""checks if a relative flow is missing and completes it if necessary
322
essentially fixing a VISSIM inp -> inpx conversion bug
323
:param routes_by_start_d: map by start link id with route dicts as values
324
:type routes_by_start_d: dict
325
:param flow_d: vissim vehicle in-flow data
326
:type flow_d: dict
327
328
.. note:: *modifies* routes_by_start_d
329
"""
330
# VISSIM BUG!!: Relative Zuflüsse mit dem Wert 1.0 gehen bei der
331
# Konversion von .inp zu .inpx verloren
332
333
# compare all rel_flows with the reference flow
334
# get all time frame limits from all routes
335
# np.concatenate([rt['rel_flow'] for rtl in routes_by_start_d.values() for rt in rtl])
336
for start_link, sl_routes in routes_by_start_d.items():
337
if start_link not in flow_d:
338
# should we remove the routes entry ?
339
print('- skipping flow validation for route without flow def. for VISSIM start link id:', start_link)
340
# CHECK: is this ok with later steps ?
341
continue
342
# grab all the time window starts from the flows
343
# NOTE: need slice here due to redundant veh_comp col.
344
ref_time_shape = flow_d.get(start_link)["flow"][:, :2]
345
ref_time_shape[:, 1] = 1. # set to default (VISSIM inp-> inpx BUG)
346
for route in sl_routes:
347
# check if there is a relative flow def. at all
348
if len(route["rel_flow"]) == 0:
349
# if not, append default
350
route["rel_flow"] = ref_time_shape.copy()
351
continue
352
else:
353
if not np.array_equal(ref_time_shape[:, 0], route["rel_flow"][:, 0]):
354
orig = dict(route["rel_flow"])
355
flow = ref_time_shape.copy()
356
for i, (time, _) in enumerate(flow):
357
flow[i][1] = orig.get(time, 0)
358
route["rel_flow"] = flow
359
# copy back modifications
360
routes_by_start_d[start_link] = sl_routes
361
362
363
def create_vTypeDistribution_elems(veh_comp_d, veh_type_d, speed_d, root):
364
"""append the vehicle distribution data to the given dom document
365
:param veh_comp_d:
366
:type veh_comp_d: dict
367
:param veh_type_d:
368
:type veh_type_d: dict
369
:param speed_d:
370
:type speed_d: dict
371
:param root: XML root element to append children to
372
373
.. note:: *modifies/extends* XML root element
374
"""
375
# iterate vehicle compositions
376
for c_id, comps in veh_comp_d.items():
377
v_type_dist = root.ownerDocument.createElement("vTypeDistribution")
378
v_type_dist.setAttribute("id", c_id)
379
root.appendChild(v_type_dist)
380
for comp in comps:
381
v_type = root.ownerDocument.createElement("vType")
382
v_type.setAttribute(
383
"id",
384
"t{}_D{}".format(
385
veh_type_d[comp["vehType"]]["id"],
386
c_id))
387
v_type.setAttribute("accel", veh_type_d[comp["vehType"]]["acc"])
388
v_type.setAttribute("length",
389
veh_type_d[comp["vehType"]]["length"])
390
v_type.setAttribute("probability", comp["rel_flow"])
391
v_type.setAttribute("maxSpeed", speed_d[comp["desSpeedDistr"]])
392
v_type_dist.appendChild(v_type)
393
# return route_doc
394
395
396
def create_routeDistribution_elems(routes_by_start_d, root):
397
"""append the route distribution data into the given dom document
398
:param routes_by_start_d: map by start link id with route dicts as values
399
:type routes_by_start_d: dict
400
:param root: XML root element to append children to
401
402
.. note:: *modifies/extends* XML root element
403
"""
404
# iterating by VISSIM link id
405
validDists = set()
406
for start_link in routes_by_start_d:
407
if start_link not in flow_d:
408
# no flow, no go
409
print('- skipping route dist. gen for route without flow def. for VISSIM start link id:', start_link)
410
continue
411
if len(routes_by_start_d[start_link]) == 0:
412
continue
413
ref_time = flow_d[start_link]["flow"][:, 0]
414
for ic, time in enumerate(ref_time):
415
route_dist = root.ownerDocument.createElement("routeDistribution")
416
# just a name
417
distID = "_".join([start_link, str(time)])
418
route_dist.setAttribute("id", distID)
419
for route in routes_by_start_d[start_link]:
420
if np.abs(route["probability"][ic]) != 0:
421
route_node = root.ownerDocument.createElement("route")
422
route_node.setAttribute("id", route["r_id"])
423
route_node.setAttribute("edges",
424
" ".join(route["links"]))
425
route_node.setAttribute("probability",
426
str(np.abs(
427
route["probability"][ic])))
428
route_dist.appendChild(route_node)
429
if route_dist.hasChildNodes():
430
root.appendChild(route_dist)
431
validDists.add(distID)
432
return validDists
433
434
435
def create_flow_elems(routes_by_start_d, flow_d, validDists, root):
436
"""append the flow data to the given dom document
437
:param routes_by_start_d: map by start link id with route dicts as values
438
:type routes_by_start_d: dict
439
:param flow_d: vissim vehicle in-flow data
440
:type flow_d: dict
441
:param root: XML root element to append children to
442
443
.. note:: *modifies/extends* XML root element
444
"""
445
sim_end = inpx_doc.getElementsByTagName("simulation")[0].getAttribute("simPeriod")
446
dom_flow_l = []
447
for start_link in routes_by_start_d:
448
if start_link not in flow_d:
449
# we got no in-flow data for that route's start link
450
print('- skipping flow gen for route without flow def. for VISSIM start link id:', start_link)
451
continue
452
if len(routes_by_start_d[start_link]) == 0:
453
print('- found no routes by start link:', start_link)
454
continue
455
flows = flow_d[start_link]["flow"]
456
# iterate over all the time frame starts from the flows
457
ref_time = flows[:, 0]
458
for index, time in enumerate(ref_time):
459
distID = "_".join([start_link, str(time)])
460
in_flow = [fl for fl in flow_d[start_link]["flow"] if
461
fl[0] == time][0]
462
if in_flow[1] > 0 and distID in validDists:
463
flow = root.ownerDocument.createElement("flow")
464
flow.setAttribute("id", "fl{}_st{}".format(start_link,
465
time))
466
flow.setAttribute("color", "1,1,0")
467
flow.setAttribute("begin", str(time))
468
if index < len(ref_time) - 1 and len(ref_time) > 1:
469
flow.setAttribute("end",
470
str(time + ref_time[index + 1]))
471
else:
472
flow.setAttribute("end", sim_end)
473
flow.setAttribute("vehsPerHour", str(in_flow[1]))
474
flow.setAttribute("type", str(int(in_flow[2])))
475
flow.setAttribute('route', distID)
476
dom_flow_l.append(flow)
477
dom_flow_l = sorted(dom_flow_l,
478
key=lambda dom: float(dom.getAttribute("begin")))
479
for dom_obj in dom_flow_l:
480
root.appendChild(dom_obj)
481
# return route_doc
482
483
484
# MAIN
485
if __name__ == '__main__':
486
op = sumolib.options.ArgumentParser(
487
description='road network conversion utility for static route flows'
488
' (VISSIM.inpx to SUMO); generates SUMO routes definition file from'
489
' given inpx and derived (by netconvert) SUMO net.')
490
op.add_argument('--output-file', '-o', default='routes.rou.xml', category="output", type=op.route_file,
491
help='output file name (default: %(default)s)')
492
op.add_argument('--vissim-file', '-V', dest="vissim_file", category="input", required=True, type=op.file,
493
help='VISSIM inpx file path')
494
op.add_argument('--sumo-net-file', '-n', dest="sumo_net_file", category="input", required=True, type=op.net_file,
495
help='SUMO net file path')
496
args = op.parse_args()
497
# print("\n", args, "\n")
498
499
#
500
# Input data ##########
501
#
502
print('\n---\n\n* loading VISSIM net:\n\t', args.vissim_file)
503
inpx_doc = minidom.parse(args.vissim_file)
504
print('\n---\n\n* loading SUMO net:\n\t', args.sumo_net_file,)
505
sumo_doc = minidom.parse(args.sumo_net_file)
506
507
print('+ building edge list...')
508
# for all normal edges
509
sumo_edge_ids = [edge.getAttribute("id") for edge in
510
sumo_doc.getElementsByTagName('edge')
511
if not edge.hasAttribute("function")]
512
print('\tOK.')
513
514
print('+ building "Verbinder"("connector") info...')
515
# to check if a link is a verbinder
516
verbinder_flag = gen_verbinder_map(inpx_doc)
517
print('\tOK.')
518
519
print('\n---')
520
#
521
# Vehicle Speeds, distributions, types ##########
522
#
523
print('* parsing speeds...')
524
# parse vehicle type data
525
speed_d = parse_speed_avg(inpx_doc)
526
print('* parsing vehicle distributions...')
527
# get the vehicle distribution
528
vehicle_comp_d = parse_veh_comp(inpx_doc)
529
print('* parsing vehicle types...')
530
# parse vehTypes and combine the information with acceleration and length data
531
vehicle_type_d = parse_vehicle_types(inpx_doc, parse_max_acc(inpx_doc),
532
parse_length(inpx_doc))
533
print('OK.\n---')
534
535
#
536
# Flows and Routes ##########
537
#
538
# TODO: maybe make flows and routes conversion switchable by option ?
539
print('* parsing vehicle in-flow definitions...')
540
# parse flows
541
flow_d = parse_flows(inpx_doc)
542
print('* parsing vehicle routes...')
543
# parse routes
544
routes_by_start_d = parse_routes(inpx_doc, sumo_edge_ids, verbinder_flag)
545
print('+ validating relative flows...')
546
# validate relative flows
547
validate_rel_flow(routes_by_start_d, flow_d)
548
print('+ setting route branching probabilities...')
549
# computes the probability for each route
550
calc_route_probability(routes_by_start_d, flow_d)
551
print('OK.\n---')
552
553
#
554
# XML generation ##########
555
#
556
print('* output routes generation...')
557
# create dom document and define routes + flows
558
result_doc = Document()
559
routes_Elem = result_doc.createElement("routes")
560
result_doc.appendChild(routes_Elem)
561
562
create_vTypeDistribution_elems(vehicle_comp_d, vehicle_type_d, speed_d, routes_Elem)
563
print('-' * 3)
564
validDists = create_routeDistribution_elems(routes_by_start_d, routes_Elem)
565
print('-' * 3)
566
create_flow_elems(routes_by_start_d, flow_d, validDists, routes_Elem)
567
print('OK.\n---')
568
569
print('* writing output:')
570
# write the data into a .rou.xml file
571
out_Fn = args.output_file
572
if not out_Fn.endswith('.xml'):
573
out_Fn += '.xml'
574
with open(out_Fn, "w") as ofh:
575
result_doc.writexml(ofh, addindent=' ', newl='\n')
576
ofh.close()
577
print('. data written to:\n\t', out_Fn)
578
579