Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Ardupilot
GitHub Repository: Ardupilot/ardupilot
Path: blob/master/Tools/autotest/autotest.py
9732 views
1
#!/usr/bin/env python3
2
"""
3
ArduPilot automatic test suite.
4
5
Andrew Tridgell, October 2011
6
7
AP_FLAKE8_CLEAN
8
"""
9
import atexit
10
import fnmatch
11
import copy
12
import glob
13
import optparse
14
import os
15
import re
16
import shutil
17
import signal
18
import subprocess
19
import sys
20
import time
21
import traceback
22
23
import blimp
24
import rover
25
import arducopter
26
import arduplane
27
import ardusub
28
import antennatracker
29
import quadplane
30
import balancebot
31
import sailboat
32
import helicopter
33
34
import examples
35
from pysim import util
36
from pymavlink.generator import mavtemplate
37
38
from vehicle_test_suite import Test
39
40
tester = None
41
42
43
def buildlogs_dirpath():
44
"""Return BUILDLOGS directory path."""
45
return os.getenv("BUILDLOGS", util.reltopdir("../buildlogs"))
46
47
48
def buildlogs_path(path):
49
"""Return a string representing path in the buildlogs directory."""
50
bits = [buildlogs_dirpath()]
51
if isinstance(path, list):
52
bits.extend(path)
53
else:
54
bits.append(path)
55
return os.path.join(*bits)
56
57
58
def build_all_filepath():
59
"""Get build_all.sh path."""
60
return util.reltopdir('Tools/scripts/build_all.sh')
61
62
63
def build_all():
64
"""Run the build_all.sh script."""
65
print("Running build_all.sh")
66
if util.run_cmd(build_all_filepath(), directory=util.reltopdir('.')) != 0:
67
print("Failed build_all.sh")
68
return False
69
return True
70
71
72
def build_binaries():
73
"""Run the build_binaries.py script."""
74
print("Running build_binaries.py")
75
76
# copy the script (and various libraries used by the script) as it
77
# changes git branch, which can change the script while running
78
for thing in [
79
"board_list.py",
80
"build_binaries_history.py",
81
"build_binaries.py",
82
"build_sizes/build_sizes.py",
83
"generate_manifest.py",
84
"gen_stable.py",
85
]:
86
orig = util.reltopdir('Tools/scripts/%s' % thing)
87
copy = util.reltopdir('./%s' % os.path.basename(thing))
88
shutil.copy2(orig, copy)
89
90
if util.run_cmd("./build_binaries.py", directory=util.reltopdir('.')) != 0:
91
print("Failed build_binaries.py")
92
return False
93
return True
94
95
96
def build_examples(**kwargs):
97
"""Build examples."""
98
for target in 'Pixhawk1', 'navio', 'linux', 'sitl':
99
print("Running build.examples for %s" % target)
100
try:
101
util.build_examples(target, **kwargs)
102
except Exception as e:
103
print("Failed build_examples on board=%s" % target)
104
print(str(e))
105
return False
106
107
return True
108
109
110
def build_unit_tests(**kwargs):
111
"""Build tests."""
112
for target in ['linux', 'sitl']:
113
print("Running build.unit_tests for %s" % target)
114
try:
115
util.build_tests(target, **kwargs)
116
except Exception as e:
117
print("Failed build.unit_tests on board=%s" % target)
118
print(str(e))
119
return False
120
121
return True
122
123
124
def run_unit_test(test):
125
"""Run unit test file."""
126
print("Running (%s)" % test)
127
subprocess.check_call([test])
128
129
130
def run_unit_tests():
131
"""Run all unit tests files."""
132
success = True
133
fail_list = []
134
for target in ['linux', 'sitl']:
135
binary_dir = util.reltopdir(os.path.join('build',
136
target,
137
'tests',
138
))
139
tests = glob.glob("%s/*" % binary_dir)
140
for test in tests:
141
try:
142
run_unit_test(test)
143
except subprocess.CalledProcessError:
144
print("Exception running (%s)" % test)
145
fail_list.append(target + '/' + os.path.basename(test))
146
success = False
147
148
print("Failing tests:")
149
for failure in fail_list:
150
print(" %s" % failure)
151
return success
152
153
154
def run_clang_scan_build():
155
"""Run Clang Scan-build utility."""
156
if util.run_cmd("scan-build python3 waf configure",
157
directory=util.reltopdir('.')) != 0:
158
print("Failed scan-build-configure")
159
return False
160
161
if util.run_cmd("scan-build python3 waf clean",
162
directory=util.reltopdir('.')) != 0:
163
print("Failed scan-build-clean")
164
return False
165
166
if util.run_cmd("scan-build python3 waf build",
167
directory=util.reltopdir('.')) != 0:
168
print("Failed scan-build-build")
169
return False
170
171
return True
172
173
174
def param_parse_filepath():
175
"""Get param_parse.py script path."""
176
return util.reltopdir('Tools/autotest/param_metadata/param_parse.py')
177
178
179
def all_vehicles():
180
"""Get all vehicles name."""
181
return ('ArduPlane',
182
'ArduCopter',
183
'Rover',
184
'AntennaTracker',
185
'ArduSub',
186
'Blimp',
187
'AP_Periph',
188
)
189
190
191
def build_parameters():
192
"""Run the param_parse.py script."""
193
print("Running param_parse.py")
194
for vehicle in all_vehicles():
195
if util.run_cmd([param_parse_filepath(), '--vehicle', vehicle],
196
directory=util.reltopdir('.')) != 0:
197
print("Failed param_parse.py (%s)" % vehicle)
198
return False
199
return True
200
201
202
def mavtogpx_filepath():
203
"""Get mavtogpx script path."""
204
return util.reltopdir("modules/mavlink/pymavlink/tools/mavtogpx.py")
205
206
207
def convert_gpx():
208
"""Convert any tlog files to GPX and KML."""
209
mavlog = glob.glob(buildlogs_path("*.tlog"))
210
passed = True
211
for m in mavlog:
212
util.run_cmd(mavtogpx_filepath() + " --nofixcheck " + m)
213
gpx = m + '.gpx'
214
kml = m + '.kml'
215
try:
216
util.run_cmd('gpsbabel -i gpx -f %s '
217
'-o kml,units=m,floating=1,extrude=1 -F %s' %
218
(gpx, kml))
219
except subprocess.CalledProcessError:
220
passed = False
221
try:
222
util.run_cmd('zip %s.kmz %s.kml' % (m, m))
223
except subprocess.CalledProcessError:
224
passed = False
225
util.run_cmd("mavflightview.py --imagefile=%s.png %s" % (m, m))
226
return passed
227
228
229
def test_prerequisites():
230
"""Check we have the right directories and tools to run tests."""
231
print("Testing prerequisites")
232
util.mkdir_p(buildlogs_dirpath())
233
return True
234
235
236
def alarm_handler(signum, frame):
237
"""Handle test timeout."""
238
try:
239
print("Alarm handler called")
240
if tester is not None:
241
if tester.rc_thread is not None:
242
tester.rc_thread_should_quit = True
243
tester.rc_thread.join()
244
tester.rc_thread = None
245
results.add('TIMEOUT',
246
'<span class="failed-text">FAILED</span>',
247
opts.timeout)
248
util.pexpect_close_all()
249
convert_gpx()
250
write_fullresults()
251
os.killpg(0, signal.SIGKILL)
252
except Exception:
253
pass
254
sys.exit(1)
255
256
257
def should_run_step(step):
258
"""See if a step should be skipped."""
259
for skip in skipsteps:
260
if fnmatch.fnmatch(step.lower(), skip.lower()):
261
return False
262
return True
263
264
265
__bin_names = {
266
"Copter": "arducopter",
267
"CopterTests1a": "arducopter",
268
"CopterTests1b": "arducopter",
269
"CopterTests1c": "arducopter",
270
"CopterTests1d": "arducopter",
271
"CopterTests1e": "arducopter",
272
273
"CopterTests2a": "arducopter",
274
"CopterTests2b": "arducopter",
275
276
"Plane": "arduplane",
277
"PlaneTests1a": "arduplane",
278
"PlaneTests1b": "arduplane",
279
"PlaneTests1c": "arduplane",
280
281
"Rover": "ardurover",
282
"Tracker": "antennatracker",
283
"Helicopter": "arducopter-heli",
284
"QuadPlane": "arduplane",
285
"Sub": "ardusub",
286
"Blimp": "blimp",
287
"BalanceBot": "ardurover",
288
"Sailboat": "ardurover",
289
"SITLPeriphUniversal": ("sitl_periph_universal", "AP_Periph"),
290
"SITLPeriphBattMon": ("sitl_periph_battmon", "AP_Periph"),
291
"CAN": "arducopter",
292
"BattCAN": "arducopter",
293
}
294
295
296
def binary_path(step, debug=False):
297
"""Get vehicle binary path."""
298
try:
299
vehicle = step.split(".")[1]
300
except Exception:
301
return None
302
303
if vehicle not in __bin_names:
304
# cope with builds that don't have a specific binary
305
return None
306
307
try:
308
(config_name, binary_name) = __bin_names[vehicle]
309
except ValueError:
310
config_name = "sitl"
311
binary_name = __bin_names[vehicle]
312
313
binary = util.reltopdir(os.path.join('build',
314
config_name,
315
'bin',
316
binary_name))
317
if not os.path.exists(binary):
318
if os.path.exists(binary + ".exe"):
319
binary += ".exe"
320
else:
321
raise ValueError("Binary (%s) does not exist" % (binary,))
322
323
return binary
324
325
326
def split_specific_test_step(step):
327
"""Extract test from argument."""
328
print('step=%s' % str(step))
329
m = re.match("((fly|drive|dive|test)[.][^.]+)[.](.*)", step)
330
if m is None:
331
return None
332
return ((m.group(1), m.group(3)))
333
334
335
def find_specific_test_to_run(step):
336
"""Find test to run in argument."""
337
t = split_specific_test_step(step)
338
if t is None:
339
return None
340
(testname, test) = t
341
return "%s.%s" % (testname, test)
342
343
344
tester_class_map = {
345
"test.Blimp": blimp.AutoTestBlimp,
346
"test.Copter": arducopter.AutoTestCopter,
347
"test.CopterTests1a": arducopter.AutoTestCopterTests1a, # 8m43s
348
"test.CopterTests1b": arducopter.AutoTestCopterTests1b, # 8m5s
349
"test.CopterTests1c": arducopter.AutoTestCopterTests1c, # 5m17s
350
"test.CopterTests1d": arducopter.AutoTestCopterTests1d, # 8m20s
351
"test.CopterTests1e": arducopter.AutoTestCopterTests1e, # 8m32s
352
"test.CopterTests2a": arducopter.AutoTestCopterTests2a, # 8m23s
353
"test.CopterTests2b": arducopter.AutoTestCopterTests2b, # 8m18s
354
"test.Plane": arduplane.AutoTestPlane,
355
"test.PlaneTests1a": arduplane.AutoTestPlaneTests1a,
356
"test.PlaneTests1b": arduplane.AutoTestPlaneTests1b,
357
"test.PlaneTests1c": arduplane.AutoTestPlaneTests1c,
358
"test.QuadPlane": quadplane.AutoTestQuadPlane,
359
"test.Rover": rover.AutoTestRover,
360
"test.BalanceBot": balancebot.AutoTestBalanceBot,
361
"test.Sailboat": sailboat.AutoTestSailboat,
362
"test.Helicopter": helicopter.AutoTestHelicopter,
363
"test.Sub": ardusub.AutoTestSub,
364
"test.Tracker": antennatracker.AutoTestTracker,
365
"test.CAN": arducopter.AutoTestCAN,
366
"test.BattCAN": arducopter.AutoTestBattCAN,
367
}
368
369
supplementary_test_binary_map = {
370
"test.CAN": ["sitl_periph_universal:AP_Periph:0:Tools/autotest/default_params/periph.parm,Tools/autotest/default_params/quad-periph.parm", # noqa: E501
371
"sitl_periph_universal:AP_Periph:1:Tools/autotest/default_params/periph.parm"],
372
"test.BattCAN": [
373
"sitl_periph_battmon:AP_Periph:0:Tools/autotest/default_params/periph-battmon.parm,Tools/autotest/default_params/quad-periph.parm", # noqa: E501
374
],
375
}
376
377
378
def run_specific_test(step, *args, **kwargs):
379
"""Run a specific test."""
380
t = split_specific_test_step(step)
381
if t is None:
382
return []
383
(testname, test) = t
384
tests = set()
385
tests.update(test.split(","))
386
387
tester_class = tester_class_map[testname]
388
global tester
389
tester = tester_class(*args, **kwargs)
390
391
# print("Got %s" % str(tester))
392
run = []
393
for a in tester.tests():
394
if not isinstance(a, Test):
395
a = Test(a)
396
# print("Got %s" % (a.name))
397
if a.name in tests:
398
run.append(a)
399
tests.remove(a.name)
400
if len(tests):
401
print(f"Failed to find tests {tests}")
402
sys.exit(1)
403
return tester.autotest(tests=run, allow_skips=False, step_name=step), tester
404
405
406
def run_step(step):
407
"""Run one step."""
408
# remove old logs
409
util.run_cmd('rm -f logs/*.BIN logs/LASTLOG.TXT')
410
411
if step == "prerequisites":
412
return test_prerequisites()
413
414
build_opts = {
415
"j": opts.j,
416
"debug": opts.debug,
417
"clean": not opts.no_clean,
418
"configure": not opts.no_configure,
419
"math_check_indexes": opts.math_check_indexes,
420
"ekf_single": opts.ekf_single,
421
"postype_single": opts.postype_single,
422
"extra_configure_args": opts.waf_configure_args,
423
"coverage": opts.coverage,
424
"force_32bit" : opts.force_32bit,
425
"ubsan" : opts.ubsan,
426
"ubsan_abort" : opts.ubsan_abort,
427
"num_aux_imus" : opts.num_aux_imus,
428
"dronecan_tests" : opts.dronecan_tests,
429
}
430
431
if opts.Werror:
432
build_opts['extra_configure_args'].append("--Werror")
433
434
vehicle_binary = None
435
board = "sitl"
436
if step == 'build.Plane':
437
vehicle_binary = 'bin/arduplane'
438
439
if step == 'build.Rover':
440
vehicle_binary = 'bin/ardurover'
441
442
if step == 'build.Copter':
443
vehicle_binary = 'bin/arducopter'
444
445
if step == 'build.Blimp':
446
vehicle_binary = 'bin/blimp'
447
448
if step == 'build.Tracker':
449
vehicle_binary = 'bin/antennatracker'
450
451
if step == 'build.Helicopter':
452
vehicle_binary = 'bin/arducopter-heli'
453
454
if step == 'build.Sub':
455
vehicle_binary = 'bin/ardusub'
456
457
if step == 'build.SITLPeriphUniversal':
458
vehicle_binary = 'bin/AP_Periph'
459
board = 'sitl_periph_universal'
460
461
if step == 'build.SITLPeriphBattMon':
462
vehicle_binary = 'bin/AP_Periph'
463
board = 'sitl_periph_battmon'
464
465
if step == 'build.Replay':
466
return util.build_replay(board='SITL')
467
468
if vehicle_binary is not None:
469
try:
470
binary = binary_path(step, debug=opts.debug)
471
os.unlink(binary)
472
except (FileNotFoundError, ValueError):
473
pass
474
return util.build_SITL(
475
vehicle_binary,
476
board=board,
477
**build_opts
478
)
479
480
binary = binary_path(step, debug=opts.debug)
481
482
# see if we need any supplementary binaries
483
supplementary_binaries = []
484
for k in supplementary_test_binary_map.keys():
485
if step.startswith(k):
486
# this test needs to use supplementary binaries
487
for supplementary_test_binary in supplementary_test_binary_map[k]:
488
a = supplementary_test_binary.split(':')
489
if len(a) != 4:
490
raise ValueError("Bad supplementary_test_binary %s" % supplementary_test_binary)
491
config_name = a[0]
492
binary_name = a[1]
493
instance_num = int(a[2])
494
param_file = a[3].split(",")
495
bin_path = util.reltopdir(os.path.join('build', config_name, 'bin', binary_name))
496
customisation = '-I {}'.format(instance_num)
497
sup_binary = {"binary" : bin_path,
498
"customisation" : customisation,
499
"param_file" : param_file}
500
supplementary_binaries.append(sup_binary)
501
# we are running in conjunction with a supplementary app
502
# can't have speedup
503
opts.speedup = 1.0
504
break
505
506
fly_opts = {
507
"viewerip": opts.viewerip,
508
"use_map": opts.map,
509
"valgrind": opts.valgrind,
510
"callgrind": opts.callgrind,
511
"gdb": opts.gdb,
512
"gdb_no_tui": opts.gdb_no_tui,
513
"lldb": opts.lldb,
514
"strace": opts.strace,
515
"gdbserver": opts.gdbserver,
516
"breakpoints": opts.breakpoint,
517
"disable_breakpoints": opts.disable_breakpoints,
518
"_show_test_timings": opts.show_test_timings,
519
"force_ahrs_type": opts.force_ahrs_type,
520
"num_aux_imus" : opts.num_aux_imus,
521
"replay": opts.replay,
522
"logs_dir": buildlogs_dirpath(),
523
"sup_binaries": supplementary_binaries,
524
"reset_after_every_test": opts.reset_after_every_test,
525
"build_opts": copy.copy(build_opts),
526
"generate_junit": opts.junit,
527
"enable_fgview": opts.enable_fgview,
528
}
529
if opts.speedup is not None:
530
fly_opts["speedup"] = opts.speedup
531
532
fly_opts["move_logs_on_test_failure"] = opts.move_logs_on_test_failure
533
534
# handle "test.Copter" etc:
535
if step in tester_class_map:
536
# create an instance of the tester class:
537
global tester
538
tester = tester_class_map[step](binary, **fly_opts)
539
# run the test and return its result and the tester itself
540
return tester.autotest(None, step_name=step), tester
541
542
# handle "test.Copter.CPUFailsafe" etc:
543
specific_test_to_run = find_specific_test_to_run(step)
544
if specific_test_to_run is not None:
545
return run_specific_test(specific_test_to_run, binary, **fly_opts)
546
547
if step == 'build.All':
548
return build_all()
549
550
if step == 'build.Binaries':
551
return build_binaries()
552
553
if step == 'build.examples':
554
return build_examples(**build_opts)
555
556
if step == 'run.examples':
557
return examples.run_examples(debug=opts.debug, valgrind=False, gdb=False)
558
559
if step == 'build.Parameters':
560
return build_parameters()
561
562
if step == 'convertgpx':
563
return convert_gpx()
564
565
if step == 'build.unit_tests':
566
return build_unit_tests(**build_opts)
567
568
if step == 'run.unit_tests':
569
return run_unit_tests()
570
571
if step == 'clang-scan-build':
572
return run_clang_scan_build()
573
574
raise RuntimeError("Unknown step %s" % step)
575
576
577
class TestResult(object):
578
"""Test result class."""
579
580
def __init__(self, name, result, elapsed):
581
"""Init test result class."""
582
self.name = name
583
self.result = result
584
self.elapsed = "%.1f" % elapsed
585
586
587
class TestFile(object):
588
"""Test result file."""
589
590
def __init__(self, name, fname):
591
"""Init test result file."""
592
self.name = name
593
self.fname = fname
594
595
596
class TestResults(object):
597
"""Test results class."""
598
599
def __init__(self):
600
"""Init test results class."""
601
self.date = time.asctime()
602
self.githash = util.get_git_hash()
603
self.tests = []
604
self.files = []
605
self.images = []
606
607
def add(self, name, result, elapsed):
608
"""Add a result."""
609
self.tests.append(TestResult(name, result, elapsed))
610
611
def addfile(self, name, fname):
612
"""Add a result file."""
613
self.files.append(TestFile(name, fname))
614
615
def addimage(self, name, fname):
616
"""Add a result image."""
617
self.images.append(TestFile(name, fname))
618
619
def addglob(self, name, pattern):
620
"""Add a set of files."""
621
for f in glob.glob(buildlogs_path(pattern)):
622
self.addfile(name, os.path.basename(f))
623
624
def addglobimage(self, name, pattern):
625
"""Add a set of images."""
626
for f in glob.glob(buildlogs_path(pattern)):
627
self.addimage(name, os.path.basename(f))
628
629
def generate_badge(self):
630
"""Get the badge template, populates and saves the result to buildlogs path."""
631
passed_tests = len([t for t in self.tests if "PASSED" in t.result])
632
total_tests = len(self.tests)
633
badge_color = "#4c1" if passed_tests == total_tests else "#e05d44"
634
635
badge_text = "{0}/{1}".format(passed_tests, total_tests)
636
# Text length so it is not stretched by svg
637
text_length = len(badge_text) * 70
638
639
# Load template file
640
template_path = 'Tools/autotest/web/autotest-badge-template.svg'
641
with open(util.reltopdir(template_path), "r") as f:
642
template = f.read()
643
644
# Add our results to the template
645
badge = template.format(color=badge_color,
646
text=badge_text,
647
text_length=text_length)
648
with open(buildlogs_path("autotest-badge.svg"), "w") as f:
649
f.write(badge)
650
651
652
def copy_tree(f, t, dirs_exist_ok=False):
653
shutil.copytree(f, t, dirs_exist_ok=dirs_exist_ok)
654
655
656
def write_webresults(results_to_write):
657
"""Write webpage results."""
658
t = mavtemplate.MAVTemplate()
659
for h in glob.glob(util.reltopdir('Tools/autotest/web/*.html')):
660
html = util.loadfile(h)
661
f = open(buildlogs_path(os.path.basename(h)), mode='w')
662
t.write(f, html, results_to_write)
663
f.close()
664
for f in glob.glob(util.reltopdir('Tools/autotest/web/*.png')):
665
shutil.copy(f, buildlogs_path(os.path.basename(f)))
666
copy_tree(util.reltopdir("Tools/autotest/web/css"), buildlogs_path("css"), dirs_exist_ok=True)
667
results_to_write.generate_badge()
668
669
670
def write_fullresults():
671
"""Write out full results set."""
672
results.addglob("Google Earth track", '*.kmz')
673
results.addfile('Full Logs', 'autotest-output.txt')
674
results.addglob('DataFlash Log', '*-log.bin')
675
results.addglob("MAVLink log", '*.tlog')
676
results.addglob("GPX track", '*.gpx')
677
678
# results common to all vehicles:
679
vehicle_files = [
680
('{vehicle} core', '{vehicle}.core'),
681
('{vehicle} ELF', '{vehicle}.elf'),
682
]
683
vehicle_globs = [('{vehicle} log', '{vehicle}-*.BIN'), ]
684
for vehicle in all_vehicles():
685
subs = {'vehicle': vehicle}
686
for vehicle_file in vehicle_files:
687
description = vehicle_file[0].format(**subs)
688
filename = vehicle_file[1].format(**subs)
689
results.addfile(description, filename)
690
for vehicle_glob in vehicle_globs:
691
description = vehicle_glob[0].format(**subs)
692
glob = vehicle_glob[1].format(**subs)
693
results.addglob(description, glob)
694
695
results.addglob("CopterAVC log", 'CopterAVC-*.BIN')
696
results.addfile("CopterAVC core", 'CopterAVC.core')
697
698
results.addglob('APM:Libraries documentation', 'docs/libraries/index.html')
699
results.addglob('APM:Plane documentation', 'docs/ArduPlane/index.html')
700
results.addglob('APM:Copter documentation', 'docs/ArduCopter/index.html')
701
results.addglob('APM:Rover documentation', 'docs/Rover/index.html')
702
results.addglob('APM:Sub documentation', 'docs/ArduSub/index.html')
703
results.addglob('APM:Blimp documentation', 'docs/Blimp/index.html')
704
results.addglobimage("Flight Track", '*.png')
705
706
write_webresults(results)
707
708
709
def run_tests(steps):
710
"""Run a list of steps."""
711
712
corefiles = glob.glob("core*")
713
corefiles.extend(glob.glob("ap-*.core"))
714
if corefiles:
715
print('Removing corefiles: %s' % str(corefiles))
716
for f in corefiles:
717
os.unlink(f)
718
719
diagnostic_files = []
720
for p in "dumpstack.sh_*", "dumpcore.sh_*", "autotest-*tlog":
721
diagnostic_files.extend(glob.glob(p))
722
if diagnostic_files:
723
print('Removing diagnostic files: %s' % str(diagnostic_files))
724
for f in diagnostic_files:
725
os.unlink(f)
726
727
passed = True
728
failed = []
729
failed_testinstances = dict()
730
for step in steps:
731
util.pexpect_close_all()
732
733
t1 = time.time()
734
print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime()))
735
try:
736
success = run_step(step)
737
testinstance = None
738
if isinstance(success, tuple):
739
(success, testinstance) = success
740
if success:
741
results.add(step, '<span class="passed-text">PASSED</span>',
742
time.time() - t1)
743
print(">>>> PASSED STEP: %s at %s" % (step, time.asctime()))
744
else:
745
print(">>>> FAILED STEP: %s at %s" % (step, time.asctime()))
746
passed = False
747
failed.append(step)
748
if testinstance is not None:
749
if failed_testinstances.get(step) is None:
750
failed_testinstances[step] = []
751
failed_testinstances[step].append(testinstance)
752
results.add(step, '<span class="failed-text">FAILED</span>',
753
time.time() - t1)
754
except Exception as msg:
755
passed = False
756
failed.append(step)
757
print(">>>> FAILED STEP: %s at %s (%s)" %
758
(step, time.asctime(), msg))
759
traceback.print_exc(file=sys.stdout)
760
results.add(step,
761
'<span class="failed-text">FAILED</span>',
762
time.time() - t1)
763
764
if tester is not None and tester.rc_thread is not None:
765
if passed:
766
print("BAD: RC Thread still alive after run_step")
767
tester.rc_thread_should_quit = True
768
tester.rc_thread.join()
769
tester.rc_thread = None
770
771
if not passed:
772
keys = failed_testinstances.keys()
773
if len(keys):
774
print("Failure Summary:")
775
for key in keys:
776
print(" %s:" % key)
777
for testinstance in failed_testinstances[key]:
778
for failure in testinstance.fail_list:
779
print(" " + str(failure))
780
781
print("FAILED %u tests: %s" % (len(failed), failed))
782
783
util.pexpect_close_all()
784
785
write_fullresults()
786
787
return passed
788
789
790
vehicle_list = ['Sub', 'Copter', 'Plane', 'Tracker', 'Rover', 'QuadPlane', 'BalanceBot', 'Helicopter', 'Sailboat', 'Blimp']
791
792
793
def list_subtests():
794
"""Print the list of tests and tests description for each vehicle."""
795
for vehicle in sorted(vehicle_list):
796
tester_class = tester_class_map["test.%s" % vehicle]
797
tester = tester_class("/bin/true", None)
798
subtests = tester.tests()
799
sorted_list = []
800
for subtest in subtests:
801
if str(type(subtest)) == "<class 'method'>":
802
subtest = Test(subtest)
803
sorted_list.append([subtest.name, subtest.description])
804
sorted_list.sort()
805
806
print("%s:" % vehicle)
807
for subtest in sorted_list:
808
print(" %s: %s" % (subtest[0], subtest[1]))
809
print("")
810
811
812
def list_subtests_for_vehicle(vehicle_type):
813
"""Print the list of tests for a vehicle."""
814
# Check that we aren't in a sub test
815
if "Test" in vehicle_type:
816
vehicle_type = re.findall('[A-Z][a-z0-9]*', vehicle_type)[0]
817
if vehicle_type in vehicle_list:
818
tester_class = tester_class_map["test.%s" % vehicle_type]
819
tester = tester_class("/bin/true", None)
820
subtests = tester.tests()
821
sorted_list = []
822
for subtest in subtests:
823
if not isinstance(subtest, Test):
824
subtest = Test(subtest)
825
sorted_list.append([subtest.name, subtest.description])
826
sorted_list.sort()
827
for subtest in sorted_list:
828
print("%s " % subtest[0], end='')
829
print("") # needed to clear the trailing %
830
831
832
if __name__ == "__main__":
833
''' main program '''
834
os.environ['PYTHONUNBUFFERED'] = '1'
835
836
if sys.platform != "darwin":
837
os.putenv('TMPDIR', util.reltopdir('tmp'))
838
839
class MyOptionParser(optparse.OptionParser):
840
"""Custom option parse class."""
841
842
def format_epilog(self, formatter):
843
"""Return customized option parser epilog."""
844
return self.epilog
845
846
parser = MyOptionParser(
847
"autotest", epilog=""
848
"e.g. autotest.py build.Rover test.Rover # test Rover\n"
849
"e.g. autotest.py build.Rover test.Rover build.Plane test.Plane # test Rover and Plane\n"
850
"e.g. autotest.py --debug --valgrind build.Rover test.Rover # test Rover under Valgrind\n"
851
"e.g. autotest.py --debug --gdb build.Tracker test.Tracker # run Tracker under gdb\n"
852
"e.g. autotest.py --debug --gdb build.Sub test.Sub.DiveManual # do specific Sub test\n"
853
)
854
parser.add_option("--autotest-server",
855
action='store_true',
856
default=False,
857
help='Run in autotest-server mode; dangerous!')
858
parser.add_option("--move-logs-on-test-failure",
859
action='store_true',
860
default=None,
861
help='Move logs to ../buildlogs if a test fails')
862
parser.add_option("--skip",
863
type='string',
864
default='',
865
help='list of steps to skip (comma separated)')
866
parser.add_option("--list",
867
action='store_true',
868
default=False,
869
help='list the available steps')
870
parser.add_option("--list-subtests",
871
action='store_true',
872
default=False,
873
help='list available subtests e.g. test.Copter')
874
parser.add_option("--viewerip",
875
default=None,
876
help='IP address to send MAVLink and fg packets to')
877
parser.add_option("--enable-fgview",
878
action='store_true',
879
help="Enable FlightGear output")
880
parser.add_option("--map",
881
action='store_true',
882
default=False,
883
help='show map')
884
parser.add_option("--experimental",
885
default=False,
886
action='store_true',
887
help='enable experimental tests')
888
parser.add_option("--timeout",
889
default=None,
890
type='int',
891
help='maximum runtime in seconds')
892
parser.add_option("--show-test-timings",
893
action="store_true",
894
default=False,
895
help="show how long each test took to run")
896
parser.add_option("--validate-parameters",
897
action="store_true",
898
default=False,
899
help="validate vehicle parameter files")
900
parser.add_option("--Werror",
901
action='store_true',
902
default=False,
903
help='configure with --Werror')
904
parser.add_option("--junit",
905
default=False,
906
action='store_true',
907
help='Generate Junit XML tests report')
908
909
group_build = optparse.OptionGroup(parser, "Build options")
910
group_build.add_option("--no-configure",
911
default=False,
912
action='store_true',
913
help='do not configure before building',
914
dest="no_configure")
915
group_build.add_option("", "--waf-configure-args",
916
action="append",
917
dest="waf_configure_args",
918
type="string",
919
default=[],
920
help="extra arguments passed to waf in configure")
921
group_build.add_option("-j", default=None, type='int', help='build CPUs')
922
group_build.add_option("--no-clean",
923
default=False,
924
action='store_true',
925
help='do not clean before building',
926
dest="no_clean")
927
group_build.add_option("--debug",
928
default=None,
929
action='store_true',
930
help='make built SITL binaries debug binaries')
931
group_build.add_option("--no-debug",
932
default=None,
933
action='store_true',
934
help='do not make built SITL binaries debug binaries')
935
group_build.add_option("--coverage",
936
default=False,
937
action='store_true',
938
help='make built binaries coverage binaries')
939
group_build.add_option("--enable-math-check-indexes",
940
default=False,
941
action="store_true",
942
dest="math_check_indexes",
943
help="enable checking of math indexes")
944
group_build.add_option("--postype-single",
945
default=False,
946
action="store_true",
947
dest="postype_single",
948
help="force single precision copter position controller")
949
group_build.add_option("--ekf-single",
950
default=False,
951
action="store_true",
952
dest="ekf_single",
953
help="force single precision EKF")
954
group_build.add_option("--force-32bit",
955
default=False,
956
action='store_true',
957
dest="force_32bit",
958
help="compile sitl using 32-bit")
959
group_build.add_option("", "--ubsan",
960
default=False,
961
action='store_true',
962
dest="ubsan",
963
help="compile sitl with undefined behaviour sanitiser")
964
group_build.add_option("", "--ubsan-abort",
965
default=False,
966
action='store_true',
967
dest="ubsan_abort",
968
help="compile sitl with undefined behaviour sanitiser and abort on error")
969
group_build.add_option("--num-aux-imus",
970
dest="num_aux_imus",
971
default=0,
972
type='int',
973
help='number of auxiliary IMUs to simulate')
974
group_build.add_option("--enable-dronecan-tests",
975
default=False,
976
action='store_true',
977
dest="dronecan_tests",
978
help="enable dronecan tests")
979
parser.add_option_group(group_build)
980
981
group_sim = optparse.OptionGroup(parser, "Simulation options")
982
group_sim.add_option("--speedup",
983
default=None,
984
type='int',
985
help='speedup to run the simulations at')
986
group_sim.add_option("--valgrind",
987
default=False,
988
action='store_true',
989
help='run ArduPilot binaries under valgrind')
990
group_sim.add_option("", "--callgrind",
991
action='store_true',
992
default=False,
993
help="enable valgrind for performance analysis (slow!!)")
994
group_sim.add_option("--gdb",
995
default=False,
996
action='store_true',
997
help='run ArduPilot binaries under gdb')
998
group_sim.add_option("--gdb-no-tui",
999
default=False,
1000
action='store_true',
1001
help='when running under GDB do NOT start in TUI mode')
1002
group_sim.add_option("--gdbserver",
1003
default=False,
1004
action='store_true',
1005
help='run ArduPilot binaries under gdbserver')
1006
group_sim.add_option("--lldb",
1007
default=False,
1008
action='store_true',
1009
help='run ArduPilot binaries under lldb')
1010
group_sim.add_option("", "--strace",
1011
action='store_true',
1012
default=False,
1013
help="strace the ArduPilot binary")
1014
group_sim.add_option("-B", "--breakpoint",
1015
type='string',
1016
action="append",
1017
default=[],
1018
help="add a breakpoint at given location in debugger")
1019
group_sim.add_option("--disable-breakpoints",
1020
default=False,
1021
action='store_true',
1022
help="disable all breakpoints before starting")
1023
group_sim.add_option("", "--force-ahrs-type",
1024
dest="force_ahrs_type",
1025
default=None,
1026
help="force a specific AHRS type (e.g. 10 for SITL-ekf")
1027
group_sim.add_option("", "--replay",
1028
action='store_true',
1029
help="enable replay logging for tests")
1030
parser.add_option_group(group_sim)
1031
1032
group_completion = optparse.OptionGroup(parser, "Completion helpers")
1033
group_completion.add_option("--list-vehicles",
1034
action='store_true',
1035
default=False,
1036
help='list available vehicles')
1037
group_completion.add_option("--list-vehicles-test",
1038
action='store_true',
1039
default=False,
1040
help='list available vehicle tester')
1041
group_completion.add_option("--list-subtests-for-vehicle",
1042
type='string',
1043
default="",
1044
help='list available subtests for a vehicle e.g Copter')
1045
group_completion.add_option("--reset-after-every-test",
1046
action='store_true',
1047
default=False,
1048
help='reset everything after every test run')
1049
parser.add_option_group(group_completion)
1050
1051
opts, args = parser.parse_args()
1052
1053
# canonicalise on opts.debug:
1054
if opts.debug is None and opts.no_debug is None:
1055
# default is to create debug SITL binaries
1056
opts.debug = True
1057
elif opts.debug is not None and opts.no_debug is not None:
1058
if opts.debug == opts.no_debug:
1059
raise ValueError("no_debug != !debug")
1060
elif opts.no_debug is not None:
1061
opts.debug = not opts.no_debug
1062
1063
if opts.timeout is None:
1064
opts.timeout = 5400
1065
# adjust if we're running in a regime which may slow us down e.g. Valgrind
1066
if opts.valgrind:
1067
opts.timeout *= 10
1068
elif opts.callgrind:
1069
opts.timeout *= 10
1070
elif opts.gdb:
1071
opts.timeout = None
1072
1073
# default to moving logs when running in autotest-server mode:
1074
if opts.move_logs_on_test_failure is None:
1075
opts.move_logs_on_test_failure = opts.autotest_server
1076
1077
steps = [
1078
'prerequisites',
1079
'build.Binaries',
1080
'build.All',
1081
'build.Parameters',
1082
1083
'build.Replay',
1084
1085
'build.unit_tests',
1086
'run.unit_tests',
1087
'build.examples',
1088
'run.examples',
1089
1090
'build.Plane',
1091
'test.Plane',
1092
'test.QuadPlane',
1093
1094
'build.Rover',
1095
'test.Rover',
1096
'test.BalanceBot',
1097
'test.Sailboat',
1098
1099
'build.Copter',
1100
'test.Copter',
1101
1102
'build.Helicopter',
1103
'test.Helicopter',
1104
1105
'build.Tracker',
1106
'test.Tracker',
1107
1108
'build.Sub',
1109
'test.Sub',
1110
1111
'build.Blimp',
1112
'test.Blimp',
1113
1114
'build.SITLPeriphUniversal',
1115
'test.CAN',
1116
1117
'build.SITLPeriphBattMon',
1118
'test.BattCAN',
1119
1120
# convertgps disabled as it takes 5 hours
1121
# 'convertgpx',
1122
]
1123
1124
moresteps = [
1125
'test.CopterTests1a',
1126
'test.CopterTests1b',
1127
'test.CopterTests1c',
1128
'test.CopterTests1d',
1129
'test.CopterTests1e',
1130
1131
'test.CopterTests2a',
1132
'test.CopterTests2b',
1133
1134
'test.PlaneTests1a',
1135
'test.PlaneTests1b',
1136
'test.PlaneTests1c',
1137
1138
'clang-scan-build',
1139
]
1140
1141
# canonicalise the step names. This allows
1142
# backwards-compatability from the hodge-podge
1143
# fly.ArduCopter/drive.APMrover2 to the more common test.Copter
1144
# test.Rover
1145
step_mapping = {
1146
"build.ArduPlane": "build.Plane",
1147
"build.ArduCopter": "build.Copter",
1148
"build.APMrover2": "build.Rover",
1149
"build.ArduSub": "build.Sub",
1150
"build.AntennaTracker": "build.Tracker",
1151
"fly.ArduCopter": "test.Copter",
1152
"fly.ArduPlane": "test.Plane",
1153
"fly.QuadPlane": "test.QuadPlane",
1154
"dive.ArduSub": "test.Sub",
1155
"drive.APMrover2": "test.Rover",
1156
"drive.BalanceBot": "test.BalanceBot",
1157
"drive.balancebot": "test.BalanceBot",
1158
"fly.CopterAVC": "test.Helicopter",
1159
"test.AntennaTracker": "test.Tracker",
1160
"fly.ArduCopterTests1a": "test.CopterTests1a",
1161
"fly.ArduCopterTests1b": "test.CopterTests1b",
1162
"fly.ArduCopterTests1c": "test.CopterTests1c",
1163
"fly.ArduCopterTests1d": "test.CopterTests1d",
1164
"fly.ArduCopterTests1e": "test.CopterTests1e",
1165
1166
"fly.ArduCopterTests2a": "test.CopterTests2a",
1167
"fly.ArduCopterTests2b": "test.CopterTests2b",
1168
1169
}
1170
1171
# form up a list of bits NOT to run, mapping from old step names
1172
# to new step names as appropriate.
1173
skipsteps = opts.skip.split(',')
1174
new_skipsteps = []
1175
for skipstep in skipsteps:
1176
if skipstep in step_mapping:
1177
new_skipsteps.append(step_mapping[skipstep])
1178
else:
1179
new_skipsteps.append(skipstep)
1180
skipsteps = new_skipsteps
1181
1182
# ensure we catch timeouts
1183
signal.signal(signal.SIGALRM, alarm_handler)
1184
if opts.timeout is not None:
1185
signal.alarm(opts.timeout)
1186
1187
if opts.list:
1188
for step in steps:
1189
print(step)
1190
sys.exit(0)
1191
1192
if opts.list_subtests:
1193
list_subtests()
1194
sys.exit(0)
1195
1196
if opts.list_subtests_for_vehicle:
1197
list_subtests_for_vehicle(opts.list_subtests_for_vehicle)
1198
sys.exit(0)
1199
1200
if opts.list_vehicles_test:
1201
print(' '.join(__bin_names.keys()))
1202
sys.exit(0)
1203
1204
if opts.list_vehicles:
1205
print(' '.join(vehicle_list))
1206
sys.exit(0)
1207
1208
util.mkdir_p(buildlogs_dirpath())
1209
1210
lckfile = buildlogs_path('autotest.lck')
1211
print("lckfile=%s" % repr(lckfile))
1212
lck = util.lock_file(lckfile)
1213
1214
if lck is None:
1215
print("autotest is locked - exiting. lckfile=(%s)" % (lckfile,))
1216
sys.exit(0)
1217
1218
atexit.register(util.pexpect_close_all)
1219
1220
# provide backwards-compatability from (e.g.) drive.APMrover2 -> test.Rover
1221
newargs = []
1222
for arg in args:
1223
for _from, to in step_mapping.items():
1224
arg = re.sub("^%s" % _from, to, arg)
1225
newargs.append(arg)
1226
args = newargs
1227
1228
if len(args) == 0 and not opts.autotest_server:
1229
print("Steps must be supplied; try --list and/or --list-subtests or --help")
1230
sys.exit(1)
1231
1232
if len(args) > 0:
1233
# allow a wildcard list of steps
1234
matched = []
1235
for a in args:
1236
matches = [step for step in steps
1237
if fnmatch.fnmatch(step.lower(), a.lower())]
1238
x = find_specific_test_to_run(a)
1239
if x is not None:
1240
matches.append(x)
1241
1242
if a in moresteps:
1243
matches.append(a)
1244
1245
if not len(matches):
1246
print("No steps matched {}".format(a))
1247
sys.exit(1)
1248
matched.extend(matches)
1249
steps = matched
1250
1251
# skip steps according to --skip option:
1252
steps_to_run = [s for s in steps if should_run_step(s)]
1253
1254
results = TestResults()
1255
1256
try:
1257
if not run_tests(steps_to_run):
1258
sys.exit(1)
1259
except KeyboardInterrupt:
1260
print("KeyboardInterrupt caught; closing pexpect connections")
1261
util.pexpect_close_all()
1262
raise
1263
except Exception:
1264
# make sure we kill off any children
1265
util.pexpect_close_all()
1266
raise
1267
1268