CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
Ardupilot

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.

GitHub Repository: Ardupilot/ardupilot
Path: blob/master/Tools/autotest/autotest.py
Views: 1798
1
#!/usr/bin/env python3
2
"""
3
ArduPilot automatic test suite.
4
5
Andrew Tridgell, October 2011
6
7
AP_FLAKE8_CLEAN
8
"""
9
from __future__ import print_function
10
import atexit
11
import fnmatch
12
import copy
13
import glob
14
import optparse
15
import os
16
import re
17
import shutil
18
import signal
19
import subprocess
20
import sys
21
import time
22
import traceback
23
24
import blimp
25
import rover
26
import arducopter
27
import arduplane
28
import ardusub
29
import antennatracker
30
import quadplane
31
import balancebot
32
import sailboat
33
import helicopter
34
35
import examples
36
from pysim import util
37
from pymavlink.generator import mavtemplate
38
39
from vehicle_test_suite import Test
40
41
tester = None
42
43
44
def buildlogs_dirpath():
45
"""Return BUILDLOGS directory path."""
46
return os.getenv("BUILDLOGS", util.reltopdir("../buildlogs"))
47
48
49
def buildlogs_path(path):
50
"""Return a string representing path in the buildlogs directory."""
51
bits = [buildlogs_dirpath()]
52
if isinstance(path, list):
53
bits.extend(path)
54
else:
55
bits.append(path)
56
return os.path.join(*bits)
57
58
59
def build_all_filepath():
60
"""Get build_all.sh path."""
61
return util.reltopdir('Tools/scripts/build_all.sh')
62
63
64
def build_all():
65
"""Run the build_all.sh script."""
66
print("Running build_all.sh")
67
if util.run_cmd(build_all_filepath(), directory=util.reltopdir('.')) != 0:
68
print("Failed build_all.sh")
69
return False
70
return True
71
72
73
def build_binaries():
74
"""Run the build_binaries.py script."""
75
print("Running build_binaries.py")
76
77
# copy the script (and various libraries used by the script) as it
78
# changes git branch, which can change the script while running
79
for thing in [
80
"board_list.py",
81
"build_binaries_history.py",
82
"build_binaries.py",
83
"build_sizes/build_sizes.py",
84
"generate_manifest.py",
85
"gen_stable.py",
86
]:
87
orig = util.reltopdir('Tools/scripts/%s' % thing)
88
copy = util.reltopdir('./%s' % os.path.basename(thing))
89
shutil.copy2(orig, copy)
90
91
if util.run_cmd("./build_binaries.py", directory=util.reltopdir('.')) != 0:
92
print("Failed build_binaries.py")
93
return False
94
return True
95
96
97
def build_examples(**kwargs):
98
"""Build examples."""
99
for target in 'Pixhawk1', 'navio', 'linux':
100
print("Running build.examples for %s" % target)
101
try:
102
util.build_examples(target, **kwargs)
103
except Exception as e:
104
print("Failed build_examples on board=%s" % target)
105
print(str(e))
106
return False
107
108
return True
109
110
111
def build_unit_tests(**kwargs):
112
"""Build tests."""
113
for target in ['linux', 'sitl']:
114
print("Running build.unit_tests for %s" % target)
115
try:
116
util.build_tests(target, **kwargs)
117
except Exception as e:
118
print("Failed build.unit_tests on board=%s" % target)
119
print(str(e))
120
return False
121
122
return True
123
124
125
def run_unit_test(test):
126
"""Run unit test file."""
127
print("Running (%s)" % test)
128
subprocess.check_call([test])
129
130
131
def run_unit_tests():
132
"""Run all unit tests files."""
133
success = True
134
fail_list = []
135
for target in ['linux', 'sitl']:
136
binary_dir = util.reltopdir(os.path.join('build',
137
target,
138
'tests',
139
))
140
tests = glob.glob("%s/*" % binary_dir)
141
for test in tests:
142
try:
143
run_unit_test(test)
144
except subprocess.CalledProcessError:
145
print("Exception running (%s)" % test)
146
fail_list.append(target + '/' + os.path.basename(test))
147
success = False
148
149
print("Failing tests:")
150
for failure in fail_list:
151
print(" %s" % failure)
152
return success
153
154
155
def run_clang_scan_build():
156
"""Run Clang Scan-build utility."""
157
if util.run_cmd("scan-build python waf configure",
158
directory=util.reltopdir('.')) != 0:
159
print("Failed scan-build-configure")
160
return False
161
162
if util.run_cmd("scan-build python waf clean",
163
directory=util.reltopdir('.')) != 0:
164
print("Failed scan-build-clean")
165
return False
166
167
if util.run_cmd("scan-build python waf build",
168
directory=util.reltopdir('.')) != 0:
169
print("Failed scan-build-build")
170
return False
171
172
return True
173
174
175
def param_parse_filepath():
176
"""Get param_parse.py script path."""
177
return util.reltopdir('Tools/autotest/param_metadata/param_parse.py')
178
179
180
def all_vehicles():
181
"""Get all vehicles name."""
182
return ('ArduPlane',
183
'ArduCopter',
184
'Rover',
185
'AntennaTracker',
186
'ArduSub',
187
'Blimp',
188
'AP_Periph',
189
)
190
191
192
def build_parameters():
193
"""Run the param_parse.py script."""
194
print("Running param_parse.py")
195
for vehicle in all_vehicles():
196
if util.run_cmd([param_parse_filepath(), '--vehicle', vehicle],
197
directory=util.reltopdir('.')) != 0:
198
print("Failed param_parse.py (%s)" % vehicle)
199
return False
200
return True
201
202
203
def mavtogpx_filepath():
204
"""Get mavtogpx script path."""
205
return util.reltopdir("modules/mavlink/pymavlink/tools/mavtogpx.py")
206
207
208
def convert_gpx():
209
"""Convert any tlog files to GPX and KML."""
210
mavlog = glob.glob(buildlogs_path("*.tlog"))
211
passed = True
212
for m in mavlog:
213
util.run_cmd(mavtogpx_filepath() + " --nofixcheck " + m)
214
gpx = m + '.gpx'
215
kml = m + '.kml'
216
try:
217
util.run_cmd('gpsbabel -i gpx -f %s '
218
'-o kml,units=m,floating=1,extrude=1 -F %s' %
219
(gpx, kml))
220
except subprocess.CalledProcessError:
221
passed = False
222
try:
223
util.run_cmd('zip %s.kmz %s.kml' % (m, m))
224
except subprocess.CalledProcessError:
225
passed = False
226
util.run_cmd("mavflightview.py --imagefile=%s.png %s" % (m, m))
227
return passed
228
229
230
def test_prerequisites():
231
"""Check we have the right directories and tools to run tests."""
232
print("Testing prerequisites")
233
util.mkdir_p(buildlogs_dirpath())
234
return True
235
236
237
def alarm_handler(signum, frame):
238
"""Handle test timeout."""
239
global results, opts, tester
240
try:
241
print("Alarm handler called")
242
if tester is not None:
243
if tester.rc_thread is not None:
244
tester.rc_thread_should_quit = True
245
tester.rc_thread.join()
246
tester.rc_thread = None
247
results.add('TIMEOUT',
248
'<span class="failed-text">FAILED</span>',
249
opts.timeout)
250
util.pexpect_close_all()
251
convert_gpx()
252
write_fullresults()
253
os.killpg(0, signal.SIGKILL)
254
except Exception:
255
pass
256
sys.exit(1)
257
258
259
def should_run_step(step):
260
"""See if a step should be skipped."""
261
for skip in skipsteps:
262
if fnmatch.fnmatch(step.lower(), skip.lower()):
263
return False
264
return True
265
266
267
__bin_names = {
268
"Copter": "arducopter",
269
"CopterTests1a": "arducopter",
270
"CopterTests1b": "arducopter",
271
"CopterTests1c": "arducopter",
272
"CopterTests1d": "arducopter",
273
"CopterTests1e": "arducopter",
274
275
"CopterTests2a": "arducopter",
276
"CopterTests2b": "arducopter",
277
278
"Plane": "arduplane",
279
"PlaneTests1a": "arduplane",
280
"PlaneTests1b": "arduplane",
281
282
"Rover": "ardurover",
283
"Tracker": "antennatracker",
284
"Helicopter": "arducopter-heli",
285
"QuadPlane": "arduplane",
286
"Sub": "ardusub",
287
"Blimp": "blimp",
288
"BalanceBot": "ardurover",
289
"Sailboat": "ardurover",
290
"SITLPeriphUniversal": ("sitl_periph_universal", "AP_Periph"),
291
"SITLPeriphBattMon": ("sitl_periph_battmon", "AP_Periph"),
292
"CAN": "arducopter",
293
"BattCAN": "arducopter",
294
}
295
296
297
def binary_path(step, debug=False):
298
"""Get vehicle binary path."""
299
try:
300
vehicle = step.split(".")[1]
301
except Exception:
302
return None
303
304
if vehicle not in __bin_names:
305
# cope with builds that don't have a specific binary
306
return None
307
308
try:
309
(config_name, binary_name) = __bin_names[vehicle]
310
except ValueError:
311
config_name = "sitl"
312
binary_name = __bin_names[vehicle]
313
314
binary = util.reltopdir(os.path.join('build',
315
config_name,
316
'bin',
317
binary_name))
318
if not os.path.exists(binary):
319
if os.path.exists(binary + ".exe"):
320
binary += ".exe"
321
else:
322
raise ValueError("Binary (%s) does not exist" % (binary,))
323
324
return binary
325
326
327
def split_specific_test_step(step):
328
"""Extract test from argument."""
329
print('step=%s' % str(step))
330
m = re.match("((fly|drive|dive|test)[.][^.]+)[.](.*)", step)
331
if m is None:
332
return None
333
return ((m.group(1), m.group(3)))
334
335
336
def find_specific_test_to_run(step):
337
"""Find test to run in argument."""
338
t = split_specific_test_step(step)
339
if t is None:
340
return None
341
(testname, test) = t
342
return "%s.%s" % (testname, test)
343
344
345
tester_class_map = {
346
"test.Blimp": blimp.AutoTestBlimp,
347
"test.Copter": arducopter.AutoTestCopter,
348
"test.CopterTests1a": arducopter.AutoTestCopterTests1a, # 8m43s
349
"test.CopterTests1b": arducopter.AutoTestCopterTests1b, # 8m5s
350
"test.CopterTests1c": arducopter.AutoTestCopterTests1c, # 5m17s
351
"test.CopterTests1d": arducopter.AutoTestCopterTests1d, # 8m20s
352
"test.CopterTests1e": arducopter.AutoTestCopterTests1e, # 8m32s
353
"test.CopterTests2a": arducopter.AutoTestCopterTests2a, # 8m23s
354
"test.CopterTests2b": arducopter.AutoTestCopterTests2b, # 8m18s
355
"test.Plane": arduplane.AutoTestPlane,
356
"test.PlaneTests1a": arduplane.AutoTestPlaneTests1a,
357
"test.PlaneTests1b": arduplane.AutoTestPlaneTests1b,
358
"test.QuadPlane": quadplane.AutoTestQuadPlane,
359
"test.Rover": rover.AutoTestRover,
360
"test.BalanceBot": balancebot.AutoTestBalanceBot,
361
"test.Sailboat": sailboat.AutoTestSailboat,
362
"test.Helicopter": helicopter.AutoTestHelicopter,
363
"test.Sub": ardusub.AutoTestSub,
364
"test.Tracker": antennatracker.AutoTestTracker,
365
"test.CAN": arducopter.AutoTestCAN,
366
"test.BattCAN": arducopter.AutoTestBattCAN,
367
}
368
369
supplementary_test_binary_map = {
370
"test.CAN": ["sitl_periph_universal:AP_Periph:0:Tools/autotest/default_params/periph.parm,Tools/autotest/default_params/quad-periph.parm", # noqa: E501
371
"sitl_periph_universal:AP_Periph:1:Tools/autotest/default_params/periph.parm"],
372
"test.BattCAN": [
373
"sitl_periph_battmon:AP_Periph:0:Tools/autotest/default_params/periph-battmon.parm,Tools/autotest/default_params/quad-periph.parm", # noqa: E501
374
],
375
}
376
377
378
def run_specific_test(step, *args, **kwargs):
379
"""Run a specific test."""
380
t = split_specific_test_step(step)
381
if t is None:
382
return []
383
(testname, test) = t
384
385
tester_class = tester_class_map[testname]
386
global tester
387
tester = tester_class(*args, **kwargs)
388
389
# print("Got %s" % str(tester))
390
for a in tester.tests():
391
if not isinstance(a, Test):
392
a = Test(a)
393
print("Got %s" % (a.name))
394
if a.name == test:
395
return tester.autotest(tests=[a], allow_skips=False, step_name=step), tester
396
print("Failed to find test %s on %s" % (test, testname))
397
sys.exit(1)
398
399
400
def run_step(step):
401
"""Run one step."""
402
# remove old logs
403
util.run_cmd('/bin/rm -f logs/*.BIN logs/LASTLOG.TXT')
404
405
if step == "prerequisites":
406
return test_prerequisites()
407
408
build_opts = {
409
"j": opts.j,
410
"debug": opts.debug,
411
"clean": not opts.no_clean,
412
"configure": not opts.no_configure,
413
"math_check_indexes": opts.math_check_indexes,
414
"ekf_single": opts.ekf_single,
415
"postype_single": opts.postype_single,
416
"extra_configure_args": opts.waf_configure_args,
417
"coverage": opts.coverage,
418
"force_32bit" : opts.force_32bit,
419
"ubsan" : opts.ubsan,
420
"ubsan_abort" : opts.ubsan_abort,
421
"num_aux_imus" : opts.num_aux_imus,
422
"dronecan_tests" : opts.dronecan_tests,
423
}
424
425
if opts.Werror:
426
build_opts['extra_configure_args'].append("--Werror")
427
428
vehicle_binary = None
429
board = "sitl"
430
if step == 'build.Plane':
431
vehicle_binary = 'bin/arduplane'
432
433
if step == 'build.Rover':
434
vehicle_binary = 'bin/ardurover'
435
436
if step == 'build.Copter':
437
vehicle_binary = 'bin/arducopter'
438
439
if step == 'build.Blimp':
440
vehicle_binary = 'bin/blimp'
441
442
if step == 'build.Tracker':
443
vehicle_binary = 'bin/antennatracker'
444
445
if step == 'build.Helicopter':
446
vehicle_binary = 'bin/arducopter-heli'
447
448
if step == 'build.Sub':
449
vehicle_binary = 'bin/ardusub'
450
451
if step == 'build.SITLPeriphUniversal':
452
vehicle_binary = 'bin/AP_Periph'
453
board = 'sitl_periph_universal'
454
455
if step == 'build.SITLPeriphBattMon':
456
vehicle_binary = 'bin/AP_Periph'
457
board = 'sitl_periph_battmon'
458
459
if step == 'build.Replay':
460
return util.build_replay(board='SITL')
461
462
if vehicle_binary is not None:
463
try:
464
binary = binary_path(step, debug=opts.debug)
465
os.unlink(binary)
466
except (FileNotFoundError, ValueError):
467
pass
468
return util.build_SITL(
469
vehicle_binary,
470
board=board,
471
**build_opts
472
)
473
474
binary = binary_path(step, debug=opts.debug)
475
476
# see if we need any supplementary binaries
477
supplementary_binaries = []
478
for k in supplementary_test_binary_map.keys():
479
if step.startswith(k):
480
# this test needs to use supplementary binaries
481
for supplementary_test_binary in supplementary_test_binary_map[k]:
482
a = supplementary_test_binary.split(':')
483
if len(a) != 4:
484
raise ValueError("Bad supplementary_test_binary %s" % supplementary_test_binary)
485
config_name = a[0]
486
binary_name = a[1]
487
instance_num = int(a[2])
488
param_file = a[3].split(",")
489
bin_path = util.reltopdir(os.path.join('build', config_name, 'bin', binary_name))
490
customisation = '-I {}'.format(instance_num)
491
sup_binary = {"binary" : bin_path,
492
"customisation" : customisation,
493
"param_file" : param_file}
494
supplementary_binaries.append(sup_binary)
495
# we are running in conjunction with a supplementary app
496
# can't have speedup
497
opts.speedup = 1.0
498
break
499
500
fly_opts = {
501
"viewerip": opts.viewerip,
502
"use_map": opts.map,
503
"valgrind": opts.valgrind,
504
"callgrind": opts.callgrind,
505
"gdb": opts.gdb,
506
"gdb_no_tui": opts.gdb_no_tui,
507
"lldb": opts.lldb,
508
"gdbserver": opts.gdbserver,
509
"breakpoints": opts.breakpoint,
510
"disable_breakpoints": opts.disable_breakpoints,
511
"_show_test_timings": opts.show_test_timings,
512
"force_ahrs_type": opts.force_ahrs_type,
513
"num_aux_imus" : opts.num_aux_imus,
514
"replay": opts.replay,
515
"logs_dir": buildlogs_dirpath(),
516
"sup_binaries": supplementary_binaries,
517
"reset_after_every_test": opts.reset_after_every_test,
518
"build_opts": copy.copy(build_opts),
519
"generate_junit": opts.junit,
520
"enable_fgview": opts.enable_fgview,
521
}
522
if opts.speedup is not None:
523
fly_opts["speedup"] = opts.speedup
524
525
# handle "test.Copter" etc:
526
if step in tester_class_map:
527
# create an instance of the tester class:
528
global tester
529
tester = tester_class_map[step](binary, **fly_opts)
530
# run the test and return its result and the tester itself
531
return tester.autotest(None, step_name=step), tester
532
533
# handle "test.Copter.CPUFailsafe" etc:
534
specific_test_to_run = find_specific_test_to_run(step)
535
if specific_test_to_run is not None:
536
return run_specific_test(specific_test_to_run, binary, **fly_opts)
537
538
if step == 'build.All':
539
return build_all()
540
541
if step == 'build.Binaries':
542
return build_binaries()
543
544
if step == 'build.examples':
545
return build_examples(**build_opts)
546
547
if step == 'run.examples':
548
return examples.run_examples(debug=opts.debug, valgrind=False, gdb=False)
549
550
if step == 'build.Parameters':
551
return build_parameters()
552
553
if step == 'convertgpx':
554
return convert_gpx()
555
556
if step == 'build.unit_tests':
557
return build_unit_tests(**build_opts)
558
559
if step == 'run.unit_tests':
560
return run_unit_tests()
561
562
if step == 'clang-scan-build':
563
return run_clang_scan_build()
564
565
raise RuntimeError("Unknown step %s" % step)
566
567
568
class TestResult(object):
569
"""Test result class."""
570
571
def __init__(self, name, result, elapsed):
572
"""Init test result class."""
573
self.name = name
574
self.result = result
575
self.elapsed = "%.1f" % elapsed
576
577
578
class TestFile(object):
579
"""Test result file."""
580
581
def __init__(self, name, fname):
582
"""Init test result file."""
583
self.name = name
584
self.fname = fname
585
586
587
class TestResults(object):
588
"""Test results class."""
589
590
def __init__(self):
591
"""Init test results class."""
592
self.date = time.asctime()
593
self.githash = util.get_git_hash()
594
self.tests = []
595
self.files = []
596
self.images = []
597
598
def add(self, name, result, elapsed):
599
"""Add a result."""
600
self.tests.append(TestResult(name, result, elapsed))
601
602
def addfile(self, name, fname):
603
"""Add a result file."""
604
self.files.append(TestFile(name, fname))
605
606
def addimage(self, name, fname):
607
"""Add a result image."""
608
self.images.append(TestFile(name, fname))
609
610
def addglob(self, name, pattern):
611
"""Add a set of files."""
612
for f in glob.glob(buildlogs_path(pattern)):
613
self.addfile(name, os.path.basename(f))
614
615
def addglobimage(self, name, pattern):
616
"""Add a set of images."""
617
for f in glob.glob(buildlogs_path(pattern)):
618
self.addimage(name, os.path.basename(f))
619
620
def generate_badge(self):
621
"""Get the badge template, populates and saves the result to buildlogs path."""
622
passed_tests = len([t for t in self.tests if "PASSED" in t.result])
623
total_tests = len(self.tests)
624
badge_color = "#4c1" if passed_tests == total_tests else "#e05d44"
625
626
badge_text = "{0}/{1}".format(passed_tests, total_tests)
627
# Text length so it is not stretched by svg
628
text_length = len(badge_text) * 70
629
630
# Load template file
631
template_path = 'Tools/autotest/web/autotest-badge-template.svg'
632
with open(util.reltopdir(template_path), "r") as f:
633
template = f.read()
634
635
# Add our results to the template
636
badge = template.format(color=badge_color,
637
text=badge_text,
638
text_length=text_length)
639
with open(buildlogs_path("autotest-badge.svg"), "w") as f:
640
f.write(badge)
641
642
643
def copy_tree(f, t, dirs_exist_ok=False):
644
shutil.copytree(f, t, dirs_exist_ok=dirs_exist_ok)
645
646
647
def write_webresults(results_to_write):
648
"""Write webpage results."""
649
t = mavtemplate.MAVTemplate()
650
for h in glob.glob(util.reltopdir('Tools/autotest/web/*.html')):
651
html = util.loadfile(h)
652
f = open(buildlogs_path(os.path.basename(h)), mode='w')
653
t.write(f, html, results_to_write)
654
f.close()
655
for f in glob.glob(util.reltopdir('Tools/autotest/web/*.png')):
656
shutil.copy(f, buildlogs_path(os.path.basename(f)))
657
copy_tree(util.reltopdir("Tools/autotest/web/css"), buildlogs_path("css"), dirs_exist_ok=True)
658
results_to_write.generate_badge()
659
660
661
def write_fullresults():
662
"""Write out full results set."""
663
global results
664
results.addglob("Google Earth track", '*.kmz')
665
results.addfile('Full Logs', 'autotest-output.txt')
666
results.addglob('DataFlash Log', '*-log.bin')
667
results.addglob("MAVLink log", '*.tlog')
668
results.addglob("GPX track", '*.gpx')
669
670
# results common to all vehicles:
671
vehicle_files = [
672
('{vehicle} core', '{vehicle}.core'),
673
('{vehicle} ELF', '{vehicle}.elf'),
674
]
675
vehicle_globs = [('{vehicle} log', '{vehicle}-*.BIN'), ]
676
for vehicle in all_vehicles():
677
subs = {'vehicle': vehicle}
678
for vehicle_file in vehicle_files:
679
description = vehicle_file[0].format(**subs)
680
filename = vehicle_file[1].format(**subs)
681
results.addfile(description, filename)
682
for vehicle_glob in vehicle_globs:
683
description = vehicle_glob[0].format(**subs)
684
glob = vehicle_glob[1].format(**subs)
685
results.addglob(description, glob)
686
687
results.addglob("CopterAVC log", 'CopterAVC-*.BIN')
688
results.addfile("CopterAVC core", 'CopterAVC.core')
689
690
results.addglob('APM:Libraries documentation', 'docs/libraries/index.html')
691
results.addglob('APM:Plane documentation', 'docs/ArduPlane/index.html')
692
results.addglob('APM:Copter documentation', 'docs/ArduCopter/index.html')
693
results.addglob('APM:Rover documentation', 'docs/Rover/index.html')
694
results.addglob('APM:Sub documentation', 'docs/ArduSub/index.html')
695
results.addglob('APM:Blimp documentation', 'docs/Blimp/index.html')
696
results.addglobimage("Flight Track", '*.png')
697
698
write_webresults(results)
699
700
701
def run_tests(steps):
702
"""Run a list of steps."""
703
global results
704
705
corefiles = glob.glob("core*")
706
corefiles.extend(glob.glob("ap-*.core"))
707
if corefiles:
708
print('Removing corefiles: %s' % str(corefiles))
709
for f in corefiles:
710
os.unlink(f)
711
712
diagnostic_files = []
713
for p in "dumpstack.sh_*", "dumpcore.sh_*", "autotest-*tlog":
714
diagnostic_files.extend(glob.glob(p))
715
if diagnostic_files:
716
print('Removing diagnostic files: %s' % str(diagnostic_files))
717
for f in diagnostic_files:
718
os.unlink(f)
719
720
passed = True
721
failed = []
722
failed_testinstances = dict()
723
for step in steps:
724
util.pexpect_close_all()
725
726
t1 = time.time()
727
print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime()))
728
try:
729
success = run_step(step)
730
testinstance = None
731
if isinstance(success, tuple):
732
(success, testinstance) = success
733
if success:
734
results.add(step, '<span class="passed-text">PASSED</span>',
735
time.time() - t1)
736
print(">>>> PASSED STEP: %s at %s" % (step, time.asctime()))
737
else:
738
print(">>>> FAILED STEP: %s at %s" % (step, time.asctime()))
739
passed = False
740
failed.append(step)
741
if testinstance is not None:
742
if failed_testinstances.get(step) is None:
743
failed_testinstances[step] = []
744
failed_testinstances[step].append(testinstance)
745
results.add(step, '<span class="failed-text">FAILED</span>',
746
time.time() - t1)
747
except Exception as msg:
748
passed = False
749
failed.append(step)
750
print(">>>> FAILED STEP: %s at %s (%s)" %
751
(step, time.asctime(), msg))
752
traceback.print_exc(file=sys.stdout)
753
results.add(step,
754
'<span class="failed-text">FAILED</span>',
755
time.time() - t1)
756
757
global tester
758
if tester is not None and tester.rc_thread is not None:
759
if passed:
760
print("BAD: RC Thread still alive after run_step")
761
tester.rc_thread_should_quit = True
762
tester.rc_thread.join()
763
tester.rc_thread = None
764
765
if not passed:
766
keys = failed_testinstances.keys()
767
if len(keys):
768
print("Failure Summary:")
769
for key in keys:
770
print(" %s:" % key)
771
for testinstance in failed_testinstances[key]:
772
for failure in testinstance.fail_list:
773
print(" " + str(failure))
774
775
print("FAILED %u tests: %s" % (len(failed), failed))
776
777
util.pexpect_close_all()
778
779
write_fullresults()
780
781
return passed
782
783
784
vehicle_list = ['Sub', 'Copter', 'Plane', 'Tracker', 'Rover', 'QuadPlane', 'BalanceBot', 'Helicopter', 'Sailboat', 'Blimp']
785
786
787
def list_subtests():
788
"""Print the list of tests and tests description for each vehicle."""
789
for vehicle in sorted(vehicle_list):
790
tester_class = tester_class_map["test.%s" % vehicle]
791
tester = tester_class("/bin/true", None)
792
subtests = tester.tests()
793
sorted_list = []
794
for subtest in subtests:
795
if str(type(subtest)) == "<class 'method'>":
796
subtest = Test(subtest)
797
sorted_list.append([subtest.name, subtest.description])
798
sorted_list.sort()
799
800
print("%s:" % vehicle)
801
for subtest in sorted_list:
802
print(" %s: %s" % (subtest[0], subtest[1]))
803
print("")
804
805
806
def list_subtests_for_vehicle(vehicle_type):
807
"""Print the list of tests for a vehicle."""
808
# Check that we aren't in a sub test
809
if "Test" in vehicle_type:
810
vehicle_type = re.findall('[A-Z][a-z0-9]*', vehicle_type)[0]
811
if vehicle_type in vehicle_list:
812
tester_class = tester_class_map["test.%s" % vehicle_type]
813
tester = tester_class("/bin/true", None)
814
subtests = tester.tests()
815
sorted_list = []
816
for subtest in subtests:
817
if not isinstance(subtest, Test):
818
subtest = Test(subtest)
819
sorted_list.append([subtest.name, subtest.description])
820
sorted_list.sort()
821
for subtest in sorted_list:
822
print("%s " % subtest[0], end='')
823
print("") # needed to clear the trailing %
824
825
826
if __name__ == "__main__":
827
''' main program '''
828
os.environ['PYTHONUNBUFFERED'] = '1'
829
830
if sys.platform != "darwin":
831
os.putenv('TMPDIR', util.reltopdir('tmp'))
832
833
class MyOptionParser(optparse.OptionParser):
834
"""Custom option parse class."""
835
836
def format_epilog(self, formatter):
837
"""Retun customized option parser epilog."""
838
return self.epilog
839
840
parser = MyOptionParser(
841
"autotest", epilog=""
842
"e.g. autotest.py build.Rover test.Rover # test Rover\n"
843
"e.g. autotest.py build.Rover test.Rover build.Plane test.Plane # test Rover and Plane\n"
844
"e.g. autotest.py --debug --valgrind build.Rover test.Rover # test Rover under Valgrind\n"
845
"e.g. autotest.py --debug --gdb build.Tracker test.Tracker # run Tracker under gdb\n"
846
"e.g. autotest.py --debug --gdb build.Sub test.Sub.DiveManual # do specific Sub test\n"
847
)
848
parser.add_option("--autotest-server",
849
action='store_true',
850
default=False,
851
help='Run in autotest-server mode; dangerous!')
852
parser.add_option("--skip",
853
type='string',
854
default='',
855
help='list of steps to skip (comma separated)')
856
parser.add_option("--list",
857
action='store_true',
858
default=False,
859
help='list the available steps')
860
parser.add_option("--list-subtests",
861
action='store_true',
862
default=False,
863
help='list available subtests e.g. test.Copter')
864
parser.add_option("--viewerip",
865
default=None,
866
help='IP address to send MAVLink and fg packets to')
867
parser.add_option("--enable-fgview",
868
action='store_true',
869
help="Enable FlightGear output")
870
parser.add_option("--map",
871
action='store_true',
872
default=False,
873
help='show map')
874
parser.add_option("--experimental",
875
default=False,
876
action='store_true',
877
help='enable experimental tests')
878
parser.add_option("--timeout",
879
default=None,
880
type='int',
881
help='maximum runtime in seconds')
882
parser.add_option("--show-test-timings",
883
action="store_true",
884
default=False,
885
help="show how long each test took to run")
886
parser.add_option("--validate-parameters",
887
action="store_true",
888
default=False,
889
help="validate vehicle parameter files")
890
parser.add_option("--Werror",
891
action='store_true',
892
default=False,
893
help='configure with --Werror')
894
parser.add_option("--junit",
895
default=False,
896
action='store_true',
897
help='Generate Junit XML tests report')
898
899
group_build = optparse.OptionGroup(parser, "Build options")
900
group_build.add_option("--no-configure",
901
default=False,
902
action='store_true',
903
help='do not configure before building',
904
dest="no_configure")
905
group_build.add_option("", "--waf-configure-args",
906
action="append",
907
dest="waf_configure_args",
908
type="string",
909
default=[],
910
help="extra arguments passed to waf in configure")
911
group_build.add_option("-j", default=None, type='int', help='build CPUs')
912
group_build.add_option("--no-clean",
913
default=False,
914
action='store_true',
915
help='do not clean before building',
916
dest="no_clean")
917
group_build.add_option("--debug",
918
default=None,
919
action='store_true',
920
help='make built SITL binaries debug binaries')
921
group_build.add_option("--no-debug",
922
default=None,
923
action='store_true',
924
help='do not make built SITL binaries debug binaries')
925
group_build.add_option("--coverage",
926
default=False,
927
action='store_true',
928
help='make built binaries coverage binaries')
929
group_build.add_option("--enable-math-check-indexes",
930
default=False,
931
action="store_true",
932
dest="math_check_indexes",
933
help="enable checking of math indexes")
934
group_build.add_option("--postype-single",
935
default=False,
936
action="store_true",
937
dest="postype_single",
938
help="force single precision copter position controller")
939
group_build.add_option("--ekf-single",
940
default=False,
941
action="store_true",
942
dest="ekf_single",
943
help="force single precision EKF")
944
group_build.add_option("--force-32bit",
945
default=False,
946
action='store_true',
947
dest="force_32bit",
948
help="compile sitl using 32-bit")
949
group_build.add_option("", "--ubsan",
950
default=False,
951
action='store_true',
952
dest="ubsan",
953
help="compile sitl with undefined behaviour sanitiser")
954
group_build.add_option("", "--ubsan-abort",
955
default=False,
956
action='store_true',
957
dest="ubsan_abort",
958
help="compile sitl with undefined behaviour sanitiser and abort on error")
959
group_build.add_option("--num-aux-imus",
960
dest="num_aux_imus",
961
default=0,
962
type='int',
963
help='number of auxiliary IMUs to simulate')
964
group_build.add_option("--enable-dronecan-tests",
965
default=False,
966
action='store_true',
967
dest="dronecan_tests",
968
help="enable dronecan tests")
969
parser.add_option_group(group_build)
970
971
group_sim = optparse.OptionGroup(parser, "Simulation options")
972
group_sim.add_option("--speedup",
973
default=None,
974
type='int',
975
help='speedup to run the simulations at')
976
group_sim.add_option("--valgrind",
977
default=False,
978
action='store_true',
979
help='run ArduPilot binaries under valgrind')
980
group_sim.add_option("", "--callgrind",
981
action='store_true',
982
default=False,
983
help="enable valgrind for performance analysis (slow!!)")
984
group_sim.add_option("--gdb",
985
default=False,
986
action='store_true',
987
help='run ArduPilot binaries under gdb')
988
group_sim.add_option("--gdb-no-tui",
989
default=False,
990
action='store_true',
991
help='when running under GDB do NOT start in TUI mode')
992
group_sim.add_option("--gdbserver",
993
default=False,
994
action='store_true',
995
help='run ArduPilot binaries under gdbserver')
996
group_sim.add_option("--lldb",
997
default=False,
998
action='store_true',
999
help='run ArduPilot binaries under lldb')
1000
group_sim.add_option("-B", "--breakpoint",
1001
type='string',
1002
action="append",
1003
default=[],
1004
help="add a breakpoint at given location in debugger")
1005
group_sim.add_option("--disable-breakpoints",
1006
default=False,
1007
action='store_true',
1008
help="disable all breakpoints before starting")
1009
group_sim.add_option("", "--force-ahrs-type",
1010
dest="force_ahrs_type",
1011
default=None,
1012
help="force a specific AHRS type (e.g. 10 for SITL-ekf")
1013
group_sim.add_option("", "--replay",
1014
action='store_true',
1015
help="enable replay logging for tests")
1016
parser.add_option_group(group_sim)
1017
1018
group_completion = optparse.OptionGroup(parser, "Completion helpers")
1019
group_completion.add_option("--list-vehicles",
1020
action='store_true',
1021
default=False,
1022
help='list available vehicles')
1023
group_completion.add_option("--list-vehicles-test",
1024
action='store_true',
1025
default=False,
1026
help='list available vehicle tester')
1027
group_completion.add_option("--list-subtests-for-vehicle",
1028
type='string',
1029
default="",
1030
help='list available subtests for a vehicle e.g Copter')
1031
group_completion.add_option("--reset-after-every-test",
1032
action='store_true',
1033
default=False,
1034
help='reset everything after every test run')
1035
parser.add_option_group(group_completion)
1036
1037
opts, args = parser.parse_args()
1038
1039
# canonicalise on opts.debug:
1040
if opts.debug is None and opts.no_debug is None:
1041
# default is to create debug SITL binaries
1042
opts.debug = True
1043
elif opts.debug is not None and opts.no_debug is not None:
1044
if opts.debug == opts.no_debug:
1045
raise ValueError("no_debug != !debug")
1046
elif opts.no_debug is not None:
1047
opts.debug = not opts.no_debug
1048
1049
if opts.timeout is None:
1050
opts.timeout = 5400
1051
# adjust if we're running in a regime which may slow us down e.g. Valgrind
1052
if opts.valgrind:
1053
opts.timeout *= 10
1054
elif opts.callgrind:
1055
opts.timeout *= 10
1056
elif opts.gdb:
1057
opts.timeout = None
1058
1059
steps = [
1060
'prerequisites',
1061
'build.Binaries',
1062
'build.All',
1063
'build.Parameters',
1064
1065
'build.Replay',
1066
1067
'build.unit_tests',
1068
'run.unit_tests',
1069
'build.examples',
1070
'run.examples',
1071
1072
'build.Plane',
1073
'test.Plane',
1074
'test.QuadPlane',
1075
1076
'build.Rover',
1077
'test.Rover',
1078
'test.BalanceBot',
1079
'test.Sailboat',
1080
1081
'build.Copter',
1082
'test.Copter',
1083
1084
'build.Helicopter',
1085
'test.Helicopter',
1086
1087
'build.Tracker',
1088
'test.Tracker',
1089
1090
'build.Sub',
1091
'test.Sub',
1092
1093
'build.Blimp',
1094
'test.Blimp',
1095
1096
'build.SITLPeriphUniversal',
1097
'test.CAN',
1098
1099
'build.SITLPeriphBattMon',
1100
'test.BattCAN',
1101
1102
# convertgps disabled as it takes 5 hours
1103
# 'convertgpx',
1104
]
1105
1106
moresteps = [
1107
'test.CopterTests1a',
1108
'test.CopterTests1b',
1109
'test.CopterTests1c',
1110
'test.CopterTests1d',
1111
'test.CopterTests1e',
1112
1113
'test.CopterTests2a',
1114
'test.CopterTests2b',
1115
1116
'test.PlaneTests1a',
1117
'test.PlaneTests1b',
1118
1119
'clang-scan-build',
1120
]
1121
1122
# canonicalise the step names. This allows
1123
# backwards-compatability from the hodge-podge
1124
# fly.ArduCopter/drive.APMrover2 to the more common test.Copter
1125
# test.Rover
1126
step_mapping = {
1127
"build.ArduPlane": "build.Plane",
1128
"build.ArduCopter": "build.Copter",
1129
"build.APMrover2": "build.Rover",
1130
"build.ArduSub": "build.Sub",
1131
"build.AntennaTracker": "build.Tracker",
1132
"fly.ArduCopter": "test.Copter",
1133
"fly.ArduPlane": "test.Plane",
1134
"fly.QuadPlane": "test.QuadPlane",
1135
"dive.ArduSub": "test.Sub",
1136
"drive.APMrover2": "test.Rover",
1137
"drive.BalanceBot": "test.BalanceBot",
1138
"drive.balancebot": "test.BalanceBot",
1139
"fly.CopterAVC": "test.Helicopter",
1140
"test.AntennaTracker": "test.Tracker",
1141
"fly.ArduCopterTests1a": "test.CopterTests1a",
1142
"fly.ArduCopterTests1b": "test.CopterTests1b",
1143
"fly.ArduCopterTests1c": "test.CopterTests1c",
1144
"fly.ArduCopterTests1d": "test.CopterTests1d",
1145
"fly.ArduCopterTests1e": "test.CopterTests1e",
1146
1147
"fly.ArduCopterTests2a": "test.CopterTests2a",
1148
"fly.ArduCopterTests2b": "test.CopterTests2b",
1149
1150
}
1151
1152
# form up a list of bits NOT to run, mapping from old step names
1153
# to new step names as appropriate.
1154
skipsteps = opts.skip.split(',')
1155
new_skipsteps = []
1156
for skipstep in skipsteps:
1157
if skipstep in step_mapping:
1158
new_skipsteps.append(step_mapping[skipstep])
1159
else:
1160
new_skipsteps.append(skipstep)
1161
skipsteps = new_skipsteps
1162
1163
# ensure we catch timeouts
1164
signal.signal(signal.SIGALRM, alarm_handler)
1165
if opts.timeout is not None:
1166
signal.alarm(opts.timeout)
1167
1168
if opts.list:
1169
for step in steps:
1170
print(step)
1171
sys.exit(0)
1172
1173
if opts.list_subtests:
1174
list_subtests()
1175
sys.exit(0)
1176
1177
if opts.list_subtests_for_vehicle:
1178
list_subtests_for_vehicle(opts.list_subtests_for_vehicle)
1179
sys.exit(0)
1180
1181
if opts.list_vehicles_test:
1182
print(' '.join(__bin_names.keys()))
1183
sys.exit(0)
1184
1185
if opts.list_vehicles:
1186
print(' '.join(vehicle_list))
1187
sys.exit(0)
1188
1189
util.mkdir_p(buildlogs_dirpath())
1190
1191
lckfile = buildlogs_path('autotest.lck')
1192
print("lckfile=%s" % repr(lckfile))
1193
lck = util.lock_file(lckfile)
1194
1195
if lck is None:
1196
print("autotest is locked - exiting. lckfile=(%s)" % (lckfile,))
1197
sys.exit(0)
1198
1199
atexit.register(util.pexpect_close_all)
1200
1201
# provide backwards-compatability from (e.g.) drive.APMrover2 -> test.Rover
1202
newargs = []
1203
for arg in args:
1204
for _from, to in step_mapping.items():
1205
arg = re.sub("^%s" % _from, to, arg)
1206
newargs.append(arg)
1207
args = newargs
1208
1209
if len(args) == 0 and not opts.autotest_server:
1210
print("Steps must be supplied; try --list and/or --list-subtests or --help")
1211
sys.exit(1)
1212
1213
if len(args) > 0:
1214
# allow a wildcard list of steps
1215
matched = []
1216
for a in args:
1217
matches = [step for step in steps
1218
if fnmatch.fnmatch(step.lower(), a.lower())]
1219
x = find_specific_test_to_run(a)
1220
if x is not None:
1221
matches.append(x)
1222
1223
if a in moresteps:
1224
matches.append(a)
1225
1226
if not len(matches):
1227
print("No steps matched {}".format(a))
1228
sys.exit(1)
1229
matched.extend(matches)
1230
steps = matched
1231
1232
# skip steps according to --skip option:
1233
steps_to_run = [s for s in steps if should_run_step(s)]
1234
1235
results = TestResults()
1236
1237
try:
1238
if not run_tests(steps_to_run):
1239
sys.exit(1)
1240
except KeyboardInterrupt:
1241
print("KeyboardInterrupt caught; closing pexpect connections")
1242
util.pexpect_close_all()
1243
raise
1244
except Exception:
1245
# make sure we kill off any children
1246
util.pexpect_close_all()
1247
raise
1248
1249