Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/tests/integration/test_cli.py
2624 views
1
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License"). You
4
# may not use this file except in compliance with the License. A copy of
5
# the License is located at
6
#
7
# http://aws.amazon.com/apache2.0/
8
#
9
# or in the "license" file accompanying this file. This file is
10
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
# ANY KIND, either express or implied. See the License for the specific
12
# language governing permissions and limitations under the License.
13
import time
14
import signal
15
import os
16
import tempfile
17
import shutil
18
19
import botocore.session
20
from awscli.testutils import unittest, aws, BaseS3CLICommand
21
from awscli.testutils import temporary_file
22
from awscli.testutils import skip_if_windows
23
from awscli.testutils import random_bucket_name
24
from awscli.clidriver import create_clidriver
25
26
27
class TestBasicCommandFunctionality(unittest.TestCase):
28
"""
29
These are a set of tests that assert high level features of
30
the CLI. They don't test anything exhaustively and they're meant as a smoke
31
test to verify basic CLI functionality isn't entirely broken.
32
"""
33
34
def put_object(self, bucket, key, content, extra_args=None):
35
session = botocore.session.get_session()
36
client = session.create_client('s3', 'us-east-1')
37
client.create_bucket(Bucket=bucket, ObjectOwnership='ObjectWriter')
38
time.sleep(5)
39
client.delete_public_access_block(Bucket=bucket)
40
self.addCleanup(client.delete_bucket, Bucket=bucket)
41
call_args = {
42
'Bucket': bucket,
43
'Key': key, 'Body': content
44
}
45
if extra_args is not None:
46
call_args.update(extra_args)
47
client.put_object(**call_args)
48
self.addCleanup(client.delete_object, Bucket=bucket, Key=key)
49
50
def test_ec2_describe_instances(self):
51
# Verify we can make a call and get output.
52
p = aws('ec2 describe-instances')
53
self.assertEqual(p.rc, 0)
54
# We don't know what instances a user might have, but we know
55
# there should at least be a Reservations key.
56
self.assertIn('Reservations', p.json)
57
58
def test_help_output(self):
59
p = aws('help')
60
self.assertEqual(p.rc, 0)
61
self.assertIn('AWS', p.stdout)
62
self.assertRegex(
63
p.stdout, r'The\s+AWS\s+Command\s+Line\s+Interface')
64
65
def test_service_help_output(self):
66
p = aws('ec2 help')
67
self.assertEqual(p.rc, 0)
68
self.assertRegex(p.stdout, r'Amazon\s+EC2')
69
70
def test_operation_help_output(self):
71
p = aws('ec2 describe-instances help')
72
self.assertEqual(p.rc, 0)
73
# XXX: This is a rendering bug that needs to be fixed in bcdoc. In
74
# the RST version there are multiple spaces between certain words.
75
# For now we're making the test less strict about formatting, but
76
# we eventually should update this test to check exactly for
77
# 'The describe-instances operation'.
78
self.assertRegex(p.stdout, r'\s+Describes\s+the\s+specified\s+instances')
79
80
def test_topic_list_help_output(self):
81
p = aws('help topics')
82
self.assertEqual(p.rc, 0)
83
self.assertRegex(p.stdout, r'\s+AWS\s+CLI\s+Topic\s+Guide')
84
self.assertRegex(
85
p.stdout,
86
r'\s+This\s+is\s+the\s+AWS\s+CLI\s+Topic\s+Guide'
87
)
88
89
def test_topic_help_output(self):
90
p = aws('help return-codes')
91
self.assertEqual(p.rc, 0)
92
self.assertRegex(p.stdout, r'\s+AWS\s+CLI\s+Return\s+Codes')
93
self.assertRegex(
94
p.stdout,
95
r'These\s+are\s+the\s+following\s+return\s+codes'
96
)
97
98
def test_operation_help_with_required_arg(self):
99
p = aws('s3api get-object help')
100
self.assertEqual(p.rc, 0, p.stderr)
101
self.assertIn('get-object', p.stdout)
102
103
def test_service_help_with_required_option(self):
104
# In cloudsearchdomain, the --endpoint-url is required.
105
# We want to make sure if you're just getting help tex
106
# that we don't trigger that validation.
107
p = aws('cloudsearchdomain help')
108
self.assertEqual(p.rc, 0, p.stderr)
109
self.assertIn('cloudsearchdomain', p.stdout)
110
# And nothing on stderr about missing options.
111
self.assertEqual(p.stderr, '')
112
113
def test_operation_help_with_required_option(self):
114
p = aws('cloudsearchdomain search help')
115
self.assertEqual(p.rc, 0, p.stderr)
116
self.assertIn('search', p.stdout)
117
# And nothing on stderr about missing options.
118
self.assertEqual(p.stderr, '')
119
120
def test_help_with_warning_blocks(self):
121
p = aws('bedrock-runtime invoke-model help')
122
self.assertEqual(p.rc, 0, p.stderr)
123
# Check text that appears in the warning block to ensure
124
# the block was actually rendered.
125
self.assertRegex(p.stdout, r"To\s+deny\s+all\s+inference\s+access")
126
127
def test_param_shorthand(self):
128
p = aws(
129
'ec2 describe-instances --filters Name=instance-id,Values=i-123')
130
self.assertEqual(p.rc, 0)
131
self.assertIn('Reservations', p.json)
132
133
def test_param_json(self):
134
p = aws(
135
'ec2 describe-instances --filters '
136
'\'{"Name": "instance-id", "Values": ["i-123"]}\'')
137
self.assertEqual(p.rc, 0, p.stdout + p.stderr)
138
self.assertIn('Reservations', p.json)
139
140
def test_param_with_bad_json(self):
141
p = aws(
142
'ec2 describe-instances --filters '
143
'\'{"Name": "bad-filter", "Values": ["i-123"]}\'')
144
self.assertEqual(p.rc, 255)
145
self.assertIn("The filter 'bad-filter' is invalid", p.stderr,
146
"stdout: %s, stderr: %s" % (p.stdout, p.stderr))
147
148
def test_param_with_file(self):
149
d = tempfile.mkdtemp()
150
self.addCleanup(os.rmdir, d)
151
param_file = os.path.abspath(os.path.join(d, 'params.json'))
152
with open(param_file, 'w') as f:
153
f.write('[{"Name": "instance-id", "Values": ["i-123"]}]')
154
self.addCleanup(os.remove, param_file)
155
p = aws('ec2 describe-instances --filters file://%s' % param_file)
156
self.assertEqual(p.rc, 0)
157
self.assertIn('Reservations', p.json)
158
159
def test_streaming_output_operation(self):
160
d = tempfile.mkdtemp()
161
self.addCleanup(shutil.rmtree, d)
162
bucket_name = random_bucket_name('clistream')
163
164
self.put_object(bucket=bucket_name, key='foobar',
165
content='foobar contents')
166
p = aws('s3api get-object --bucket %s --key foobar %s' % (
167
bucket_name, os.path.join(d, 'foobar')))
168
self.assertEqual(p.rc, 0)
169
with open(os.path.join(d, 'foobar')) as f:
170
contents = f.read()
171
self.assertEqual(contents, 'foobar contents')
172
173
def test_no_sign_request(self):
174
d = tempfile.mkdtemp()
175
self.addCleanup(shutil.rmtree, d)
176
177
env_vars = os.environ.copy()
178
env_vars['AWS_ACCESS_KEY_ID'] = 'foo'
179
env_vars['AWS_SECRET_ACCESS_KEY'] = 'bar'
180
181
bucket_name = random_bucket_name('nosign')
182
self.put_object(bucket_name, 'foo', content='bar',
183
extra_args={'ACL': 'public-read-write'})
184
185
p = aws('s3api get-object --bucket %s --key foo %s' % (
186
bucket_name, os.path.join(d, 'foo')), env_vars=env_vars)
187
# Should have credential issues.
188
self.assertEqual(p.rc, 255)
189
190
p = aws('s3api get-object --bucket %s --key foo '
191
'%s --no-sign-request' % (bucket_name, os.path.join(d, 'foo')),
192
env_vars=env_vars)
193
194
# Should be able to download the file when not signing.
195
self.assertEqual(p.rc, 0)
196
197
with open(os.path.join(d, 'foo')) as f:
198
contents = f.read()
199
self.assertEqual(contents, 'bar')
200
201
def test_no_paginate_arg(self):
202
d = tempfile.mkdtemp()
203
self.addCleanup(shutil.rmtree, d)
204
bucket_name = random_bucket_name('nopaginate')
205
206
self.put_object(bucket=bucket_name, key='foobar',
207
content='foobar contents')
208
p = aws('s3api list-objects --bucket %s --no-paginate' % bucket_name)
209
self.assertEqual(p.rc, 0, p.stdout + p.stderr)
210
211
p = aws('s3api list-objects --bucket %s' % bucket_name)
212
self.assertEqual(p.rc, 0, p.stdout + p.stderr)
213
214
def test_top_level_options_debug(self):
215
p = aws('ec2 describe-instances --debug')
216
self.assertEqual(p.rc, 0)
217
self.assertIn('DEBUG', p.stderr)
218
219
def test_make_requests_to_other_region(self):
220
p = aws('ec2 describe-instances --region us-west-2')
221
self.assertEqual(p.rc, 0)
222
self.assertIn('Reservations', p.json)
223
224
def test_help_usage_top_level(self):
225
p = aws('')
226
self.assertIn('usage: aws [options] <command> '
227
'<subcommand> [<subcommand> ...] [parameters]', p.stderr)
228
self.assertIn('aws: error', p.stderr)
229
230
def test_help_usage_service_level(self):
231
p = aws('ec2')
232
self.assertIn('usage: aws [options] <command> '
233
'<subcommand> [<subcommand> ...] [parameters]', p.stderr)
234
# python3: aws: error: the following arguments are required: operation
235
# python2: aws: error: too few arguments
236
# We don't care too much about the specific error message, as long
237
# as it says we have a parse error.
238
self.assertIn('aws: error', p.stderr)
239
240
def test_help_usage_operation_level(self):
241
p = aws('ec2 start-instances')
242
self.assertIn('usage: aws [options] <command> '
243
'<subcommand> [<subcommand> ...] [parameters]', p.stderr)
244
245
def test_unknown_argument(self):
246
p = aws('ec2 describe-instances --filterss')
247
self.assertEqual(p.rc, 255)
248
self.assertIn('Unknown options: --filterss', p.stderr)
249
250
def test_table_output(self):
251
p = aws('ec2 describe-instances --output table --color off')
252
# We're not testing the specifics of table output, we just want
253
# to make sure the output looks like a table using some heuristics.
254
# If this prints JSON instead of a table, for example, this test
255
# should fail.
256
self.assertEqual(p.rc, 0, p.stderr)
257
self.assertIn('-----', p.stdout)
258
self.assertIn('+-', p.stdout)
259
self.assertIn('DescribeInstances', p.stdout)
260
261
def test_version(self):
262
p = aws('--version')
263
self.assertEqual(p.rc, 0)
264
# The version is wrote to standard out for Python 3.4 and
265
# standard error for other Python versions.
266
version_output = p.stderr.startswith('aws-cli') or \
267
p.stdout.startswith('aws-cli')
268
self.assertTrue(version_output, p.stderr)
269
270
def test_traceback_printed_when_debug_on(self):
271
p = aws('ec2 describe-instances --filters BADKEY=foo --debug')
272
self.assertIn('Traceback (most recent call last):', p.stderr, p.stderr)
273
# Also should see DEBUG statements:
274
self.assertIn('DEBUG', p.stderr, p.stderr)
275
276
def test_leftover_args_in_operation(self):
277
p = aws('ec2 describe-instances BADKEY=foo')
278
self.assertEqual(p.rc, 255)
279
self.assertIn("Unknown option", p.stderr, p.stderr)
280
281
def test_json_param_parsing(self):
282
# This is convered by unit tests in botocore, but this is a sanity
283
# check that we get a json response from a json service.
284
p = aws('swf list-domains --registration-status REGISTERED')
285
self.assertEqual(p.rc, 0)
286
self.assertIsInstance(p.json, dict)
287
288
p = aws('dynamodb list-tables')
289
self.assertEqual(p.rc, 0)
290
self.assertIsInstance(p.json, dict)
291
292
def test_pagination_with_text_output(self):
293
p = aws('iam list-users --output text')
294
self.assertEqual(p.rc, 0)
295
296
def test_bad_lc_ctype_env_var_is_handled(self):
297
# Test for bad LC_CTYPE on Mac OS X.
298
base_env_vars = os.environ.copy()
299
base_env_vars['LC_CTYPE'] = 'UTF-8'
300
p = aws('iam list-users', env_vars=base_env_vars)
301
self.assertEqual(p.rc, 0)
302
303
def test_error_msg_with_no_region_configured(self):
304
environ = os.environ.copy()
305
try:
306
del environ['AWS_DEFAULT_REGION']
307
except KeyError:
308
pass
309
environ['AWS_CONFIG_FILE'] = 'nowhere-foo'
310
p = aws('ec2 describe-instances', env_vars=environ)
311
self.assertIn('must specify a region', p.stderr)
312
313
@skip_if_windows('Ctrl-C not supported on windows.')
314
def test_ctrl_c_does_not_print_traceback(self):
315
# Relying on the fact that this generally takes
316
# more than 1 second to complete.
317
process = aws('ec2 describe-images', wait_for_finish=False)
318
time.sleep(1)
319
process.send_signal(signal.SIGINT)
320
stdout, stderr = process.communicate()
321
self.assertNotIn(b'Traceback', stdout)
322
self.assertNotIn(b'Traceback', stderr)
323
324
325
class TestCommandLineage(unittest.TestCase):
326
def setUp(self):
327
self.driver = create_clidriver()
328
self.top_help = self.driver.create_help_command()
329
330
def assert_lineage_names(self, ref_lineage_names):
331
command_table = self.top_help.command_table
332
for i, cmd_name in enumerate(ref_lineage_names):
333
command = command_table[cmd_name]
334
help_command = command.create_help_command()
335
command_table = help_command.command_table
336
337
actual_lineage_names = []
338
for cmd in command.lineage:
339
actual_lineage_names.append(cmd.name)
340
341
# Assert the actual names of each command in a lineage is as expected.
342
self.assertEqual(actual_lineage_names, ref_lineage_names)
343
344
# Assert that ``lineage_names`` for each command is in sync with what
345
# is actually in the command's ``lineage``.
346
self.assertEqual(command.lineage_names, actual_lineage_names)
347
348
def test_service_level_commands(self):
349
# Check a normal unchanged service command
350
self.assert_lineage_names(['ec2'])
351
352
# Check a service that had its name changed.
353
self.assert_lineage_names(['s3api'])
354
355
# Check a couple custom service level commands.
356
self.assert_lineage_names(['s3'])
357
self.assert_lineage_names(['configure'])
358
359
def test_operation_level_commands(self):
360
# Check a normal unchanged service and operation command
361
self.assert_lineage_names(['dynamodb', 'create-table'])
362
363
# Check an operation commands with a service that had its name changed.
364
self.assert_lineage_names(['s3api', 'list-objects'])
365
366
# Check a custom operation level command with no
367
# custom service command.
368
self.assert_lineage_names(['emr', 'create-cluster'])
369
370
# Check a couple of operation level commands that
371
# are based off a custom service command
372
self.assert_lineage_names(['configure', 'set'])
373
self.assert_lineage_names(['s3', 'cp'])
374
375
def test_wait_commands(self):
376
self.assert_lineage_names(['ec2', 'wait'])
377
self.assert_lineage_names(['ec2', 'wait', 'instance-running'])
378
379
380
# We're using BaseS3CLICommand because we need a service to use
381
# for testing the global arguments. We're picking S3 here because
382
# the BaseS3CLICommand has a lot of utility functions that help
383
# with this.
384
class TestGlobalArgs(BaseS3CLICommand):
385
386
def test_endpoint_url(self):
387
p = aws('s3api list-objects --bucket dnscompat '
388
'--endpoint-url http://localhost:51515 '
389
'--debug')
390
debug_logs = p.stderr
391
original_hostname = 'dnscompat.s3.amazonaws.com'
392
expected = 'localhost'
393
self.assertNotIn(original_hostname, debug_logs,
394
'--endpoint-url is being ignored.')
395
self.assertIn(expected, debug_logs)
396
397
def test_no_pagination(self):
398
bucket_name = self.create_bucket()
399
self.put_object(bucket_name, 'foo.txt', contents=b'bar')
400
self.put_object(bucket_name, 'foo2.txt', contents=b'bar')
401
self.put_object(bucket_name, 'foo3.txt', contents=b'bar')
402
p = aws('s3api list-objects --bucket %s '
403
'--no-paginate --output json' % bucket_name)
404
# A really simple way to check that --no-paginate was
405
# honored is to see if we have all the mirrored input
406
# arguments in the response json. These normally aren't
407
# present when the response is paginated.
408
self.assert_no_errors(p)
409
response_json = p.json
410
self.assertIn('IsTruncated', response_json)
411
self.assertIn('Name', response_json)
412
413
def test_no_paginate_and_original_args(self):
414
bucket_name = self.create_bucket()
415
self.put_object(bucket_name, 'foo.txt', contents=b'bar')
416
self.put_object(bucket_name, 'foo2.txt', contents=b'bar')
417
self.put_object(bucket_name, 'foo3.txt', contents=b'bar')
418
p = aws('s3api list-objects --bucket %s '
419
'--max-keys 1 --no-paginate --output json' % bucket_name)
420
self.assert_no_errors(p)
421
response_json = p.json
422
self.assertEqual(len(response_json['Contents']), 1)
423
424
def test_max_items(self):
425
bucket_name = self.create_bucket()
426
self.put_object(bucket_name, 'foo.txt', contents=b'bar')
427
self.put_object(bucket_name, 'foo2.txt', contents=b'bar')
428
self.put_object(bucket_name, 'foo3.txt', contents=b'bar')
429
p = aws('s3api list-objects --bucket %s '
430
'--max-items 1 --output json' % bucket_name)
431
self.assert_no_errors(p)
432
response_json = p.json
433
self.assertEqual(len(response_json['Contents']), 1)
434
435
def test_query(self):
436
bucket_name = self.create_bucket()
437
self.put_object(bucket_name, 'foo.txt', contents=b'bar')
438
p = aws('s3api list-objects --bucket %s '
439
'--query Contents[].Key --output json' % bucket_name)
440
self.assert_no_errors(p)
441
response_json = p.json
442
self.assertEqual(response_json, ['foo.txt'])
443
444
def test_no_sign_requests(self):
445
bucket_name = self.create_bucket()
446
self.put_object(bucket_name, 'public', contents=b'bar',
447
extra_args={'ACL': 'public-read'})
448
self.put_object(bucket_name, 'private', contents=b'bar')
449
env = os.environ.copy()
450
# Set the env vars to bad values so if we do actually
451
# try to sign the request, we'll get an auth error.
452
env['AWS_ACCESS_KEY_ID'] = 'foo'
453
env['AWS_SECRET_ACCESS_KEY'] = 'bar'
454
p = aws('s3api head-object --bucket %s --key public --no-sign-request'
455
% bucket_name, env_vars=env)
456
self.assert_no_errors(p)
457
self.assertIn('ETag', p.json)
458
459
# Should fail because we're not signing the request but the object is
460
# private.
461
p = aws('s3api head-object --bucket %s --key private --no-sign-request'
462
% bucket_name, env_vars=env)
463
self.assertEqual(p.rc, 255)
464
465
def test_profile_arg_has_precedence_over_env_vars(self):
466
# At a high level, we're going to set access_key/secret_key
467
# via env vars, but ensure that a --profile <foo> results
468
# in creds being retrieved from the shared creds file
469
# and not from env vars.
470
env_vars = os.environ.copy()
471
with temporary_file('w') as f:
472
env_vars.pop('AWS_PROFILE', None)
473
env_vars.pop('AWS_DEFAULT_PROFILE', None)
474
# 'aws configure list' only shows 4 values
475
# from the credentials so we'll show
476
# 4 char values.
477
env_vars['AWS_ACCESS_KEY_ID'] = 'enva'
478
env_vars['AWS_SECRET_ACCESS_KEY'] = 'envb'
479
env_vars['AWS_SHARED_CREDENTIALS_FILE'] = f.name
480
env_vars['AWS_CONFIG_FILE'] = 'does-not-exist-foo'
481
f.write(
482
'[from_argument]\n'
483
'aws_access_key_id=proa\n'
484
'aws_secret_access_key=prob\n'
485
)
486
f.flush()
487
p = aws('configure list --profile from_argument',
488
env_vars=env_vars)
489
# 1. We should see the profile name being set.
490
self.assertIn('from_argument', p.stdout)
491
# 2. The creds should be proa/prob, which come
492
# from the "from_argument" profile.
493
self.assertIn('proa', p.stdout)
494
self.assertIn('prob', p.stdout)
495
self.assertIn('shared-credentials-file', p.stdout)
496
497
def test_profile_arg_wins_over_profile_env_var(self):
498
env_vars = os.environ.copy()
499
with temporary_file('w') as f:
500
# Remove existing profile related env vars.
501
env_vars.pop('AWS_PROFILE', None)
502
env_vars.pop('AWS_DEFAULT_PROFILE', None)
503
env_vars['AWS_SHARED_CREDENTIALS_FILE'] = f.name
504
env_vars['AWS_CONFIG_FILE'] = 'does-not-exist-foo'
505
f.write(
506
'[from_env_var]\n'
507
'aws_access_key_id=enva\n'
508
'aws_secret_access_key=envb\n'
509
'\n'
510
'[from_argument]\n'
511
'aws_access_key_id=proa\n'
512
'aws_secret_access_key=prob\n'
513
)
514
f.flush()
515
# Now we set the current profile via env var:
516
env_vars['AWS_PROFILE'] = 'from_env_var'
517
# If we specify the --profile argument, that
518
# value should win over the AWS_PROFILE env var.
519
p = aws('configure list --profile from_argument',
520
env_vars=env_vars)
521
# 1. We should see the profile name being set.
522
self.assertIn('from_argument', p.stdout)
523
# 2. The creds should be profa/profb, which come
524
# from the "from_argument" profile.
525
self.assertIn('proa', p.stdout)
526
self.assertIn('prob', p.stdout)
527
528
529
if __name__ == '__main__':
530
unittest.main()
531
532