Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/tests/unit/customizations/s3/test_subcommands.py
2633 views
1
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License"). You
4
# may not use this file except in compliance with the License. A copy of
5
# the License is located at
6
#
7
# http://aws.amazon.com/apache2.0e
8
#
9
# or in the "license" file accompanying this file. This file is
10
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
# ANY KIND, either express or implied. See the License for the specific
12
# language governing permissions and limitations under the License.
13
import argparse
14
import os
15
import sys
16
17
import botocore.session
18
from awscli.customizations.s3.s3 import S3
19
from awscli.customizations.s3.subcommands import CommandParameters, \
20
CommandArchitecture, CpCommand, SyncCommand, ListCommand, \
21
RbCommand, get_client
22
from awscli.customizations.s3.transferconfig import RuntimeConfig
23
from awscli.customizations.s3.syncstrategy.base import \
24
SizeAndLastModifiedSync, NeverSync, MissingFileSync
25
from awscli.testutils import mock, unittest, BaseAWSHelpOutputTest, \
26
BaseAWSCommandParamsTest, FileCreator
27
from tests.unit.customizations.s3 import make_loc_files, clean_loc_files
28
from awscli.compat import StringIO
29
30
31
class FakeArgs(object):
32
def __init__(self, **kwargs):
33
self.__dict__.update(kwargs)
34
35
def __contains__(self, key):
36
return key in self.__dict__
37
38
39
class TestGetClient(unittest.TestCase):
40
def test_client(self):
41
session = mock.Mock()
42
endpoint = get_client(session, region='us-west-1', endpoint_url='URL',
43
verify=True)
44
session.create_client.assert_called_with(
45
's3', region_name='us-west-1', endpoint_url='URL', verify=True,
46
config=None)
47
48
49
class TestRbCommand(unittest.TestCase):
50
def setUp(self):
51
self.session = mock.Mock()
52
self.session.get_scoped_config.return_value = {}
53
self.rb_command = RbCommand(self.session)
54
self.parsed_args = FakeArgs(path='s3://mybucket/',
55
force=True, dir_op=False)
56
self.parsed_globals = FakeArgs(region=None, endpoint_url=None,
57
verify_ssl=None)
58
self.cmd_name = 'awscli.customizations.s3.subcommands.RmCommand'
59
self.arch_name = 'awscli.customizations.s3.subcommands.CommandArchitecture'
60
61
def test_rb_command_with_force_deletes_objects_in_bucket(self):
62
with mock.patch(self.cmd_name) as rm_command:
63
with mock.patch(self.arch_name):
64
# RmCommand returns an RmCommand instance whose __call__
65
# should be the RC of the command.
66
# In this case we'll have it return an RC of 0 which indicates
67
# success.
68
rm_command.return_value.return_value = 0
69
self.rb_command._run_main(self.parsed_args,
70
parsed_globals=self.parsed_globals)
71
# Because of --force we should have called the
72
# rm_command with the --recursive option.
73
rm_command.return_value.assert_called_with(
74
['s3://mybucket/', '--recursive'], mock.ANY)
75
76
def test_rb_command_with_force_requires_strict_path(self):
77
with self.assertRaises(ValueError):
78
self.parsed_args.path = 's3://mybucket/mykey'
79
self.rb_command._run_main(self.parsed_args,
80
parsed_globals=self.parsed_globals)
81
82
83
class TestLSCommand(unittest.TestCase):
84
def setUp(self):
85
self.session = mock.Mock()
86
self.session.create_client.return_value.list_buckets.return_value\
87
= {'Buckets': []}
88
self.session.create_client.return_value.get_paginator.return_value\
89
.paginate.return_value = [{'Contents': [], 'CommonPrefixes': []}]
90
91
def _get_fake_kwargs(self, override=None):
92
fake_kwargs = {
93
'paths': 's3://',
94
'dir_op': False,
95
'human_readable': False,
96
'summarize': False,
97
'page_size': None,
98
'request_payer': None,
99
'bucket_name_prefix': None,
100
'bucket_region': None,
101
}
102
fake_kwargs.update(override or {})
103
104
return fake_kwargs
105
106
def test_ls_command_for_bucket(self):
107
ls_command = ListCommand(self.session)
108
parsed_args = FakeArgs(**self._get_fake_kwargs({
109
'paths': 's3://mybucket/',
110
'page_size': '5',
111
}))
112
parsed_globals = mock.Mock()
113
ls_command._run_main(parsed_args, parsed_globals)
114
call = self.session.create_client.return_value.list_objects_v2
115
paginate = self.session.create_client.return_value.get_paginator\
116
.return_value.paginate
117
# We should make no operation calls.
118
self.assertEqual(call.call_count, 0)
119
# And only a single pagination call to ListObjectsV2.
120
self.session.create_client.return_value.get_paginator.\
121
assert_called_with('list_objects_v2')
122
ref_call_args = {'Bucket': u'mybucket', 'Delimiter': '/',
123
'Prefix': u'',
124
'PaginationConfig': {'PageSize': u'5'}}
125
126
paginate.assert_called_with(**ref_call_args)
127
128
def test_ls_command_with_no_args(self):
129
ls_command = ListCommand(self.session)
130
parsed_global = FakeArgs(region=None, endpoint_url=None,
131
verify_ssl=None)
132
parsed_args = FakeArgs(**self._get_fake_kwargs())
133
ls_command._run_main(parsed_args, parsed_global)
134
call = self.session.create_client.return_value.list_buckets
135
paginate = self.session.create_client.return_value.get_paginator\
136
.return_value.paginate
137
138
# We should make no operation calls.
139
self.assertEqual(call.call_count, 0)
140
# And only a single pagination call to ListBuckets.
141
self.session.create_client.return_value.get_paginator.\
142
assert_called_with('list_buckets')
143
ref_call_args = {'PaginationConfig': {'PageSize': None}}
144
145
paginate.assert_called_with(**ref_call_args)
146
147
# Verify get_client
148
get_client = self.session.create_client
149
args = get_client.call_args
150
self.assertEqual(args, mock.call(
151
's3', region_name=None, endpoint_url=None, verify=None,
152
config=None))
153
154
def test_ls_with_bucket_name_prefix(self):
155
ls_command = ListCommand(self.session)
156
parsed_args = FakeArgs(**self._get_fake_kwargs({
157
'bucket_name_prefix': 'myprefix',
158
}))
159
parsed_globals = FakeArgs(
160
region=None,
161
endpoint_url=None,
162
verify_ssl=None,
163
)
164
ls_command._run_main(parsed_args, parsed_globals)
165
call = self.session.create_client.return_value.list_objects
166
paginate = self.session.create_client.return_value.get_paginator\
167
.return_value.paginate
168
# We should make no operation calls.
169
self.assertEqual(call.call_count, 0)
170
self.session.create_client.return_value.get_paginator.\
171
assert_called_with('list_buckets')
172
ref_call_args = {
173
'PaginationConfig': {'PageSize': None},
174
'Prefix': 'myprefix',
175
}
176
177
paginate.assert_called_with(**ref_call_args)
178
179
def test_ls_with_bucket_region(self):
180
ls_command = ListCommand(self.session)
181
parsed_args = FakeArgs(**self._get_fake_kwargs({
182
'bucket_region': 'us-west-1',
183
}))
184
parsed_globals = FakeArgs(
185
region=None,
186
endpoint_url=None,
187
verify_ssl=None,
188
)
189
ls_command._run_main(parsed_args, parsed_globals)
190
call = self.session.create_client.return_value.list_objects
191
paginate = self.session.create_client.return_value.get_paginator\
192
.return_value.paginate
193
# We should make no operation calls.
194
self.assertEqual(call.call_count, 0)
195
self.session.create_client.return_value.get_paginator.\
196
assert_called_with('list_buckets')
197
ref_call_args = {
198
'PaginationConfig': {'PageSize': None},
199
'BucketRegion': 'us-west-1',
200
}
201
202
paginate.assert_called_with(**ref_call_args)
203
204
def test_ls_with_verify_argument(self):
205
ls_command = ListCommand(self.session)
206
parsed_global = FakeArgs(region='us-west-2', endpoint_url=None,
207
verify_ssl=False)
208
parsed_args = FakeArgs(**self._get_fake_kwargs({}))
209
ls_command._run_main(parsed_args, parsed_global)
210
# Verify get_client
211
get_client = self.session.create_client
212
args = get_client.call_args
213
self.assertEqual(args, mock.call(
214
's3', region_name='us-west-2', endpoint_url=None, verify=False,
215
config=None))
216
217
def test_ls_with_requester_pays(self):
218
ls_command = ListCommand(self.session)
219
parsed_args = FakeArgs(**self._get_fake_kwargs({
220
'paths': 's3://mybucket/',
221
'page_size': '5',
222
'request_payer': 'requester',
223
}))
224
parsed_globals = mock.Mock()
225
ls_command._run_main(parsed_args, parsed_globals)
226
call = self.session.create_client.return_value.list_objects
227
paginate = self.session.create_client.return_value.get_paginator\
228
.return_value.paginate
229
# We should make no operation calls.
230
self.assertEqual(call.call_count, 0)
231
# And only a single pagination call to ListObjectsV2.
232
self.session.create_client.return_value.get_paginator.\
233
assert_called_with('list_objects_v2')
234
ref_call_args = {
235
'Bucket': u'mybucket', 'Delimiter': '/',
236
'Prefix': u'', 'PaginationConfig': {'PageSize': '5'},
237
'RequestPayer': 'requester',
238
}
239
240
paginate.assert_called_with(**ref_call_args)
241
242
243
class CommandArchitectureTest(BaseAWSCommandParamsTest):
244
def setUp(self):
245
super(CommandArchitectureTest, self).setUp()
246
self.session = self.driver.session
247
self.bucket = 'mybucket'
248
self.file_creator = FileCreator()
249
self.loc_files = make_loc_files(self.file_creator)
250
self.output = StringIO()
251
self.err_output = StringIO()
252
self.saved_stdout = sys.stdout
253
self.saved_stderr = sys.stderr
254
sys.stdout = self.output
255
sys.stderr = self.err_output
256
257
def tearDown(self):
258
self.output.close()
259
self.err_output.close()
260
sys.stdout = self.saved_stdout
261
sys.stderr = self.saved_stderr
262
263
super(CommandArchitectureTest, self).tearDown()
264
clean_loc_files(self.file_creator)
265
266
def _get_file_path(self, file):
267
try:
268
return os.path.relpath(file)
269
except ValueError:
270
# In some cases (usually it happens inside Windows based GitHub
271
# Action) tests are situated on one volume and temp folder on
272
# another one, in such a case there is no relative path between
273
# them and we use absolute path instead
274
return os.path.abspath(file)
275
276
def test_set_client_no_source(self):
277
session = mock.Mock()
278
cmd_arc = CommandArchitecture(session, 'sync',
279
{'region': 'us-west-1',
280
'endpoint_url': None,
281
'verify_ssl': None,
282
'source_region': None})
283
cmd_arc.set_clients()
284
self.assertEqual(session.create_client.call_count, 2)
285
self.assertEqual(
286
session.create_client.call_args_list[0],
287
mock.call(
288
's3', region_name='us-west-1', endpoint_url=None, verify=None,
289
config=None)
290
)
291
# A client created with the same arguments as the first should be used
292
# for the source client since no source region was provided.
293
self.assertEqual(
294
session.create_client.call_args_list[1],
295
mock.call(
296
's3', region_name='us-west-1', endpoint_url=None, verify=None,
297
config=None)
298
)
299
300
def test_set_client_with_source(self):
301
session = mock.Mock()
302
cmd_arc = CommandArchitecture(session, 'sync',
303
{'region': 'us-west-1',
304
'endpoint_url': None,
305
'verify_ssl': None,
306
'paths_type': 's3s3',
307
'source_region': 'us-west-2'})
308
cmd_arc.set_clients()
309
create_client_args = session.create_client.call_args_list
310
# Assert that two clients were created
311
self.assertEqual(len(create_client_args), 3)
312
self.assertEqual(
313
create_client_args[0][1],
314
{'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
315
'config': None}
316
)
317
self.assertEqual(
318
create_client_args[1][1],
319
{'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
320
'config': None}
321
)
322
# Assert override the second client created with the one needed for the
323
# source region.
324
self.assertEqual(
325
create_client_args[2][1],
326
{'region_name': 'us-west-2', 'verify': None, 'endpoint_url': None,
327
'config': None}
328
)
329
330
def test_set_sigv4_clients_with_sse_kms(self):
331
session = mock.Mock()
332
cmd_arc = CommandArchitecture(
333
session, 'sync',
334
{'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None,
335
'source_region': None, 'sse': 'aws:kms'})
336
cmd_arc.set_clients()
337
self.assertEqual( session.create_client.call_count, 2)
338
create_client_call = session.create_client.call_args_list[0]
339
create_source_client_call = session.create_client.call_args_list[1]
340
341
# Make sure that both clients are using sigv4 if kms is enabled.
342
self.assertEqual(
343
create_client_call[1]['config'].signature_version, 's3v4')
344
self.assertEqual(
345
create_source_client_call[1]['config'].signature_version, 's3v4')
346
347
def test_create_instructions(self):
348
"""
349
This tests to make sure the instructions for any command is generated
350
properly.
351
"""
352
cmds = ['cp', 'mv', 'rm', 'sync']
353
354
instructions = {'cp': ['file_generator', 'file_info_builder',
355
's3_handler'],
356
'mv': ['file_generator', 'file_info_builder',
357
's3_handler'],
358
'rm': ['file_generator', 'file_info_builder',
359
's3_handler'],
360
'sync': ['file_generator', 'comparator',
361
'file_info_builder', 's3_handler']}
362
363
params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
364
'verify_ssl': None, 'is_stream': False}
365
for cmd in cmds:
366
cmd_arc = CommandArchitecture(self.session, cmd,
367
{'region': 'us-east-1',
368
'endpoint_url': None,
369
'verify_ssl': None,
370
'is_stream': False})
371
cmd_arc.create_instructions()
372
self.assertEqual(cmd_arc.instructions, instructions[cmd])
373
374
# Test if there is a filter.
375
cmd_arc = CommandArchitecture(self.session, 'cp', params)
376
cmd_arc.create_instructions()
377
self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
378
'file_info_builder',
379
's3_handler'])
380
381
def test_choose_sync_strategy_default(self):
382
session = mock.Mock()
383
cmd_arc = CommandArchitecture(session, 'sync',
384
{'region': 'us-east-1',
385
'endpoint_url': None,
386
'verify_ssl': None})
387
# Check if no plugins return their sync strategy. Should
388
# result in the default strategies
389
session.emit.return_value = None
390
sync_strategies = cmd_arc.choose_sync_strategies()
391
self.assertEqual(
392
sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
393
SizeAndLastModifiedSync
394
)
395
self.assertEqual(
396
sync_strategies['file_not_at_dest_sync_strategy'].__class__,
397
MissingFileSync
398
)
399
self.assertEqual(
400
sync_strategies['file_not_at_src_sync_strategy'].__class__,
401
NeverSync
402
)
403
404
def test_choose_sync_strategy_overwrite(self):
405
session = mock.Mock()
406
cmd_arc = CommandArchitecture(session, 'sync',
407
{'region': 'us-east-1',
408
'endpoint_url': None,
409
'verify_ssl': None})
410
# Check that the default sync strategy is overwritten if a plugin
411
# returns its sync strategy.
412
mock_strategy = mock.Mock()
413
mock_strategy.sync_type = 'file_at_src_and_dest'
414
415
mock_not_at_dest_sync_strategy = mock.Mock()
416
mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'
417
418
mock_not_at_src_sync_strategy = mock.Mock()
419
mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'
420
421
responses = [(None, mock_strategy),
422
(None, mock_not_at_dest_sync_strategy),
423
(None, mock_not_at_src_sync_strategy)]
424
425
session.emit.return_value = responses
426
sync_strategies = cmd_arc.choose_sync_strategies()
427
self.assertEqual(
428
sync_strategies['file_at_src_and_dest_sync_strategy'],
429
mock_strategy
430
)
431
self.assertEqual(
432
sync_strategies['file_not_at_dest_sync_strategy'],
433
mock_not_at_dest_sync_strategy
434
)
435
self.assertEqual(
436
sync_strategies['file_not_at_src_sync_strategy'],
437
mock_not_at_src_sync_strategy
438
)
439
440
def test_run_cp_put(self):
441
# This ensures that the architecture sets up correctly for a ``cp`` put
442
# command. It is just just a dry run, but all of the components need
443
# to be wired correctly for it to work.
444
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
445
local_file = self.loc_files[0]
446
rel_local_file = self._get_file_path(local_file)
447
filters = [['--include', '*']]
448
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
449
'src': local_file, 'dest': s3_file, 'filters': filters,
450
'paths_type': 'locals3', 'region': 'us-east-1',
451
'endpoint_url': None, 'verify_ssl': None,
452
'follow_symlinks': True, 'page_size': None,
453
'is_stream': False, 'source_region': None, 'metadata': None,
454
'v2_debug': False}
455
config = RuntimeConfig().build_config()
456
cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
457
cmd_arc.set_clients()
458
cmd_arc.create_instructions()
459
self.patch_make_request()
460
cmd_arc.run()
461
output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
462
self.assertIn(output_str, self.output.getvalue())
463
464
def test_error_on_same_line_as_status(self):
465
s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
466
local_file = self.loc_files[0]
467
rel_local_file = self._get_file_path(local_file)
468
filters = [['--include', '*']]
469
params = {'dir_op': False, 'dryrun': False, 'quiet': False,
470
'src': local_file, 'dest': s3_file, 'filters': filters,
471
'paths_type': 'locals3', 'region': 'us-east-1',
472
'endpoint_url': None, 'verify_ssl': None,
473
'follow_symlinks': True, 'page_size': None,
474
'is_stream': False, 'source_region': None, 'metadata': None,
475
'v2_debug': False}
476
self.http_response.status_code = 400
477
self.parsed_responses = [{'Error': {
478
'Code': 'BucketNotExists',
479
'Message': 'Bucket does not exist'}}]
480
cmd_arc = CommandArchitecture(
481
self.session, 'cp', params, RuntimeConfig().build_config())
482
cmd_arc.set_clients()
483
cmd_arc.create_instructions()
484
self.patch_make_request()
485
cmd_arc.run()
486
# Also, we need to verify that the error message is on the *same* line
487
# as the upload failed line, to make it easier to track.
488
output_str = (
489
"upload failed: %s to %s An error" % (
490
rel_local_file, s3_file))
491
self.assertIn(output_str, self.err_output.getvalue())
492
493
def test_run_cp_get(self):
494
# This ensures that the architecture sets up correctly for a ``cp`` get
495
# command. It is just just a dry run, but all of the components need
496
# to be wired correctly for it to work.
497
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
498
local_file = self.loc_files[0]
499
rel_local_file = self._get_file_path(local_file)
500
filters = [['--include', '*']]
501
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
502
'src': s3_file, 'dest': local_file, 'filters': filters,
503
'paths_type': 's3local', 'region': 'us-east-1',
504
'endpoint_url': None, 'verify_ssl': None,
505
'follow_symlinks': True, 'page_size': None,
506
'is_stream': False, 'source_region': None, 'v2_debug': False,
507
'case_conflict': 'ignore'}
508
self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
509
"LastModified": "2014-01-09T20:45:49.000Z"}]
510
config = RuntimeConfig().build_config()
511
cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
512
cmd_arc.set_clients()
513
cmd_arc.create_instructions()
514
self.patch_make_request()
515
cmd_arc.run()
516
output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
517
self.assertIn(output_str, self.output.getvalue())
518
519
def test_run_cp_copy(self):
520
# This ensures that the architecture sets up correctly for a ``cp``
521
# copy command. It is just just a dry run, but all of the
522
# components need to be wired correctly for it to work.
523
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
524
filters = [['--include', '*']]
525
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
526
'src': s3_file, 'dest': s3_file, 'filters': filters,
527
'paths_type': 's3s3', 'region': 'us-east-1',
528
'endpoint_url': None, 'verify_ssl': None,
529
'follow_symlinks': True, 'page_size': None,
530
'is_stream': False, 'source_region': None, 'v2_debug': False}
531
self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
532
"LastModified": "2014-01-09T20:45:49.000Z"}]
533
config = RuntimeConfig().build_config()
534
cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
535
cmd_arc.set_clients()
536
cmd_arc.create_instructions()
537
self.patch_make_request()
538
cmd_arc.run()
539
output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
540
self.assertIn(output_str, self.output.getvalue())
541
542
def test_run_mv(self):
543
# This ensures that the architecture sets up correctly for a ``mv``
544
# command. It is just just a dry run, but all of the components need
545
# to be wired correctly for it to work.
546
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
547
filters = [['--include', '*']]
548
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
549
'src': s3_file, 'dest': s3_file, 'filters': filters,
550
'paths_type': 's3s3', 'region': 'us-east-1',
551
'endpoint_url': None, 'verify_ssl': None,
552
'follow_symlinks': True, 'page_size': None,
553
'is_stream': False, 'source_region': None,
554
'is_move': True, 'v2_debug': False}
555
self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
556
"LastModified": "2014-01-09T20:45:49.000Z"}]
557
config = RuntimeConfig().build_config()
558
cmd_arc = CommandArchitecture(self.session, 'mv', params, config)
559
cmd_arc.set_clients()
560
cmd_arc.create_instructions()
561
self.patch_make_request()
562
cmd_arc.run()
563
output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
564
self.assertIn(output_str, self.output.getvalue())
565
566
def test_run_remove(self):
567
# This ensures that the architecture sets up correctly for a ``rm``
568
# command. It is just just a dry run, but all of the components need
569
# to be wired correctly for it to work.
570
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
571
filters = [['--include', '*']]
572
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
573
'src': s3_file, 'dest': s3_file, 'filters': filters,
574
'paths_type': 's3', 'region': 'us-east-1',
575
'endpoint_url': None, 'verify_ssl': None,
576
'follow_symlinks': True, 'page_size': None,
577
'is_stream': False, 'source_region': None,
578
'v2_debug': False}
579
self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
580
"LastModified": "2014-01-09T20:45:49.000Z"}]
581
config = RuntimeConfig().build_config()
582
cmd_arc = CommandArchitecture(self.session, 'rm', params, config)
583
cmd_arc.set_clients()
584
cmd_arc.create_instructions()
585
self.patch_make_request()
586
cmd_arc.run()
587
output_str = "(dryrun) delete: %s" % s3_file
588
self.assertIn(output_str, self.output.getvalue())
589
590
def test_run_sync(self):
591
# This ensures that the architecture sets up correctly for a ``sync``
592
# command. It is just just a dry run, but all of the components need
593
# to be wired correctly for it to work.
594
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
595
local_file = self.loc_files[0]
596
s3_prefix = 's3://' + self.bucket + '/'
597
local_dir = self.loc_files[3]
598
rel_local_file = self._get_file_path(local_file)
599
filters = [['--include', '*']]
600
params = {'dir_op': True, 'dryrun': True, 'quiet': False,
601
'src': local_dir, 'dest': s3_prefix, 'filters': filters,
602
'paths_type': 'locals3', 'region': 'us-east-1',
603
'endpoint_url': None, 'verify_ssl': None,
604
'follow_symlinks': True, 'page_size': None,
605
'is_stream': False, 'source_region': 'us-west-2',
606
'v2_debug': False}
607
self.parsed_responses = [
608
{"CommonPrefixes": [], "Contents": [
609
{"Key": "text1.txt", "Size": 100,
610
"LastModified": "2014-01-09T20:45:49.000Z"}]},
611
{"CommonPrefixes": [], "Contents": []}]
612
config = RuntimeConfig().build_config()
613
cmd_arc = CommandArchitecture(self.session, 'sync', params, config)
614
cmd_arc.create_instructions()
615
cmd_arc.set_clients()
616
self.patch_make_request()
617
cmd_arc.run()
618
output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
619
self.assertIn(output_str, self.output.getvalue())
620
621
def test_v2_debug_mv(self):
622
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
623
filters = [['--include', '*']]
624
params = {'dir_op': False, 'quiet': False, 'dryrun': True,
625
'src': s3_file, 'dest': s3_file, 'filters': filters,
626
'paths_type': 's3s3', 'region': 'us-east-1',
627
'endpoint_url': None, 'verify_ssl': None,
628
'follow_symlinks': True, 'page_size': None,
629
'is_stream': False, 'source_region': None,
630
'is_move': True, 'v2_debug': True}
631
self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
632
"LastModified": "2014-01-09T20:45:49.000Z"}]
633
config = RuntimeConfig().build_config()
634
cmd_arc = CommandArchitecture(self.session, 'mv', params, config)
635
cmd_arc.set_clients()
636
cmd_arc.create_instructions()
637
self.patch_make_request()
638
cmd_arc.run()
639
warning_str = (
640
'AWS CLI v2 UPGRADE WARNING: In AWS CLI v2, object '
641
'properties will be copied from the source in '
642
'multipart copies between S3 buckets initiated via '
643
'`aws s3` commands'
644
)
645
output_str = f"(dryrun) move: {s3_file} to {s3_file}"
646
self.assertIn(warning_str, self.err_output.getvalue())
647
self.assertIn(output_str, self.output.getvalue())
648
649
650
class CommandParametersTest(unittest.TestCase):
651
def setUp(self):
652
self.environ = {}
653
self.environ_patch = mock.patch('os.environ', self.environ)
654
self.environ_patch.start()
655
self.mock = mock.MagicMock()
656
self.mock.get_config = mock.MagicMock(return_value={'region': None})
657
self.file_creator = FileCreator()
658
self.loc_files = make_loc_files(self.file_creator)
659
self.bucket = 's3testbucket'
660
self.session = mock.Mock()
661
self.parsed_global = FakeArgs(
662
region='us-west-2',
663
endpoint_url=None,
664
verify_ssl=False)
665
666
def tearDown(self):
667
self.environ_patch.stop()
668
clean_loc_files(self.file_creator)
669
670
def test_check_path_type_pass(self):
671
# This tests the class's ability to determine whether the correct
672
# path types have been passed for a particular command. It test every
673
# possible combination that is correct for every command.
674
cmds = {'cp': ['locals3', 's3s3', 's3local'],
675
'mv': ['locals3', 's3s3', 's3local'],
676
'rm': ['s3'], 'mb': ['s3'], 'rb': ['s3'],
677
'sync': ['locals3', 's3s3', 's3local']}
678
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
679
local_file = self.loc_files[0]
680
681
combos = {'s3s3': [s3_file, s3_file],
682
's3local': [s3_file, local_file],
683
'locals3': [local_file, s3_file],
684
's3': [s3_file],
685
'local': [local_file],
686
'locallocal': [local_file, local_file]}
687
688
for cmd in cmds.keys():
689
cmd_param = CommandParameters(cmd, {}, '',
690
self.session, self.parsed_global)
691
cmd_param.add_region(mock.Mock())
692
correct_paths = cmds[cmd]
693
for path_args in correct_paths:
694
cmd_param.check_path_type(combos[path_args])
695
696
def test_check_path_type_fail(self):
697
# This tests the class's ability to determine whether the correct
698
# path types have been passed for a particular command. It test every
699
# possible combination that is incorrect for every command.
700
cmds = {'cp': ['local', 'locallocal', 's3'],
701
'mv': ['local', 'locallocal', 's3'],
702
'rm': ['local', 'locallocal', 's3s3', 'locals3', 's3local'],
703
'ls': ['local', 'locallocal', 's3s3', 'locals3', 's3local'],
704
'sync': ['local', 'locallocal', 's3'],
705
'mb': ['local', 'locallocal', 's3s3', 'locals3', 's3local'],
706
'rb': ['local', 'locallocal', 's3s3', 'locals3', 's3local']}
707
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
708
local_file = self.loc_files[0]
709
710
combos = {'s3s3': [s3_file, s3_file],
711
's3local': [s3_file, local_file],
712
'locals3': [local_file, s3_file],
713
's3': [s3_file],
714
'local': [local_file],
715
'locallocal': [local_file, local_file]}
716
717
for cmd in cmds.keys():
718
cmd_param = CommandParameters(cmd, {}, '',
719
self.session, self.parsed_global)
720
cmd_param.add_region(mock.Mock())
721
wrong_paths = cmds[cmd]
722
for path_args in wrong_paths:
723
with self.assertRaises(TypeError):
724
cmd_param.check_path_type(combos[path_args])
725
726
def test_validate_streaming_paths_upload(self):
727
paths = ['-', 's3://bucket']
728
cmd_params = CommandParameters('cp', {}, '')
729
cmd_params.add_paths(paths)
730
self.assertTrue(cmd_params.parameters['is_stream'])
731
self.assertTrue(cmd_params.parameters['only_show_errors'])
732
self.assertFalse(cmd_params.parameters['dir_op'])
733
734
def test_validate_streaming_paths_download(self):
735
paths = ['s3://bucket/key', '-']
736
cmd_params = CommandParameters('cp', {}, '')
737
cmd_params.add_paths(paths)
738
self.assertTrue(cmd_params.parameters['is_stream'])
739
self.assertTrue(cmd_params.parameters['only_show_errors'])
740
self.assertFalse(cmd_params.parameters['dir_op'])
741
742
def test_validate_no_streaming_paths(self):
743
paths = [self.file_creator.rootdir, 's3://bucket']
744
cmd_params = CommandParameters('cp', {}, '')
745
cmd_params.add_paths(paths)
746
self.assertFalse(cmd_params.parameters['is_stream'])
747
748
def test_validate_checksum_algorithm_download_error(self):
749
paths = ['s3://bucket/key', self.file_creator.rootdir]
750
parameters = {'checksum_algorithm': 'CRC32'}
751
cmd_params = CommandParameters('cp', parameters, '')
752
with self.assertRaises(ValueError) as cm:
753
cmd_params.add_paths(paths)
754
self.assertIn('Expected checksum-algorithm parameter to be used with one of following path formats', cm.msg)
755
756
def test_validate_checksum_algorithm_sync_download_error(self):
757
paths = ['s3://bucket/key', self.file_creator.rootdir]
758
parameters = {'checksum_algorithm': 'CRC32C'}
759
cmd_params = CommandParameters('sync', parameters, '')
760
with self.assertRaises(ValueError) as cm:
761
cmd_params.add_paths(paths)
762
self.assertIn('Expected checksum-algorithm parameter to be used with one of following path formats', cm.msg)
763
764
def test_validate_checksum_mode_upload_error(self):
765
paths = [self.file_creator.rootdir, 's3://bucket/key']
766
parameters = {'checksum_mode': 'ENABLED'}
767
cmd_params = CommandParameters('cp', parameters, '')
768
with self.assertRaises(ValueError) as cm:
769
cmd_params.add_paths(paths)
770
self.assertIn('Expected checksum-mode parameter to be used with one of following path formats', cm.msg)
771
772
def test_validate_checksum_mode_sync_upload_error(self):
773
paths = [self.file_creator.rootdir, 's3://bucket/key']
774
parameters = {'checksum_mode': 'ENABLED'}
775
cmd_params = CommandParameters('sync', parameters, '')
776
with self.assertRaises(ValueError) as cm:
777
cmd_params.add_paths(paths)
778
self.assertIn('Expected checksum-mode parameter to be used with one of following path formats', cm.msg)
779
780
def test_validate_checksum_mode_move_error(self):
781
paths = ['s3://bucket/key', 's3://bucket2/key']
782
parameters = {'checksum_mode': 'ENABLED'}
783
cmd_params = CommandParameters('mv', parameters, '')
784
with self.assertRaises(ValueError) as cm:
785
cmd_params.add_paths(paths)
786
self.assertIn('Expected checksum-mode parameter to be used with one of following path formats', cm.msg)
787
788
def test_validate_streaming_paths_error(self):
789
parameters = {'src': '-', 'dest': 's3://bucket'}
790
cmd_params = CommandParameters('sync', parameters, '')
791
with self.assertRaises(ValueError):
792
cmd_params._validate_streaming_paths()
793
794
def test_validate_non_existent_local_path_upload(self):
795
non_existent_path = os.path.join(self.file_creator.rootdir, 'foo')
796
paths = [non_existent_path, 's3://bucket/']
797
cmd_param = CommandParameters('cp', {}, '')
798
with self.assertRaises(RuntimeError):
799
cmd_param.add_paths(paths)
800
801
def test_add_path_for_non_existsent_local_path_download(self):
802
non_existent_path = os.path.join(self.file_creator.rootdir, 'foo')
803
paths = ['s3://bucket', non_existent_path]
804
cmd_param = CommandParameters('cp', {'dir_op': True}, '')
805
cmd_param.add_paths(paths)
806
self.assertTrue(os.path.exists(non_existent_path))
807
808
def test_validate_sse_c_args_missing_sse(self):
809
paths = ['s3://bucket/foo', 's3://bucket/bar']
810
params = {'dir_op': False, 'sse_c_key': 'foo'}
811
cmd_param = CommandParameters('cp', params, '')
812
with self.assertRaisesRegex(ValueError, '--sse-c must be specified'):
813
cmd_param.add_paths(paths)
814
815
def test_validate_sse_c_args_missing_sse_c_key(self):
816
paths = ['s3://bucket/foo', 's3://bucket/bar']
817
params = {'dir_op': False, 'sse_c': 'AES256'}
818
cmd_param = CommandParameters('cp', params, '')
819
with self.assertRaisesRegex(ValueError,
820
'--sse-c-key must be specified'):
821
cmd_param.add_paths(paths)
822
823
def test_validate_sse_c_args_missing_sse_c_copy_source(self):
824
paths = ['s3://bucket/foo', 's3://bucket/bar']
825
params = {'dir_op': False, 'sse_c_copy_source_key': 'foo'}
826
cmd_param = CommandParameters('cp', params, '')
827
with self.assertRaisesRegex(ValueError,
828
'--sse-c-copy-source must be specified'):
829
cmd_param.add_paths(paths)
830
831
def test_validate_sse_c_args_missing_sse_c_copy_source_key(self):
832
paths = ['s3://bucket/foo', 's3://bucket/bar']
833
params = {'dir_op': False, 'sse_c_copy_source': 'AES256'}
834
cmd_param = CommandParameters('cp', params, '')
835
with self.assertRaisesRegex(ValueError,
836
'--sse-c-copy-source-key must be specified'):
837
cmd_param.add_paths(paths)
838
839
def test_validate_sse_c_args_wrong_path_type(self):
840
paths = ['s3://bucket/foo', self.file_creator.rootdir]
841
params = {'dir_op': False, 'sse_c_copy_source': 'AES256',
842
'sse_c_copy_source_key': 'foo'}
843
cmd_param = CommandParameters('cp', params, '')
844
with self.assertRaisesRegex(ValueError,
845
'only supported for copy operations'):
846
cmd_param.add_paths(paths)
847
848
def test_adds_is_move(self):
849
params = {}
850
CommandParameters('mv', params, '',
851
session=self.session,
852
parsed_globals=self.parsed_global)
853
self.assertTrue(params.get('is_move'))
854
855
# is_move should only be true for mv
856
params = {}
857
CommandParameters('cp', params, '')
858
self.assertFalse(params.get('is_move'))
859
860
861
class HelpDocTest(BaseAWSHelpOutputTest):
862
def setUp(self):
863
super(HelpDocTest, self).setUp()
864
self.session = botocore.session.get_session()
865
866
def tearDown(self):
867
super(HelpDocTest, self).tearDown()
868
869
def test_s3_help(self):
870
# This tests the help command for the s3 service. This
871
# checks to make sure the appropriate descriptions are
872
# added including the tutorial.
873
s3 = S3(self.session)
874
parser = argparse.ArgumentParser()
875
parser.add_argument('--paginate', action='store_true')
876
parsed_global = parser.parse_args(['--paginate'])
877
help_command = s3.create_help_command()
878
help_command([], parsed_global)
879
self.assert_contains(
880
"This section explains prominent concepts "
881
"and notations in the set of high-level S3 commands provided.")
882
self.assert_contains("Every command takes one or two positional")
883
self.assert_contains("* rb")
884
885
def test_s3command_help(self):
886
# This tests the help command for an s3 command. This
887
# checks to make sure the command prints appropriate
888
# parts. Note the examples are not included because
889
# the event was not registered.
890
s3command = CpCommand(self.session)
891
s3command._arg_table = s3command._build_arg_table()
892
parser = argparse.ArgumentParser()
893
parser.add_argument('--paginate', action='store_true')
894
parsed_global = parser.parse_args(['--paginate'])
895
help_command = s3command.create_help_command()
896
help_command([], parsed_global)
897
self.assert_contains("cp")
898
self.assert_contains("[--acl <value>]")
899
self.assert_contains("Displays the operations that would be")
900
901
def test_help(self):
902
# This ensures that the file appropriately redirects to help object
903
# if help is the only argument left to be parsed. There should not
904
# have any contents in the docs.
905
s3_command = SyncCommand(self.session)
906
s3_command(['help'], [])
907
self.assert_contains('sync')
908
self.assert_contains("Synopsis")
909
910
911
if __name__ == "__main__":
912
unittest.main()
913
914