Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/tests/unit/customizations/s3/test_subcommands.py
1569 views
1
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License"). You
4
# may not use this file except in compliance with the License. A copy of
5
# the License is located at
6
#
7
# http://aws.amazon.com/apache2.0e
8
#
9
# or in the "license" file accompanying this file. This file is
10
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
# ANY KIND, either express or implied. See the License for the specific
12
# language governing permissions and limitations under the License.
13
import argparse
14
import os
15
import sys
16
17
import botocore.session
18
from awscli.customizations.s3.s3 import S3
19
from awscli.customizations.s3.subcommands import CommandParameters, \
20
CommandArchitecture, CpCommand, SyncCommand, ListCommand, \
21
RbCommand, get_client
22
from awscli.customizations.s3.transferconfig import RuntimeConfig
23
from awscli.customizations.s3.syncstrategy.base import \
24
SizeAndLastModifiedSync, NeverSync, MissingFileSync
25
from awscli.testutils import mock, unittest, BaseAWSHelpOutputTest, \
26
BaseAWSCommandParamsTest, FileCreator
27
from tests.unit.customizations.s3 import make_loc_files, clean_loc_files
28
from awscli.compat import StringIO
29
30
31
class FakeArgs(object):
32
def __init__(self, **kwargs):
33
self.__dict__.update(kwargs)
34
35
def __contains__(self, key):
36
return key in self.__dict__
37
38
39
class TestGetClient(unittest.TestCase):
40
def test_client(self):
41
session = mock.Mock()
42
endpoint = get_client(session, region='us-west-1', endpoint_url='URL',
43
verify=True)
44
session.create_client.assert_called_with(
45
's3', region_name='us-west-1', endpoint_url='URL', verify=True,
46
config=None)
47
48
49
class TestRbCommand(unittest.TestCase):
50
def setUp(self):
51
self.session = mock.Mock()
52
self.session.get_scoped_config.return_value = {}
53
self.rb_command = RbCommand(self.session)
54
self.parsed_args = FakeArgs(path='s3://mybucket/',
55
force=True, dir_op=False)
56
self.parsed_globals = FakeArgs(region=None, endpoint_url=None,
57
verify_ssl=None)
58
self.cmd_name = 'awscli.customizations.s3.subcommands.RmCommand'
59
self.arch_name = 'awscli.customizations.s3.subcommands.CommandArchitecture'
60
61
def test_rb_command_with_force_deletes_objects_in_bucket(self):
62
with mock.patch(self.cmd_name) as rm_command:
63
with mock.patch(self.arch_name):
64
# RmCommand returns an RmCommand instance whose __call__
65
# should be the RC of the command.
66
# In this case we'll have it return an RC of 0 which indicates
67
# success.
68
rm_command.return_value.return_value = 0
69
self.rb_command._run_main(self.parsed_args,
70
parsed_globals=self.parsed_globals)
71
# Because of --force we should have called the
72
# rm_command with the --recursive option.
73
rm_command.return_value.assert_called_with(
74
['s3://mybucket/', '--recursive'], mock.ANY)
75
76
def test_rb_command_with_force_requires_strict_path(self):
77
with self.assertRaises(ValueError):
78
self.parsed_args.path = 's3://mybucket/mykey'
79
self.rb_command._run_main(self.parsed_args,
80
parsed_globals=self.parsed_globals)
81
82
83
class TestLSCommand(unittest.TestCase):
84
def setUp(self):
85
self.session = mock.Mock()
86
self.session.create_client.return_value.list_buckets.return_value\
87
= {'Buckets': []}
88
self.session.create_client.return_value.get_paginator.return_value\
89
.paginate.return_value = [{'Contents': [], 'CommonPrefixes': []}]
90
91
def _get_fake_kwargs(self, override=None):
92
fake_kwargs = {
93
'paths': 's3://',
94
'dir_op': False,
95
'human_readable': False,
96
'summarize': False,
97
'page_size': None,
98
'request_payer': None,
99
'bucket_name_prefix': None,
100
'bucket_region': None,
101
}
102
fake_kwargs.update(override or {})
103
104
return fake_kwargs
105
106
def test_ls_command_for_bucket(self):
107
ls_command = ListCommand(self.session)
108
parsed_args = FakeArgs(**self._get_fake_kwargs({
109
'paths': 's3://mybucket/',
110
'page_size': '5',
111
}))
112
parsed_globals = mock.Mock()
113
ls_command._run_main(parsed_args, parsed_globals)
114
call = self.session.create_client.return_value.list_objects_v2
115
paginate = self.session.create_client.return_value.get_paginator\
116
.return_value.paginate
117
# We should make no operation calls.
118
self.assertEqual(call.call_count, 0)
119
# And only a single pagination call to ListObjectsV2.
120
self.session.create_client.return_value.get_paginator.\
121
assert_called_with('list_objects_v2')
122
ref_call_args = {'Bucket': u'mybucket', 'Delimiter': '/',
123
'Prefix': u'',
124
'PaginationConfig': {'PageSize': u'5'}}
125
126
paginate.assert_called_with(**ref_call_args)
127
128
def test_ls_command_with_no_args(self):
129
ls_command = ListCommand(self.session)
130
parsed_global = FakeArgs(region=None, endpoint_url=None,
131
verify_ssl=None)
132
parsed_args = FakeArgs(**self._get_fake_kwargs())
133
ls_command._run_main(parsed_args, parsed_global)
134
call = self.session.create_client.return_value.list_buckets
135
paginate = self.session.create_client.return_value.get_paginator\
136
.return_value.paginate
137
138
# We should make no operation calls.
139
self.assertEqual(call.call_count, 0)
140
# And only a single pagination call to ListBuckets.
141
self.session.create_client.return_value.get_paginator.\
142
assert_called_with('list_buckets')
143
ref_call_args = {'PaginationConfig': {'PageSize': None}}
144
145
paginate.assert_called_with(**ref_call_args)
146
147
# Verify get_client
148
get_client = self.session.create_client
149
args = get_client.call_args
150
self.assertEqual(args, mock.call(
151
's3', region_name=None, endpoint_url=None, verify=None,
152
config=None))
153
154
def test_ls_with_bucket_name_prefix(self):
155
ls_command = ListCommand(self.session)
156
parsed_args = FakeArgs(**self._get_fake_kwargs({
157
'bucket_name_prefix': 'myprefix',
158
}))
159
parsed_globals = FakeArgs(
160
region=None,
161
endpoint_url=None,
162
verify_ssl=None,
163
)
164
ls_command._run_main(parsed_args, parsed_globals)
165
call = self.session.create_client.return_value.list_objects
166
paginate = self.session.create_client.return_value.get_paginator\
167
.return_value.paginate
168
# We should make no operation calls.
169
self.assertEqual(call.call_count, 0)
170
self.session.create_client.return_value.get_paginator.\
171
assert_called_with('list_buckets')
172
ref_call_args = {
173
'PaginationConfig': {'PageSize': None},
174
'Prefix': 'myprefix',
175
}
176
177
paginate.assert_called_with(**ref_call_args)
178
179
def test_ls_with_bucket_region(self):
180
ls_command = ListCommand(self.session)
181
parsed_args = FakeArgs(**self._get_fake_kwargs({
182
'bucket_region': 'us-west-1',
183
}))
184
parsed_globals = FakeArgs(
185
region=None,
186
endpoint_url=None,
187
verify_ssl=None,
188
)
189
ls_command._run_main(parsed_args, parsed_globals)
190
call = self.session.create_client.return_value.list_objects
191
paginate = self.session.create_client.return_value.get_paginator\
192
.return_value.paginate
193
# We should make no operation calls.
194
self.assertEqual(call.call_count, 0)
195
self.session.create_client.return_value.get_paginator.\
196
assert_called_with('list_buckets')
197
ref_call_args = {
198
'PaginationConfig': {'PageSize': None},
199
'BucketRegion': 'us-west-1',
200
}
201
202
paginate.assert_called_with(**ref_call_args)
203
204
def test_ls_with_verify_argument(self):
205
ls_command = ListCommand(self.session)
206
parsed_global = FakeArgs(region='us-west-2', endpoint_url=None,
207
verify_ssl=False)
208
parsed_args = FakeArgs(**self._get_fake_kwargs({}))
209
ls_command._run_main(parsed_args, parsed_global)
210
# Verify get_client
211
get_client = self.session.create_client
212
args = get_client.call_args
213
self.assertEqual(args, mock.call(
214
's3', region_name='us-west-2', endpoint_url=None, verify=False,
215
config=None))
216
217
def test_ls_with_requester_pays(self):
218
ls_command = ListCommand(self.session)
219
parsed_args = FakeArgs(**self._get_fake_kwargs({
220
'paths': 's3://mybucket/',
221
'page_size': '5',
222
'request_payer': 'requester',
223
}))
224
parsed_globals = mock.Mock()
225
ls_command._run_main(parsed_args, parsed_globals)
226
call = self.session.create_client.return_value.list_objects
227
paginate = self.session.create_client.return_value.get_paginator\
228
.return_value.paginate
229
# We should make no operation calls.
230
self.assertEqual(call.call_count, 0)
231
# And only a single pagination call to ListObjectsV2.
232
self.session.create_client.return_value.get_paginator.\
233
assert_called_with('list_objects_v2')
234
ref_call_args = {
235
'Bucket': u'mybucket', 'Delimiter': '/',
236
'Prefix': u'', 'PaginationConfig': {'PageSize': '5'},
237
'RequestPayer': 'requester',
238
}
239
240
paginate.assert_called_with(**ref_call_args)
241
242
243
class CommandArchitectureTest(BaseAWSCommandParamsTest):
244
def setUp(self):
245
super(CommandArchitectureTest, self).setUp()
246
self.session = self.driver.session
247
self.bucket = 'mybucket'
248
self.file_creator = FileCreator()
249
self.loc_files = make_loc_files(self.file_creator)
250
self.output = StringIO()
251
self.err_output = StringIO()
252
self.saved_stdout = sys.stdout
253
self.saved_stderr = sys.stderr
254
sys.stdout = self.output
255
sys.stderr = self.err_output
256
257
def tearDown(self):
258
self.output.close()
259
self.err_output.close()
260
sys.stdout = self.saved_stdout
261
sys.stderr = self.saved_stderr
262
263
super(CommandArchitectureTest, self).tearDown()
264
clean_loc_files(self.file_creator)
265
266
def _get_file_path(self, file):
267
try:
268
return os.path.relpath(file)
269
except ValueError:
270
# In some cases (usually it happens inside Windows based GitHub
271
# Action) tests are situated on one volume and temp folder on
272
# another one, in such a case there is no relative path between
273
# them and we use absolute path instead
274
return os.path.abspath(file)
275
276
def test_set_client_no_source(self):
277
session = mock.Mock()
278
cmd_arc = CommandArchitecture(session, 'sync',
279
{'region': 'us-west-1',
280
'endpoint_url': None,
281
'verify_ssl': None,
282
'source_region': None})
283
cmd_arc.set_clients()
284
self.assertEqual(session.create_client.call_count, 2)
285
self.assertEqual(
286
session.create_client.call_args_list[0],
287
mock.call(
288
's3', region_name='us-west-1', endpoint_url=None, verify=None,
289
config=None)
290
)
291
# A client created with the same arguments as the first should be used
292
# for the source client since no source region was provided.
293
self.assertEqual(
294
session.create_client.call_args_list[1],
295
mock.call(
296
's3', region_name='us-west-1', endpoint_url=None, verify=None,
297
config=None)
298
)
299
300
def test_set_client_with_source(self):
301
session = mock.Mock()
302
cmd_arc = CommandArchitecture(session, 'sync',
303
{'region': 'us-west-1',
304
'endpoint_url': None,
305
'verify_ssl': None,
306
'paths_type': 's3s3',
307
'source_region': 'us-west-2'})
308
cmd_arc.set_clients()
309
create_client_args = session.create_client.call_args_list
310
# Assert that two clients were created
311
self.assertEqual(len(create_client_args), 3)
312
self.assertEqual(
313
create_client_args[0][1],
314
{'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
315
'config': None}
316
)
317
self.assertEqual(
318
create_client_args[1][1],
319
{'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None,
320
'config': None}
321
)
322
# Assert override the second client created with the one needed for the
323
# source region.
324
self.assertEqual(
325
create_client_args[2][1],
326
{'region_name': 'us-west-2', 'verify': None, 'endpoint_url': None,
327
'config': None}
328
)
329
330
def test_set_sigv4_clients_with_sse_kms(self):
331
session = mock.Mock()
332
cmd_arc = CommandArchitecture(
333
session, 'sync',
334
{'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None,
335
'source_region': None, 'sse': 'aws:kms'})
336
cmd_arc.set_clients()
337
self.assertEqual( session.create_client.call_count, 2)
338
create_client_call = session.create_client.call_args_list[0]
339
create_source_client_call = session.create_client.call_args_list[1]
340
341
# Make sure that both clients are using sigv4 if kms is enabled.
342
self.assertEqual(
343
create_client_call[1]['config'].signature_version, 's3v4')
344
self.assertEqual(
345
create_source_client_call[1]['config'].signature_version, 's3v4')
346
347
def test_create_instructions(self):
348
"""
349
This tests to make sure the instructions for any command is generated
350
properly.
351
"""
352
cmds = ['cp', 'mv', 'rm', 'sync']
353
354
instructions = {'cp': ['file_generator', 'file_info_builder',
355
's3_handler'],
356
'mv': ['file_generator', 'file_info_builder',
357
's3_handler'],
358
'rm': ['file_generator', 'file_info_builder',
359
's3_handler'],
360
'sync': ['file_generator', 'comparator',
361
'file_info_builder', 's3_handler']}
362
363
params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
364
'verify_ssl': None, 'is_stream': False}
365
for cmd in cmds:
366
cmd_arc = CommandArchitecture(self.session, cmd,
367
{'region': 'us-east-1',
368
'endpoint_url': None,
369
'verify_ssl': None,
370
'is_stream': False})
371
cmd_arc.create_instructions()
372
self.assertEqual(cmd_arc.instructions, instructions[cmd])
373
374
# Test if there is a filter.
375
cmd_arc = CommandArchitecture(self.session, 'cp', params)
376
cmd_arc.create_instructions()
377
self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
378
'file_info_builder',
379
's3_handler'])
380
381
def test_choose_sync_strategy_default(self):
382
session = mock.Mock()
383
cmd_arc = CommandArchitecture(session, 'sync',
384
{'region': 'us-east-1',
385
'endpoint_url': None,
386
'verify_ssl': None})
387
# Check if no plugins return their sync strategy. Should
388
# result in the default strategies
389
session.emit.return_value = None
390
sync_strategies = cmd_arc.choose_sync_strategies()
391
self.assertEqual(
392
sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
393
SizeAndLastModifiedSync
394
)
395
self.assertEqual(
396
sync_strategies['file_not_at_dest_sync_strategy'].__class__,
397
MissingFileSync
398
)
399
self.assertEqual(
400
sync_strategies['file_not_at_src_sync_strategy'].__class__,
401
NeverSync
402
)
403
404
def test_choose_sync_strategy_overwrite(self):
405
session = mock.Mock()
406
cmd_arc = CommandArchitecture(session, 'sync',
407
{'region': 'us-east-1',
408
'endpoint_url': None,
409
'verify_ssl': None})
410
# Check that the default sync strategy is overwritten if a plugin
411
# returns its sync strategy.
412
mock_strategy = mock.Mock()
413
mock_strategy.sync_type = 'file_at_src_and_dest'
414
415
mock_not_at_dest_sync_strategy = mock.Mock()
416
mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'
417
418
mock_not_at_src_sync_strategy = mock.Mock()
419
mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'
420
421
responses = [(None, mock_strategy),
422
(None, mock_not_at_dest_sync_strategy),
423
(None, mock_not_at_src_sync_strategy)]
424
425
session.emit.return_value = responses
426
sync_strategies = cmd_arc.choose_sync_strategies()
427
self.assertEqual(
428
sync_strategies['file_at_src_and_dest_sync_strategy'],
429
mock_strategy
430
)
431
self.assertEqual(
432
sync_strategies['file_not_at_dest_sync_strategy'],
433
mock_not_at_dest_sync_strategy
434
)
435
self.assertEqual(
436
sync_strategies['file_not_at_src_sync_strategy'],
437
mock_not_at_src_sync_strategy
438
)
439
440
def test_run_cp_put(self):
441
# This ensures that the architecture sets up correctly for a ``cp`` put
442
# command. It is just just a dry run, but all of the components need
443
# to be wired correctly for it to work.
444
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
445
local_file = self.loc_files[0]
446
rel_local_file = self._get_file_path(local_file)
447
filters = [['--include', '*']]
448
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
449
'src': local_file, 'dest': s3_file, 'filters': filters,
450
'paths_type': 'locals3', 'region': 'us-east-1',
451
'endpoint_url': None, 'verify_ssl': None,
452
'follow_symlinks': True, 'page_size': None,
453
'is_stream': False, 'source_region': None, 'metadata': None}
454
config = RuntimeConfig().build_config()
455
cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
456
cmd_arc.set_clients()
457
cmd_arc.create_instructions()
458
self.patch_make_request()
459
cmd_arc.run()
460
output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
461
self.assertIn(output_str, self.output.getvalue())
462
463
def test_error_on_same_line_as_status(self):
464
s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
465
local_file = self.loc_files[0]
466
rel_local_file = self._get_file_path(local_file)
467
filters = [['--include', '*']]
468
params = {'dir_op': False, 'dryrun': False, 'quiet': False,
469
'src': local_file, 'dest': s3_file, 'filters': filters,
470
'paths_type': 'locals3', 'region': 'us-east-1',
471
'endpoint_url': None, 'verify_ssl': None,
472
'follow_symlinks': True, 'page_size': None,
473
'is_stream': False, 'source_region': None, 'metadata': None}
474
self.http_response.status_code = 400
475
self.parsed_responses = [{'Error': {
476
'Code': 'BucketNotExists',
477
'Message': 'Bucket does not exist'}}]
478
cmd_arc = CommandArchitecture(
479
self.session, 'cp', params, RuntimeConfig().build_config())
480
cmd_arc.set_clients()
481
cmd_arc.create_instructions()
482
self.patch_make_request()
483
cmd_arc.run()
484
# Also, we need to verify that the error message is on the *same* line
485
# as the upload failed line, to make it easier to track.
486
output_str = (
487
"upload failed: %s to %s An error" % (
488
rel_local_file, s3_file))
489
self.assertIn(output_str, self.err_output.getvalue())
490
491
def test_run_cp_get(self):
492
# This ensures that the architecture sets up correctly for a ``cp`` get
493
# command. It is just just a dry run, but all of the components need
494
# to be wired correctly for it to work.
495
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
496
local_file = self.loc_files[0]
497
rel_local_file = self._get_file_path(local_file)
498
filters = [['--include', '*']]
499
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
500
'src': s3_file, 'dest': local_file, 'filters': filters,
501
'paths_type': 's3local', 'region': 'us-east-1',
502
'endpoint_url': None, 'verify_ssl': None,
503
'follow_symlinks': True, 'page_size': None,
504
'is_stream': False, 'source_region': None}
505
self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
506
"LastModified": "2014-01-09T20:45:49.000Z"}]
507
config = RuntimeConfig().build_config()
508
cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
509
cmd_arc.set_clients()
510
cmd_arc.create_instructions()
511
self.patch_make_request()
512
cmd_arc.run()
513
output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
514
self.assertIn(output_str, self.output.getvalue())
515
516
def test_run_cp_copy(self):
517
# This ensures that the architecture sets up correctly for a ``cp``
518
# copy command. It is just just a dry run, but all of the
519
# components need to be wired correctly for it to work.
520
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
521
filters = [['--include', '*']]
522
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
523
'src': s3_file, 'dest': s3_file, 'filters': filters,
524
'paths_type': 's3s3', 'region': 'us-east-1',
525
'endpoint_url': None, 'verify_ssl': None,
526
'follow_symlinks': True, 'page_size': None,
527
'is_stream': False, 'source_region': None}
528
self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
529
"LastModified": "2014-01-09T20:45:49.000Z"}]
530
config = RuntimeConfig().build_config()
531
cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
532
cmd_arc.set_clients()
533
cmd_arc.create_instructions()
534
self.patch_make_request()
535
cmd_arc.run()
536
output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
537
self.assertIn(output_str, self.output.getvalue())
538
539
def test_run_mv(self):
540
# This ensures that the architecture sets up correctly for a ``mv``
541
# command. It is just just a dry run, but all of the components need
542
# to be wired correctly for it to work.
543
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
544
filters = [['--include', '*']]
545
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
546
'src': s3_file, 'dest': s3_file, 'filters': filters,
547
'paths_type': 's3s3', 'region': 'us-east-1',
548
'endpoint_url': None, 'verify_ssl': None,
549
'follow_symlinks': True, 'page_size': None,
550
'is_stream': False, 'source_region': None,
551
'is_move': True}
552
self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
553
"LastModified": "2014-01-09T20:45:49.000Z"}]
554
config = RuntimeConfig().build_config()
555
cmd_arc = CommandArchitecture(self.session, 'mv', params, config)
556
cmd_arc.set_clients()
557
cmd_arc.create_instructions()
558
self.patch_make_request()
559
cmd_arc.run()
560
output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
561
self.assertIn(output_str, self.output.getvalue())
562
563
def test_run_remove(self):
564
# This ensures that the architecture sets up correctly for a ``rm``
565
# command. It is just just a dry run, but all of the components need
566
# to be wired correctly for it to work.
567
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
568
filters = [['--include', '*']]
569
params = {'dir_op': False, 'dryrun': True, 'quiet': False,
570
'src': s3_file, 'dest': s3_file, 'filters': filters,
571
'paths_type': 's3', 'region': 'us-east-1',
572
'endpoint_url': None, 'verify_ssl': None,
573
'follow_symlinks': True, 'page_size': None,
574
'is_stream': False, 'source_region': None}
575
self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
576
"LastModified": "2014-01-09T20:45:49.000Z"}]
577
config = RuntimeConfig().build_config()
578
cmd_arc = CommandArchitecture(self.session, 'rm', params, config)
579
cmd_arc.set_clients()
580
cmd_arc.create_instructions()
581
self.patch_make_request()
582
cmd_arc.run()
583
output_str = "(dryrun) delete: %s" % s3_file
584
self.assertIn(output_str, self.output.getvalue())
585
586
def test_run_sync(self):
587
# This ensures that the architecture sets up correctly for a ``sync``
588
# command. It is just just a dry run, but all of the components need
589
# to be wired correctly for it to work.
590
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
591
local_file = self.loc_files[0]
592
s3_prefix = 's3://' + self.bucket + '/'
593
local_dir = self.loc_files[3]
594
rel_local_file = self._get_file_path(local_file)
595
filters = [['--include', '*']]
596
params = {'dir_op': True, 'dryrun': True, 'quiet': False,
597
'src': local_dir, 'dest': s3_prefix, 'filters': filters,
598
'paths_type': 'locals3', 'region': 'us-east-1',
599
'endpoint_url': None, 'verify_ssl': None,
600
'follow_symlinks': True, 'page_size': None,
601
'is_stream': False, 'source_region': 'us-west-2'}
602
self.parsed_responses = [
603
{"CommonPrefixes": [], "Contents": [
604
{"Key": "text1.txt", "Size": 100,
605
"LastModified": "2014-01-09T20:45:49.000Z"}]},
606
{"CommonPrefixes": [], "Contents": []}]
607
config = RuntimeConfig().build_config()
608
cmd_arc = CommandArchitecture(self.session, 'sync', params, config)
609
cmd_arc.create_instructions()
610
cmd_arc.set_clients()
611
self.patch_make_request()
612
cmd_arc.run()
613
output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
614
self.assertIn(output_str, self.output.getvalue())
615
616
617
class CommandParametersTest(unittest.TestCase):
618
def setUp(self):
619
self.environ = {}
620
self.environ_patch = mock.patch('os.environ', self.environ)
621
self.environ_patch.start()
622
self.mock = mock.MagicMock()
623
self.mock.get_config = mock.MagicMock(return_value={'region': None})
624
self.file_creator = FileCreator()
625
self.loc_files = make_loc_files(self.file_creator)
626
self.bucket = 's3testbucket'
627
self.session = mock.Mock()
628
self.parsed_global = FakeArgs(
629
region='us-west-2',
630
endpoint_url=None,
631
verify_ssl=False)
632
633
def tearDown(self):
634
self.environ_patch.stop()
635
clean_loc_files(self.file_creator)
636
637
def test_check_path_type_pass(self):
638
# This tests the class's ability to determine whether the correct
639
# path types have been passed for a particular command. It test every
640
# possible combination that is correct for every command.
641
cmds = {'cp': ['locals3', 's3s3', 's3local'],
642
'mv': ['locals3', 's3s3', 's3local'],
643
'rm': ['s3'], 'mb': ['s3'], 'rb': ['s3'],
644
'sync': ['locals3', 's3s3', 's3local']}
645
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
646
local_file = self.loc_files[0]
647
648
combos = {'s3s3': [s3_file, s3_file],
649
's3local': [s3_file, local_file],
650
'locals3': [local_file, s3_file],
651
's3': [s3_file],
652
'local': [local_file],
653
'locallocal': [local_file, local_file]}
654
655
for cmd in cmds.keys():
656
cmd_param = CommandParameters(cmd, {}, '',
657
self.session, self.parsed_global)
658
cmd_param.add_region(mock.Mock())
659
correct_paths = cmds[cmd]
660
for path_args in correct_paths:
661
cmd_param.check_path_type(combos[path_args])
662
663
def test_check_path_type_fail(self):
664
# This tests the class's ability to determine whether the correct
665
# path types have been passed for a particular command. It test every
666
# possible combination that is incorrect for every command.
667
cmds = {'cp': ['local', 'locallocal', 's3'],
668
'mv': ['local', 'locallocal', 's3'],
669
'rm': ['local', 'locallocal', 's3s3', 'locals3', 's3local'],
670
'ls': ['local', 'locallocal', 's3s3', 'locals3', 's3local'],
671
'sync': ['local', 'locallocal', 's3'],
672
'mb': ['local', 'locallocal', 's3s3', 'locals3', 's3local'],
673
'rb': ['local', 'locallocal', 's3s3', 'locals3', 's3local']}
674
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
675
local_file = self.loc_files[0]
676
677
combos = {'s3s3': [s3_file, s3_file],
678
's3local': [s3_file, local_file],
679
'locals3': [local_file, s3_file],
680
's3': [s3_file],
681
'local': [local_file],
682
'locallocal': [local_file, local_file]}
683
684
for cmd in cmds.keys():
685
cmd_param = CommandParameters(cmd, {}, '',
686
self.session, self.parsed_global)
687
cmd_param.add_region(mock.Mock())
688
wrong_paths = cmds[cmd]
689
for path_args in wrong_paths:
690
with self.assertRaises(TypeError):
691
cmd_param.check_path_type(combos[path_args])
692
693
def test_validate_streaming_paths_upload(self):
694
paths = ['-', 's3://bucket']
695
cmd_params = CommandParameters('cp', {}, '')
696
cmd_params.add_paths(paths)
697
self.assertTrue(cmd_params.parameters['is_stream'])
698
self.assertTrue(cmd_params.parameters['only_show_errors'])
699
self.assertFalse(cmd_params.parameters['dir_op'])
700
701
def test_validate_streaming_paths_download(self):
702
paths = ['s3://bucket/key', '-']
703
cmd_params = CommandParameters('cp', {}, '')
704
cmd_params.add_paths(paths)
705
self.assertTrue(cmd_params.parameters['is_stream'])
706
self.assertTrue(cmd_params.parameters['only_show_errors'])
707
self.assertFalse(cmd_params.parameters['dir_op'])
708
709
def test_validate_no_streaming_paths(self):
710
paths = [self.file_creator.rootdir, 's3://bucket']
711
cmd_params = CommandParameters('cp', {}, '')
712
cmd_params.add_paths(paths)
713
self.assertFalse(cmd_params.parameters['is_stream'])
714
715
def test_validate_checksum_algorithm_download_error(self):
716
paths = ['s3://bucket/key', self.file_creator.rootdir]
717
parameters = {'checksum_algorithm': 'CRC32'}
718
cmd_params = CommandParameters('cp', parameters, '')
719
with self.assertRaises(ValueError) as cm:
720
cmd_params.add_paths(paths)
721
self.assertIn('Expected checksum-algorithm parameter to be used with one of following path formats', cm.msg)
722
723
def test_validate_checksum_algorithm_sync_download_error(self):
724
paths = ['s3://bucket/key', self.file_creator.rootdir]
725
parameters = {'checksum_algorithm': 'CRC32C'}
726
cmd_params = CommandParameters('sync', parameters, '')
727
with self.assertRaises(ValueError) as cm:
728
cmd_params.add_paths(paths)
729
self.assertIn('Expected checksum-algorithm parameter to be used with one of following path formats', cm.msg)
730
731
def test_validate_checksum_mode_upload_error(self):
732
paths = [self.file_creator.rootdir, 's3://bucket/key']
733
parameters = {'checksum_mode': 'ENABLED'}
734
cmd_params = CommandParameters('cp', parameters, '')
735
with self.assertRaises(ValueError) as cm:
736
cmd_params.add_paths(paths)
737
self.assertIn('Expected checksum-mode parameter to be used with one of following path formats', cm.msg)
738
739
def test_validate_checksum_mode_sync_upload_error(self):
740
paths = [self.file_creator.rootdir, 's3://bucket/key']
741
parameters = {'checksum_mode': 'ENABLED'}
742
cmd_params = CommandParameters('sync', parameters, '')
743
with self.assertRaises(ValueError) as cm:
744
cmd_params.add_paths(paths)
745
self.assertIn('Expected checksum-mode parameter to be used with one of following path formats', cm.msg)
746
747
def test_validate_checksum_mode_move_error(self):
748
paths = ['s3://bucket/key', 's3://bucket2/key']
749
parameters = {'checksum_mode': 'ENABLED'}
750
cmd_params = CommandParameters('mv', parameters, '')
751
with self.assertRaises(ValueError) as cm:
752
cmd_params.add_paths(paths)
753
self.assertIn('Expected checksum-mode parameter to be used with one of following path formats', cm.msg)
754
755
def test_validate_streaming_paths_error(self):
756
parameters = {'src': '-', 'dest': 's3://bucket'}
757
cmd_params = CommandParameters('sync', parameters, '')
758
with self.assertRaises(ValueError):
759
cmd_params._validate_streaming_paths()
760
761
def test_validate_non_existent_local_path_upload(self):
762
non_existent_path = os.path.join(self.file_creator.rootdir, 'foo')
763
paths = [non_existent_path, 's3://bucket/']
764
cmd_param = CommandParameters('cp', {}, '')
765
with self.assertRaises(RuntimeError):
766
cmd_param.add_paths(paths)
767
768
def test_add_path_for_non_existsent_local_path_download(self):
769
non_existent_path = os.path.join(self.file_creator.rootdir, 'foo')
770
paths = ['s3://bucket', non_existent_path]
771
cmd_param = CommandParameters('cp', {'dir_op': True}, '')
772
cmd_param.add_paths(paths)
773
self.assertTrue(os.path.exists(non_existent_path))
774
775
def test_validate_sse_c_args_missing_sse(self):
776
paths = ['s3://bucket/foo', 's3://bucket/bar']
777
params = {'dir_op': False, 'sse_c_key': 'foo'}
778
cmd_param = CommandParameters('cp', params, '')
779
with self.assertRaisesRegex(ValueError, '--sse-c must be specified'):
780
cmd_param.add_paths(paths)
781
782
def test_validate_sse_c_args_missing_sse_c_key(self):
783
paths = ['s3://bucket/foo', 's3://bucket/bar']
784
params = {'dir_op': False, 'sse_c': 'AES256'}
785
cmd_param = CommandParameters('cp', params, '')
786
with self.assertRaisesRegex(ValueError,
787
'--sse-c-key must be specified'):
788
cmd_param.add_paths(paths)
789
790
def test_validate_sse_c_args_missing_sse_c_copy_source(self):
791
paths = ['s3://bucket/foo', 's3://bucket/bar']
792
params = {'dir_op': False, 'sse_c_copy_source_key': 'foo'}
793
cmd_param = CommandParameters('cp', params, '')
794
with self.assertRaisesRegex(ValueError,
795
'--sse-c-copy-source must be specified'):
796
cmd_param.add_paths(paths)
797
798
def test_validate_sse_c_args_missing_sse_c_copy_source_key(self):
799
paths = ['s3://bucket/foo', 's3://bucket/bar']
800
params = {'dir_op': False, 'sse_c_copy_source': 'AES256'}
801
cmd_param = CommandParameters('cp', params, '')
802
with self.assertRaisesRegex(ValueError,
803
'--sse-c-copy-source-key must be specified'):
804
cmd_param.add_paths(paths)
805
806
def test_validate_sse_c_args_wrong_path_type(self):
807
paths = ['s3://bucket/foo', self.file_creator.rootdir]
808
params = {'dir_op': False, 'sse_c_copy_source': 'AES256',
809
'sse_c_copy_source_key': 'foo'}
810
cmd_param = CommandParameters('cp', params, '')
811
with self.assertRaisesRegex(ValueError,
812
'only supported for copy operations'):
813
cmd_param.add_paths(paths)
814
815
def test_adds_is_move(self):
816
params = {}
817
CommandParameters('mv', params, '',
818
session=self.session,
819
parsed_globals=self.parsed_global)
820
self.assertTrue(params.get('is_move'))
821
822
# is_move should only be true for mv
823
params = {}
824
CommandParameters('cp', params, '')
825
self.assertFalse(params.get('is_move'))
826
827
828
class HelpDocTest(BaseAWSHelpOutputTest):
829
def setUp(self):
830
super(HelpDocTest, self).setUp()
831
self.session = botocore.session.get_session()
832
833
def tearDown(self):
834
super(HelpDocTest, self).tearDown()
835
836
def test_s3_help(self):
837
# This tests the help command for the s3 service. This
838
# checks to make sure the appropriate descriptions are
839
# added including the tutorial.
840
s3 = S3(self.session)
841
parser = argparse.ArgumentParser()
842
parser.add_argument('--paginate', action='store_true')
843
parsed_global = parser.parse_args(['--paginate'])
844
help_command = s3.create_help_command()
845
help_command([], parsed_global)
846
self.assert_contains(
847
"This section explains prominent concepts "
848
"and notations in the set of high-level S3 commands provided.")
849
self.assert_contains("Every command takes one or two positional")
850
self.assert_contains("* rb")
851
852
def test_s3command_help(self):
853
# This tests the help command for an s3 command. This
854
# checks to make sure the command prints appropriate
855
# parts. Note the examples are not included because
856
# the event was not registered.
857
s3command = CpCommand(self.session)
858
s3command._arg_table = s3command._build_arg_table()
859
parser = argparse.ArgumentParser()
860
parser.add_argument('--paginate', action='store_true')
861
parsed_global = parser.parse_args(['--paginate'])
862
help_command = s3command.create_help_command()
863
help_command([], parsed_global)
864
self.assert_contains("cp")
865
self.assert_contains("[--acl <value>]")
866
self.assert_contains("Displays the operations that would be")
867
868
def test_help(self):
869
# This ensures that the file appropriately redirects to help object
870
# if help is the only argument left to be parsed. There should not
871
# have any contents in the docs.
872
s3_command = SyncCommand(self.session)
873
s3_command(['help'], [])
874
self.assert_contains('sync')
875
self.assert_contains("Synopsis")
876
877
878
if __name__ == "__main__":
879
unittest.main()
880
881