Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/tests/functional/s3/test_cp_command.py
2624 views
1
#!/usr/bin/env python
2
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License"). You
5
# may not use this file except in compliance with the License. A copy of
6
# the License is located at
7
#
8
# http://aws.amazon.com/apache2.0/
9
#
10
# or in the "license" file accompanying this file. This file is
11
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
12
# ANY KIND, either express or implied. See the License for the specific
13
# language governing permissions and limitations under the License.
14
import os
15
16
from awscli.testutils import BaseAWSCommandParamsTest, skip_if_windows
17
from awscli.testutils import capture_input
18
from awscli.testutils import mock
19
from awscli.compat import BytesIO
20
from tests.functional.s3 import BaseS3TransferCommandTest
21
from tests.functional.s3.test_sync_command import TestSyncCaseConflict
22
from tests import requires_crt
23
24
25
class BufferedBytesIO(BytesIO):
26
@property
27
def buffer(self):
28
return self
29
30
31
class BaseCPCommandTest(BaseS3TransferCommandTest):
32
prefix = 's3 cp '
33
34
35
class TestCPCommand(BaseCPCommandTest):
36
def test_operations_used_in_upload(self):
37
full_path = self.files.create_file('foo.txt', 'mycontent')
38
cmdline = '%s %s s3://bucket/key.txt' % (self.prefix, full_path)
39
self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
40
self.run_cmd(cmdline, expected_rc=0)
41
# The only operation we should have called is PutObject.
42
self.assertEqual(len(self.operations_called), 1, self.operations_called)
43
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
44
45
def test_key_name_added_when_only_bucket_provided(self):
46
full_path = self.files.create_file('foo.txt', 'mycontent')
47
cmdline = '%s %s s3://bucket/' % (self.prefix, full_path)
48
self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
49
self.run_cmd(cmdline, expected_rc=0)
50
# The only operation we should have called is PutObject.
51
self.assertEqual(len(self.operations_called), 1, self.operations_called)
52
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
53
self.assertEqual(self.operations_called[0][1]['Key'], 'foo.txt')
54
self.assertEqual(self.operations_called[0][1]['Bucket'], 'bucket')
55
56
def test_trailing_slash_appended(self):
57
full_path = self.files.create_file('foo.txt', 'mycontent')
58
# Here we're saying s3://bucket instead of s3://bucket/
59
# This should still work the same as if we added the trailing slash.
60
cmdline = '%s %s s3://bucket' % (self.prefix, full_path)
61
self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
62
self.run_cmd(cmdline, expected_rc=0)
63
# The only operation we should have called is PutObject.
64
self.assertEqual(len(self.operations_called), 1, self.operations_called)
65
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
66
self.assertEqual(self.operations_called[0][1]['Key'], 'foo.txt')
67
self.assertEqual(self.operations_called[0][1]['Bucket'], 'bucket')
68
69
def test_upload_grants(self):
70
full_path = self.files.create_file('foo.txt', 'mycontent')
71
cmdline = ('%s %s s3://bucket/key.txt --grants read=id=foo '
72
'full=id=bar readacl=id=biz writeacl=id=baz' %
73
(self.prefix, full_path))
74
self.parsed_responses = \
75
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
76
self.run_cmd(cmdline, expected_rc=0)
77
# The only operation we should have called is PutObject.
78
self.assertEqual(len(self.operations_called), 1,
79
self.operations_called)
80
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
81
self.assertDictEqual(
82
self.operations_called[0][1],
83
{'Key': u'key.txt', 'Bucket': u'bucket', 'GrantRead': u'id=foo',
84
'GrantFullControl': u'id=bar', 'GrantReadACP': u'id=biz',
85
'GrantWriteACP': u'id=baz', 'ContentType': u'text/plain',
86
'Body': mock.ANY, 'ChecksumAlgorithm': 'CRC32'}
87
)
88
89
def test_upload_expires(self):
90
full_path = self.files.create_file('foo.txt', 'mycontent')
91
cmdline = ('%s %s s3://bucket/key.txt --expires 90' %
92
(self.prefix, full_path))
93
self.parsed_responses = \
94
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
95
self.run_cmd(cmdline, expected_rc=0)
96
# The only operation we should have called is PutObject.
97
self.assertEqual(len(self.operations_called), 1,
98
self.operations_called)
99
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
100
self.assertEqual(self.operations_called[0][1]['Key'], 'key.txt')
101
self.assertEqual(self.operations_called[0][1]['Bucket'], 'bucket')
102
self.assertEqual(self.operations_called[0][1]['Expires'], '90')
103
104
def test_upload_standard_ia(self):
105
full_path = self.files.create_file('foo.txt', 'mycontent')
106
cmdline = ('%s %s s3://bucket/key.txt --storage-class STANDARD_IA' %
107
(self.prefix, full_path))
108
self.parsed_responses = \
109
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
110
self.run_cmd(cmdline, expected_rc=0)
111
self.assertEqual(len(self.operations_called), 1,
112
self.operations_called)
113
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
114
args = self.operations_called[0][1]
115
self.assertEqual(args['Key'], 'key.txt')
116
self.assertEqual(args['Bucket'], 'bucket')
117
self.assertEqual(args['StorageClass'], 'STANDARD_IA')
118
119
def test_upload_onezone_ia(self):
120
full_path = self.files.create_file('foo.txt', 'mycontent')
121
cmdline = ('%s %s s3://bucket/key.txt --storage-class ONEZONE_IA' %
122
(self.prefix, full_path))
123
self.parsed_responses = \
124
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
125
self.run_cmd(cmdline, expected_rc=0)
126
self.assertEqual(len(self.operations_called), 1,
127
self.operations_called)
128
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
129
args = self.operations_called[0][1]
130
self.assertEqual(args['Key'], 'key.txt')
131
self.assertEqual(args['Bucket'], 'bucket')
132
self.assertEqual(args['StorageClass'], 'ONEZONE_IA')
133
134
def test_upload_intelligent_tiering(self):
135
full_path = self.files.create_file('foo.txt', 'mycontent')
136
cmdline = ('%s %s s3://bucket/key.txt --storage-class INTELLIGENT_TIERING' %
137
(self.prefix, full_path))
138
self.parsed_responses = \
139
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
140
self.run_cmd(cmdline, expected_rc=0)
141
self.assertEqual(len(self.operations_called), 1,
142
self.operations_called)
143
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
144
args = self.operations_called[0][1]
145
self.assertEqual(args['Key'], 'key.txt')
146
self.assertEqual(args['Bucket'], 'bucket')
147
self.assertEqual(args['StorageClass'], 'INTELLIGENT_TIERING')
148
149
def test_upload_glacier(self):
150
full_path = self.files.create_file('foo.txt', 'mycontent')
151
cmdline = ('%s %s s3://bucket/key.txt --storage-class GLACIER' %
152
(self.prefix, full_path))
153
self.parsed_responses = \
154
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
155
self.run_cmd(cmdline, expected_rc=0)
156
self.assertEqual(len(self.operations_called), 1,
157
self.operations_called)
158
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
159
args = self.operations_called[0][1]
160
self.assertEqual(args['Key'], 'key.txt')
161
self.assertEqual(args['Bucket'], 'bucket')
162
self.assertEqual(args['StorageClass'], 'GLACIER')
163
164
def test_upload_deep_archive(self):
165
full_path = self.files.create_file('foo.txt', 'mycontent')
166
cmdline = ('%s %s s3://bucket/key.txt --storage-class DEEP_ARCHIVE' %
167
(self.prefix, full_path))
168
self.parsed_responses = \
169
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
170
self.run_cmd(cmdline, expected_rc=0)
171
self.assertEqual(len(self.operations_called), 1,
172
self.operations_called)
173
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
174
args = self.operations_called[0][1]
175
self.assertEqual(args['Key'], 'key.txt')
176
self.assertEqual(args['Bucket'], 'bucket')
177
self.assertEqual(args['StorageClass'], 'DEEP_ARCHIVE')
178
179
def test_operations_used_in_download_file(self):
180
self.parsed_responses = [
181
{
182
"ContentLength": "100",
183
"LastModified": "00:00:00Z",
184
"ETag": '"foo-1"'
185
},
186
{'ETag': '"foo-1"', 'Body': BytesIO(b'foo')},
187
]
188
cmdline = '%s s3://bucket/key.txt %s' % (self.prefix,
189
self.files.rootdir)
190
self.run_cmd(cmdline, expected_rc=0)
191
# The only operations we should have called are HeadObject/GetObject.
192
self.assertEqual(len(self.operations_called), 2, self.operations_called)
193
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
194
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
195
196
def test_operations_used_in_recursive_download(self):
197
self.parsed_responses = [
198
{'ETag': '"foo-1"', 'Contents': [], 'CommonPrefixes': []},
199
]
200
cmdline = '%s s3://bucket/key.txt %s --recursive' % (
201
self.prefix, self.files.rootdir)
202
self.run_cmd(cmdline, expected_rc=0)
203
# We called ListObjectsV2 but had no objects to download, so
204
# we only have a single ListObjectsV2 operation being called.
205
self.assertEqual(len(self.operations_called), 1, self.operations_called)
206
self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
207
208
def test_website_redirect_ignore_paramfile(self):
209
full_path = self.files.create_file('foo.txt', 'mycontent')
210
cmdline = '%s %s s3://bucket/key.txt --website-redirect %s' % \
211
(self.prefix, full_path, 'http://someserver')
212
self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
213
self.run_cmd(cmdline, expected_rc=0)
214
# Make sure that the specified web address is used as opposed to the
215
# contents of the web address.
216
self.assertEqual(
217
self.operations_called[0][1]['WebsiteRedirectLocation'],
218
'http://someserver'
219
)
220
221
def test_metadata_copy(self):
222
self.parsed_responses = [
223
{
224
"ContentLength": "100",
225
"LastModified": "00:00:00Z",
226
'ETag': '"foo-1"',
227
},
228
{'ETag': '"foo-1"'},
229
]
230
cmdline = ('%s s3://bucket/key.txt s3://bucket/key2.txt'
231
' --metadata KeyName=Value' % self.prefix)
232
self.run_cmd(cmdline, expected_rc=0)
233
self.assertEqual(len(self.operations_called), 2,
234
self.operations_called)
235
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
236
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
237
self.assertEqual(self.operations_called[1][1]['Metadata'],
238
{'KeyName': 'Value'})
239
240
def test_metadata_copy_with_put_object(self):
241
full_path = self.files.create_file('foo.txt', 'mycontent')
242
self.parsed_responses = [
243
{"ContentLength": "100", "LastModified": "00:00:00Z"},
244
{'ETag': '"foo-1"'},
245
]
246
cmdline = ('%s %s s3://bucket/key2.txt'
247
' --metadata KeyName=Value' % (self.prefix, full_path))
248
self.run_cmd(cmdline, expected_rc=0)
249
self.assertEqual(len(self.operations_called), 1,
250
self.operations_called)
251
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
252
self.assertEqual(self.operations_called[0][1]['Metadata'],
253
{'KeyName': 'Value'})
254
255
def test_metadata_copy_with_multipart_upload(self):
256
full_path = self.files.create_file('foo.txt', 'a' * 10 * (1024 ** 2))
257
self.parsed_responses = [
258
{'UploadId': 'foo'},
259
{'ETag': '"foo-1"'},
260
{'ETag': '"foo-2"'},
261
{}
262
]
263
cmdline = ('%s %s s3://bucket/key2.txt'
264
' --metadata KeyName=Value' % (self.prefix, full_path))
265
self.run_cmd(cmdline, expected_rc=0)
266
self.assertEqual(len(self.operations_called), 4,
267
self.operations_called)
268
self.assertEqual(self.operations_called[0][0].name,
269
'CreateMultipartUpload')
270
self.assertEqual(self.operations_called[0][1]['Metadata'],
271
{'KeyName': 'Value'})
272
273
def test_metadata_directive_copy(self):
274
self.parsed_responses = [
275
{
276
"ContentLength": "100",
277
"LastModified": "00:00:00Z",
278
"ETag": "'foo-1'",
279
},
280
{'ETag': '"foo-1"'},
281
]
282
cmdline = ('%s s3://bucket/key.txt s3://bucket/key2.txt'
283
' --metadata-directive REPLACE' % self.prefix)
284
self.run_cmd(cmdline, expected_rc=0)
285
self.assertEqual(len(self.operations_called), 2,
286
self.operations_called)
287
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
288
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
289
self.assertEqual(self.operations_called[1][1]['MetadataDirective'],
290
'REPLACE')
291
292
def test_no_metadata_directive_for_non_copy(self):
293
full_path = self.files.create_file('foo.txt', 'mycontent')
294
cmdline = '%s %s s3://bucket --metadata-directive REPLACE' % \
295
(self.prefix, full_path)
296
self.parsed_responses = \
297
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
298
self.run_cmd(cmdline, expected_rc=0)
299
self.assertEqual(len(self.operations_called), 1,
300
self.operations_called)
301
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
302
self.assertNotIn('MetadataDirective', self.operations_called[0][1])
303
304
def test_cp_succeeds_with_mimetype_errors(self):
305
full_path = self.files.create_file('foo.txt', 'mycontent')
306
cmdline = '%s %s s3://bucket/key.txt' % (self.prefix, full_path)
307
self.parsed_responses = [
308
{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
309
with mock.patch('mimetypes.guess_type') as mock_guess_type:
310
# This should throw a UnicodeDecodeError.
311
mock_guess_type.side_effect = lambda x: b'\xe2'.decode('ascii')
312
self.run_cmd(cmdline, expected_rc=0)
313
# Because of the decoding error the command should have succeeded
314
# just that there was no content type added.
315
self.assertNotIn('ContentType', self.last_kwargs)
316
317
def test_cp_fails_with_utime_errors_but_continues(self):
318
full_path = self.files.create_file('foo.txt', '')
319
cmdline = '%s s3://bucket/key.txt %s' % (self.prefix, full_path)
320
self.parsed_responses = [
321
{
322
"ContentLength": "100",
323
"LastModified": "00:00:00Z",
324
"ETag": '"foo-1"'
325
},
326
{'ETag': '"foo-1"', 'Body': BytesIO(b'foo')}
327
]
328
with mock.patch('os.utime') as mock_utime:
329
mock_utime.side_effect = OSError(1, '')
330
_, err, _ = self.run_cmd(cmdline, expected_rc=2)
331
self.assertIn('attempting to modify the utime', err)
332
333
def test_recursive_glacier_download_with_force_glacier(self):
334
self.parsed_responses = [
335
{
336
'Contents': [
337
{
338
'Key': 'foo/bar.txt', 'ContentLength': '100',
339
'LastModified': '00:00:00Z',
340
'StorageClass': 'GLACIER',
341
'Size': 100,
342
'ETag': '"foo-1"',
343
},
344
],
345
'CommonPrefixes': []
346
},
347
{'ETag': '"foo-1"', 'Body': BytesIO(b'foo')},
348
]
349
cmdline = '%s s3://bucket/foo %s --recursive --force-glacier-transfer'\
350
% (self.prefix, self.files.rootdir)
351
self.run_cmd(cmdline, expected_rc=0)
352
self.assertEqual(len(self.operations_called), 2, self.operations_called)
353
self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
354
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
355
356
def test_recursive_glacier_download_without_force_glacier(self):
357
self.parsed_responses = [
358
{
359
'Contents': [
360
{'Key': 'foo/bar.txt', 'ContentLength': '100',
361
'LastModified': '00:00:00Z',
362
'StorageClass': 'GLACIER',
363
'Size': 100},
364
],
365
'CommonPrefixes': []
366
}
367
]
368
cmdline = '%s s3://bucket/foo %s --recursive' % (
369
self.prefix, self.files.rootdir)
370
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
371
self.assertEqual(len(self.operations_called), 1, self.operations_called)
372
self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
373
self.assertIn('GLACIER', stderr)
374
375
def test_warns_on_glacier_incompatible_operation(self):
376
self.parsed_responses = [
377
{'ContentLength': '100', 'LastModified': '00:00:00Z',
378
'StorageClass': 'GLACIER'},
379
]
380
cmdline = ('%s s3://bucket/key.txt .' % self.prefix)
381
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
382
# There should not have been a download attempted because the
383
# operation was skipped because it is glacier incompatible.
384
self.assertEqual(len(self.operations_called), 1)
385
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
386
self.assertIn('GLACIER', stderr)
387
388
def test_warns_on_deep_arhive_incompatible_operation(self):
389
self.parsed_responses = [
390
{'ContentLength': '100', 'LastModified': '00:00:00Z',
391
'StorageClass': 'DEEP_ARCHIVE'},
392
]
393
cmdline = ('%s s3://bucket/key.txt .' % self.prefix)
394
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
395
# There should not have been a download attempted because the
396
# operation was skipped because it is glacier
397
# deep archive incompatible.
398
self.assertEqual(len(self.operations_called), 1)
399
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
400
self.assertIn('GLACIER', stderr)
401
402
def test_warns_on_glacier_incompatible_operation_for_multipart_file(self):
403
self.parsed_responses = [
404
{'ContentLength': str(20 * (1024 ** 2)),
405
'LastModified': '00:00:00Z',
406
'StorageClass': 'GLACIER'},
407
]
408
cmdline = ('%s s3://bucket/key.txt .' % self.prefix)
409
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
410
# There should not have been a download attempted because the
411
# operation was skipped because it is glacier incompatible.
412
self.assertEqual(len(self.operations_called), 1)
413
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
414
self.assertIn('GLACIER', stderr)
415
416
def test_warns_on_deep_archive_incompatible_op_for_multipart_file(self):
417
self.parsed_responses = [
418
{'ContentLength': str(20 * (1024 ** 2)),
419
'LastModified': '00:00:00Z',
420
'StorageClass': 'DEEP_ARCHIVE'},
421
]
422
cmdline = ('%s s3://bucket/key.txt .' % self.prefix)
423
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
424
# There should not have been a download attempted because the
425
# operation was skipped because it is glacier
426
# deep archive incompatible.
427
self.assertEqual(len(self.operations_called), 1)
428
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
429
self.assertIn('GLACIER', stderr)
430
431
def test_turn_off_glacier_warnings(self):
432
self.parsed_responses = [
433
{'ContentLength': str(20 * (1024 ** 2)),
434
'LastModified': '00:00:00Z',
435
'StorageClass': 'GLACIER'},
436
]
437
cmdline = (
438
'%s s3://bucket/key.txt . --ignore-glacier-warnings' % self.prefix)
439
_, stderr, _ = self.run_cmd(cmdline, expected_rc=0)
440
# There should not have been a download attempted because the
441
# operation was skipped because it is glacier incompatible.
442
self.assertEqual(len(self.operations_called), 1)
443
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
444
self.assertEqual('', stderr)
445
446
def test_turn_off_glacier_warnings_for_deep_archive(self):
447
self.parsed_responses = [
448
{'ContentLength': str(20 * (1024 ** 2)),
449
'LastModified': '00:00:00Z',
450
'StorageClass': 'DEEP_ARCHIVE'},
451
]
452
cmdline = (
453
'%s s3://bucket/key.txt . --ignore-glacier-warnings' % self.prefix)
454
_, stderr, _ = self.run_cmd(cmdline, expected_rc=0)
455
# There should not have been a download attempted because the
456
# operation was skipped because it is glacier incompatible.
457
self.assertEqual(len(self.operations_called), 1)
458
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
459
self.assertEqual('', stderr)
460
461
def test_cp_with_sse_flag(self):
462
full_path = self.files.create_file('foo.txt', 'contents')
463
cmdline = (
464
'%s %s s3://bucket/key.txt --sse' % (
465
self.prefix, full_path))
466
self.run_cmd(cmdline, expected_rc=0)
467
self.assertEqual(len(self.operations_called), 1)
468
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
469
self.assertDictEqual(
470
self.operations_called[0][1],
471
{'Key': 'key.txt', 'Bucket': 'bucket',
472
'ChecksumAlgorithm': 'CRC32',
473
'ContentType': 'text/plain', 'Body': mock.ANY,
474
'ServerSideEncryption': 'AES256'}
475
)
476
477
def test_cp_with_sse_c_flag(self):
478
full_path = self.files.create_file('foo.txt', 'contents')
479
cmdline = (
480
'%s %s s3://bucket/key.txt --sse-c --sse-c-key foo' % (
481
self.prefix, full_path))
482
self.run_cmd(cmdline, expected_rc=0)
483
self.assertEqual(len(self.operations_called), 1)
484
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
485
self.assertDictEqual(
486
self.operations_called[0][1],
487
{'Key': 'key.txt', 'Bucket': 'bucket',
488
'ChecksumAlgorithm': 'CRC32',
489
'ContentType': 'text/plain', 'Body': mock.ANY,
490
'SSECustomerAlgorithm': 'AES256', 'SSECustomerKey': 'foo'}
491
)
492
493
def test_cp_with_sse_c_fileb(self):
494
file_path = self.files.create_file('foo.txt', 'contents')
495
key_path = self.files.create_file('foo.key', '')
496
key_contents = (
497
b'K\xc9G\xe1\xf9&\xee\xd1\x03\xf3\xd4\x10\x18o9E\xc2\xaeD'
498
b'\x89(\x18\xea\xda\xf6\x81\xc3\xd2\x9d\\\xa8\xe6'
499
)
500
with open(key_path, 'wb') as f:
501
f.write(key_contents)
502
cmdline = (
503
'%s %s s3://bucket/key.txt --sse-c --sse-c-key fileb://%s' % (
504
self.prefix, file_path, key_path
505
)
506
)
507
self.run_cmd(cmdline, expected_rc=0)
508
self.assertEqual(len(self.operations_called), 1)
509
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
510
511
expected_args = {
512
'Key': 'key.txt', 'Bucket': 'bucket',
513
'ChecksumAlgorithm': 'CRC32',
514
'ContentType': 'text/plain',
515
'Body': mock.ANY,
516
'SSECustomerAlgorithm': 'AES256',
517
'SSECustomerKey': key_contents,
518
}
519
self.assertDictEqual(self.operations_called[0][1], expected_args)
520
521
def test_cp_with_sse_c_copy_source_fileb(self):
522
self.parsed_responses = [
523
{
524
"AcceptRanges": "bytes",
525
"LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
526
"ContentLength": 4,
527
"ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
528
"Metadata": {},
529
"ContentType": "binary/octet-stream"
530
},
531
{
532
"AcceptRanges": "bytes",
533
"Metadata": {},
534
"ContentType": "binary/octet-stream",
535
"ContentLength": 4,
536
"ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
537
"LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
538
"Body": BytesIO(b'foo\n')
539
},
540
{}
541
]
542
543
file_path = self.files.create_file('foo.txt', '')
544
key_path = self.files.create_file('foo.key', '')
545
key_contents = (
546
b'K\xc9G\xe1\xf9&\xee\xd1\x03\xf3\xd4\x10\x18o9E\xc2\xaeD'
547
b'\x89(\x18\xea\xda\xf6\x81\xc3\xd2\x9d\\\xa8\xe6'
548
)
549
with open(key_path, 'wb') as f:
550
f.write(key_contents)
551
cmdline = (
552
'%s s3://bucket-one/key.txt s3://bucket/key.txt '
553
'--sse-c-copy-source --sse-c-copy-source-key fileb://%s' % (
554
self.prefix, key_path
555
)
556
)
557
self.run_cmd(cmdline, expected_rc=0)
558
self.assertEqual(len(self.operations_called), 2)
559
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
560
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
561
562
expected_args = {
563
'Key': 'key.txt', 'Bucket': 'bucket',
564
'ContentType': 'text/plain',
565
'CopySource': {
566
'Bucket': 'bucket-one',
567
'Key': 'key.txt'
568
},
569
'CopySourceSSECustomerAlgorithm': 'AES256',
570
'CopySourceSSECustomerKey': key_contents,
571
}
572
self.assertDictEqual(self.operations_called[1][1], expected_args)
573
574
575
# Note ideally the kms sse with a key id would be integration tests
576
# However, you cannot delete kms keys so there would be no way to clean
577
# up the tests
578
def test_cp_upload_with_sse_kms_and_key_id(self):
579
full_path = self.files.create_file('foo.txt', 'contents')
580
cmdline = (
581
'%s %s s3://bucket/key.txt --sse aws:kms --sse-kms-key-id foo' % (
582
self.prefix, full_path))
583
self.run_cmd(cmdline, expected_rc=0)
584
self.assertEqual(len(self.operations_called), 1)
585
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
586
self.assertDictEqual(
587
self.operations_called[0][1],
588
{'Key': 'key.txt', 'Bucket': 'bucket',
589
'ChecksumAlgorithm': 'CRC32',
590
'ContentType': 'text/plain', 'Body': mock.ANY,
591
'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'}
592
)
593
594
def test_cp_upload_large_file_with_sse_kms_and_key_id(self):
595
self.parsed_responses = [
596
{'UploadId': 'foo'}, # CreateMultipartUpload
597
{'ETag': '"foo"'}, # UploadPart
598
{'ETag': '"foo"'}, # UploadPart
599
{} # CompleteMultipartUpload
600
]
601
full_path = self.files.create_file('foo.txt', 'a' * 10 * (1024 ** 2))
602
cmdline = (
603
'%s %s s3://bucket/key.txt --sse aws:kms --sse-kms-key-id foo' % (
604
self.prefix, full_path))
605
self.run_cmd(cmdline, expected_rc=0)
606
self.assertEqual(len(self.operations_called), 4)
607
608
# We are only really concerned that the CreateMultipartUpload
609
# used the KMS key id.
610
self.assertEqual(
611
self.operations_called[0][0].name, 'CreateMultipartUpload')
612
self.assertDictEqual(
613
self.operations_called[0][1],
614
{'Key': 'key.txt', 'Bucket': 'bucket',
615
'ChecksumAlgorithm': 'CRC32',
616
'ContentType': 'text/plain',
617
'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'}
618
)
619
620
def test_cp_copy_with_sse_kms_and_key_id(self):
621
self.parsed_responses = [
622
{
623
'ContentLength': 5,
624
'LastModified': '00:00:00Z',
625
'ETag': '"foo"',
626
}, # HeadObject
627
{} # CopyObject
628
]
629
cmdline = (
630
'%s s3://bucket/key1.txt s3://bucket/key2.txt '
631
'--sse aws:kms --sse-kms-key-id foo' % self.prefix)
632
self.run_cmd(cmdline, expected_rc=0)
633
self.assertEqual(len(self.operations_called), 2)
634
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
635
self.assertDictEqual(
636
self.operations_called[1][1],
637
{
638
'Key': 'key2.txt',
639
'Bucket': 'bucket',
640
'ContentType': 'text/plain',
641
'CopySource': {
642
'Bucket': 'bucket',
643
'Key': 'key1.txt'
644
},
645
'SSEKMSKeyId': 'foo',
646
'ServerSideEncryption': 'aws:kms'
647
}
648
)
649
650
def test_cp_copy_large_file_with_sse_kms_and_key_id(self):
651
self.parsed_responses = [
652
{
653
'ContentLength': 10 * (1024 ** 2),
654
'LastModified': '00:00:00Z',
655
'ETag': '"foo"',
656
}, # HeadObject
657
{'UploadId': 'foo'}, # CreateMultipartUpload
658
{'CopyPartResult': {'ETag': '"foo"'}}, # UploadPartCopy
659
{'CopyPartResult': {'ETag': '"foo"'}}, # UploadPartCopy
660
{} # CompleteMultipartUpload
661
]
662
cmdline = (
663
'%s s3://bucket/key1.txt s3://bucket/key2.txt '
664
'--sse aws:kms --sse-kms-key-id foo' % self.prefix)
665
self.run_cmd(cmdline, expected_rc=0)
666
self.assertEqual(len(self.operations_called), 5)
667
668
# We are only really concerned that the CreateMultipartUpload
669
# used the KMS key id.
670
self.assertEqual(
671
self.operations_called[1][0].name, 'CreateMultipartUpload')
672
self.assertDictEqual(
673
self.operations_called[1][1],
674
{'Key': 'key2.txt', 'Bucket': 'bucket',
675
'ContentType': 'text/plain',
676
'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'}
677
)
678
679
def test_cannot_use_recursive_with_stream(self):
680
cmdline = '%s - s3://bucket/key.txt --recursive' % self.prefix
681
_, stderr, _ = self.run_cmd(cmdline, expected_rc=255)
682
self.assertIn(
683
'Streaming currently is only compatible with non-recursive cp '
684
'commands', stderr)
685
686
def test_upload_unicode_path(self):
687
self.parsed_responses = [
688
{
689
'ContentLength': 10,
690
'LastModified': '00:00:00Z',
691
'ETag': '"foo"',
692
}, # HeadObject
693
{'ETag': '"foo"'} # PutObject
694
]
695
command = u's3 cp s3://bucket/\u2603 s3://bucket/\u2713'
696
stdout, stderr, rc = self.run_cmd(command, expected_rc=0)
697
698
success_message = (
699
u'copy: s3://bucket/\u2603 to s3://bucket/\u2713'
700
)
701
self.assertIn(success_message, stdout)
702
703
progress_message = 'Completed 10 Bytes'
704
self.assertIn(progress_message, stdout)
705
706
def test_cp_with_error_and_warning_permissions(self):
707
command = "s3 cp %s s3://bucket/foo.txt"
708
self.parsed_responses = [{
709
'Error': {
710
'Code': 'NoSuchBucket',
711
'Message': 'The specified bucket does not exist',
712
'BucketName': 'bucket'
713
}
714
}]
715
self.http_response.status_code = 404
716
717
full_path = self.files.create_file('foo.txt', 'bar')
718
719
# Patch get_file_stat to return a value indicating that an invalid
720
# timestamp was loaded. It is impossible to set an invalid timestamp
721
# on all OSes so it has to be patched.
722
# TODO: find another method to test this behavior without patching.
723
with mock.patch(
724
'awscli.customizations.s3.filegenerator.get_file_stat',
725
return_value=(None, None)
726
):
727
_, stderr, rc = self.run_cmd(command % full_path, expected_rc=1)
728
self.assertIn('upload failed', stderr)
729
self.assertIn('warning: File has an invalid timestamp.', stderr)
730
731
def test_upload_with_checksum_algorithm_crc32(self):
732
full_path = self.files.create_file('foo.txt', 'contents')
733
cmdline = f'{self.prefix} {full_path} s3://bucket/key.txt --checksum-algorithm CRC32'
734
self.run_cmd(cmdline, expected_rc=0)
735
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
736
self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC32')
737
738
@requires_crt
739
def test_upload_with_checksum_algorithm_crc32c(self):
740
full_path = self.files.create_file('foo.txt', 'contents')
741
cmdline = f'{self.prefix} {full_path} s3://bucket/key.txt --checksum-algorithm CRC32C'
742
self.run_cmd(cmdline, expected_rc=0)
743
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
744
self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC32C')
745
746
@requires_crt
747
def test_upload_with_checksum_algorithm_crc64nvme(self):
748
full_path = self.files.create_file('foo.txt', 'contents')
749
cmdline = f'{self.prefix} {full_path} s3://bucket/key.txt --checksum-algorithm CRC64NVME'
750
self.run_cmd(cmdline, expected_rc=0)
751
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
752
self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC64NVME')
753
754
def test_multipart_upload_with_checksum_algorithm_crc32(self):
755
full_path = self.files.create_file('foo.txt', 'a' * 10 * (1024 ** 2))
756
self.parsed_responses = [
757
{'UploadId': 'foo'},
758
{'ETag': 'foo-e1', 'ChecksumCRC32': 'foo-1'},
759
{'ETag': 'foo-e2', 'ChecksumCRC32': 'foo-2'},
760
{}
761
]
762
cmdline = ('%s %s s3://bucket/key2.txt'
763
' --checksum-algorithm CRC32' % (self.prefix, full_path))
764
self.run_cmd(cmdline, expected_rc=0)
765
self.assertEqual(len(self.operations_called), 4, self.operations_called)
766
self.assertEqual(self.operations_called[0][0].name, 'CreateMultipartUpload')
767
self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC32')
768
self.assertEqual(self.operations_called[1][0].name, 'UploadPart')
769
self.assertEqual(self.operations_called[1][1]['ChecksumAlgorithm'], 'CRC32')
770
self.assertEqual(self.operations_called[3][0].name, 'CompleteMultipartUpload')
771
self.assertIn({'ETag': 'foo-e1', 'ChecksumCRC32': 'foo-1', 'PartNumber': mock.ANY},
772
self.operations_called[3][1]['MultipartUpload']['Parts'])
773
self.assertIn({'ETag': 'foo-e2', 'ChecksumCRC32': 'foo-2', 'PartNumber': mock.ANY},
774
self.operations_called[3][1]['MultipartUpload']['Parts'])
775
776
def test_copy_with_checksum_algorithm_crc32(self):
777
self.parsed_responses = [
778
self.head_object_response(),
779
# Mocked CopyObject response with a CRC32 checksum specified
780
{
781
'ETag': 'foo-1',
782
'ChecksumCRC32': 'Tq0H4g=='
783
}
784
]
785
cmdline = f'{self.prefix} s3://bucket1/key.txt s3://bucket2/key.txt --checksum-algorithm CRC32'
786
self.run_cmd(cmdline, expected_rc=0)
787
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
788
self.assertEqual(self.operations_called[1][1]['ChecksumAlgorithm'], 'CRC32')
789
790
def test_download_with_checksum_mode_crc32(self):
791
self.parsed_responses = [
792
self.head_object_response(),
793
# Mocked GetObject response with a checksum algorithm specified
794
{
795
'ETag': 'foo-1',
796
'ChecksumCRC32': 'Tq0H4g==',
797
'Body': BytesIO(b'foo')
798
}
799
]
800
cmdline = f'{self.prefix} s3://bucket/foo {self.files.rootdir} --checksum-mode ENABLED'
801
self.run_cmd(cmdline, expected_rc=0)
802
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
803
self.assertEqual(self.operations_called[1][1]['ChecksumMode'], 'ENABLED')
804
805
def test_download_with_checksum_mode_crc32c(self):
806
self.parsed_responses = [
807
self.head_object_response(),
808
# Mocked GetObject response with a checksum algorithm specified
809
{
810
'ETag': 'foo-1',
811
'ChecksumCRC32C': 'checksum',
812
'Body': BytesIO(b'foo')
813
}
814
]
815
cmdline = f'{self.prefix} s3://bucket/foo {self.files.rootdir} --checksum-mode ENABLED'
816
self.run_cmd(cmdline, expected_rc=0)
817
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
818
self.assertEqual(self.operations_called[1][1]['ChecksumMode'], 'ENABLED')
819
820
821
class TestStreamingCPCommand(BaseAWSCommandParamsTest):
822
def test_streaming_upload(self):
823
command = "s3 cp - s3://bucket/streaming.txt"
824
self.parsed_responses = [{
825
'ETag': '"c8afdb36c52cf4727836669019e69222"'
826
}]
827
828
binary_stdin = BufferedBytesIO(b'foo\n')
829
with mock.patch('sys.stdin', binary_stdin):
830
self.run_cmd(command)
831
832
self.assertEqual(len(self.operations_called), 1)
833
model, args = self.operations_called[0]
834
expected_args = {
835
'Bucket': 'bucket',
836
'Key': 'streaming.txt',
837
'ChecksumAlgorithm': 'CRC32',
838
'Body': mock.ANY
839
}
840
841
self.assertEqual(model.name, 'PutObject')
842
self.assertEqual(args, expected_args)
843
844
def test_streaming_upload_with_expected_size(self):
845
command = "s3 cp - s3://bucket/streaming.txt --expected-size 4"
846
self.parsed_responses = [{
847
'ETag': '"c8afdb36c52cf4727836669019e69222"'
848
}]
849
850
binary_stdin = BufferedBytesIO(b'foo\n')
851
with mock.patch('sys.stdin', binary_stdin):
852
self.run_cmd(command)
853
854
self.assertEqual(len(self.operations_called), 1)
855
model, args = self.operations_called[0]
856
expected_args = {
857
'Bucket': 'bucket',
858
'Key': 'streaming.txt',
859
'ChecksumAlgorithm': 'CRC32',
860
'Body': mock.ANY
861
}
862
863
self.assertEqual(model.name, 'PutObject')
864
self.assertEqual(args, expected_args)
865
866
def test_streaming_upload_error(self):
867
command = "s3 cp - s3://bucket/streaming.txt"
868
self.parsed_responses = [{
869
'Error': {
870
'Code': 'NoSuchBucket',
871
'Message': 'The specified bucket does not exist',
872
'BucketName': 'bucket'
873
}
874
}]
875
self.http_response.status_code = 404
876
877
binary_stdin = BufferedBytesIO(b'foo\n')
878
with mock.patch('sys.stdin', binary_stdin):
879
_, stderr, _ = self.run_cmd(command, expected_rc=1)
880
881
error_message = (
882
'An error occurred (NoSuchBucket) when calling '
883
'the PutObject operation: The specified bucket does not exist'
884
)
885
self.assertIn(error_message, stderr)
886
887
def test_streaming_upload_when_stdin_unavailable(self):
888
command = "s3 cp - s3://bucket/streaming.txt"
889
self.parsed_responses = [{
890
'ETag': '"c8afdb36c52cf4727836669019e69222"'
891
}]
892
893
with mock.patch('sys.stdin', None):
894
_, stderr, _ = self.run_cmd(command, expected_rc=1)
895
896
expected_message = (
897
'stdin is required for this operation, but is not available'
898
)
899
self.assertIn(expected_message, stderr)
900
901
def test_streaming_download(self):
902
command = "s3 cp s3://bucket/streaming.txt -"
903
self.parsed_responses = [
904
{
905
"AcceptRanges": "bytes",
906
"LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
907
"ContentLength": 4,
908
"ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
909
"Metadata": {},
910
"ContentType": "binary/octet-stream"
911
},
912
{
913
"AcceptRanges": "bytes",
914
"Metadata": {},
915
"ContentType": "binary/octet-stream",
916
"ContentLength": 4,
917
"ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
918
"LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
919
"Body": BytesIO(b'foo\n')
920
}
921
]
922
923
stdout, stderr, rc = self.run_cmd(command)
924
self.assertEqual(stdout, 'foo\n')
925
926
# Ensures no extra operations were called
927
self.assertEqual(len(self.operations_called), 2)
928
ops = [op[0].name for op in self.operations_called]
929
expected_ops = ['HeadObject', 'GetObject']
930
self.assertEqual(ops, expected_ops)
931
932
def test_streaming_download_error(self):
933
command = "s3 cp s3://bucket/streaming.txt -"
934
self.parsed_responses = [{
935
'Error': {
936
'Code': 'NoSuchBucket',
937
'Message': 'The specified bucket does not exist',
938
'BucketName': 'bucket'
939
}
940
}]
941
self.http_response.status_code = 404
942
943
_, stderr, _ = self.run_cmd(command, expected_rc=1)
944
error_message = (
945
'An error occurred (NoSuchBucket) when calling '
946
'the HeadObject operation: The specified bucket does not exist'
947
)
948
self.assertIn(error_message, stderr)
949
950
951
class TestCpCommandWithRequesterPayer(BaseCPCommandTest):
952
def test_single_upload(self):
953
full_path = self.files.create_file('myfile', 'mycontent')
954
cmdline = (
955
'%s %s s3://mybucket/mykey --request-payer' % (
956
self.prefix, full_path
957
)
958
)
959
self.run_cmd(cmdline, expected_rc=0)
960
self.assert_operations_called(
961
[
962
('PutObject', {
963
'Bucket': 'mybucket',
964
'Key': 'mykey',
965
'ChecksumAlgorithm': 'CRC32',
966
'RequestPayer': 'requester',
967
'Body': mock.ANY,
968
})
969
]
970
)
971
972
def test_multipart_upload(self):
973
full_path = self.files.create_file('myfile', 'a' * 10 * (1024 ** 2))
974
cmdline = (
975
'%s %s s3://mybucket/mykey --request-payer' % (
976
self.prefix, full_path))
977
978
self.parsed_responses = [
979
{'UploadId': 'myid'}, # CreateMultipartUpload
980
{'ETag': '"myetag"'}, # UploadPart
981
{'ETag': '"myetag"'}, # UploadPart
982
{} # CompleteMultipartUpload
983
]
984
self.run_cmd(cmdline, expected_rc=0)
985
self.assert_operations_called(
986
[
987
('CreateMultipartUpload', {
988
'Bucket': 'mybucket',
989
'Key': 'mykey',
990
'ChecksumAlgorithm': 'CRC32',
991
'RequestPayer': 'requester',
992
}),
993
('UploadPart', {
994
'Bucket': 'mybucket',
995
'Key': 'mykey',
996
'ChecksumAlgorithm': 'CRC32',
997
'RequestPayer': 'requester',
998
'UploadId': 'myid',
999
'PartNumber': mock.ANY,
1000
'Body': mock.ANY,
1001
}),
1002
('UploadPart', {
1003
'Bucket': 'mybucket',
1004
'Key': 'mykey',
1005
'ChecksumAlgorithm': 'CRC32',
1006
'RequestPayer': 'requester',
1007
'UploadId': 'myid',
1008
'PartNumber': mock.ANY,
1009
'Body': mock.ANY,
1010
1011
}),
1012
('CompleteMultipartUpload', {
1013
'Bucket': 'mybucket',
1014
'Key': 'mykey',
1015
'RequestPayer': 'requester',
1016
'UploadId': 'myid',
1017
'MultipartUpload': {'Parts': [
1018
{'ETag': '"myetag"', 'PartNumber': 1},
1019
{'ETag': '"myetag"', 'PartNumber': 2}]
1020
}
1021
})
1022
]
1023
)
1024
1025
def test_recursive_upload(self):
1026
self.files.create_file('myfile', 'mycontent')
1027
cmdline = (
1028
'%s %s s3://mybucket/ --request-payer --recursive' % (
1029
self.prefix, self.files.rootdir
1030
)
1031
)
1032
self.run_cmd(cmdline, expected_rc=0)
1033
self.assert_operations_called(
1034
[
1035
('PutObject', {
1036
'Bucket': 'mybucket',
1037
'Key': 'myfile',
1038
'ChecksumAlgorithm': 'CRC32',
1039
'RequestPayer': 'requester',
1040
'Body': mock.ANY,
1041
})
1042
]
1043
)
1044
1045
def test_single_download(self):
1046
cmdline = '%s s3://mybucket/mykey %s --request-payer' % (
1047
self.prefix, self.files.rootdir)
1048
self.parsed_responses = [
1049
self.head_object_response(),
1050
self.get_object_response()
1051
]
1052
1053
self.run_cmd(cmdline, expected_rc=0)
1054
self.assert_operations_called(
1055
[
1056
self.head_object_request(
1057
'mybucket', 'mykey', RequestPayer='requester'),
1058
self.get_object_request(
1059
'mybucket', 'mykey', RequestPayer='requester'),
1060
]
1061
)
1062
1063
def test_ranged_download(self):
1064
cmdline = '%s s3://mybucket/mykey %s --request-payer' % (
1065
self.prefix, self.files.rootdir)
1066
self.parsed_responses = [
1067
self.head_object_response(ContentLength=10 * (1024 ** 2)),
1068
self.get_object_response(),
1069
self.get_object_response()
1070
]
1071
1072
self.run_cmd(cmdline, expected_rc=0)
1073
self.assert_operations_called(
1074
[
1075
self.head_object_request(
1076
'mybucket', 'mykey', RequestPayer='requester'),
1077
self.get_object_request(
1078
'mybucket', 'mykey', Range=mock.ANY,
1079
RequestPayer='requester',
1080
IfMatch='"foo-1"',
1081
),
1082
self.get_object_request(
1083
'mybucket', 'mykey', Range=mock.ANY,
1084
RequestPayer='requester',
1085
IfMatch='"foo-1"',
1086
),
1087
]
1088
)
1089
1090
def test_recursive_download(self):
1091
cmdline = '%s s3://mybucket/ %s --request-payer --recursive' % (
1092
self.prefix, self.files.rootdir)
1093
self.parsed_responses = [
1094
self.list_objects_response(['mykey']),
1095
self.get_object_response()
1096
]
1097
self.run_cmd(cmdline, expected_rc=0)
1098
self.assert_operations_called(
1099
[
1100
self.list_objects_request(
1101
'mybucket', RequestPayer='requester'),
1102
self.get_object_request(
1103
'mybucket', 'mykey', RequestPayer='requester')
1104
]
1105
)
1106
1107
def test_single_copy(self):
1108
cmdline = self.prefix
1109
cmdline += ' s3://sourcebucket/sourcekey s3://mybucket/mykey'
1110
cmdline += ' --request-payer'
1111
self.parsed_responses = [
1112
self.head_object_response(),
1113
self.copy_object_response(),
1114
]
1115
self.run_cmd(cmdline, expected_rc=0)
1116
self.assert_operations_called(
1117
[
1118
self.head_object_request(
1119
'sourcebucket', 'sourcekey', RequestPayer='requester'
1120
),
1121
self.copy_object_request(
1122
'sourcebucket', 'sourcekey', 'mybucket', 'mykey',
1123
RequestPayer='requester'
1124
)
1125
]
1126
)
1127
1128
def test_multipart_copy(self):
1129
cmdline = self.prefix
1130
cmdline += ' s3://sourcebucket/sourcekey s3://mybucket/mykey'
1131
cmdline += ' --request-payer'
1132
upload_id = 'id'
1133
self.parsed_responses = [
1134
self.head_object_response(ContentLength=10 * (1024 ** 2)),
1135
self.create_mpu_response(upload_id),
1136
self.upload_part_copy_response(),
1137
self.upload_part_copy_response(),
1138
self.complete_mpu_response(),
1139
]
1140
self.run_cmd(cmdline, expected_rc=0)
1141
self.assert_operations_called(
1142
[
1143
self.head_object_request(
1144
'sourcebucket', 'sourcekey', RequestPayer='requester'),
1145
self.create_mpu_request(
1146
'mybucket', 'mykey', RequestPayer='requester'),
1147
self.upload_part_copy_request(
1148
'sourcebucket', 'sourcekey', 'mybucket', 'mykey',
1149
upload_id, PartNumber=mock.ANY, RequestPayer='requester',
1150
CopySourceRange=mock.ANY, CopySourceIfMatch='"foo-1"'),
1151
self.upload_part_copy_request(
1152
'sourcebucket', 'sourcekey', 'mybucket', 'mykey',
1153
upload_id, PartNumber=mock.ANY, RequestPayer='requester',
1154
CopySourceRange=mock.ANY, CopySourceIfMatch='"foo-1"'),
1155
self.complete_mpu_request(
1156
'mybucket', 'mykey', upload_id, num_parts=2,
1157
RequestPayer='requester')
1158
]
1159
)
1160
1161
def test_recursive_copy(self):
1162
cmdline = self.prefix
1163
cmdline += ' s3://sourcebucket/ s3://mybucket/'
1164
cmdline += ' --request-payer'
1165
cmdline += ' --recursive'
1166
self.parsed_responses = [
1167
self.list_objects_response(['mykey']),
1168
self.copy_object_response()
1169
]
1170
self.run_cmd(cmdline, expected_rc=0)
1171
self.assert_operations_called(
1172
[
1173
self.list_objects_request(
1174
'sourcebucket', RequestPayer='requester'),
1175
self.copy_object_request(
1176
'sourcebucket', 'mykey', 'mybucket', 'mykey',
1177
RequestPayer='requester')
1178
]
1179
)
1180
1181
1182
class TestAccesspointCPCommand(BaseCPCommandTest):
1183
def setUp(self):
1184
self.accesspoint_arn = (
1185
'arn:aws:s3:us-west-2:123456789012:accesspoint/endpoint'
1186
)
1187
super(TestAccesspointCPCommand, self).setUp()
1188
1189
def test_upload(self):
1190
filename = self.files.create_file('myfile', 'mycontent')
1191
cmdline = self.prefix
1192
cmdline += ' %s' % filename
1193
cmdline += ' s3://%s/mykey' % self.accesspoint_arn
1194
self.run_cmd(cmdline, expected_rc=0)
1195
self.assert_operations_called(
1196
[
1197
self.put_object_request(self.accesspoint_arn, 'mykey')
1198
]
1199
)
1200
1201
def test_recusive_upload(self):
1202
self.files.create_file('myfile', 'mycontent')
1203
cmdline = self.prefix
1204
cmdline += ' %s' % self.files.rootdir
1205
cmdline += ' s3://%s/' % self.accesspoint_arn
1206
cmdline += ' --recursive'
1207
self.run_cmd(cmdline, expected_rc=0)
1208
self.assert_operations_called(
1209
[
1210
self.put_object_request(self.accesspoint_arn, 'myfile')
1211
]
1212
)
1213
1214
def test_download(self):
1215
cmdline = self.prefix
1216
cmdline += ' s3://%s/mykey' % self.accesspoint_arn
1217
cmdline += ' %s' % self.files.rootdir
1218
self.parsed_responses = [
1219
self.head_object_response(),
1220
self.get_object_response(),
1221
]
1222
self.run_cmd(cmdline, expected_rc=0)
1223
self.assert_operations_called(
1224
[
1225
self.head_object_request(self.accesspoint_arn, 'mykey'),
1226
self.get_object_request(self.accesspoint_arn, 'mykey'),
1227
]
1228
)
1229
1230
def test_recursive_download(self):
1231
cmdline = self.prefix
1232
cmdline += ' s3://%s' % self.accesspoint_arn
1233
cmdline += ' %s' % self.files.rootdir
1234
cmdline += ' --recursive'
1235
self.parsed_responses = [
1236
self.list_objects_response(['mykey']),
1237
self.get_object_response(),
1238
]
1239
self.run_cmd(cmdline, expected_rc=0)
1240
self.assert_operations_called(
1241
[
1242
self.list_objects_request(self.accesspoint_arn),
1243
self.get_object_request(self.accesspoint_arn, 'mykey'),
1244
]
1245
)
1246
1247
def test_copy(self):
1248
cmdline = self.prefix
1249
cmdline += ' s3://%s/mykey' % self.accesspoint_arn
1250
accesspoint_arn_dest = self.accesspoint_arn + '-dest'
1251
cmdline += ' s3://%s' % accesspoint_arn_dest
1252
self.parsed_responses = [
1253
self.head_object_response(),
1254
self.copy_object_response(),
1255
]
1256
self.run_cmd(cmdline, expected_rc=0)
1257
self.assert_operations_called(
1258
[
1259
self.head_object_request(self.accesspoint_arn, 'mykey'),
1260
self.copy_object_request(
1261
self.accesspoint_arn, 'mykey', accesspoint_arn_dest,
1262
'mykey'),
1263
]
1264
)
1265
1266
def test_recursive_copy(self):
1267
cmdline = self.prefix
1268
cmdline += ' s3://%s' % self.accesspoint_arn
1269
accesspoint_arn_dest = self.accesspoint_arn + '-dest'
1270
cmdline += ' s3://%s' % accesspoint_arn_dest
1271
cmdline += ' --recursive'
1272
self.parsed_responses = [
1273
self.list_objects_response(['mykey']),
1274
self.copy_object_response(),
1275
]
1276
self.run_cmd(cmdline, expected_rc=0)
1277
self.assert_operations_called(
1278
[
1279
self.list_objects_request(self.accesspoint_arn),
1280
self.copy_object_request(
1281
self.accesspoint_arn, 'mykey', accesspoint_arn_dest,
1282
'mykey'),
1283
]
1284
)
1285
1286
@requires_crt()
1287
def test_accepts_mrap_arns(self):
1288
mrap_arn = (
1289
'arn:aws:s3::123456789012:accesspoint:mfzwi23gnjvgw.mrap'
1290
)
1291
filename = self.files.create_file('myfile', 'mycontent')
1292
cmdline = self.prefix
1293
cmdline += ' %s' % filename
1294
cmdline += ' s3://%s/mykey' % mrap_arn
1295
self.run_cmd(cmdline, expected_rc=0)
1296
self.assert_operations_called(
1297
[
1298
self.put_object_request(mrap_arn, 'mykey')
1299
]
1300
)
1301
1302
@requires_crt()
1303
def test_accepts_mrap_arns_with_slash(self):
1304
mrap_arn = (
1305
'arn:aws:s3::123456789012:accesspoint/mfzwi23gnjvgw.mrap'
1306
)
1307
filename = self.files.create_file('myfile', 'mycontent')
1308
cmdline = self.prefix
1309
cmdline += ' %s' % filename
1310
cmdline += ' s3://%s/mykey' % mrap_arn
1311
self.run_cmd(cmdline, expected_rc=0)
1312
self.assert_operations_called(
1313
[
1314
self.put_object_request(mrap_arn, 'mykey')
1315
]
1316
)
1317
1318
1319
class TestCpRecursiveCaseConflict(TestSyncCaseConflict):
1320
prefix = 's3 cp --recursive '
1321
1322
def test_ignore_by_default(self):
1323
self.files.create_file(self.lower_key, 'mycontent')
1324
# Note there's no --case-conflict param.
1325
cmd = f"{self.prefix} s3://bucket {self.files.rootdir}"
1326
self.parsed_responses = [
1327
self.list_objects_response([self.upper_key]),
1328
self.get_object_response(),
1329
]
1330
# Expect success, so not error mode.
1331
_, stderr, _ = self.run_cmd(cmd, expected_rc=0)
1332
# No warnings in stderr, so not warn or skip mode.
1333
assert not stderr
1334
1335
1336
class TestS3ExpressCpRecursive(BaseCPCommandTest):
1337
prefix = 's3 cp --recursive '
1338
1339
def test_s3_express_error_raises_exception(self):
1340
cmd = (
1341
f"{self.prefix} s3://bucket--usw2-az1--x-s3 {self.files.rootdir} "
1342
"--case-conflict error"
1343
)
1344
_, stderr, _ = self.run_cmd(cmd, expected_rc=255)
1345
assert "`error` is not a valid value" in stderr
1346
1347
def test_s3_express_skip_raises_exception(self):
1348
cmd = (
1349
f"{self.prefix} s3://bucket--usw2-az1--x-s3 {self.files.rootdir} "
1350
"--case-conflict skip"
1351
)
1352
_, stderr, _ = self.run_cmd(cmd, expected_rc=255)
1353
assert "`skip` is not a valid value" in stderr
1354
1355
@skip_if_windows("Can't rename to same file")
1356
def test_s3_express_warn_emits_warning(self):
1357
cmd = (
1358
f"{self.prefix} s3://bucket--usw2-az1--x-s3 {self.files.rootdir} "
1359
"--case-conflict warn"
1360
)
1361
self.parsed_responses = [
1362
self.list_objects_response(['a.txt', 'A.txt']),
1363
self.get_object_response(),
1364
self.get_object_response(),
1365
]
1366
1367
_, stderr, _ = self.run_cmd(cmd, expected_rc=0)
1368
assert "warning: Recursive copies/moves" in stderr
1369
1370