Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/tests/functional/s3/test_cp_command.py
1567 views
1
#!/usr/bin/env python
2
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License"). You
5
# may not use this file except in compliance with the License. A copy of
6
# the License is located at
7
#
8
# http://aws.amazon.com/apache2.0/
9
#
10
# or in the "license" file accompanying this file. This file is
11
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
12
# ANY KIND, either express or implied. See the License for the specific
13
# language governing permissions and limitations under the License.
14
import os
15
16
from awscli.testutils import BaseAWSCommandParamsTest
17
from awscli.testutils import capture_input
18
from awscli.testutils import mock
19
from awscli.compat import BytesIO
20
from tests.functional.s3 import BaseS3TransferCommandTest
21
from tests import requires_crt
22
23
24
class BufferedBytesIO(BytesIO):
25
@property
26
def buffer(self):
27
return self
28
29
30
class BaseCPCommandTest(BaseS3TransferCommandTest):
31
prefix = 's3 cp '
32
33
34
class TestCPCommand(BaseCPCommandTest):
35
def test_operations_used_in_upload(self):
36
full_path = self.files.create_file('foo.txt', 'mycontent')
37
cmdline = '%s %s s3://bucket/key.txt' % (self.prefix, full_path)
38
self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
39
self.run_cmd(cmdline, expected_rc=0)
40
# The only operation we should have called is PutObject.
41
self.assertEqual(len(self.operations_called), 1, self.operations_called)
42
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
43
44
def test_key_name_added_when_only_bucket_provided(self):
45
full_path = self.files.create_file('foo.txt', 'mycontent')
46
cmdline = '%s %s s3://bucket/' % (self.prefix, full_path)
47
self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
48
self.run_cmd(cmdline, expected_rc=0)
49
# The only operation we should have called is PutObject.
50
self.assertEqual(len(self.operations_called), 1, self.operations_called)
51
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
52
self.assertEqual(self.operations_called[0][1]['Key'], 'foo.txt')
53
self.assertEqual(self.operations_called[0][1]['Bucket'], 'bucket')
54
55
def test_trailing_slash_appended(self):
56
full_path = self.files.create_file('foo.txt', 'mycontent')
57
# Here we're saying s3://bucket instead of s3://bucket/
58
# This should still work the same as if we added the trailing slash.
59
cmdline = '%s %s s3://bucket' % (self.prefix, full_path)
60
self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
61
self.run_cmd(cmdline, expected_rc=0)
62
# The only operation we should have called is PutObject.
63
self.assertEqual(len(self.operations_called), 1, self.operations_called)
64
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
65
self.assertEqual(self.operations_called[0][1]['Key'], 'foo.txt')
66
self.assertEqual(self.operations_called[0][1]['Bucket'], 'bucket')
67
68
def test_upload_grants(self):
69
full_path = self.files.create_file('foo.txt', 'mycontent')
70
cmdline = ('%s %s s3://bucket/key.txt --grants read=id=foo '
71
'full=id=bar readacl=id=biz writeacl=id=baz' %
72
(self.prefix, full_path))
73
self.parsed_responses = \
74
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
75
self.run_cmd(cmdline, expected_rc=0)
76
# The only operation we should have called is PutObject.
77
self.assertEqual(len(self.operations_called), 1,
78
self.operations_called)
79
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
80
self.assertDictEqual(
81
self.operations_called[0][1],
82
{'Key': u'key.txt', 'Bucket': u'bucket', 'GrantRead': u'id=foo',
83
'GrantFullControl': u'id=bar', 'GrantReadACP': u'id=biz',
84
'GrantWriteACP': u'id=baz', 'ContentType': u'text/plain',
85
'Body': mock.ANY, 'ChecksumAlgorithm': 'CRC32'}
86
)
87
88
def test_upload_expires(self):
89
full_path = self.files.create_file('foo.txt', 'mycontent')
90
cmdline = ('%s %s s3://bucket/key.txt --expires 90' %
91
(self.prefix, full_path))
92
self.parsed_responses = \
93
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
94
self.run_cmd(cmdline, expected_rc=0)
95
# The only operation we should have called is PutObject.
96
self.assertEqual(len(self.operations_called), 1,
97
self.operations_called)
98
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
99
self.assertEqual(self.operations_called[0][1]['Key'], 'key.txt')
100
self.assertEqual(self.operations_called[0][1]['Bucket'], 'bucket')
101
self.assertEqual(self.operations_called[0][1]['Expires'], '90')
102
103
def test_upload_standard_ia(self):
104
full_path = self.files.create_file('foo.txt', 'mycontent')
105
cmdline = ('%s %s s3://bucket/key.txt --storage-class STANDARD_IA' %
106
(self.prefix, full_path))
107
self.parsed_responses = \
108
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
109
self.run_cmd(cmdline, expected_rc=0)
110
self.assertEqual(len(self.operations_called), 1,
111
self.operations_called)
112
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
113
args = self.operations_called[0][1]
114
self.assertEqual(args['Key'], 'key.txt')
115
self.assertEqual(args['Bucket'], 'bucket')
116
self.assertEqual(args['StorageClass'], 'STANDARD_IA')
117
118
def test_upload_onezone_ia(self):
119
full_path = self.files.create_file('foo.txt', 'mycontent')
120
cmdline = ('%s %s s3://bucket/key.txt --storage-class ONEZONE_IA' %
121
(self.prefix, full_path))
122
self.parsed_responses = \
123
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
124
self.run_cmd(cmdline, expected_rc=0)
125
self.assertEqual(len(self.operations_called), 1,
126
self.operations_called)
127
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
128
args = self.operations_called[0][1]
129
self.assertEqual(args['Key'], 'key.txt')
130
self.assertEqual(args['Bucket'], 'bucket')
131
self.assertEqual(args['StorageClass'], 'ONEZONE_IA')
132
133
def test_upload_intelligent_tiering(self):
134
full_path = self.files.create_file('foo.txt', 'mycontent')
135
cmdline = ('%s %s s3://bucket/key.txt --storage-class INTELLIGENT_TIERING' %
136
(self.prefix, full_path))
137
self.parsed_responses = \
138
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
139
self.run_cmd(cmdline, expected_rc=0)
140
self.assertEqual(len(self.operations_called), 1,
141
self.operations_called)
142
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
143
args = self.operations_called[0][1]
144
self.assertEqual(args['Key'], 'key.txt')
145
self.assertEqual(args['Bucket'], 'bucket')
146
self.assertEqual(args['StorageClass'], 'INTELLIGENT_TIERING')
147
148
def test_upload_glacier(self):
149
full_path = self.files.create_file('foo.txt', 'mycontent')
150
cmdline = ('%s %s s3://bucket/key.txt --storage-class GLACIER' %
151
(self.prefix, full_path))
152
self.parsed_responses = \
153
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
154
self.run_cmd(cmdline, expected_rc=0)
155
self.assertEqual(len(self.operations_called), 1,
156
self.operations_called)
157
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
158
args = self.operations_called[0][1]
159
self.assertEqual(args['Key'], 'key.txt')
160
self.assertEqual(args['Bucket'], 'bucket')
161
self.assertEqual(args['StorageClass'], 'GLACIER')
162
163
def test_upload_deep_archive(self):
164
full_path = self.files.create_file('foo.txt', 'mycontent')
165
cmdline = ('%s %s s3://bucket/key.txt --storage-class DEEP_ARCHIVE' %
166
(self.prefix, full_path))
167
self.parsed_responses = \
168
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
169
self.run_cmd(cmdline, expected_rc=0)
170
self.assertEqual(len(self.operations_called), 1,
171
self.operations_called)
172
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
173
args = self.operations_called[0][1]
174
self.assertEqual(args['Key'], 'key.txt')
175
self.assertEqual(args['Bucket'], 'bucket')
176
self.assertEqual(args['StorageClass'], 'DEEP_ARCHIVE')
177
178
def test_operations_used_in_download_file(self):
179
self.parsed_responses = [
180
{
181
"ContentLength": "100",
182
"LastModified": "00:00:00Z",
183
"ETag": '"foo-1"'
184
},
185
{'ETag': '"foo-1"', 'Body': BytesIO(b'foo')},
186
]
187
cmdline = '%s s3://bucket/key.txt %s' % (self.prefix,
188
self.files.rootdir)
189
self.run_cmd(cmdline, expected_rc=0)
190
# The only operations we should have called are HeadObject/GetObject.
191
self.assertEqual(len(self.operations_called), 2, self.operations_called)
192
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
193
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
194
195
def test_operations_used_in_recursive_download(self):
196
self.parsed_responses = [
197
{'ETag': '"foo-1"', 'Contents': [], 'CommonPrefixes': []},
198
]
199
cmdline = '%s s3://bucket/key.txt %s --recursive' % (
200
self.prefix, self.files.rootdir)
201
self.run_cmd(cmdline, expected_rc=0)
202
# We called ListObjectsV2 but had no objects to download, so
203
# we only have a single ListObjectsV2 operation being called.
204
self.assertEqual(len(self.operations_called), 1, self.operations_called)
205
self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
206
207
def test_website_redirect_ignore_paramfile(self):
208
full_path = self.files.create_file('foo.txt', 'mycontent')
209
cmdline = '%s %s s3://bucket/key.txt --website-redirect %s' % \
210
(self.prefix, full_path, 'http://someserver')
211
self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
212
self.run_cmd(cmdline, expected_rc=0)
213
# Make sure that the specified web address is used as opposed to the
214
# contents of the web address.
215
self.assertEqual(
216
self.operations_called[0][1]['WebsiteRedirectLocation'],
217
'http://someserver'
218
)
219
220
def test_metadata_copy(self):
221
self.parsed_responses = [
222
{"ContentLength": "100", "LastModified": "00:00:00Z"},
223
{'ETag': '"foo-1"'},
224
]
225
cmdline = ('%s s3://bucket/key.txt s3://bucket/key2.txt'
226
' --metadata KeyName=Value' % self.prefix)
227
self.run_cmd(cmdline, expected_rc=0)
228
self.assertEqual(len(self.operations_called), 2,
229
self.operations_called)
230
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
231
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
232
self.assertEqual(self.operations_called[1][1]['Metadata'],
233
{'KeyName': 'Value'})
234
235
def test_metadata_copy_with_put_object(self):
236
full_path = self.files.create_file('foo.txt', 'mycontent')
237
self.parsed_responses = [
238
{"ContentLength": "100", "LastModified": "00:00:00Z"},
239
{'ETag': '"foo-1"'},
240
]
241
cmdline = ('%s %s s3://bucket/key2.txt'
242
' --metadata KeyName=Value' % (self.prefix, full_path))
243
self.run_cmd(cmdline, expected_rc=0)
244
self.assertEqual(len(self.operations_called), 1,
245
self.operations_called)
246
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
247
self.assertEqual(self.operations_called[0][1]['Metadata'],
248
{'KeyName': 'Value'})
249
250
def test_metadata_copy_with_multipart_upload(self):
251
full_path = self.files.create_file('foo.txt', 'a' * 10 * (1024 ** 2))
252
self.parsed_responses = [
253
{'UploadId': 'foo'},
254
{'ETag': '"foo-1"'},
255
{'ETag': '"foo-2"'},
256
{}
257
]
258
cmdline = ('%s %s s3://bucket/key2.txt'
259
' --metadata KeyName=Value' % (self.prefix, full_path))
260
self.run_cmd(cmdline, expected_rc=0)
261
self.assertEqual(len(self.operations_called), 4,
262
self.operations_called)
263
self.assertEqual(self.operations_called[0][0].name,
264
'CreateMultipartUpload')
265
self.assertEqual(self.operations_called[0][1]['Metadata'],
266
{'KeyName': 'Value'})
267
268
def test_metadata_directive_copy(self):
269
self.parsed_responses = [
270
{"ContentLength": "100", "LastModified": "00:00:00Z"},
271
{'ETag': '"foo-1"'},
272
]
273
cmdline = ('%s s3://bucket/key.txt s3://bucket/key2.txt'
274
' --metadata-directive REPLACE' % self.prefix)
275
self.run_cmd(cmdline, expected_rc=0)
276
self.assertEqual(len(self.operations_called), 2,
277
self.operations_called)
278
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
279
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
280
self.assertEqual(self.operations_called[1][1]['MetadataDirective'],
281
'REPLACE')
282
283
def test_no_metadata_directive_for_non_copy(self):
284
full_path = self.files.create_file('foo.txt', 'mycontent')
285
cmdline = '%s %s s3://bucket --metadata-directive REPLACE' % \
286
(self.prefix, full_path)
287
self.parsed_responses = \
288
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
289
self.run_cmd(cmdline, expected_rc=0)
290
self.assertEqual(len(self.operations_called), 1,
291
self.operations_called)
292
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
293
self.assertNotIn('MetadataDirective', self.operations_called[0][1])
294
295
def test_cp_succeeds_with_mimetype_errors(self):
296
full_path = self.files.create_file('foo.txt', 'mycontent')
297
cmdline = '%s %s s3://bucket/key.txt' % (self.prefix, full_path)
298
self.parsed_responses = [
299
{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
300
with mock.patch('mimetypes.guess_type') as mock_guess_type:
301
# This should throw a UnicodeDecodeError.
302
mock_guess_type.side_effect = lambda x: b'\xe2'.decode('ascii')
303
self.run_cmd(cmdline, expected_rc=0)
304
# Because of the decoding error the command should have succeeded
305
# just that there was no content type added.
306
self.assertNotIn('ContentType', self.last_kwargs)
307
308
def test_cp_fails_with_utime_errors_but_continues(self):
309
full_path = self.files.create_file('foo.txt', '')
310
cmdline = '%s s3://bucket/key.txt %s' % (self.prefix, full_path)
311
self.parsed_responses = [
312
{
313
"ContentLength": "100",
314
"LastModified": "00:00:00Z",
315
"ETag": '"foo-1"'
316
},
317
{'ETag': '"foo-1"', 'Body': BytesIO(b'foo')}
318
]
319
with mock.patch('os.utime') as mock_utime:
320
mock_utime.side_effect = OSError(1, '')
321
_, err, _ = self.run_cmd(cmdline, expected_rc=2)
322
self.assertIn('attempting to modify the utime', err)
323
324
def test_recursive_glacier_download_with_force_glacier(self):
325
self.parsed_responses = [
326
{
327
'Contents': [
328
{
329
'Key': 'foo/bar.txt', 'ContentLength': '100',
330
'LastModified': '00:00:00Z',
331
'StorageClass': 'GLACIER',
332
'Size': 100,
333
'ETag': '"foo-1"',
334
},
335
],
336
'CommonPrefixes': []
337
},
338
{'ETag': '"foo-1"', 'Body': BytesIO(b'foo')},
339
]
340
cmdline = '%s s3://bucket/foo %s --recursive --force-glacier-transfer'\
341
% (self.prefix, self.files.rootdir)
342
self.run_cmd(cmdline, expected_rc=0)
343
self.assertEqual(len(self.operations_called), 2, self.operations_called)
344
self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
345
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
346
347
def test_recursive_glacier_download_without_force_glacier(self):
348
self.parsed_responses = [
349
{
350
'Contents': [
351
{'Key': 'foo/bar.txt', 'ContentLength': '100',
352
'LastModified': '00:00:00Z',
353
'StorageClass': 'GLACIER',
354
'Size': 100},
355
],
356
'CommonPrefixes': []
357
}
358
]
359
cmdline = '%s s3://bucket/foo %s --recursive' % (
360
self.prefix, self.files.rootdir)
361
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
362
self.assertEqual(len(self.operations_called), 1, self.operations_called)
363
self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
364
self.assertIn('GLACIER', stderr)
365
366
def test_warns_on_glacier_incompatible_operation(self):
367
self.parsed_responses = [
368
{'ContentLength': '100', 'LastModified': '00:00:00Z',
369
'StorageClass': 'GLACIER'},
370
]
371
cmdline = ('%s s3://bucket/key.txt .' % self.prefix)
372
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
373
# There should not have been a download attempted because the
374
# operation was skipped because it is glacier incompatible.
375
self.assertEqual(len(self.operations_called), 1)
376
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
377
self.assertIn('GLACIER', stderr)
378
379
def test_warns_on_deep_arhive_incompatible_operation(self):
380
self.parsed_responses = [
381
{'ContentLength': '100', 'LastModified': '00:00:00Z',
382
'StorageClass': 'DEEP_ARCHIVE'},
383
]
384
cmdline = ('%s s3://bucket/key.txt .' % self.prefix)
385
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
386
# There should not have been a download attempted because the
387
# operation was skipped because it is glacier
388
# deep archive incompatible.
389
self.assertEqual(len(self.operations_called), 1)
390
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
391
self.assertIn('GLACIER', stderr)
392
393
def test_warns_on_glacier_incompatible_operation_for_multipart_file(self):
394
self.parsed_responses = [
395
{'ContentLength': str(20 * (1024 ** 2)),
396
'LastModified': '00:00:00Z',
397
'StorageClass': 'GLACIER'},
398
]
399
cmdline = ('%s s3://bucket/key.txt .' % self.prefix)
400
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
401
# There should not have been a download attempted because the
402
# operation was skipped because it is glacier incompatible.
403
self.assertEqual(len(self.operations_called), 1)
404
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
405
self.assertIn('GLACIER', stderr)
406
407
def test_warns_on_deep_archive_incompatible_op_for_multipart_file(self):
408
self.parsed_responses = [
409
{'ContentLength': str(20 * (1024 ** 2)),
410
'LastModified': '00:00:00Z',
411
'StorageClass': 'DEEP_ARCHIVE'},
412
]
413
cmdline = ('%s s3://bucket/key.txt .' % self.prefix)
414
_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
415
# There should not have been a download attempted because the
416
# operation was skipped because it is glacier
417
# deep archive incompatible.
418
self.assertEqual(len(self.operations_called), 1)
419
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
420
self.assertIn('GLACIER', stderr)
421
422
def test_turn_off_glacier_warnings(self):
423
self.parsed_responses = [
424
{'ContentLength': str(20 * (1024 ** 2)),
425
'LastModified': '00:00:00Z',
426
'StorageClass': 'GLACIER'},
427
]
428
cmdline = (
429
'%s s3://bucket/key.txt . --ignore-glacier-warnings' % self.prefix)
430
_, stderr, _ = self.run_cmd(cmdline, expected_rc=0)
431
# There should not have been a download attempted because the
432
# operation was skipped because it is glacier incompatible.
433
self.assertEqual(len(self.operations_called), 1)
434
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
435
self.assertEqual('', stderr)
436
437
def test_turn_off_glacier_warnings_for_deep_archive(self):
438
self.parsed_responses = [
439
{'ContentLength': str(20 * (1024 ** 2)),
440
'LastModified': '00:00:00Z',
441
'StorageClass': 'DEEP_ARCHIVE'},
442
]
443
cmdline = (
444
'%s s3://bucket/key.txt . --ignore-glacier-warnings' % self.prefix)
445
_, stderr, _ = self.run_cmd(cmdline, expected_rc=0)
446
# There should not have been a download attempted because the
447
# operation was skipped because it is glacier incompatible.
448
self.assertEqual(len(self.operations_called), 1)
449
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
450
self.assertEqual('', stderr)
451
452
def test_cp_with_sse_flag(self):
453
full_path = self.files.create_file('foo.txt', 'contents')
454
cmdline = (
455
'%s %s s3://bucket/key.txt --sse' % (
456
self.prefix, full_path))
457
self.run_cmd(cmdline, expected_rc=0)
458
self.assertEqual(len(self.operations_called), 1)
459
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
460
self.assertDictEqual(
461
self.operations_called[0][1],
462
{'Key': 'key.txt', 'Bucket': 'bucket',
463
'ChecksumAlgorithm': 'CRC32',
464
'ContentType': 'text/plain', 'Body': mock.ANY,
465
'ServerSideEncryption': 'AES256'}
466
)
467
468
def test_cp_with_sse_c_flag(self):
469
full_path = self.files.create_file('foo.txt', 'contents')
470
cmdline = (
471
'%s %s s3://bucket/key.txt --sse-c --sse-c-key foo' % (
472
self.prefix, full_path))
473
self.run_cmd(cmdline, expected_rc=0)
474
self.assertEqual(len(self.operations_called), 1)
475
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
476
self.assertDictEqual(
477
self.operations_called[0][1],
478
{'Key': 'key.txt', 'Bucket': 'bucket',
479
'ChecksumAlgorithm': 'CRC32',
480
'ContentType': 'text/plain', 'Body': mock.ANY,
481
'SSECustomerAlgorithm': 'AES256', 'SSECustomerKey': 'foo'}
482
)
483
484
def test_cp_with_sse_c_fileb(self):
485
file_path = self.files.create_file('foo.txt', 'contents')
486
key_path = self.files.create_file('foo.key', '')
487
key_contents = (
488
b'K\xc9G\xe1\xf9&\xee\xd1\x03\xf3\xd4\x10\x18o9E\xc2\xaeD'
489
b'\x89(\x18\xea\xda\xf6\x81\xc3\xd2\x9d\\\xa8\xe6'
490
)
491
with open(key_path, 'wb') as f:
492
f.write(key_contents)
493
cmdline = (
494
'%s %s s3://bucket/key.txt --sse-c --sse-c-key fileb://%s' % (
495
self.prefix, file_path, key_path
496
)
497
)
498
self.run_cmd(cmdline, expected_rc=0)
499
self.assertEqual(len(self.operations_called), 1)
500
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
501
502
expected_args = {
503
'Key': 'key.txt', 'Bucket': 'bucket',
504
'ChecksumAlgorithm': 'CRC32',
505
'ContentType': 'text/plain',
506
'Body': mock.ANY,
507
'SSECustomerAlgorithm': 'AES256',
508
'SSECustomerKey': key_contents,
509
}
510
self.assertDictEqual(self.operations_called[0][1], expected_args)
511
512
def test_cp_with_sse_c_copy_source_fileb(self):
513
self.parsed_responses = [
514
{
515
"AcceptRanges": "bytes",
516
"LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
517
"ContentLength": 4,
518
"ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
519
"Metadata": {},
520
"ContentType": "binary/octet-stream"
521
},
522
{
523
"AcceptRanges": "bytes",
524
"Metadata": {},
525
"ContentType": "binary/octet-stream",
526
"ContentLength": 4,
527
"ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
528
"LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
529
"Body": BytesIO(b'foo\n')
530
},
531
{}
532
]
533
534
file_path = self.files.create_file('foo.txt', '')
535
key_path = self.files.create_file('foo.key', '')
536
key_contents = (
537
b'K\xc9G\xe1\xf9&\xee\xd1\x03\xf3\xd4\x10\x18o9E\xc2\xaeD'
538
b'\x89(\x18\xea\xda\xf6\x81\xc3\xd2\x9d\\\xa8\xe6'
539
)
540
with open(key_path, 'wb') as f:
541
f.write(key_contents)
542
cmdline = (
543
'%s s3://bucket-one/key.txt s3://bucket/key.txt '
544
'--sse-c-copy-source --sse-c-copy-source-key fileb://%s' % (
545
self.prefix, key_path
546
)
547
)
548
self.run_cmd(cmdline, expected_rc=0)
549
self.assertEqual(len(self.operations_called), 2)
550
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
551
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
552
553
expected_args = {
554
'Key': 'key.txt', 'Bucket': 'bucket',
555
'ContentType': 'text/plain',
556
'CopySource': {
557
'Bucket': 'bucket-one',
558
'Key': 'key.txt'
559
},
560
'CopySourceSSECustomerAlgorithm': 'AES256',
561
'CopySourceSSECustomerKey': key_contents,
562
}
563
self.assertDictEqual(self.operations_called[1][1], expected_args)
564
565
566
# Note ideally the kms sse with a key id would be integration tests
567
# However, you cannot delete kms keys so there would be no way to clean
568
# up the tests
569
def test_cp_upload_with_sse_kms_and_key_id(self):
570
full_path = self.files.create_file('foo.txt', 'contents')
571
cmdline = (
572
'%s %s s3://bucket/key.txt --sse aws:kms --sse-kms-key-id foo' % (
573
self.prefix, full_path))
574
self.run_cmd(cmdline, expected_rc=0)
575
self.assertEqual(len(self.operations_called), 1)
576
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
577
self.assertDictEqual(
578
self.operations_called[0][1],
579
{'Key': 'key.txt', 'Bucket': 'bucket',
580
'ChecksumAlgorithm': 'CRC32',
581
'ContentType': 'text/plain', 'Body': mock.ANY,
582
'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'}
583
)
584
585
def test_cp_upload_large_file_with_sse_kms_and_key_id(self):
586
self.parsed_responses = [
587
{'UploadId': 'foo'}, # CreateMultipartUpload
588
{'ETag': '"foo"'}, # UploadPart
589
{'ETag': '"foo"'}, # UploadPart
590
{} # CompleteMultipartUpload
591
]
592
full_path = self.files.create_file('foo.txt', 'a' * 10 * (1024 ** 2))
593
cmdline = (
594
'%s %s s3://bucket/key.txt --sse aws:kms --sse-kms-key-id foo' % (
595
self.prefix, full_path))
596
self.run_cmd(cmdline, expected_rc=0)
597
self.assertEqual(len(self.operations_called), 4)
598
599
# We are only really concerned that the CreateMultipartUpload
600
# used the KMS key id.
601
self.assertEqual(
602
self.operations_called[0][0].name, 'CreateMultipartUpload')
603
self.assertDictEqual(
604
self.operations_called[0][1],
605
{'Key': 'key.txt', 'Bucket': 'bucket',
606
'ChecksumAlgorithm': 'CRC32',
607
'ContentType': 'text/plain',
608
'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'}
609
)
610
611
def test_cp_copy_with_sse_kms_and_key_id(self):
612
self.parsed_responses = [
613
{'ContentLength': 5, 'LastModified': '00:00:00Z'}, # HeadObject
614
{} # CopyObject
615
]
616
cmdline = (
617
'%s s3://bucket/key1.txt s3://bucket/key2.txt '
618
'--sse aws:kms --sse-kms-key-id foo' % self.prefix)
619
self.run_cmd(cmdline, expected_rc=0)
620
self.assertEqual(len(self.operations_called), 2)
621
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
622
self.assertDictEqual(
623
self.operations_called[1][1],
624
{
625
'Key': 'key2.txt',
626
'Bucket': 'bucket',
627
'ContentType': 'text/plain',
628
'CopySource': {
629
'Bucket': 'bucket',
630
'Key': 'key1.txt'
631
},
632
'SSEKMSKeyId': 'foo',
633
'ServerSideEncryption': 'aws:kms'
634
}
635
)
636
637
def test_cp_copy_large_file_with_sse_kms_and_key_id(self):
638
self.parsed_responses = [
639
{'ContentLength': 10 * (1024 ** 2),
640
'LastModified': '00:00:00Z'}, # HeadObject
641
{'UploadId': 'foo'}, # CreateMultipartUpload
642
{'CopyPartResult': {'ETag': '"foo"'}}, # UploadPartCopy
643
{'CopyPartResult': {'ETag': '"foo"'}}, # UploadPartCopy
644
{} # CompleteMultipartUpload
645
]
646
cmdline = (
647
'%s s3://bucket/key1.txt s3://bucket/key2.txt '
648
'--sse aws:kms --sse-kms-key-id foo' % self.prefix)
649
self.run_cmd(cmdline, expected_rc=0)
650
self.assertEqual(len(self.operations_called), 5)
651
652
# We are only really concerned that the CreateMultipartUpload
653
# used the KMS key id.
654
self.assertEqual(
655
self.operations_called[1][0].name, 'CreateMultipartUpload')
656
self.assertDictEqual(
657
self.operations_called[1][1],
658
{'Key': 'key2.txt', 'Bucket': 'bucket',
659
'ContentType': 'text/plain',
660
'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'}
661
)
662
663
def test_cannot_use_recursive_with_stream(self):
664
cmdline = '%s - s3://bucket/key.txt --recursive' % self.prefix
665
_, stderr, _ = self.run_cmd(cmdline, expected_rc=255)
666
self.assertIn(
667
'Streaming currently is only compatible with non-recursive cp '
668
'commands', stderr)
669
670
def test_upload_unicode_path(self):
671
self.parsed_responses = [
672
{'ContentLength': 10,
673
'LastModified': '00:00:00Z'}, # HeadObject
674
{'ETag': '"foo"'} # PutObject
675
]
676
command = u's3 cp s3://bucket/\u2603 s3://bucket/\u2713'
677
stdout, stderr, rc = self.run_cmd(command, expected_rc=0)
678
679
success_message = (
680
u'copy: s3://bucket/\u2603 to s3://bucket/\u2713'
681
)
682
self.assertIn(success_message, stdout)
683
684
progress_message = 'Completed 10 Bytes'
685
self.assertIn(progress_message, stdout)
686
687
def test_cp_with_error_and_warning_permissions(self):
688
command = "s3 cp %s s3://bucket/foo.txt"
689
self.parsed_responses = [{
690
'Error': {
691
'Code': 'NoSuchBucket',
692
'Message': 'The specified bucket does not exist',
693
'BucketName': 'bucket'
694
}
695
}]
696
self.http_response.status_code = 404
697
698
full_path = self.files.create_file('foo.txt', 'bar')
699
700
# Patch get_file_stat to return a value indicating that an invalid
701
# timestamp was loaded. It is impossible to set an invalid timestamp
702
# on all OSes so it has to be patched.
703
# TODO: find another method to test this behavior without patching.
704
with mock.patch(
705
'awscli.customizations.s3.filegenerator.get_file_stat',
706
return_value=(None, None)
707
):
708
_, stderr, rc = self.run_cmd(command % full_path, expected_rc=1)
709
self.assertIn('upload failed', stderr)
710
self.assertIn('warning: File has an invalid timestamp.', stderr)
711
712
def test_upload_with_checksum_algorithm_crc32(self):
713
full_path = self.files.create_file('foo.txt', 'contents')
714
cmdline = f'{self.prefix} {full_path} s3://bucket/key.txt --checksum-algorithm CRC32'
715
self.run_cmd(cmdline, expected_rc=0)
716
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
717
self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC32')
718
719
@requires_crt
720
def test_upload_with_checksum_algorithm_crc32c(self):
721
full_path = self.files.create_file('foo.txt', 'contents')
722
cmdline = f'{self.prefix} {full_path} s3://bucket/key.txt --checksum-algorithm CRC32C'
723
self.run_cmd(cmdline, expected_rc=0)
724
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
725
self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC32C')
726
727
@requires_crt
728
def test_upload_with_checksum_algorithm_crc64nvme(self):
729
full_path = self.files.create_file('foo.txt', 'contents')
730
cmdline = f'{self.prefix} {full_path} s3://bucket/key.txt --checksum-algorithm CRC64NVME'
731
self.run_cmd(cmdline, expected_rc=0)
732
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
733
self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC64NVME')
734
735
def test_multipart_upload_with_checksum_algorithm_crc32(self):
736
full_path = self.files.create_file('foo.txt', 'a' * 10 * (1024 ** 2))
737
self.parsed_responses = [
738
{'UploadId': 'foo'},
739
{'ETag': 'foo-e1', 'ChecksumCRC32': 'foo-1'},
740
{'ETag': 'foo-e2', 'ChecksumCRC32': 'foo-2'},
741
{}
742
]
743
cmdline = ('%s %s s3://bucket/key2.txt'
744
' --checksum-algorithm CRC32' % (self.prefix, full_path))
745
self.run_cmd(cmdline, expected_rc=0)
746
self.assertEqual(len(self.operations_called), 4, self.operations_called)
747
self.assertEqual(self.operations_called[0][0].name, 'CreateMultipartUpload')
748
self.assertEqual(self.operations_called[0][1]['ChecksumAlgorithm'], 'CRC32')
749
self.assertEqual(self.operations_called[1][0].name, 'UploadPart')
750
self.assertEqual(self.operations_called[1][1]['ChecksumAlgorithm'], 'CRC32')
751
self.assertEqual(self.operations_called[3][0].name, 'CompleteMultipartUpload')
752
self.assertIn({'ETag': 'foo-e1', 'ChecksumCRC32': 'foo-1', 'PartNumber': mock.ANY},
753
self.operations_called[3][1]['MultipartUpload']['Parts'])
754
self.assertIn({'ETag': 'foo-e2', 'ChecksumCRC32': 'foo-2', 'PartNumber': mock.ANY},
755
self.operations_called[3][1]['MultipartUpload']['Parts'])
756
757
def test_copy_with_checksum_algorithm_crc32(self):
758
self.parsed_responses = [
759
self.head_object_response(),
760
# Mocked CopyObject response with a CRC32 checksum specified
761
{
762
'ETag': 'foo-1',
763
'ChecksumCRC32': 'Tq0H4g=='
764
}
765
]
766
cmdline = f'{self.prefix} s3://bucket1/key.txt s3://bucket2/key.txt --checksum-algorithm CRC32'
767
self.run_cmd(cmdline, expected_rc=0)
768
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
769
self.assertEqual(self.operations_called[1][1]['ChecksumAlgorithm'], 'CRC32')
770
771
def test_download_with_checksum_mode_crc32(self):
772
self.parsed_responses = [
773
self.head_object_response(),
774
# Mocked GetObject response with a checksum algorithm specified
775
{
776
'ETag': 'foo-1',
777
'ChecksumCRC32': 'Tq0H4g==',
778
'Body': BytesIO(b'foo')
779
}
780
]
781
cmdline = f'{self.prefix} s3://bucket/foo {self.files.rootdir} --checksum-mode ENABLED'
782
self.run_cmd(cmdline, expected_rc=0)
783
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
784
self.assertEqual(self.operations_called[1][1]['ChecksumMode'], 'ENABLED')
785
786
def test_download_with_checksum_mode_crc32c(self):
787
self.parsed_responses = [
788
self.head_object_response(),
789
# Mocked GetObject response with a checksum algorithm specified
790
{
791
'ETag': 'foo-1',
792
'ChecksumCRC32C': 'checksum',
793
'Body': BytesIO(b'foo')
794
}
795
]
796
cmdline = f'{self.prefix} s3://bucket/foo {self.files.rootdir} --checksum-mode ENABLED'
797
self.run_cmd(cmdline, expected_rc=0)
798
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
799
self.assertEqual(self.operations_called[1][1]['ChecksumMode'], 'ENABLED')
800
801
802
class TestStreamingCPCommand(BaseAWSCommandParamsTest):
803
def test_streaming_upload(self):
804
command = "s3 cp - s3://bucket/streaming.txt"
805
self.parsed_responses = [{
806
'ETag': '"c8afdb36c52cf4727836669019e69222"'
807
}]
808
809
binary_stdin = BufferedBytesIO(b'foo\n')
810
with mock.patch('sys.stdin', binary_stdin):
811
self.run_cmd(command)
812
813
self.assertEqual(len(self.operations_called), 1)
814
model, args = self.operations_called[0]
815
expected_args = {
816
'Bucket': 'bucket',
817
'Key': 'streaming.txt',
818
'ChecksumAlgorithm': 'CRC32',
819
'Body': mock.ANY
820
}
821
822
self.assertEqual(model.name, 'PutObject')
823
self.assertEqual(args, expected_args)
824
825
def test_streaming_upload_with_expected_size(self):
826
command = "s3 cp - s3://bucket/streaming.txt --expected-size 4"
827
self.parsed_responses = [{
828
'ETag': '"c8afdb36c52cf4727836669019e69222"'
829
}]
830
831
binary_stdin = BufferedBytesIO(b'foo\n')
832
with mock.patch('sys.stdin', binary_stdin):
833
self.run_cmd(command)
834
835
self.assertEqual(len(self.operations_called), 1)
836
model, args = self.operations_called[0]
837
expected_args = {
838
'Bucket': 'bucket',
839
'Key': 'streaming.txt',
840
'ChecksumAlgorithm': 'CRC32',
841
'Body': mock.ANY
842
}
843
844
self.assertEqual(model.name, 'PutObject')
845
self.assertEqual(args, expected_args)
846
847
def test_streaming_upload_error(self):
848
command = "s3 cp - s3://bucket/streaming.txt"
849
self.parsed_responses = [{
850
'Error': {
851
'Code': 'NoSuchBucket',
852
'Message': 'The specified bucket does not exist',
853
'BucketName': 'bucket'
854
}
855
}]
856
self.http_response.status_code = 404
857
858
binary_stdin = BufferedBytesIO(b'foo\n')
859
with mock.patch('sys.stdin', binary_stdin):
860
_, stderr, _ = self.run_cmd(command, expected_rc=1)
861
862
error_message = (
863
'An error occurred (NoSuchBucket) when calling '
864
'the PutObject operation: The specified bucket does not exist'
865
)
866
self.assertIn(error_message, stderr)
867
868
def test_streaming_upload_when_stdin_unavailable(self):
869
command = "s3 cp - s3://bucket/streaming.txt"
870
self.parsed_responses = [{
871
'ETag': '"c8afdb36c52cf4727836669019e69222"'
872
}]
873
874
with mock.patch('sys.stdin', None):
875
_, stderr, _ = self.run_cmd(command, expected_rc=1)
876
877
expected_message = (
878
'stdin is required for this operation, but is not available'
879
)
880
self.assertIn(expected_message, stderr)
881
882
def test_streaming_download(self):
883
command = "s3 cp s3://bucket/streaming.txt -"
884
self.parsed_responses = [
885
{
886
"AcceptRanges": "bytes",
887
"LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
888
"ContentLength": 4,
889
"ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
890
"Metadata": {},
891
"ContentType": "binary/octet-stream"
892
},
893
{
894
"AcceptRanges": "bytes",
895
"Metadata": {},
896
"ContentType": "binary/octet-stream",
897
"ContentLength": 4,
898
"ETag": '"d3b07384d113edec49eaa6238ad5ff00"',
899
"LastModified": "Tue, 12 Jul 2016 21:26:07 GMT",
900
"Body": BytesIO(b'foo\n')
901
}
902
]
903
904
stdout, stderr, rc = self.run_cmd(command)
905
self.assertEqual(stdout, 'foo\n')
906
907
# Ensures no extra operations were called
908
self.assertEqual(len(self.operations_called), 2)
909
ops = [op[0].name for op in self.operations_called]
910
expected_ops = ['HeadObject', 'GetObject']
911
self.assertEqual(ops, expected_ops)
912
913
def test_streaming_download_error(self):
914
command = "s3 cp s3://bucket/streaming.txt -"
915
self.parsed_responses = [{
916
'Error': {
917
'Code': 'NoSuchBucket',
918
'Message': 'The specified bucket does not exist',
919
'BucketName': 'bucket'
920
}
921
}]
922
self.http_response.status_code = 404
923
924
_, stderr, _ = self.run_cmd(command, expected_rc=1)
925
error_message = (
926
'An error occurred (NoSuchBucket) when calling '
927
'the HeadObject operation: The specified bucket does not exist'
928
)
929
self.assertIn(error_message, stderr)
930
931
932
class TestCpCommandWithRequesterPayer(BaseCPCommandTest):
933
def test_single_upload(self):
934
full_path = self.files.create_file('myfile', 'mycontent')
935
cmdline = (
936
'%s %s s3://mybucket/mykey --request-payer' % (
937
self.prefix, full_path
938
)
939
)
940
self.run_cmd(cmdline, expected_rc=0)
941
self.assert_operations_called(
942
[
943
('PutObject', {
944
'Bucket': 'mybucket',
945
'Key': 'mykey',
946
'ChecksumAlgorithm': 'CRC32',
947
'RequestPayer': 'requester',
948
'Body': mock.ANY,
949
})
950
]
951
)
952
953
def test_multipart_upload(self):
954
full_path = self.files.create_file('myfile', 'a' * 10 * (1024 ** 2))
955
cmdline = (
956
'%s %s s3://mybucket/mykey --request-payer' % (
957
self.prefix, full_path))
958
959
self.parsed_responses = [
960
{'UploadId': 'myid'}, # CreateMultipartUpload
961
{'ETag': '"myetag"'}, # UploadPart
962
{'ETag': '"myetag"'}, # UploadPart
963
{} # CompleteMultipartUpload
964
]
965
self.run_cmd(cmdline, expected_rc=0)
966
self.assert_operations_called(
967
[
968
('CreateMultipartUpload', {
969
'Bucket': 'mybucket',
970
'Key': 'mykey',
971
'ChecksumAlgorithm': 'CRC32',
972
'RequestPayer': 'requester',
973
}),
974
('UploadPart', {
975
'Bucket': 'mybucket',
976
'Key': 'mykey',
977
'ChecksumAlgorithm': 'CRC32',
978
'RequestPayer': 'requester',
979
'UploadId': 'myid',
980
'PartNumber': mock.ANY,
981
'Body': mock.ANY,
982
}),
983
('UploadPart', {
984
'Bucket': 'mybucket',
985
'Key': 'mykey',
986
'ChecksumAlgorithm': 'CRC32',
987
'RequestPayer': 'requester',
988
'UploadId': 'myid',
989
'PartNumber': mock.ANY,
990
'Body': mock.ANY,
991
992
}),
993
('CompleteMultipartUpload', {
994
'Bucket': 'mybucket',
995
'Key': 'mykey',
996
'RequestPayer': 'requester',
997
'UploadId': 'myid',
998
'MultipartUpload': {'Parts': [
999
{'ETag': '"myetag"', 'PartNumber': 1},
1000
{'ETag': '"myetag"', 'PartNumber': 2}]
1001
}
1002
})
1003
]
1004
)
1005
1006
def test_recursive_upload(self):
1007
self.files.create_file('myfile', 'mycontent')
1008
cmdline = (
1009
'%s %s s3://mybucket/ --request-payer --recursive' % (
1010
self.prefix, self.files.rootdir
1011
)
1012
)
1013
self.run_cmd(cmdline, expected_rc=0)
1014
self.assert_operations_called(
1015
[
1016
('PutObject', {
1017
'Bucket': 'mybucket',
1018
'Key': 'myfile',
1019
'ChecksumAlgorithm': 'CRC32',
1020
'RequestPayer': 'requester',
1021
'Body': mock.ANY,
1022
})
1023
]
1024
)
1025
1026
def test_single_download(self):
1027
cmdline = '%s s3://mybucket/mykey %s --request-payer' % (
1028
self.prefix, self.files.rootdir)
1029
self.parsed_responses = [
1030
self.head_object_response(),
1031
self.get_object_response()
1032
]
1033
1034
self.run_cmd(cmdline, expected_rc=0)
1035
self.assert_operations_called(
1036
[
1037
self.head_object_request(
1038
'mybucket', 'mykey', RequestPayer='requester'),
1039
self.get_object_request(
1040
'mybucket', 'mykey', RequestPayer='requester'),
1041
]
1042
)
1043
1044
def test_ranged_download(self):
1045
cmdline = '%s s3://mybucket/mykey %s --request-payer' % (
1046
self.prefix, self.files.rootdir)
1047
self.parsed_responses = [
1048
self.head_object_response(ContentLength=10 * (1024 ** 2)),
1049
self.get_object_response(),
1050
self.get_object_response()
1051
]
1052
1053
self.run_cmd(cmdline, expected_rc=0)
1054
self.assert_operations_called(
1055
[
1056
self.head_object_request(
1057
'mybucket', 'mykey', RequestPayer='requester'),
1058
self.get_object_request(
1059
'mybucket', 'mykey', Range=mock.ANY,
1060
RequestPayer='requester',
1061
IfMatch='"foo-1"',
1062
),
1063
self.get_object_request(
1064
'mybucket', 'mykey', Range=mock.ANY,
1065
RequestPayer='requester',
1066
IfMatch='"foo-1"',
1067
),
1068
]
1069
)
1070
1071
def test_recursive_download(self):
1072
cmdline = '%s s3://mybucket/ %s --request-payer --recursive' % (
1073
self.prefix, self.files.rootdir)
1074
self.parsed_responses = [
1075
self.list_objects_response(['mykey']),
1076
self.get_object_response()
1077
]
1078
self.run_cmd(cmdline, expected_rc=0)
1079
self.assert_operations_called(
1080
[
1081
self.list_objects_request(
1082
'mybucket', RequestPayer='requester'),
1083
self.get_object_request(
1084
'mybucket', 'mykey', RequestPayer='requester')
1085
]
1086
)
1087
1088
def test_single_copy(self):
1089
cmdline = self.prefix
1090
cmdline += ' s3://sourcebucket/sourcekey s3://mybucket/mykey'
1091
cmdline += ' --request-payer'
1092
self.parsed_responses = [
1093
self.head_object_response(),
1094
self.copy_object_response(),
1095
]
1096
self.run_cmd(cmdline, expected_rc=0)
1097
self.assert_operations_called(
1098
[
1099
self.head_object_request(
1100
'sourcebucket', 'sourcekey', RequestPayer='requester'
1101
),
1102
self.copy_object_request(
1103
'sourcebucket', 'sourcekey', 'mybucket', 'mykey',
1104
RequestPayer='requester'
1105
)
1106
]
1107
)
1108
1109
def test_multipart_copy(self):
1110
cmdline = self.prefix
1111
cmdline += ' s3://sourcebucket/sourcekey s3://mybucket/mykey'
1112
cmdline += ' --request-payer'
1113
upload_id = 'id'
1114
self.parsed_responses = [
1115
self.head_object_response(ContentLength=10 * (1024 ** 2)),
1116
self.create_mpu_response(upload_id),
1117
self.upload_part_copy_response(),
1118
self.upload_part_copy_response(),
1119
self.complete_mpu_response(),
1120
]
1121
self.run_cmd(cmdline, expected_rc=0)
1122
self.assert_operations_called(
1123
[
1124
self.head_object_request(
1125
'sourcebucket', 'sourcekey', RequestPayer='requester'),
1126
self.create_mpu_request(
1127
'mybucket', 'mykey', RequestPayer='requester'),
1128
self.upload_part_copy_request(
1129
'sourcebucket', 'sourcekey', 'mybucket', 'mykey',
1130
upload_id, PartNumber=mock.ANY, RequestPayer='requester',
1131
CopySourceRange=mock.ANY),
1132
self.upload_part_copy_request(
1133
'sourcebucket', 'sourcekey', 'mybucket', 'mykey',
1134
upload_id, PartNumber=mock.ANY, RequestPayer='requester',
1135
CopySourceRange=mock.ANY),
1136
self.complete_mpu_request(
1137
'mybucket', 'mykey', upload_id, num_parts=2,
1138
RequestPayer='requester')
1139
]
1140
)
1141
1142
def test_recursive_copy(self):
1143
cmdline = self.prefix
1144
cmdline += ' s3://sourcebucket/ s3://mybucket/'
1145
cmdline += ' --request-payer'
1146
cmdline += ' --recursive'
1147
self.parsed_responses = [
1148
self.list_objects_response(['mykey']),
1149
self.copy_object_response()
1150
]
1151
self.run_cmd(cmdline, expected_rc=0)
1152
self.assert_operations_called(
1153
[
1154
self.list_objects_request(
1155
'sourcebucket', RequestPayer='requester'),
1156
self.copy_object_request(
1157
'sourcebucket', 'mykey', 'mybucket', 'mykey',
1158
RequestPayer='requester')
1159
]
1160
)
1161
1162
1163
class TestAccesspointCPCommand(BaseCPCommandTest):
1164
def setUp(self):
1165
self.accesspoint_arn = (
1166
'arn:aws:s3:us-west-2:123456789012:accesspoint/endpoint'
1167
)
1168
super(TestAccesspointCPCommand, self).setUp()
1169
1170
def test_upload(self):
1171
filename = self.files.create_file('myfile', 'mycontent')
1172
cmdline = self.prefix
1173
cmdline += ' %s' % filename
1174
cmdline += ' s3://%s/mykey' % self.accesspoint_arn
1175
self.run_cmd(cmdline, expected_rc=0)
1176
self.assert_operations_called(
1177
[
1178
self.put_object_request(self.accesspoint_arn, 'mykey')
1179
]
1180
)
1181
1182
def test_recusive_upload(self):
1183
self.files.create_file('myfile', 'mycontent')
1184
cmdline = self.prefix
1185
cmdline += ' %s' % self.files.rootdir
1186
cmdline += ' s3://%s/' % self.accesspoint_arn
1187
cmdline += ' --recursive'
1188
self.run_cmd(cmdline, expected_rc=0)
1189
self.assert_operations_called(
1190
[
1191
self.put_object_request(self.accesspoint_arn, 'myfile')
1192
]
1193
)
1194
1195
def test_download(self):
1196
cmdline = self.prefix
1197
cmdline += ' s3://%s/mykey' % self.accesspoint_arn
1198
cmdline += ' %s' % self.files.rootdir
1199
self.parsed_responses = [
1200
self.head_object_response(),
1201
self.get_object_response(),
1202
]
1203
self.run_cmd(cmdline, expected_rc=0)
1204
self.assert_operations_called(
1205
[
1206
self.head_object_request(self.accesspoint_arn, 'mykey'),
1207
self.get_object_request(self.accesspoint_arn, 'mykey'),
1208
]
1209
)
1210
1211
def test_recursive_download(self):
1212
cmdline = self.prefix
1213
cmdline += ' s3://%s' % self.accesspoint_arn
1214
cmdline += ' %s' % self.files.rootdir
1215
cmdline += ' --recursive'
1216
self.parsed_responses = [
1217
self.list_objects_response(['mykey']),
1218
self.get_object_response(),
1219
]
1220
self.run_cmd(cmdline, expected_rc=0)
1221
self.assert_operations_called(
1222
[
1223
self.list_objects_request(self.accesspoint_arn),
1224
self.get_object_request(self.accesspoint_arn, 'mykey'),
1225
]
1226
)
1227
1228
def test_copy(self):
1229
cmdline = self.prefix
1230
cmdline += ' s3://%s/mykey' % self.accesspoint_arn
1231
accesspoint_arn_dest = self.accesspoint_arn + '-dest'
1232
cmdline += ' s3://%s' % accesspoint_arn_dest
1233
self.parsed_responses = [
1234
self.head_object_response(),
1235
self.copy_object_response(),
1236
]
1237
self.run_cmd(cmdline, expected_rc=0)
1238
self.assert_operations_called(
1239
[
1240
self.head_object_request(self.accesspoint_arn, 'mykey'),
1241
self.copy_object_request(
1242
self.accesspoint_arn, 'mykey', accesspoint_arn_dest,
1243
'mykey'),
1244
]
1245
)
1246
1247
def test_recursive_copy(self):
1248
cmdline = self.prefix
1249
cmdline += ' s3://%s' % self.accesspoint_arn
1250
accesspoint_arn_dest = self.accesspoint_arn + '-dest'
1251
cmdline += ' s3://%s' % accesspoint_arn_dest
1252
cmdline += ' --recursive'
1253
self.parsed_responses = [
1254
self.list_objects_response(['mykey']),
1255
self.copy_object_response(),
1256
]
1257
self.run_cmd(cmdline, expected_rc=0)
1258
self.assert_operations_called(
1259
[
1260
self.list_objects_request(self.accesspoint_arn),
1261
self.copy_object_request(
1262
self.accesspoint_arn, 'mykey', accesspoint_arn_dest,
1263
'mykey'),
1264
]
1265
)
1266
1267
@requires_crt()
1268
def test_accepts_mrap_arns(self):
1269
mrap_arn = (
1270
'arn:aws:s3::123456789012:accesspoint:mfzwi23gnjvgw.mrap'
1271
)
1272
filename = self.files.create_file('myfile', 'mycontent')
1273
cmdline = self.prefix
1274
cmdline += ' %s' % filename
1275
cmdline += ' s3://%s/mykey' % mrap_arn
1276
self.run_cmd(cmdline, expected_rc=0)
1277
self.assert_operations_called(
1278
[
1279
self.put_object_request(mrap_arn, 'mykey')
1280
]
1281
)
1282
1283
@requires_crt()
1284
def test_accepts_mrap_arns_with_slash(self):
1285
mrap_arn = (
1286
'arn:aws:s3::123456789012:accesspoint/mfzwi23gnjvgw.mrap'
1287
)
1288
filename = self.files.create_file('myfile', 'mycontent')
1289
cmdline = self.prefix
1290
cmdline += ' %s' % filename
1291
cmdline += ' s3://%s/mykey' % mrap_arn
1292
self.run_cmd(cmdline, expected_rc=0)
1293
self.assert_operations_called(
1294
[
1295
self.put_object_request(mrap_arn, 'mykey')
1296
]
1297
)
1298
1299