Path: blob/develop/tests/functional/s3/test_sync_command.py
1567 views
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.1#2# Licensed under the Apache License, Version 2.0 (the "License"). You3# may not use this file except in compliance with the License. A copy of4# the License is located at5#6# http://aws.amazon.com/apache2.0/7#8# or in the "license" file accompanying this file. This file is9# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF10# ANY KIND, either express or implied. See the License for the specific11# language governing permissions and limitations under the License.12import os1314from awscli.testutils import mock, cd15from awscli.compat import BytesIO16from tests.functional.s3 import BaseS3TransferCommandTest171819class TestSyncCommand(BaseS3TransferCommandTest):2021prefix = 's3 sync '2223def test_website_redirect_ignore_paramfile(self):24full_path = self.files.create_file('foo.txt', 'mycontent')25cmdline = '%s %s s3://bucket/key.txt --website-redirect %s' % \26(self.prefix, self.files.rootdir, 'http://someserver')27self.parsed_responses = [28{"CommonPrefixes": [], "Contents": []},29{'ETag': '"c8afdb36c52cf4727836669019e69222"'}30]31self.run_cmd(cmdline, expected_rc=0)3233# The only operations we should have called are ListObjectsV2/PutObject.34self.assertEqual(len(self.operations_called), 2, self.operations_called)35self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')36self.assertEqual(self.operations_called[1][0].name, 'PutObject')37# Make sure that the specified web address is used as opposed to the38# contents of the web address when uploading the object39self.assertEqual(40self.operations_called[1][1]['WebsiteRedirectLocation'],41'http://someserver'42)4344def test_no_recursive_option(self):45cmdline = '. s3://mybucket --recursive'46# Return code will be 2 for invalid parameter ``--recursive``47self.run_cmd(cmdline, expected_rc=2)4849def test_sync_from_non_existant_directory(self):50non_existant_directory = os.path.join(self.files.rootdir, 'fakedir')51cmdline = '%s %s s3://bucket/' % (self.prefix, non_existant_directory)52self.parsed_responses = [53{"CommonPrefixes": [], "Contents": []}54]55_, stderr, _ = self.run_cmd(cmdline, expected_rc=255)56self.assertIn('does not exist', stderr)5758def test_sync_to_non_existant_directory(self):59key = 'foo.txt'60non_existant_directory = os.path.join(self.files.rootdir, 'fakedir')61cmdline = '%s s3://bucket/ %s' % (self.prefix, non_existant_directory)62self.parsed_responses = [63{"CommonPrefixes": [], "Contents": [64{"Key": key, "Size": 3,65"LastModified": "2014-01-09T20:45:49.000Z",66"ETag": '"c8afdb36c52cf4727836669019e69222-"',}]},67{'ETag': '"c8afdb36c52cf4727836669019e69222-"',68'Body': BytesIO(b'foo')}69]70self.run_cmd(cmdline, expected_rc=0)71# Make sure the file now exists.72self.assertTrue(73os.path.exists(os.path.join(non_existant_directory, key)))7475def test_glacier_sync_with_force_glacier(self):76self.parsed_responses = [77{78'Contents': [79{'Key': 'foo/bar.txt', 'ContentLength': '100',80'LastModified': '00:00:00Z',81'StorageClass': 'GLACIER',82'Size': 100, 'ETag': '"foo-1"',},83],84'CommonPrefixes': []85},86{'ETag': '"foo-1"', 'Body': BytesIO(b'foo')},87]88cmdline = '%s s3://bucket/foo %s --force-glacier-transfer' % (89self.prefix, self.files.rootdir)90self.run_cmd(cmdline, expected_rc=0)91self.assertEqual(len(self.operations_called), 2, self.operations_called)92self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')93self.assertEqual(self.operations_called[1][0].name, 'GetObject')9495def test_handles_glacier_incompatible_operations(self):96self.parsed_responses = [97{'Contents': [98{'Key': 'foo', 'Size': 100,99'LastModified': '00:00:00Z', 'StorageClass': 'GLACIER'},100{'Key': 'bar', 'Size': 100,101'LastModified': '00:00:00Z', 'StorageClass': 'DEEP_ARCHIVE'}102]}103]104cmdline = '%s s3://bucket/ %s' % (105self.prefix, self.files.rootdir)106_, stderr, _ = self.run_cmd(cmdline, expected_rc=2)107# There should not have been a download attempted because the108# operation was skipped because it is glacier and glacier109# deep archive incompatible.110self.assertEqual(len(self.operations_called), 1)111self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')112self.assertIn('GLACIER', stderr)113self.assertIn('s3://bucket/foo', stderr)114self.assertIn('s3://bucket/bar', stderr)115116def test_turn_off_glacier_warnings(self):117self.parsed_responses = [118{'Contents': [119{'Key': 'foo', 'Size': 100,120'LastModified': '00:00:00Z', 'StorageClass': 'GLACIER'},121{'Key': 'bar', 'Size': 100,122'LastModified': '00:00:00Z', 'StorageClass': 'DEEP_ARCHIVE'}123]}124]125cmdline = '%s s3://bucket/ %s --ignore-glacier-warnings' % (126self.prefix, self.files.rootdir)127_, stderr, _ = self.run_cmd(cmdline, expected_rc=0)128# There should not have been a download attempted because the129# operation was skipped because it is glacier incompatible.130self.assertEqual(len(self.operations_called), 1)131self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')132self.assertEqual('', stderr)133134def test_warning_on_invalid_timestamp(self):135full_path = self.files.create_file('foo.txt', 'mycontent')136137cmdline = '%s %s s3://bucket/key.txt' % \138(self.prefix, self.files.rootdir)139self.parsed_responses = [140{"CommonPrefixes": [], "Contents": []},141{'ETag': '"c8afdb36c52cf4727836669019e69222"'}142]143# Patch get_file_stat to return a value indicating that an invalid144# timestamp was loaded. It is impossible to set an invalid timestamp145# on all OSes so it has to be patched.146# TODO: find another method to test this behavior without patching.147with mock.patch(148'awscli.customizations.s3.filegenerator.get_file_stat',149return_value=(None, None)150):151self.run_cmd(cmdline, expected_rc=2)152153# We should still have put the object154self.assertEqual(len(self.operations_called), 2, self.operations_called)155self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')156self.assertEqual(self.operations_called[1][0].name, 'PutObject')157158def test_sync_with_delete_on_downloads(self):159full_path = self.files.create_file('foo.txt', 'mycontent')160cmdline = '%s s3://bucket %s --delete' % (161self.prefix, self.files.rootdir)162self.parsed_responses = [163{"CommonPrefixes": [], "Contents": []},164{'ETag': '"c8afdb36c52cf4727836669019e69222"'}165]166self.run_cmd(cmdline, expected_rc=0)167168# The only operations we should have called are ListObjectsV2.169self.assertEqual(len(self.operations_called), 1, self.operations_called)170self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')171172self.assertFalse(os.path.exists(full_path))173174# When a file has been deleted after listing,175# awscli.customizations.s3.utils.get_file_stat may raise either some kind176# of OSError, or a ValueError, depending on the environment. In both cases,177# the behaviour should be the same: skip the file and emit a warning.178#179# This test covers the case where a ValueError is emitted.180def test_sync_skips_over_files_deleted_between_listing_and_transfer_valueerror(self):181full_path = self.files.create_file('foo.txt', 'mycontent')182cmdline = '%s %s s3://bucket/' % (183self.prefix, self.files.rootdir)184185# FileGenerator.list_files should skip over files that cause an186# IOError to be raised because they are missing when we try to187# get their stats. This IOError is translated to a ValueError in188# awscli.customizations.s3.utils.get_file_stat.189def side_effect(_):190os.remove(full_path)191raise ValueError()192with mock.patch(193'awscli.customizations.s3.filegenerator.get_file_stat',194side_effect=side_effect195):196self.run_cmd(cmdline, expected_rc=2)197198# We should not call PutObject because the file was deleted199# before we could transfer it200self.assertEqual(len(self.operations_called), 1, self.operations_called)201self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')202203# This test covers the case where an OSError is emitted.204def test_sync_skips_over_files_deleted_between_listing_and_transfer_oserror(self):205full_path = self.files.create_file('foo.txt', 'mycontent')206cmdline = '%s %s s3://bucket/' % (207self.prefix, self.files.rootdir)208209# FileGenerator.list_files should skip over files that cause an210# OSError to be raised because they are missing when we try to211# get their stats.212def side_effect(_):213os.remove(full_path)214raise OSError()215with mock.patch(216'awscli.customizations.s3.filegenerator.get_file_stat',217side_effect=side_effect218):219self.run_cmd(cmdline, expected_rc=2)220221# We should not call PutObject because the file was deleted222# before we could transfer it223self.assertEqual(len(self.operations_called), 1, self.operations_called)224self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')225226def test_request_payer(self):227cmdline = '%s s3://sourcebucket/ s3://mybucket --request-payer' % (228self.prefix)229self.parsed_responses = [230# Response for ListObjects on source bucket231self.list_objects_response(['mykey']),232# Response for ListObjects on destination bucket233self.list_objects_response([]),234self.copy_object_response(),235]236self.run_cmd(cmdline, expected_rc=0)237self.assert_operations_called(238[239self.list_objects_request(240'sourcebucket', RequestPayer='requester'),241self.list_objects_request(242'mybucket', RequestPayer='requester'),243self.copy_object_request(244'sourcebucket', 'mykey', 'mybucket', 'mykey',245RequestPayer='requester')246]247)248249def test_request_payer_with_deletes(self):250cmdline = '%s s3://sourcebucket/ s3://mybucket' % self.prefix251cmdline += ' --request-payer'252cmdline += ' --delete'253self.parsed_responses = [254# Response for ListObjects on source bucket255self.list_objects_response([]),256# Response for ListObjects on destination bucket257self.list_objects_response(['key-to-delete']),258self.delete_object_response()259]260self.run_cmd(cmdline, expected_rc=0)261self.assert_operations_called(262[263self.list_objects_request(264'sourcebucket', RequestPayer='requester'),265self.list_objects_request(266'mybucket', RequestPayer='requester'),267self.delete_object_request(268'mybucket', 'key-to-delete', RequestPayer='requester'),269]270)271272def test_with_accesspoint_arn(self):273accesspoint_arn = (274'arn:aws:s3:us-west-2:123456789012:accesspoint/endpoint'275)276cmdline = self.prefix277cmdline += 's3://%s' % accesspoint_arn278cmdline += ' %s' % self.files.rootdir279self.parsed_responses = [280self.list_objects_response(['mykey']),281self.get_object_response(),282]283self.run_cmd(cmdline, expected_rc=0)284self.assert_operations_called(285[286self.list_objects_request(accesspoint_arn),287self.get_object_request(accesspoint_arn, 'mykey')288]289)290291def test_upload_with_checksum_algorithm_sha1(self):292self.files.create_file('foo.txt', 'contents')293cmdline = f'{self.prefix} {self.files.rootdir} s3://bucket/ --checksum-algorithm SHA1'294self.run_cmd(cmdline, expected_rc=0)295self.assertEqual(self.operations_called[1][0].name, 'PutObject')296self.assertEqual(self.operations_called[1][1]['ChecksumAlgorithm'], 'SHA1')297298def test_copy_with_checksum_algorithm_update_sha1(self):299cmdline = f'{self.prefix} s3://src-bucket/ s3://dest-bucket/ --checksum-algorithm SHA1'300self.parsed_responses = [301# Response for ListObjects on source bucket302{303'Contents': [304{305'Key': 'mykey',306'LastModified': '00:00:00Z',307'Size': 100,308'ChecksumAlgorithm': 'SHA1'309}310],311'CommonPrefixes': []312},313# Response for ListObjects on destination bucket314self.list_objects_response([]),315# Response for CopyObject316{317'ChecksumSHA1': 'sha1-checksum'318}319]320self.run_cmd(cmdline, expected_rc=0)321self.assert_operations_called(322[323self.list_objects_request('src-bucket'),324self.list_objects_request('dest-bucket'),325(326'CopyObject', {327'CopySource': {328'Bucket': 'src-bucket',329'Key': 'mykey'330},331'Bucket': 'dest-bucket',332'Key': 'mykey',333'ChecksumAlgorithm': 'SHA1'334}335)336]337)338339def test_upload_with_checksum_algorithm_sha256(self):340self.files.create_file('foo.txt', 'contents')341cmdline = f'{self.prefix} {self.files.rootdir} s3://bucket/ --checksum-algorithm SHA256'342self.run_cmd(cmdline, expected_rc=0)343self.assertEqual(self.operations_called[1][0].name, 'PutObject')344self.assertEqual(self.operations_called[1][1]['ChecksumAlgorithm'], 'SHA256')345346def test_download_with_checksum_mode_sha1(self):347self.parsed_responses = [348self.list_objects_response(['bucket']),349# Mocked GetObject response with a checksum algorithm specified350{351'ETag': 'foo-1',352'ChecksumSHA1': 'checksum',353'Body': BytesIO(b'foo')354}355]356cmdline = f'{self.prefix} s3://bucket/foo {self.files.rootdir} --checksum-mode ENABLED'357self.run_cmd(cmdline, expected_rc=0)358self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')359self.assertEqual(self.operations_called[1][0].name, 'GetObject')360self.assertIn(('ChecksumMode', 'ENABLED'), self.operations_called[1][1].items())361362def test_download_with_checksum_mode_sha256(self):363self.parsed_responses = [364self.list_objects_response(['bucket']),365# Mocked GetObject response with a checksum algorithm specified366{367'ETag': 'foo-1',368'ChecksumSHA256': 'checksum',369'Body': BytesIO(b'foo')370}371]372cmdline = f'{self.prefix} s3://bucket/foo {self.files.rootdir} --checksum-mode ENABLED'373self.run_cmd(cmdline, expected_rc=0)374self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')375self.assertEqual(self.operations_called[1][0].name, 'GetObject')376self.assertIn(('ChecksumMode', 'ENABLED'), self.operations_called[1][1].items())377378def test_download_with_checksum_mode_crc64nvme(self):379self.parsed_responses = [380self.list_objects_response(['bucket']),381# Mocked GetObject response with a checksum algorithm specified382{383'ETag': 'foo-1',384'ChecksumCRC64NVME': 'checksum',385'Body': BytesIO(b'foo')386}387]388cmdline = f'{self.prefix} s3://bucket/foo {self.files.rootdir} --checksum-mode ENABLED'389self.run_cmd(cmdline, expected_rc=0)390self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')391self.assertEqual(self.operations_called[1][0].name, 'GetObject')392self.assertIn(('ChecksumMode', 'ENABLED'), self.operations_called[1][1].items())393394395class TestSyncCommandWithS3Express(BaseS3TransferCommandTest):396397prefix = 's3 sync '398399def test_incompatible_with_sync_upload(self):400cmdline = '%s %s s3://testdirectorybucket--usw2-az1--x-s3/' % (self.prefix, self.files.rootdir)401stderr = self.run_cmd(cmdline, expected_rc=255)[1]402self.assertIn('Cannot use sync command with a directory bucket.', stderr)403404def test_incompatible_with_sync_download(self):405cmdline = '%s s3://testdirectorybucket--usw2-az1--x-s3/ %s' % (self.prefix, self.files.rootdir)406stderr = self.run_cmd(cmdline, expected_rc=255)[1]407self.assertIn('Cannot use sync command with a directory bucket.', stderr)408409def test_incompatible_with_sync_copy(self):410cmdline = '%s s3://bucket/ s3://testdirectorybucket--usw2-az1--x-s3/' % self.prefix411stderr = self.run_cmd(cmdline, expected_rc=255)[1]412self.assertIn('Cannot use sync command with a directory bucket.', stderr)413414def test_incompatible_with_sync_with_delete(self):415cmdline = '%s s3://bucket/ s3://testdirectorybucket--usw2-az1--x-s3/ --delete' % self.prefix416stderr = self.run_cmd(cmdline, expected_rc=255)[1]417self.assertIn('Cannot use sync command with a directory bucket.', stderr)418419def test_compatible_with_sync_with_local_directory_like_directory_bucket(self):420self.parsed_responses = [421{'Contents': []}422]423424cmdline = '%s s3://bucket/ testdirectorybucket--usw2-az1--x-s3/' % self.prefix425with cd(self.files.rootdir):426_, stderr, _ = self.run_cmd(cmdline)427428# Just asserting that command validated and made an API call429self.assertEqual(len(self.operations_called), 1)430self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')431432433