Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/tests/integration/customizations/s3/test_filegenerator.py
1567 views
1
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License"). You
4
# may not use this file except in compliance with the License. A copy of
5
# the License is located at
6
#
7
# http://aws.amazon.com/apache2.0/
8
#
9
# or in the "license" file accompanying this file. This file is
10
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
# ANY KIND, either express or implied. See the License for the specific
12
# language governing permissions and limitations under the License.
13
14
15
# Note that all of these functions can be found in the unit tests.
16
# The only difference is that these tests use botocore's actual session
17
# variables to communicate with s3 as these are integration tests. Therefore,
18
# only tests that use sessions are included as integration tests.
19
20
import unittest
21
import os
22
import itertools
23
24
import botocore.session
25
from awscli import EnvironmentVariables
26
from awscli.customizations.s3.filegenerator import FileGenerator, FileStat
27
from tests.unit.customizations.s3 import compare_files
28
from tests.integration.customizations.s3 import make_s3_files, s3_cleanup
29
30
31
class S3FileGeneratorIntTest(unittest.TestCase):
32
def setUp(self):
33
self.session = botocore.session.get_session(EnvironmentVariables)
34
# Use the datetime and and blob parsing of the CLI
35
factory = self.session.get_component('response_parser_factory')
36
factory.set_parser_defaults(
37
blob_parser=lambda x: x,
38
timestamp_parser=lambda x: x)
39
self.client = self.session.create_client('s3', region_name='us-west-2')
40
self.bucket = make_s3_files(self.session)
41
self.file1 = self.bucket + '/' + 'text1.txt'
42
self.file2 = self.bucket + '/' + 'another_directory/text2.txt'
43
44
def tearDown(self):
45
s3_cleanup(self.bucket, self.session)
46
47
def test_s3_file(self):
48
#
49
# Generate a single s3 file
50
# Note: Size and last update are not tested because s3 generates them.
51
#
52
input_s3_file = {'src': {'path': self.file1, 'type': 's3'},
53
'dest': {'path': 'text1.txt', 'type': 'local'},
54
'dir_op': False, 'use_src_name': False}
55
expected_file_size = 15
56
result_list = list(
57
FileGenerator(self.client, '').call(input_s3_file))
58
file_stat = FileStat(src=self.file1, dest='text1.txt',
59
compare_key='text1.txt',
60
size=expected_file_size,
61
last_update=result_list[0].last_update,
62
src_type='s3',
63
dest_type='local', operation_name='')
64
65
expected_list = [file_stat]
66
self.assertEqual(len(result_list), 1)
67
compare_files(self, result_list[0], expected_list[0])
68
69
def test_s3_directory(self):
70
#
71
# Generates s3 files under a common prefix. Also it ensures that
72
# zero size files are ignored.
73
# Note: Size and last update are not tested because s3 generates them.
74
#
75
input_s3_file = {'src': {'path': self.bucket+'/', 'type': 's3'},
76
'dest': {'path': '', 'type': 'local'},
77
'dir_op': True, 'use_src_name': True}
78
result_list = list(
79
FileGenerator(self.client, '').call(input_s3_file))
80
file_stat = FileStat(src=self.file2,
81
dest='another_directory' + os.sep + 'text2.txt',
82
compare_key='another_directory/text2.txt',
83
size=21,
84
last_update=result_list[0].last_update,
85
src_type='s3',
86
dest_type='local', operation_name='')
87
file_stat2 = FileStat(src=self.file1,
88
dest='text1.txt',
89
compare_key='text1.txt',
90
size=15,
91
last_update=result_list[1].last_update,
92
src_type='s3',
93
dest_type='local', operation_name='')
94
95
expected_result = [file_stat, file_stat2]
96
self.assertEqual(len(result_list), 2)
97
compare_files(self, result_list[0], expected_result[0])
98
compare_files(self, result_list[1], expected_result[1])
99
100
def test_s3_delete_directory(self):
101
#
102
# Generates s3 files under a common prefix. Also it ensures that
103
# the directory itself is included because it is a delete command
104
# Note: Size and last update are not tested because s3 generates them.
105
#
106
input_s3_file = {'src': {'path': self.bucket+'/', 'type': 's3'},
107
'dest': {'path': '', 'type': 'local'},
108
'dir_op': True, 'use_src_name': True}
109
result_list = list(
110
FileGenerator(self.client, 'delete').call(input_s3_file))
111
112
file_stat1 = FileStat(
113
src=self.bucket + '/another_directory/',
114
dest='another_directory' + os.sep,
115
compare_key='another_directory/',
116
size=0,
117
last_update=result_list[0].last_update,
118
src_type='s3',
119
dest_type='local', operation_name='delete')
120
file_stat2 = FileStat(
121
src=self.file2,
122
dest='another_directory' + os.sep + 'text2.txt',
123
compare_key='another_directory/text2.txt',
124
size=21,
125
last_update=result_list[1].last_update,
126
src_type='s3',
127
dest_type='local', operation_name='delete')
128
file_stat3 = FileStat(
129
src=self.file1,
130
dest='text1.txt',
131
compare_key='text1.txt',
132
size=15,
133
last_update=result_list[2].last_update,
134
src_type='s3',
135
dest_type='local', operation_name='delete')
136
137
expected_list = [file_stat1, file_stat2, file_stat3]
138
self.assertEqual(len(result_list), 3)
139
compare_files(self, result_list[0], expected_list[0])
140
compare_files(self, result_list[1], expected_list[1])
141
compare_files(self, result_list[2], expected_list[2])
142
143
def test_page_size(self):
144
input_s3_file = {'src': {'path': self.bucket+'/', 'type': 's3'},
145
'dest': {'path': '', 'type': 'local'},
146
'dir_op': True, 'use_src_name': True}
147
file_gen = FileGenerator(self.client, '',
148
page_size=1).call(input_s3_file)
149
limited_file_gen = itertools.islice(file_gen, 1)
150
result_list = list(limited_file_gen)
151
file_stat = FileStat(src=self.file2,
152
dest='another_directory' + os.sep + 'text2.txt',
153
compare_key='another_directory/text2.txt',
154
size=21,
155
last_update=result_list[0].last_update,
156
src_type='s3',
157
dest_type='local', operation_name='')
158
# Ensure only one item is returned from ``ListObjects``
159
self.assertEqual(len(result_list), 1)
160
compare_files(self, result_list[0], file_stat)
161
162
163
if __name__ == "__main__":
164
unittest.main()
165
166