Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/tests/functional/s3/test_ls_command.py
1567 views
1
#!/usr/bin/env python
2
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License"). You
5
# may not use this file except in compliance with the License. A copy of
6
# the License is located at
7
#
8
# http://aws.amazon.com/apache2.0/
9
#
10
# or in the "license" file accompanying this file. This file is
11
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
12
# ANY KIND, either express or implied. See the License for the specific
13
# language governing permissions and limitations under the License.
14
from dateutil import parser, tz
15
16
from tests.functional.s3 import BaseS3TransferCommandTest
17
18
class TestLSCommand(BaseS3TransferCommandTest):
19
20
def test_operations_used_in_recursive_list(self):
21
time_utc = "2014-01-09T20:45:49.000Z"
22
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
23
{"Key": "foo/bar.txt", "Size": 100,
24
"LastModified": time_utc}]}]
25
stdout, _, _ = self.run_cmd('s3 ls s3://bucket/ --recursive', expected_rc=0)
26
call_args = self.operations_called[0][1]
27
# We should not be calling the args with any delimiter because we
28
# want a recursive listing.
29
self.assertEqual(call_args['Prefix'], '')
30
self.assertEqual(call_args['Bucket'], 'bucket')
31
self.assertNotIn('delimiter', call_args)
32
# Time is stored in UTC timezone, but the actual time displayed
33
# is specific to your tzinfo, so shift the timezone to your local's.
34
time_local = parser.parse(time_utc).astimezone(tz.tzlocal())
35
self.assertEqual(
36
stdout, '%s 100 foo/bar.txt\n'%time_local.strftime('%Y-%m-%d %H:%M:%S'))
37
38
def test_errors_out_with_extra_arguments(self):
39
stderr = self.run_cmd('s3 ls --extra-argument-foo', expected_rc=255)[1]
40
self.assertIn('Unknown options', stderr)
41
self.assertIn('--extra-argument-foo', stderr)
42
43
def test_list_buckets_use_page_size(self):
44
stdout, _, _ = self.run_cmd('s3 ls --page-size 8', expected_rc=0)
45
call_args = self.operations_called[0][1]
46
# The page size gets translated to ``MaxBuckets`` in the s3 model
47
self.assertEqual(call_args['MaxBuckets'], 8)
48
49
def test_operations_use_page_size(self):
50
time_utc = "2014-01-09T20:45:49.000Z"
51
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
52
{"Key": "foo/bar.txt", "Size": 100,
53
"LastModified": time_utc}]}]
54
stdout, _, _ = self.run_cmd('s3 ls s3://bucket/ --page-size 8', expected_rc=0)
55
call_args = self.operations_called[0][1]
56
# We should not be calling the args with any delimiter because we
57
# want a recursive listing.
58
self.assertEqual(call_args['Prefix'], '')
59
self.assertEqual(call_args['Bucket'], 'bucket')
60
# The page size gets translated to ``MaxKeys`` in the s3 model
61
self.assertEqual(call_args['MaxKeys'], 8)
62
63
def test_operations_use_page_size_recursive(self):
64
time_utc = "2014-01-09T20:45:49.000Z"
65
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
66
{"Key": "foo/bar.txt", "Size": 100,
67
"LastModified": time_utc}]}]
68
stdout, _, _ = self.run_cmd('s3 ls s3://bucket/ --page-size 8 --recursive', expected_rc=0)
69
call_args = self.operations_called[0][1]
70
# We should not be calling the args with any delimiter because we
71
# want a recursive listing.
72
self.assertEqual(call_args['Prefix'], '')
73
self.assertEqual(call_args['Bucket'], 'bucket')
74
# The page size gets translated to ``MaxKeys`` in the s3 model
75
self.assertEqual(call_args['MaxKeys'], 8)
76
self.assertNotIn('Delimiter', call_args)
77
78
def test_success_rc_has_prefixes_and_objects(self):
79
time_utc = "2014-01-09T20:45:49.000Z"
80
self.parsed_responses = [
81
{"CommonPrefixes": [{"Prefix": "foo/"}],
82
"Contents": [{"Key": "foo/bar.txt", "Size": 100,
83
"LastModified": time_utc}]}
84
]
85
self.run_cmd('s3 ls s3://bucket/foo', expected_rc=0)
86
87
def test_success_rc_has_only_prefixes(self):
88
self.parsed_responses = [
89
{"CommonPrefixes": [{"Prefix": "foo/"}]}
90
]
91
self.run_cmd('s3 ls s3://bucket/foo', expected_rc=0)
92
93
def test_success_rc_has_only_objects(self):
94
time_utc = "2014-01-09T20:45:49.000Z"
95
self.parsed_responses = [
96
{"Contents": [{"Key": "foo/bar.txt", "Size": 100,
97
"LastModified": time_utc}]}
98
]
99
self.run_cmd('s3 ls s3://bucket/foo', expected_rc=0)
100
101
def test_success_rc_with_pagination(self):
102
time_utc = "2014-01-09T20:45:49.000Z"
103
# Pagination should not affect a successful return code of zero, even
104
# if there are no results on the second page because there were
105
# results in previous pages.
106
self.parsed_responses = [
107
{"CommonPrefixes": [{"Prefix": "foo/"}],
108
"Contents": [{"Key": "foo/bar.txt", "Size": 100,
109
"LastModified": time_utc}]},
110
{}
111
]
112
self.run_cmd('s3 ls s3://bucket/foo', expected_rc=0)
113
114
def test_success_rc_empty_bucket_no_key_given(self):
115
# If no key has been provided and the bucket is empty, it should
116
# still return an rc of 0 since the user is not looking for an actual
117
# object.
118
self.parsed_responses = [{}]
119
self.run_cmd('s3 ls s3://bucket', expected_rc=0)
120
121
def test_fail_rc_no_objects_nor_prefixes(self):
122
self.parsed_responses = [{}]
123
self.run_cmd('s3 ls s3://bucket/foo', expected_rc=1)
124
125
def test_human_readable_file_size(self):
126
time_utc = "2014-01-09T20:45:49.000Z"
127
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
128
{"Key": "onebyte.txt", "Size": 1, "LastModified": time_utc},
129
{"Key": "onekilobyte.txt", "Size": 1024, "LastModified": time_utc},
130
{"Key": "onemegabyte.txt", "Size": 1024 ** 2, "LastModified": time_utc},
131
{"Key": "onegigabyte.txt", "Size": 1024 ** 3, "LastModified": time_utc},
132
{"Key": "oneterabyte.txt", "Size": 1024 ** 4, "LastModified": time_utc},
133
{"Key": "onepetabyte.txt", "Size": 1024 ** 5, "LastModified": time_utc} ]}]
134
stdout, _, _ = self.run_cmd('s3 ls s3://bucket/ --human-readable',
135
expected_rc=0)
136
call_args = self.operations_called[0][1]
137
# Time is stored in UTC timezone, but the actual time displayed
138
# is specific to your tzinfo, so shift the timezone to your local's.
139
time_local = parser.parse(time_utc).astimezone(tz.tzlocal())
140
time_fmt = time_local.strftime('%Y-%m-%d %H:%M:%S')
141
self.assertIn('%s 1 Byte onebyte.txt\n' % time_fmt, stdout)
142
self.assertIn('%s 1.0 KiB onekilobyte.txt\n' % time_fmt, stdout)
143
self.assertIn('%s 1.0 MiB onemegabyte.txt\n' % time_fmt, stdout)
144
self.assertIn('%s 1.0 GiB onegigabyte.txt\n' % time_fmt, stdout)
145
self.assertIn('%s 1.0 TiB oneterabyte.txt\n' % time_fmt, stdout)
146
self.assertIn('%s 1.0 PiB onepetabyte.txt\n' % time_fmt, stdout)
147
148
def test_summarize(self):
149
time_utc = "2014-01-09T20:45:49.000Z"
150
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
151
{"Key": "onebyte.txt", "Size": 1, "LastModified": time_utc},
152
{"Key": "onekilobyte.txt", "Size": 1024, "LastModified": time_utc},
153
{"Key": "onemegabyte.txt", "Size": 1024 ** 2, "LastModified": time_utc},
154
{"Key": "onegigabyte.txt", "Size": 1024 ** 3, "LastModified": time_utc},
155
{"Key": "oneterabyte.txt", "Size": 1024 ** 4, "LastModified": time_utc},
156
{"Key": "onepetabyte.txt", "Size": 1024 ** 5, "LastModified": time_utc} ]}]
157
stdout, _, _ = self.run_cmd('s3 ls s3://bucket/ --summarize', expected_rc=0)
158
call_args = self.operations_called[0][1]
159
# Time is stored in UTC timezone, but the actual time displayed
160
# is specific to your tzinfo, so shift the timezone to your local's.
161
time_local = parser.parse(time_utc).astimezone(tz.tzlocal())
162
time_fmt = time_local.strftime('%Y-%m-%d %H:%M:%S')
163
self.assertIn('Total Objects: 6\n', stdout)
164
self.assertIn('Total Size: 1127000493261825\n', stdout)
165
166
def test_summarize_with_human_readable(self):
167
time_utc = "2014-01-09T20:45:49.000Z"
168
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
169
{"Key": "onebyte.txt", "Size": 1, "LastModified": time_utc},
170
{"Key": "onekilobyte.txt", "Size": 1024, "LastModified": time_utc},
171
{"Key": "onemegabyte.txt", "Size": 1024 ** 2, "LastModified": time_utc},
172
{"Key": "onegigabyte.txt", "Size": 1024 ** 3, "LastModified": time_utc},
173
{"Key": "oneterabyte.txt", "Size": 1024 ** 4, "LastModified": time_utc},
174
{"Key": "onepetabyte.txt", "Size": 1024 ** 5, "LastModified": time_utc} ]}]
175
stdout, _, _ = self.run_cmd('s3 ls s3://bucket/ --human-readable --summarize', expected_rc=0)
176
call_args = self.operations_called[0][1]
177
# Time is stored in UTC timezone, but the actual time displayed
178
# is specific to your tzinfo, so shift the timezone to your local's.
179
time_local = parser.parse(time_utc).astimezone(tz.tzlocal())
180
time_fmt = time_local.strftime('%Y-%m-%d %H:%M:%S')
181
self.assertIn('Total Objects: 6\n', stdout)
182
self.assertIn('Total Size: 1.0 PiB\n', stdout)
183
184
def test_requester_pays(self):
185
time_utc = "2014-01-09T20:45:49.000Z"
186
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
187
{"Key": "onebyte.txt", "Size": 1, "LastModified": time_utc},
188
]}]
189
command = 's3 ls s3://mybucket/foo/ --request-payer requester'
190
self.assert_params_for_cmd(command, {
191
'Bucket': 'mybucket', 'Delimiter': '/',
192
'RequestPayer': 'requester', 'EncodingType': 'url',
193
'Prefix': 'foo/'
194
})
195
196
def test_requester_pays_with_no_args(self):
197
time_utc = "2014-01-09T20:45:49.000Z"
198
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
199
{"Key": "onebyte.txt", "Size": 1, "LastModified": time_utc},
200
]}]
201
command = 's3 ls s3://mybucket/foo/ --request-payer'
202
self.assert_params_for_cmd(command, {
203
'Bucket': 'mybucket', 'Delimiter': '/',
204
'RequestPayer': 'requester', 'EncodingType': 'url',
205
'Prefix': 'foo/'
206
})
207
208
def test_accesspoint_arn(self):
209
self.parsed_responses = [
210
self.list_objects_response(['bar.txt'])
211
]
212
arn = (
213
'arn:aws:s3:us-west-2:123456789012:accesspoint/endpoint'
214
)
215
self.run_cmd('s3 ls s3://%s' % arn, expected_rc=0)
216
call_args = self.operations_called[0][1]
217
self.assertEqual(call_args['Bucket'], arn)
218
219
def test_list_buckets_uses_bucket_name_prefix(self):
220
stdout, _, _ = self.run_cmd('s3 ls --bucket-name-prefix myprefix', expected_rc=0)
221
call_args = self.operations_called[0][1]
222
self.assertEqual(call_args['Prefix'], 'myprefix')
223
224
def test_list_buckets_uses_bucket_region(self):
225
stdout, _, _ = self.run_cmd('s3 ls --bucket-region us-west-1', expected_rc=0)
226
call_args = self.operations_called[0][1]
227
self.assertEqual(call_args['BucketRegion'], 'us-west-1')
228
229
def test_list_objects_ignores_bucket_name_prefix(self):
230
stdout, _, _ = self.run_cmd('s3 ls s3://mybucket --bucket-name-prefix myprefix', expected_rc=0)
231
call_args = self.operations_called[0][1]
232
self.assertEqual(call_args['Prefix'], '')
233
234
def test_list_objects_ignores_bucket_region(self):
235
stdout, _, _ = self.run_cmd('s3 ls s3://mybucket --bucket-region us-west-1', expected_rc=0)
236
call_args = self.operations_called[0][1]
237
self.assertNotIn('BucketRegion', call_args)
238
239