Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/tests/unit/customizations/emr/test_schedule_hbase_backup.py
1569 views
1
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License"). You
4
# may not use this file except in compliance with the License. A copy of
5
# the License is located at
6
#
7
# http://aws.amazon.com/apache2.0/
8
#
9
# or in the "license" file accompanying this file. This file is
10
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
# ANY KIND, either express or implied. See the License for the specific
12
# language governing permissions and limitations under the License.
13
from awscli.testutils import mock
14
15
from tests.unit.customizations.emr import EMRBaseAWSCommandParamsTest as \
16
BaseAWSCommandParamsTest
17
from copy import deepcopy
18
19
20
class TestScheduleHBaseBackup(BaseAWSCommandParamsTest):
21
prefix = 'emr schedule-hbase-backup'
22
default_steps = [{
23
'HadoopJarStep': {
24
'Args': [
25
'emr.hbase.backup.Main',
26
'--set-scheduled-backup',
27
'true',
28
'--backup-dir',
29
's3://abc/',
30
'--full-backup-time-interval',
31
'10',
32
'--full-backup-time-unit',
33
'minutes',
34
'--start-time',
35
'now'
36
],
37
'Jar': '/home/hadoop/lib/hbase.jar'
38
},
39
'Name': 'Modify Backup Schedule',
40
'ActionOnFailure': 'CANCEL_AND_WAIT'
41
}]
42
43
def test_schedule_hbase_backup_full(self):
44
args = ' --cluster-id j-ABCD --dir s3://abc/ --type full' +\
45
' --interval 10 --unit minutes'
46
cmdline = self.prefix + args
47
result = {'JobFlowId': 'j-ABCD', 'Steps': self.default_steps}
48
49
self.assert_params_for_cmd(cmdline, result)
50
51
def test_schedule_hbase_backup_full_upper_case(self):
52
args = ' --cluster-id j-ABCD --dir s3://abc/ --type FULL' +\
53
' --interval 10 --unit minutes'
54
cmdline = self.prefix + args
55
result = {'JobFlowId': 'j-ABCD', 'Steps': self.default_steps}
56
57
self.assert_params_for_cmd(cmdline, result)
58
59
def test_schedule_hbase_backup_incremental_upper_case(self):
60
args = ' --cluster-id j-ABCD --dir s3://abc/ --type INCREMENTAL' +\
61
' --interval 10 --unit HOURS'
62
cmdline = self.prefix + args
63
64
steps = deepcopy(self.default_steps)
65
args = steps[0]['HadoopJarStep']['Args']
66
args[5] = '--incremental-backup-time-interval'
67
args[7] = '--incremental-backup-time-unit'
68
args[8] = 'hours'
69
steps[0]['HadoopJarStep']['Args'] = args
70
result = {'JobFlowId': 'j-ABCD', 'Steps': steps}
71
72
self.assert_params_for_cmd(cmdline, result)
73
74
def test_schedule_hbase_backup_incremental(self):
75
args = ' --cluster-id j-ABCD --dir s3://abc/ --type incremental' +\
76
' --interval 10 --unit minutes'
77
cmdline = self.prefix + args
78
79
steps = deepcopy(self.default_steps)
80
args = steps[0]['HadoopJarStep']['Args']
81
args[5] = '--incremental-backup-time-interval'
82
args[7] = '--incremental-backup-time-unit'
83
steps[0]['HadoopJarStep']['Args'] = args
84
85
result = {'JobFlowId': 'j-ABCD', 'Steps': steps}
86
87
self.assert_params_for_cmd(cmdline, result)
88
89
def test_schedule_hbase_backup_wrong_type(self):
90
args = ' --cluster-id j-ABCD --dir s3://abc/ --type wrong_type' +\
91
' --interval 10 --unit minutes'
92
cmdline = self.prefix + args
93
expected_error_msg = '\naws: error: invalid type. type should be' +\
94
' either full or incremental.\n'
95
result = self.run_cmd(cmdline, 255)
96
97
self.assertEqual(expected_error_msg, result[1])
98
99
def test_schedule_hbase_backup_wrong_unit(self):
100
args = ' --cluster-id j-ABCD --dir s3://abc/ --type full' +\
101
' --interval 10 --unit wrong_unit'
102
cmdline = self.prefix + args
103
expected_error_msg = '\naws: error: invalid unit. unit should be' +\
104
' one of the following values: minutes,' +\
105
' hours or days.\n'
106
result = self.run_cmd(cmdline, 255)
107
108
self.assertEqual(expected_error_msg, result[1])
109
110
def test_schedule_hbase_backup_consistent(self):
111
args = ' --cluster-id j-ABCD --dir s3://abc/ --type full' +\
112
' --interval 10 --unit minutes --consistent'
113
cmdline = self.prefix + args
114
115
steps = deepcopy(self.default_steps)
116
steps[0]['HadoopJarStep']['Args'].insert(5, '--consistent')
117
118
result = {'JobFlowId': 'j-ABCD', 'Steps': steps}
119
self.assert_params_for_cmd(cmdline, result)
120
121
def test_schedule_hbase_backup_start_time(self):
122
args = ' --cluster-id j-ABCD --dir s3://abc/ --type full --interval' +\
123
' 10 --unit minutes --start-time 2014-04-18T10:43:24-07:00'
124
cmdline = self.prefix + args
125
126
steps = deepcopy(self.default_steps)
127
steps[0]['HadoopJarStep']['Args'][10] = '2014-04-18T10:43:24-07:00'
128
129
result = {'JobFlowId': 'j-ABCD', 'Steps': steps}
130
self.assert_params_for_cmd(cmdline, result)
131
132
@mock.patch('awscli.customizations.emr.'
133
'emrutils.get_release_label')
134
def test_unsupported_command_on_release_based_cluster_error(
135
self, grl_patch):
136
grl_patch.return_value = 'emr-4.0'
137
args = ' --cluster-id j-ABCD --dir s3://abc/ --type full' +\
138
' --interval 10 --unit minutes'
139
cmdline = self.prefix + args
140
expected_error_msg = ("\naws: error: schedule-hbase-backup"
141
" is not supported with 'emr-4.0' release.\n")
142
result = self.run_cmd(cmdline, 255)
143
144
self.assertEqual(result[1], expected_error_msg)
145
146
147
if __name__ == "__main__":
148
unittest.main()
149
150