Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/awscli/customizations/emr/constants.py
1567 views
1
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License"). You
4
# may not use this file except in compliance with the License. A copy of
5
# the License is located at
6
#
7
# http://aws.amazon.com/apache2.0/
8
#
9
# or in the "license" file accompanying this file. This file is
10
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
# ANY KIND, either express or implied. See the License for the specific
12
# language governing permissions and limitations under the License.
13
14
# Declare all the constants used by EMR in this file.
15
16
EC2_ROLE_NAME = "EMR_EC2_DefaultRole"
17
EMR_ROLE_NAME = "EMR_DefaultRole"
18
EMR_AUTOSCALING_ROLE_NAME = "EMR_AutoScaling_DefaultRole"
19
ROLE_ARN_PATTERN = "arn:{{region_suffix}}:iam::aws:policy/service-role/{{policy_name}}"
20
EC2_ROLE_POLICY_NAME = "AmazonElasticMapReduceforEC2Role"
21
EMR_ROLE_POLICY_NAME = "AmazonElasticMapReduceRole"
22
EMR_AUTOSCALING_ROLE_POLICY_NAME = "AmazonElasticMapReduceforAutoScalingRole"
23
EMR_AUTOSCALING_SERVICE_NAME = "application-autoscaling"
24
EMR_AUTOSCALING_SERVICE_PRINCIPAL = "application-autoscaling.amazonaws.com"
25
EC2_SERVICE_PRINCIPAL = "ec2.amazonaws.com"
26
27
# Action on failure
28
CONTINUE = 'CONTINUE'
29
CANCEL_AND_WAIT = 'CANCEL_AND_WAIT'
30
TERMINATE_CLUSTER = 'TERMINATE_CLUSTER'
31
DEFAULT_FAILURE_ACTION = CONTINUE
32
33
# Market type
34
SPOT = 'SPOT'
35
ON_DEMAND = 'ON_DEMAND'
36
37
SCRIPT_RUNNER_PATH = '/libs/script-runner/script-runner.jar'
38
COMMAND_RUNNER = 'command-runner.jar'
39
DEBUGGING_PATH = '/libs/state-pusher/0.1/fetch'
40
DEBUGGING_COMMAND = 'state-pusher-script'
41
DEBUGGING_NAME = 'Setup Hadoop Debugging'
42
43
CONFIG_HADOOP_PATH = '/bootstrap-actions/configure-hadoop'
44
45
# S3 copy bootstrap action
46
S3_GET_BA_NAME = 'S3 get'
47
S3_GET_BA_SRC = '-s'
48
S3_GET_BA_DEST = '-d'
49
S3_GET_BA_FORCE = '-f'
50
51
# EMRFS
52
EMRFS_BA_NAME = 'Setup EMRFS'
53
EMRFS_BA_ARG_KEY = '-e'
54
EMRFS_CONSISTENT_KEY = 'fs.s3.consistent'
55
EMRFS_SSE_KEY = 'fs.s3.enableServerSideEncryption'
56
EMRFS_RETRY_COUNT_KEY = 'fs.s3.consistent.retryCount'
57
EMRFS_RETRY_PERIOD_KEY = 'fs.s3.consistent.retryPeriodSeconds'
58
EMRFS_CSE_KEY = 'fs.s3.cse.enabled'
59
EMRFS_CSE_KMS_KEY_ID_KEY = 'fs.s3.cse.kms.keyId'
60
EMRFS_CSE_ENCRYPTION_MATERIALS_PROVIDER_KEY = \
61
'fs.s3.cse.encryptionMaterialsProvider'
62
EMRFS_CSE_CUSTOM_PROVIDER_URI_KEY = 'fs.s3.cse.encryptionMaterialsProvider.uri'
63
64
EMRFS_CSE_KMS_PROVIDER_FULL_CLASS_NAME = ('com.amazon.ws.emr.hadoop.fs.cse.'
65
'KMSEncryptionMaterialsProvider')
66
EMRFS_CSE_CUSTOM_S3_GET_BA_PATH = 'file:/usr/share/aws/emr/scripts/s3get'
67
EMRFS_CUSTOM_DEST_PATH = '/usr/share/aws/emr/auxlib'
68
69
EMRFS_SERVER_SIDE = 'SERVERSIDE'
70
EMRFS_CLIENT_SIDE = 'CLIENTSIDE'
71
EMRFS_KMS = 'KMS'
72
EMRFS_CUSTOM = 'CUSTOM'
73
74
EMRFS_SITE = 'emrfs-site'
75
76
MAX_BOOTSTRAP_ACTION_NUMBER = 16
77
BOOTSTRAP_ACTION_NAME = 'Bootstrap action'
78
79
HIVE_BASE_PATH = '/libs/hive'
80
HIVE_SCRIPT_PATH = '/libs/hive/hive-script'
81
HIVE_SCRIPT_COMMAND = 'hive-script'
82
83
PIG_BASE_PATH = '/libs/pig'
84
PIG_SCRIPT_PATH = '/libs/pig/pig-script'
85
PIG_SCRIPT_COMMAND = 'pig-script'
86
87
GANGLIA_INSTALL_BA_PATH = '/bootstrap-actions/install-ganglia'
88
89
# HBase
90
HBASE_INSTALL_BA_PATH = '/bootstrap-actions/setup-hbase'
91
HBASE_PATH_HADOOP1_INSTALL_JAR = '/home/hadoop/lib/hbase-0.92.0.jar'
92
HBASE_PATH_HADOOP2_INSTALL_JAR = '/home/hadoop/lib/hbase.jar'
93
HBASE_INSTALL_ARG = ['emr.hbase.backup.Main', '--start-master']
94
HBASE_JAR_PATH = '/home/hadoop/lib/hbase.jar'
95
HBASE_MAIN = 'emr.hbase.backup.Main'
96
97
# HBase commands
98
HBASE_RESTORE = '--restore'
99
HBASE_BACKUP_DIR_FOR_RESTORE = '--backup-dir-to-restore'
100
HBASE_BACKUP_VERSION_FOR_RESTORE = '--backup-version'
101
HBASE_BACKUP = '--backup'
102
HBASE_SCHEDULED_BACKUP = '--set-scheduled-backup'
103
HBASE_BACKUP_DIR = '--backup-dir'
104
HBASE_INCREMENTAL_BACKUP_INTERVAL = '--incremental-backup-time-interval'
105
HBASE_INCREMENTAL_BACKUP_INTERVAL_UNIT = '--incremental-backup-time-unit'
106
HBASE_FULL_BACKUP_INTERVAL = '--full-backup-time-interval'
107
HBASE_FULL_BACKUP_INTERVAL_UNIT = '--full-backup-time-unit'
108
HBASE_DISABLE_FULL_BACKUP = '--disable-full-backups'
109
HBASE_DISABLE_INCREMENTAL_BACKUP = '--disable-incremental-backups'
110
HBASE_BACKUP_STARTTIME = '--start-time'
111
HBASE_BACKUP_CONSISTENT = '--consistent'
112
HBASE_BACKUP_STEP_NAME = 'Backup HBase'
113
HBASE_RESTORE_STEP_NAME = 'Restore HBase'
114
HBASE_SCHEDULE_BACKUP_STEP_NAME = 'Modify Backup Schedule'
115
116
IMPALA_INSTALL_PATH = '/libs/impala/setup-impala'
117
118
# Step
119
HADOOP_STREAMING_PATH = '/home/hadoop/contrib/streaming/hadoop-streaming.jar'
120
HADOOP_STREAMING_COMMAND = 'hadoop-streaming'
121
122
CUSTOM_JAR = 'custom_jar'
123
HIVE = 'hive'
124
PIG = 'pig'
125
IMPALA = 'impala'
126
STREAMING = 'streaming'
127
GANGLIA = 'ganglia'
128
HBASE = 'hbase'
129
SPARK = 'spark'
130
131
DEFAULT_CUSTOM_JAR_STEP_NAME = 'Custom JAR'
132
DEFAULT_STREAMING_STEP_NAME = 'Streaming program'
133
DEFAULT_HIVE_STEP_NAME = 'Hive program'
134
DEFAULT_PIG_STEP_NAME = 'Pig program'
135
DEFAULT_IMPALA_STEP_NAME = 'Impala program'
136
DEFAULT_SPARK_STEP_NAME = 'Spark application'
137
138
ARGS = '--args'
139
RUN_HIVE_SCRIPT = '--run-hive-script'
140
HIVE_VERSIONS = '--hive-versions'
141
HIVE_STEP_CONFIG = 'HiveStepConfig'
142
RUN_PIG_SCRIPT = '--run-pig-script'
143
PIG_VERSIONS = '--pig-versions'
144
PIG_STEP_CONFIG = 'PigStepConfig'
145
RUN_IMPALA_SCRIPT = '--run-impala-script'
146
SPARK_SUBMIT_PATH = '/home/hadoop/spark/bin/spark-submit'
147
SPARK_SUBMIT_COMMAND = 'spark-submit'
148
IMPALA_STEP_CONFIG = 'ImpalaStepConfig'
149
SPARK_STEP_CONFIG = 'SparkStepConfig'
150
STREAMING_STEP_CONFIG = 'StreamingStepConfig'
151
CUSTOM_JAR_STEP_CONFIG = 'CustomJARStepConfig'
152
153
INSTALL_PIG_ARG = '--install-pig'
154
INSTALL_PIG_NAME = 'Install Pig'
155
INSTALL_HIVE_ARG = '--install-hive'
156
INSTALL_HIVE_NAME = 'Install Hive'
157
HIVE_SITE_KEY = '--hive-site'
158
INSTALL_HIVE_SITE_ARG = '--install-hive-site'
159
INSTALL_HIVE_SITE_NAME = 'Install Hive Site Configuration'
160
BASE_PATH_ARG = '--base-path'
161
INSTALL_GANGLIA_NAME = 'Install Ganglia'
162
INSTALL_HBASE_NAME = 'Install HBase'
163
START_HBASE_NAME = 'Start HBase'
164
INSTALL_IMPALA_NAME = 'Install Impala'
165
IMPALA_VERSION = '--impala-version'
166
IMPALA_CONF = '--impala-conf'
167
168
FULL = 'full'
169
INCREMENTAL = 'incremental'
170
171
MINUTES = 'minutes'
172
HOURS = 'hours'
173
DAYS = 'days'
174
NOW = 'now'
175
176
TRUE = 'true'
177
FALSE = 'false'
178
179
EC2 = 'ec2'
180
EMR = 'elasticmapreduce'
181
APPLICATION_AUTOSCALING = 'application-autoscaling'
182
LATEST = 'latest'
183
184
APPLICATIONS = ["HIVE", "PIG", "HBASE", "GANGLIA", "IMPALA", "SPARK", "MAPR",
185
"MAPR_M3", "MAPR_M5", "MAPR_M7"]
186
187
SSH_USER = 'hadoop'
188
STARTING_STATES = ['STARTING', 'BOOTSTRAPPING']
189
TERMINATED_STATES = ['TERMINATED', 'TERMINATING', 'TERMINATED_WITH_ERRORS']
190
191
# list-clusters
192
LIST_CLUSTERS_ACTIVE_STATES = ['STARTING', 'BOOTSTRAPPING', 'RUNNING',
193
'WAITING', 'TERMINATING']
194
LIST_CLUSTERS_TERMINATED_STATES = ['TERMINATED']
195
LIST_CLUSTERS_FAILED_STATES = ['TERMINATED_WITH_ERRORS']
196
197
INSTANCE_FLEET_TYPE = 'INSTANCE_FLEET'
198
199