Path: blob/develop/awscli/customizations/cloudformation/artifact_exporter.py
1567 views
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.1#2# Licensed under the Apache License, Version 2.0 (the "License"). You3# may not use this file except in compliance with the License. A copy of4# the License is located at5#6# http://aws.amazon.com/apache2.0/7#8# or in the "license" file accompanying this file. This file is9# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF10# ANY KIND, either express or implied. See the License for the specific11# language governing permissions and limitations under the License.1213import logging14import os15import tempfile16import zipfile17import contextlib18import uuid19import shutil20from botocore.utils import set_value_from_jmespath2122from awscli.compat import urlparse23from contextlib import contextmanager24from awscli.customizations.cloudformation import exceptions25from awscli.customizations.cloudformation.yamlhelper import yaml_dump, \26yaml_parse27import jmespath282930LOG = logging.getLogger(__name__)313233def is_path_value_valid(path):34return isinstance(path, str)353637def make_abs_path(directory, path):38if is_path_value_valid(path) and not os.path.isabs(path):39return os.path.normpath(os.path.join(directory, path))40else:41return path424344def is_s3_url(url):45try:46parse_s3_url(url)47return True48except ValueError:49return False505152def is_local_folder(path):53return is_path_value_valid(path) and os.path.isdir(path)545556def is_local_file(path):57return is_path_value_valid(path) and os.path.isfile(path)585960def is_zip_file(path):61return (62is_path_value_valid(path) and63zipfile.is_zipfile(path))646566def parse_s3_url(url,67bucket_name_property="Bucket",68object_key_property="Key",69version_property=None):7071if isinstance(url, str) \72and url.startswith("s3://"):7374# Python < 2.7.10 don't parse query parameters from URI with custom75# scheme such as s3://blah/blah. As a workaround, remove scheme76# altogether to trigger the parser "s3://foo/bar?v=1" =>"//foo/bar?v=1"77parsed = urlparse.urlparse(url[3:])78query = urlparse.parse_qs(parsed.query)7980if parsed.netloc and parsed.path:81result = dict()82result[bucket_name_property] = parsed.netloc83result[object_key_property] = parsed.path.lstrip('/')8485# If there is a query string that has a single versionId field,86# set the object version and return87if version_property is not None \88and 'versionId' in query \89and len(query['versionId']) == 1:90result[version_property] = query['versionId'][0]9192return result9394raise ValueError("URL given to the parse method is not a valid S3 url "95"{0}".format(url))969798def upload_local_artifacts(resource_id, resource_dict, property_name,99parent_dir, uploader):100"""101Upload local artifacts referenced by the property at given resource and102return S3 URL of the uploaded object. It is the responsibility of callers103to ensure property value is a valid string104105If path refers to a file, this method will upload the file. If path refers106to a folder, this method will zip the folder and upload the zip to S3.107If path is omitted, this method will zip the current working folder and108upload.109110If path is already a path to S3 object, this method does nothing.111112:param resource_id: Id of the CloudFormation resource113:param resource_dict: Dictionary containing resource definition114:param property_name: Property name of CloudFormation resource where this115local path is present116:param parent_dir: Resolve all relative paths with respect to this117directory118:param uploader: Method to upload files to S3119120:return: S3 URL of the uploaded object121:raise: ValueError if path is not a S3 URL or a local path122"""123124local_path = jmespath.search(property_name, resource_dict)125126if local_path is None:127# Build the root directory and upload to S3128local_path = parent_dir129130if is_s3_url(local_path):131# A valid CloudFormation template will specify artifacts as S3 URLs.132# This check is supporting the case where your resource does not133# refer to local artifacts134# Nothing to do if property value is an S3 URL135LOG.debug("Property {0} of {1} is already a S3 URL"136.format(property_name, resource_id))137return local_path138139local_path = make_abs_path(parent_dir, local_path)140141# Or, pointing to a folder. Zip the folder and upload142if is_local_folder(local_path):143return zip_and_upload(local_path, uploader)144145# Path could be pointing to a file. Upload the file146elif is_local_file(local_path):147return uploader.upload_with_dedup(local_path)148149raise exceptions.InvalidLocalPathError(150resource_id=resource_id,151property_name=property_name,152local_path=local_path)153154155def zip_and_upload(local_path, uploader):156with zip_folder(local_path) as zipfile:157return uploader.upload_with_dedup(zipfile)158159160@contextmanager161def zip_folder(folder_path):162"""163Zip the entire folder and return a file to the zip. Use this inside164a "with" statement to cleanup the zipfile after it is used.165166:param folder_path:167:return: Name of the zipfile168"""169170filename = os.path.join(171tempfile.gettempdir(), "data-" + uuid.uuid4().hex)172173zipfile_name = make_zip(filename, folder_path)174try:175yield zipfile_name176finally:177if os.path.exists(zipfile_name):178os.remove(zipfile_name)179180181def make_zip(filename, source_root):182zipfile_name = "{0}.zip".format(filename)183source_root = os.path.abspath(source_root)184with open(zipfile_name, 'wb') as f:185zip_file = zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED)186with contextlib.closing(zip_file) as zf:187for root, dirs, files in os.walk(source_root, followlinks=True):188for filename in files:189full_path = os.path.join(root, filename)190relative_path = os.path.relpath(191full_path, source_root)192zf.write(full_path, relative_path)193194return zipfile_name195196197@contextmanager198def mktempfile():199directory = tempfile.gettempdir()200filename = os.path.join(directory, uuid.uuid4().hex)201202try:203with open(filename, "w+") as handle:204yield handle205finally:206if os.path.exists(filename):207os.remove(filename)208209210def copy_to_temp_dir(filepath):211tmp_dir = tempfile.mkdtemp()212dst = os.path.join(tmp_dir, os.path.basename(filepath))213shutil.copy(filepath, dst)214return tmp_dir215216217class Resource(object):218"""219Base class representing a CloudFormation resource that can be exported220"""221222RESOURCE_TYPE = None223PROPERTY_NAME = None224PACKAGE_NULL_PROPERTY = True225# Set this property to True in base class if you want the exporter to zip226# up the file before uploading This is useful for Lambda functions.227FORCE_ZIP = False228229def __init__(self, uploader):230self.uploader = uploader231232def export(self, resource_id, resource_dict, parent_dir):233if resource_dict is None:234return235236property_value = jmespath.search(self.PROPERTY_NAME, resource_dict)237238if not property_value and not self.PACKAGE_NULL_PROPERTY:239return240241if isinstance(property_value, dict):242LOG.debug("Property {0} of {1} resource is not a URL"243.format(self.PROPERTY_NAME, resource_id))244return245246# If property is a file but not a zip file, place file in temp247# folder and send the temp folder to be zipped248temp_dir = None249if is_local_file(property_value) and not \250is_zip_file(property_value) and self.FORCE_ZIP:251temp_dir = copy_to_temp_dir(property_value)252set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, temp_dir)253254try:255self.do_export(resource_id, resource_dict, parent_dir)256257except Exception as ex:258LOG.debug("Unable to export", exc_info=ex)259raise exceptions.ExportFailedError(260resource_id=resource_id,261property_name=self.PROPERTY_NAME,262property_value=property_value,263ex=ex)264finally:265if temp_dir:266shutil.rmtree(temp_dir)267268def do_export(self, resource_id, resource_dict, parent_dir):269"""270Default export action is to upload artifacts and set the property to271S3 URL of the uploaded object272"""273uploaded_url = upload_local_artifacts(resource_id, resource_dict,274self.PROPERTY_NAME,275parent_dir, self.uploader)276set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url)277278279class ResourceWithS3UrlDict(Resource):280"""281Represents CloudFormation resources that need the S3 URL to be specified as282an dict like {Bucket: "", Key: "", Version: ""}283"""284285BUCKET_NAME_PROPERTY = None286OBJECT_KEY_PROPERTY = None287VERSION_PROPERTY = None288289def __init__(self, uploader):290super(ResourceWithS3UrlDict, self).__init__(uploader)291292def do_export(self, resource_id, resource_dict, parent_dir):293"""294Upload to S3 and set property to an dict representing the S3 url295of the uploaded object296"""297298artifact_s3_url = \299upload_local_artifacts(resource_id, resource_dict,300self.PROPERTY_NAME,301parent_dir, self.uploader)302303parsed_url = parse_s3_url(304artifact_s3_url,305bucket_name_property=self.BUCKET_NAME_PROPERTY,306object_key_property=self.OBJECT_KEY_PROPERTY,307version_property=self.VERSION_PROPERTY)308set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, parsed_url)309310311class ServerlessFunctionResource(Resource):312RESOURCE_TYPE = "AWS::Serverless::Function"313PROPERTY_NAME = "CodeUri"314FORCE_ZIP = True315316317class ServerlessApiResource(Resource):318RESOURCE_TYPE = "AWS::Serverless::Api"319PROPERTY_NAME = "DefinitionUri"320# Don't package the directory if DefinitionUri is omitted.321# Necessary to support DefinitionBody322PACKAGE_NULL_PROPERTY = False323324325class GraphQLSchemaResource(Resource):326RESOURCE_TYPE = "AWS::AppSync::GraphQLSchema"327PROPERTY_NAME = "DefinitionS3Location"328# Don't package the directory if DefinitionS3Location is omitted.329# Necessary to support Definition330PACKAGE_NULL_PROPERTY = False331332333class AppSyncResolverRequestTemplateResource(Resource):334RESOURCE_TYPE = "AWS::AppSync::Resolver"335PROPERTY_NAME = "RequestMappingTemplateS3Location"336# Don't package the directory if RequestMappingTemplateS3Location is omitted.337# Necessary to support RequestMappingTemplate338PACKAGE_NULL_PROPERTY = False339340341class AppSyncResolverResponseTemplateResource(Resource):342RESOURCE_TYPE = "AWS::AppSync::Resolver"343PROPERTY_NAME = "ResponseMappingTemplateS3Location"344# Don't package the directory if ResponseMappingTemplateS3Location is omitted.345# Necessary to support ResponseMappingTemplate346PACKAGE_NULL_PROPERTY = False347348349class AppSyncFunctionConfigurationRequestTemplateResource(Resource):350RESOURCE_TYPE = "AWS::AppSync::FunctionConfiguration"351PROPERTY_NAME = "RequestMappingTemplateS3Location"352# Don't package the directory if RequestMappingTemplateS3Location is omitted.353# Necessary to support RequestMappingTemplate354PACKAGE_NULL_PROPERTY = False355356357class AppSyncFunctionConfigurationResponseTemplateResource(Resource):358RESOURCE_TYPE = "AWS::AppSync::FunctionConfiguration"359PROPERTY_NAME = "ResponseMappingTemplateS3Location"360# Don't package the directory if ResponseMappingTemplateS3Location is omitted.361# Necessary to support ResponseMappingTemplate362PACKAGE_NULL_PROPERTY = False363364365class LambdaFunctionResource(ResourceWithS3UrlDict):366RESOURCE_TYPE = "AWS::Lambda::Function"367PROPERTY_NAME = "Code"368BUCKET_NAME_PROPERTY = "S3Bucket"369OBJECT_KEY_PROPERTY = "S3Key"370VERSION_PROPERTY = "S3ObjectVersion"371FORCE_ZIP = True372373374class ApiGatewayRestApiResource(ResourceWithS3UrlDict):375RESOURCE_TYPE = "AWS::ApiGateway::RestApi"376PROPERTY_NAME = "BodyS3Location"377PACKAGE_NULL_PROPERTY = False378BUCKET_NAME_PROPERTY = "Bucket"379OBJECT_KEY_PROPERTY = "Key"380VERSION_PROPERTY = "Version"381382383class ElasticBeanstalkApplicationVersion(ResourceWithS3UrlDict):384RESOURCE_TYPE = "AWS::ElasticBeanstalk::ApplicationVersion"385PROPERTY_NAME = "SourceBundle"386BUCKET_NAME_PROPERTY = "S3Bucket"387OBJECT_KEY_PROPERTY = "S3Key"388VERSION_PROPERTY = None389390391class LambdaLayerVersionResource(ResourceWithS3UrlDict):392RESOURCE_TYPE = "AWS::Lambda::LayerVersion"393PROPERTY_NAME = "Content"394BUCKET_NAME_PROPERTY = "S3Bucket"395OBJECT_KEY_PROPERTY = "S3Key"396VERSION_PROPERTY = "S3ObjectVersion"397FORCE_ZIP = True398399400class ServerlessLayerVersionResource(Resource):401RESOURCE_TYPE = "AWS::Serverless::LayerVersion"402PROPERTY_NAME = "ContentUri"403FORCE_ZIP = True404405406class ServerlessRepoApplicationReadme(Resource):407RESOURCE_TYPE = "AWS::ServerlessRepo::Application"408PROPERTY_NAME = "ReadmeUrl"409PACKAGE_NULL_PROPERTY = False410411412class ServerlessRepoApplicationLicense(Resource):413RESOURCE_TYPE = "AWS::ServerlessRepo::Application"414PROPERTY_NAME = "LicenseUrl"415PACKAGE_NULL_PROPERTY = False416417418class StepFunctionsStateMachineDefinitionResource(ResourceWithS3UrlDict):419RESOURCE_TYPE = "AWS::StepFunctions::StateMachine"420PROPERTY_NAME = "DefinitionS3Location"421BUCKET_NAME_PROPERTY = "Bucket"422OBJECT_KEY_PROPERTY = "Key"423VERSION_PROPERTY = "Version"424PACKAGE_NULL_PROPERTY = False425426427class ServerlessStateMachineDefinitionResource(ResourceWithS3UrlDict):428RESOURCE_TYPE = "AWS::Serverless::StateMachine"429PROPERTY_NAME = "DefinitionUri"430BUCKET_NAME_PROPERTY = "Bucket"431OBJECT_KEY_PROPERTY = "Key"432VERSION_PROPERTY = "Version"433PACKAGE_NULL_PROPERTY = False434435436class CloudFormationStackResource(Resource):437"""438Represents CloudFormation::Stack resource that can refer to a nested439stack template via TemplateURL property.440"""441RESOURCE_TYPE = "AWS::CloudFormation::Stack"442PROPERTY_NAME = "TemplateURL"443444def __init__(self, uploader):445super(CloudFormationStackResource, self).__init__(uploader)446447def do_export(self, resource_id, resource_dict, parent_dir):448"""449If the nested stack template is valid, this method will450export on the nested template, upload the exported template to S3451and set property to URL of the uploaded S3 template452"""453454template_path = resource_dict.get(self.PROPERTY_NAME, None)455456if template_path is None or is_s3_url(template_path) or \457template_path.startswith("http://") or \458template_path.startswith("https://"):459# Nothing to do460return461462abs_template_path = make_abs_path(parent_dir, template_path)463if not is_local_file(abs_template_path):464raise exceptions.InvalidTemplateUrlParameterError(465property_name=self.PROPERTY_NAME,466resource_id=resource_id,467template_path=abs_template_path)468469exported_template_dict = \470Template(template_path, parent_dir, self.uploader).export()471472exported_template_str = yaml_dump(exported_template_dict)473474with mktempfile() as temporary_file:475temporary_file.write(exported_template_str)476temporary_file.flush()477478url = self.uploader.upload_with_dedup(479temporary_file.name, "template")480481# TemplateUrl property requires S3 URL to be in path-style format482parts = parse_s3_url(url, version_property="Version")483s3_path_url = self.uploader.to_path_style_s3_url(484parts["Key"], parts.get("Version", None))485set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, s3_path_url)486487488class ServerlessApplicationResource(CloudFormationStackResource):489"""490Represents Serverless::Application resource that can refer to a nested491app template via Location property.492"""493RESOURCE_TYPE = "AWS::Serverless::Application"494PROPERTY_NAME = "Location"495496497498class GlueJobCommandScriptLocationResource(Resource):499"""500Represents Glue::Job resource.501"""502RESOURCE_TYPE = "AWS::Glue::Job"503# Note the PROPERTY_NAME includes a '.' implying it's nested.504PROPERTY_NAME = "Command.ScriptLocation"505506507class CodeCommitRepositoryS3Resource(ResourceWithS3UrlDict):508"""509Represents CodeCommit::Repository resource.510"""511RESOURCE_TYPE = "AWS::CodeCommit::Repository"512PROPERTY_NAME = "Code.S3"513BUCKET_NAME_PROPERTY = "Bucket"514OBJECT_KEY_PROPERTY = "Key"515VERSION_PROPERTY = "ObjectVersion"516# Don't package the directory if S3 is omitted.517PACKAGE_NULL_PROPERTY = False518FORCE_ZIP = True519520521RESOURCES_EXPORT_LIST = [522ServerlessFunctionResource,523ServerlessApiResource,524GraphQLSchemaResource,525AppSyncResolverRequestTemplateResource,526AppSyncResolverResponseTemplateResource,527AppSyncFunctionConfigurationRequestTemplateResource,528AppSyncFunctionConfigurationResponseTemplateResource,529ApiGatewayRestApiResource,530LambdaFunctionResource,531ElasticBeanstalkApplicationVersion,532CloudFormationStackResource,533ServerlessApplicationResource,534ServerlessLayerVersionResource,535LambdaLayerVersionResource,536GlueJobCommandScriptLocationResource,537StepFunctionsStateMachineDefinitionResource,538ServerlessStateMachineDefinitionResource,539CodeCommitRepositoryS3Resource540]541542METADATA_EXPORT_LIST = [543ServerlessRepoApplicationReadme,544ServerlessRepoApplicationLicense545]546547548def include_transform_export_handler(template_dict, uploader, parent_dir):549if template_dict.get("Name", None) != "AWS::Include":550return template_dict551552include_location = template_dict.get("Parameters", {}).get("Location", None)553if not include_location \554or not is_path_value_valid(include_location) \555or is_s3_url(include_location):556# `include_location` is either empty, or not a string, or an S3 URI557return template_dict558559# We are confident at this point that `include_location` is a string containing the local path560abs_include_location = os.path.join(parent_dir, include_location)561if is_local_file(abs_include_location):562template_dict["Parameters"]["Location"] = uploader.upload_with_dedup(abs_include_location)563else:564raise exceptions.InvalidLocalPathError(565resource_id="AWS::Include",566property_name="Location",567local_path=abs_include_location)568569return template_dict570571572GLOBAL_EXPORT_DICT = {573"Fn::Transform": include_transform_export_handler574}575576577class Template(object):578"""579Class to export a CloudFormation template580"""581582def __init__(self, template_path, parent_dir, uploader,583resources_to_export=RESOURCES_EXPORT_LIST,584metadata_to_export=METADATA_EXPORT_LIST):585"""586Reads the template and makes it ready for export587"""588589if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)):590raise ValueError("parent_dir parameter must be "591"an absolute path to a folder {0}"592.format(parent_dir))593594abs_template_path = make_abs_path(parent_dir, template_path)595template_dir = os.path.dirname(abs_template_path)596597with open(abs_template_path, "r") as handle:598template_str = handle.read()599600self.template_dict = yaml_parse(template_str)601self.template_dir = template_dir602self.resources_to_export = resources_to_export603self.metadata_to_export = metadata_to_export604self.uploader = uploader605606def export_global_artifacts(self, template_dict):607"""608Template params such as AWS::Include transforms are not specific to609any resource type but contain artifacts that should be exported,610here we iterate through the template dict and export params with a611handler defined in GLOBAL_EXPORT_DICT612"""613for key, val in template_dict.items():614if key in GLOBAL_EXPORT_DICT:615template_dict[key] = GLOBAL_EXPORT_DICT[key](val, self.uploader, self.template_dir)616elif isinstance(val, dict):617self.export_global_artifacts(val)618elif isinstance(val, list):619for item in val:620if isinstance(item, dict):621self.export_global_artifacts(item)622return template_dict623624def export_metadata(self, template_dict):625"""626Exports the local artifacts referenced by the metadata section in627the given template to an s3 bucket.628629:return: The template with references to artifacts that have been630exported to s3.631"""632if "Metadata" not in template_dict:633return template_dict634635for metadata_type, metadata_dict in template_dict["Metadata"].items():636for exporter_class in self.metadata_to_export:637if exporter_class.RESOURCE_TYPE != metadata_type:638continue639640exporter = exporter_class(self.uploader)641exporter.export(metadata_type, metadata_dict, self.template_dir)642643return template_dict644645def export(self):646"""647Exports the local artifacts referenced by the given template to an648s3 bucket.649650:return: The template with references to artifacts that have been651exported to s3.652"""653self.template_dict = self.export_metadata(self.template_dict)654655if "Resources" not in self.template_dict:656return self.template_dict657658self.template_dict = self.export_global_artifacts(self.template_dict)659660self.export_resources(self.template_dict["Resources"])661662return self.template_dict663664def export_resources(self, resource_dict):665for resource_id, resource in resource_dict.items():666667if resource_id.startswith("Fn::ForEach::"):668if not isinstance(resource, list) or len(resource) != 3:669raise exceptions.InvalidForEachIntrinsicFunctionError(resource_id=resource_id)670self.export_resources(resource[2])671continue672673resource_type = resource.get("Type", None)674resource_dict = resource.get("Properties", None)675676for exporter_class in self.resources_to_export:677if exporter_class.RESOURCE_TYPE != resource_type:678continue679680# Export code resources681exporter = exporter_class(self.uploader)682exporter.export(resource_id, resource_dict, self.template_dir)683684685