Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
aws
GitHub Repository: aws/aws-cli
Path: blob/develop/awscli/customizations/cloudformation/artifact_exporter.py
1567 views
1
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License"). You
4
# may not use this file except in compliance with the License. A copy of
5
# the License is located at
6
#
7
# http://aws.amazon.com/apache2.0/
8
#
9
# or in the "license" file accompanying this file. This file is
10
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
# ANY KIND, either express or implied. See the License for the specific
12
# language governing permissions and limitations under the License.
13
14
import logging
15
import os
16
import tempfile
17
import zipfile
18
import contextlib
19
import uuid
20
import shutil
21
from botocore.utils import set_value_from_jmespath
22
23
from awscli.compat import urlparse
24
from contextlib import contextmanager
25
from awscli.customizations.cloudformation import exceptions
26
from awscli.customizations.cloudformation.yamlhelper import yaml_dump, \
27
yaml_parse
28
import jmespath
29
30
31
LOG = logging.getLogger(__name__)
32
33
34
def is_path_value_valid(path):
35
return isinstance(path, str)
36
37
38
def make_abs_path(directory, path):
39
if is_path_value_valid(path) and not os.path.isabs(path):
40
return os.path.normpath(os.path.join(directory, path))
41
else:
42
return path
43
44
45
def is_s3_url(url):
46
try:
47
parse_s3_url(url)
48
return True
49
except ValueError:
50
return False
51
52
53
def is_local_folder(path):
54
return is_path_value_valid(path) and os.path.isdir(path)
55
56
57
def is_local_file(path):
58
return is_path_value_valid(path) and os.path.isfile(path)
59
60
61
def is_zip_file(path):
62
return (
63
is_path_value_valid(path) and
64
zipfile.is_zipfile(path))
65
66
67
def parse_s3_url(url,
68
bucket_name_property="Bucket",
69
object_key_property="Key",
70
version_property=None):
71
72
if isinstance(url, str) \
73
and url.startswith("s3://"):
74
75
# Python < 2.7.10 don't parse query parameters from URI with custom
76
# scheme such as s3://blah/blah. As a workaround, remove scheme
77
# altogether to trigger the parser "s3://foo/bar?v=1" =>"//foo/bar?v=1"
78
parsed = urlparse.urlparse(url[3:])
79
query = urlparse.parse_qs(parsed.query)
80
81
if parsed.netloc and parsed.path:
82
result = dict()
83
result[bucket_name_property] = parsed.netloc
84
result[object_key_property] = parsed.path.lstrip('/')
85
86
# If there is a query string that has a single versionId field,
87
# set the object version and return
88
if version_property is not None \
89
and 'versionId' in query \
90
and len(query['versionId']) == 1:
91
result[version_property] = query['versionId'][0]
92
93
return result
94
95
raise ValueError("URL given to the parse method is not a valid S3 url "
96
"{0}".format(url))
97
98
99
def upload_local_artifacts(resource_id, resource_dict, property_name,
100
parent_dir, uploader):
101
"""
102
Upload local artifacts referenced by the property at given resource and
103
return S3 URL of the uploaded object. It is the responsibility of callers
104
to ensure property value is a valid string
105
106
If path refers to a file, this method will upload the file. If path refers
107
to a folder, this method will zip the folder and upload the zip to S3.
108
If path is omitted, this method will zip the current working folder and
109
upload.
110
111
If path is already a path to S3 object, this method does nothing.
112
113
:param resource_id: Id of the CloudFormation resource
114
:param resource_dict: Dictionary containing resource definition
115
:param property_name: Property name of CloudFormation resource where this
116
local path is present
117
:param parent_dir: Resolve all relative paths with respect to this
118
directory
119
:param uploader: Method to upload files to S3
120
121
:return: S3 URL of the uploaded object
122
:raise: ValueError if path is not a S3 URL or a local path
123
"""
124
125
local_path = jmespath.search(property_name, resource_dict)
126
127
if local_path is None:
128
# Build the root directory and upload to S3
129
local_path = parent_dir
130
131
if is_s3_url(local_path):
132
# A valid CloudFormation template will specify artifacts as S3 URLs.
133
# This check is supporting the case where your resource does not
134
# refer to local artifacts
135
# Nothing to do if property value is an S3 URL
136
LOG.debug("Property {0} of {1} is already a S3 URL"
137
.format(property_name, resource_id))
138
return local_path
139
140
local_path = make_abs_path(parent_dir, local_path)
141
142
# Or, pointing to a folder. Zip the folder and upload
143
if is_local_folder(local_path):
144
return zip_and_upload(local_path, uploader)
145
146
# Path could be pointing to a file. Upload the file
147
elif is_local_file(local_path):
148
return uploader.upload_with_dedup(local_path)
149
150
raise exceptions.InvalidLocalPathError(
151
resource_id=resource_id,
152
property_name=property_name,
153
local_path=local_path)
154
155
156
def zip_and_upload(local_path, uploader):
157
with zip_folder(local_path) as zipfile:
158
return uploader.upload_with_dedup(zipfile)
159
160
161
@contextmanager
162
def zip_folder(folder_path):
163
"""
164
Zip the entire folder and return a file to the zip. Use this inside
165
a "with" statement to cleanup the zipfile after it is used.
166
167
:param folder_path:
168
:return: Name of the zipfile
169
"""
170
171
filename = os.path.join(
172
tempfile.gettempdir(), "data-" + uuid.uuid4().hex)
173
174
zipfile_name = make_zip(filename, folder_path)
175
try:
176
yield zipfile_name
177
finally:
178
if os.path.exists(zipfile_name):
179
os.remove(zipfile_name)
180
181
182
def make_zip(filename, source_root):
183
zipfile_name = "{0}.zip".format(filename)
184
source_root = os.path.abspath(source_root)
185
with open(zipfile_name, 'wb') as f:
186
zip_file = zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED)
187
with contextlib.closing(zip_file) as zf:
188
for root, dirs, files in os.walk(source_root, followlinks=True):
189
for filename in files:
190
full_path = os.path.join(root, filename)
191
relative_path = os.path.relpath(
192
full_path, source_root)
193
zf.write(full_path, relative_path)
194
195
return zipfile_name
196
197
198
@contextmanager
199
def mktempfile():
200
directory = tempfile.gettempdir()
201
filename = os.path.join(directory, uuid.uuid4().hex)
202
203
try:
204
with open(filename, "w+") as handle:
205
yield handle
206
finally:
207
if os.path.exists(filename):
208
os.remove(filename)
209
210
211
def copy_to_temp_dir(filepath):
212
tmp_dir = tempfile.mkdtemp()
213
dst = os.path.join(tmp_dir, os.path.basename(filepath))
214
shutil.copy(filepath, dst)
215
return tmp_dir
216
217
218
class Resource(object):
219
"""
220
Base class representing a CloudFormation resource that can be exported
221
"""
222
223
RESOURCE_TYPE = None
224
PROPERTY_NAME = None
225
PACKAGE_NULL_PROPERTY = True
226
# Set this property to True in base class if you want the exporter to zip
227
# up the file before uploading This is useful for Lambda functions.
228
FORCE_ZIP = False
229
230
def __init__(self, uploader):
231
self.uploader = uploader
232
233
def export(self, resource_id, resource_dict, parent_dir):
234
if resource_dict is None:
235
return
236
237
property_value = jmespath.search(self.PROPERTY_NAME, resource_dict)
238
239
if not property_value and not self.PACKAGE_NULL_PROPERTY:
240
return
241
242
if isinstance(property_value, dict):
243
LOG.debug("Property {0} of {1} resource is not a URL"
244
.format(self.PROPERTY_NAME, resource_id))
245
return
246
247
# If property is a file but not a zip file, place file in temp
248
# folder and send the temp folder to be zipped
249
temp_dir = None
250
if is_local_file(property_value) and not \
251
is_zip_file(property_value) and self.FORCE_ZIP:
252
temp_dir = copy_to_temp_dir(property_value)
253
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, temp_dir)
254
255
try:
256
self.do_export(resource_id, resource_dict, parent_dir)
257
258
except Exception as ex:
259
LOG.debug("Unable to export", exc_info=ex)
260
raise exceptions.ExportFailedError(
261
resource_id=resource_id,
262
property_name=self.PROPERTY_NAME,
263
property_value=property_value,
264
ex=ex)
265
finally:
266
if temp_dir:
267
shutil.rmtree(temp_dir)
268
269
def do_export(self, resource_id, resource_dict, parent_dir):
270
"""
271
Default export action is to upload artifacts and set the property to
272
S3 URL of the uploaded object
273
"""
274
uploaded_url = upload_local_artifacts(resource_id, resource_dict,
275
self.PROPERTY_NAME,
276
parent_dir, self.uploader)
277
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url)
278
279
280
class ResourceWithS3UrlDict(Resource):
281
"""
282
Represents CloudFormation resources that need the S3 URL to be specified as
283
an dict like {Bucket: "", Key: "", Version: ""}
284
"""
285
286
BUCKET_NAME_PROPERTY = None
287
OBJECT_KEY_PROPERTY = None
288
VERSION_PROPERTY = None
289
290
def __init__(self, uploader):
291
super(ResourceWithS3UrlDict, self).__init__(uploader)
292
293
def do_export(self, resource_id, resource_dict, parent_dir):
294
"""
295
Upload to S3 and set property to an dict representing the S3 url
296
of the uploaded object
297
"""
298
299
artifact_s3_url = \
300
upload_local_artifacts(resource_id, resource_dict,
301
self.PROPERTY_NAME,
302
parent_dir, self.uploader)
303
304
parsed_url = parse_s3_url(
305
artifact_s3_url,
306
bucket_name_property=self.BUCKET_NAME_PROPERTY,
307
object_key_property=self.OBJECT_KEY_PROPERTY,
308
version_property=self.VERSION_PROPERTY)
309
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, parsed_url)
310
311
312
class ServerlessFunctionResource(Resource):
313
RESOURCE_TYPE = "AWS::Serverless::Function"
314
PROPERTY_NAME = "CodeUri"
315
FORCE_ZIP = True
316
317
318
class ServerlessApiResource(Resource):
319
RESOURCE_TYPE = "AWS::Serverless::Api"
320
PROPERTY_NAME = "DefinitionUri"
321
# Don't package the directory if DefinitionUri is omitted.
322
# Necessary to support DefinitionBody
323
PACKAGE_NULL_PROPERTY = False
324
325
326
class GraphQLSchemaResource(Resource):
327
RESOURCE_TYPE = "AWS::AppSync::GraphQLSchema"
328
PROPERTY_NAME = "DefinitionS3Location"
329
# Don't package the directory if DefinitionS3Location is omitted.
330
# Necessary to support Definition
331
PACKAGE_NULL_PROPERTY = False
332
333
334
class AppSyncResolverRequestTemplateResource(Resource):
335
RESOURCE_TYPE = "AWS::AppSync::Resolver"
336
PROPERTY_NAME = "RequestMappingTemplateS3Location"
337
# Don't package the directory if RequestMappingTemplateS3Location is omitted.
338
# Necessary to support RequestMappingTemplate
339
PACKAGE_NULL_PROPERTY = False
340
341
342
class AppSyncResolverResponseTemplateResource(Resource):
343
RESOURCE_TYPE = "AWS::AppSync::Resolver"
344
PROPERTY_NAME = "ResponseMappingTemplateS3Location"
345
# Don't package the directory if ResponseMappingTemplateS3Location is omitted.
346
# Necessary to support ResponseMappingTemplate
347
PACKAGE_NULL_PROPERTY = False
348
349
350
class AppSyncFunctionConfigurationRequestTemplateResource(Resource):
351
RESOURCE_TYPE = "AWS::AppSync::FunctionConfiguration"
352
PROPERTY_NAME = "RequestMappingTemplateS3Location"
353
# Don't package the directory if RequestMappingTemplateS3Location is omitted.
354
# Necessary to support RequestMappingTemplate
355
PACKAGE_NULL_PROPERTY = False
356
357
358
class AppSyncFunctionConfigurationResponseTemplateResource(Resource):
359
RESOURCE_TYPE = "AWS::AppSync::FunctionConfiguration"
360
PROPERTY_NAME = "ResponseMappingTemplateS3Location"
361
# Don't package the directory if ResponseMappingTemplateS3Location is omitted.
362
# Necessary to support ResponseMappingTemplate
363
PACKAGE_NULL_PROPERTY = False
364
365
366
class LambdaFunctionResource(ResourceWithS3UrlDict):
367
RESOURCE_TYPE = "AWS::Lambda::Function"
368
PROPERTY_NAME = "Code"
369
BUCKET_NAME_PROPERTY = "S3Bucket"
370
OBJECT_KEY_PROPERTY = "S3Key"
371
VERSION_PROPERTY = "S3ObjectVersion"
372
FORCE_ZIP = True
373
374
375
class ApiGatewayRestApiResource(ResourceWithS3UrlDict):
376
RESOURCE_TYPE = "AWS::ApiGateway::RestApi"
377
PROPERTY_NAME = "BodyS3Location"
378
PACKAGE_NULL_PROPERTY = False
379
BUCKET_NAME_PROPERTY = "Bucket"
380
OBJECT_KEY_PROPERTY = "Key"
381
VERSION_PROPERTY = "Version"
382
383
384
class ElasticBeanstalkApplicationVersion(ResourceWithS3UrlDict):
385
RESOURCE_TYPE = "AWS::ElasticBeanstalk::ApplicationVersion"
386
PROPERTY_NAME = "SourceBundle"
387
BUCKET_NAME_PROPERTY = "S3Bucket"
388
OBJECT_KEY_PROPERTY = "S3Key"
389
VERSION_PROPERTY = None
390
391
392
class LambdaLayerVersionResource(ResourceWithS3UrlDict):
393
RESOURCE_TYPE = "AWS::Lambda::LayerVersion"
394
PROPERTY_NAME = "Content"
395
BUCKET_NAME_PROPERTY = "S3Bucket"
396
OBJECT_KEY_PROPERTY = "S3Key"
397
VERSION_PROPERTY = "S3ObjectVersion"
398
FORCE_ZIP = True
399
400
401
class ServerlessLayerVersionResource(Resource):
402
RESOURCE_TYPE = "AWS::Serverless::LayerVersion"
403
PROPERTY_NAME = "ContentUri"
404
FORCE_ZIP = True
405
406
407
class ServerlessRepoApplicationReadme(Resource):
408
RESOURCE_TYPE = "AWS::ServerlessRepo::Application"
409
PROPERTY_NAME = "ReadmeUrl"
410
PACKAGE_NULL_PROPERTY = False
411
412
413
class ServerlessRepoApplicationLicense(Resource):
414
RESOURCE_TYPE = "AWS::ServerlessRepo::Application"
415
PROPERTY_NAME = "LicenseUrl"
416
PACKAGE_NULL_PROPERTY = False
417
418
419
class StepFunctionsStateMachineDefinitionResource(ResourceWithS3UrlDict):
420
RESOURCE_TYPE = "AWS::StepFunctions::StateMachine"
421
PROPERTY_NAME = "DefinitionS3Location"
422
BUCKET_NAME_PROPERTY = "Bucket"
423
OBJECT_KEY_PROPERTY = "Key"
424
VERSION_PROPERTY = "Version"
425
PACKAGE_NULL_PROPERTY = False
426
427
428
class ServerlessStateMachineDefinitionResource(ResourceWithS3UrlDict):
429
RESOURCE_TYPE = "AWS::Serverless::StateMachine"
430
PROPERTY_NAME = "DefinitionUri"
431
BUCKET_NAME_PROPERTY = "Bucket"
432
OBJECT_KEY_PROPERTY = "Key"
433
VERSION_PROPERTY = "Version"
434
PACKAGE_NULL_PROPERTY = False
435
436
437
class CloudFormationStackResource(Resource):
438
"""
439
Represents CloudFormation::Stack resource that can refer to a nested
440
stack template via TemplateURL property.
441
"""
442
RESOURCE_TYPE = "AWS::CloudFormation::Stack"
443
PROPERTY_NAME = "TemplateURL"
444
445
def __init__(self, uploader):
446
super(CloudFormationStackResource, self).__init__(uploader)
447
448
def do_export(self, resource_id, resource_dict, parent_dir):
449
"""
450
If the nested stack template is valid, this method will
451
export on the nested template, upload the exported template to S3
452
and set property to URL of the uploaded S3 template
453
"""
454
455
template_path = resource_dict.get(self.PROPERTY_NAME, None)
456
457
if template_path is None or is_s3_url(template_path) or \
458
template_path.startswith("http://") or \
459
template_path.startswith("https://"):
460
# Nothing to do
461
return
462
463
abs_template_path = make_abs_path(parent_dir, template_path)
464
if not is_local_file(abs_template_path):
465
raise exceptions.InvalidTemplateUrlParameterError(
466
property_name=self.PROPERTY_NAME,
467
resource_id=resource_id,
468
template_path=abs_template_path)
469
470
exported_template_dict = \
471
Template(template_path, parent_dir, self.uploader).export()
472
473
exported_template_str = yaml_dump(exported_template_dict)
474
475
with mktempfile() as temporary_file:
476
temporary_file.write(exported_template_str)
477
temporary_file.flush()
478
479
url = self.uploader.upload_with_dedup(
480
temporary_file.name, "template")
481
482
# TemplateUrl property requires S3 URL to be in path-style format
483
parts = parse_s3_url(url, version_property="Version")
484
s3_path_url = self.uploader.to_path_style_s3_url(
485
parts["Key"], parts.get("Version", None))
486
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, s3_path_url)
487
488
489
class ServerlessApplicationResource(CloudFormationStackResource):
490
"""
491
Represents Serverless::Application resource that can refer to a nested
492
app template via Location property.
493
"""
494
RESOURCE_TYPE = "AWS::Serverless::Application"
495
PROPERTY_NAME = "Location"
496
497
498
499
class GlueJobCommandScriptLocationResource(Resource):
500
"""
501
Represents Glue::Job resource.
502
"""
503
RESOURCE_TYPE = "AWS::Glue::Job"
504
# Note the PROPERTY_NAME includes a '.' implying it's nested.
505
PROPERTY_NAME = "Command.ScriptLocation"
506
507
508
class CodeCommitRepositoryS3Resource(ResourceWithS3UrlDict):
509
"""
510
Represents CodeCommit::Repository resource.
511
"""
512
RESOURCE_TYPE = "AWS::CodeCommit::Repository"
513
PROPERTY_NAME = "Code.S3"
514
BUCKET_NAME_PROPERTY = "Bucket"
515
OBJECT_KEY_PROPERTY = "Key"
516
VERSION_PROPERTY = "ObjectVersion"
517
# Don't package the directory if S3 is omitted.
518
PACKAGE_NULL_PROPERTY = False
519
FORCE_ZIP = True
520
521
522
RESOURCES_EXPORT_LIST = [
523
ServerlessFunctionResource,
524
ServerlessApiResource,
525
GraphQLSchemaResource,
526
AppSyncResolverRequestTemplateResource,
527
AppSyncResolverResponseTemplateResource,
528
AppSyncFunctionConfigurationRequestTemplateResource,
529
AppSyncFunctionConfigurationResponseTemplateResource,
530
ApiGatewayRestApiResource,
531
LambdaFunctionResource,
532
ElasticBeanstalkApplicationVersion,
533
CloudFormationStackResource,
534
ServerlessApplicationResource,
535
ServerlessLayerVersionResource,
536
LambdaLayerVersionResource,
537
GlueJobCommandScriptLocationResource,
538
StepFunctionsStateMachineDefinitionResource,
539
ServerlessStateMachineDefinitionResource,
540
CodeCommitRepositoryS3Resource
541
]
542
543
METADATA_EXPORT_LIST = [
544
ServerlessRepoApplicationReadme,
545
ServerlessRepoApplicationLicense
546
]
547
548
549
def include_transform_export_handler(template_dict, uploader, parent_dir):
550
if template_dict.get("Name", None) != "AWS::Include":
551
return template_dict
552
553
include_location = template_dict.get("Parameters", {}).get("Location", None)
554
if not include_location \
555
or not is_path_value_valid(include_location) \
556
or is_s3_url(include_location):
557
# `include_location` is either empty, or not a string, or an S3 URI
558
return template_dict
559
560
# We are confident at this point that `include_location` is a string containing the local path
561
abs_include_location = os.path.join(parent_dir, include_location)
562
if is_local_file(abs_include_location):
563
template_dict["Parameters"]["Location"] = uploader.upload_with_dedup(abs_include_location)
564
else:
565
raise exceptions.InvalidLocalPathError(
566
resource_id="AWS::Include",
567
property_name="Location",
568
local_path=abs_include_location)
569
570
return template_dict
571
572
573
GLOBAL_EXPORT_DICT = {
574
"Fn::Transform": include_transform_export_handler
575
}
576
577
578
class Template(object):
579
"""
580
Class to export a CloudFormation template
581
"""
582
583
def __init__(self, template_path, parent_dir, uploader,
584
resources_to_export=RESOURCES_EXPORT_LIST,
585
metadata_to_export=METADATA_EXPORT_LIST):
586
"""
587
Reads the template and makes it ready for export
588
"""
589
590
if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)):
591
raise ValueError("parent_dir parameter must be "
592
"an absolute path to a folder {0}"
593
.format(parent_dir))
594
595
abs_template_path = make_abs_path(parent_dir, template_path)
596
template_dir = os.path.dirname(abs_template_path)
597
598
with open(abs_template_path, "r") as handle:
599
template_str = handle.read()
600
601
self.template_dict = yaml_parse(template_str)
602
self.template_dir = template_dir
603
self.resources_to_export = resources_to_export
604
self.metadata_to_export = metadata_to_export
605
self.uploader = uploader
606
607
def export_global_artifacts(self, template_dict):
608
"""
609
Template params such as AWS::Include transforms are not specific to
610
any resource type but contain artifacts that should be exported,
611
here we iterate through the template dict and export params with a
612
handler defined in GLOBAL_EXPORT_DICT
613
"""
614
for key, val in template_dict.items():
615
if key in GLOBAL_EXPORT_DICT:
616
template_dict[key] = GLOBAL_EXPORT_DICT[key](val, self.uploader, self.template_dir)
617
elif isinstance(val, dict):
618
self.export_global_artifacts(val)
619
elif isinstance(val, list):
620
for item in val:
621
if isinstance(item, dict):
622
self.export_global_artifacts(item)
623
return template_dict
624
625
def export_metadata(self, template_dict):
626
"""
627
Exports the local artifacts referenced by the metadata section in
628
the given template to an s3 bucket.
629
630
:return: The template with references to artifacts that have been
631
exported to s3.
632
"""
633
if "Metadata" not in template_dict:
634
return template_dict
635
636
for metadata_type, metadata_dict in template_dict["Metadata"].items():
637
for exporter_class in self.metadata_to_export:
638
if exporter_class.RESOURCE_TYPE != metadata_type:
639
continue
640
641
exporter = exporter_class(self.uploader)
642
exporter.export(metadata_type, metadata_dict, self.template_dir)
643
644
return template_dict
645
646
def export(self):
647
"""
648
Exports the local artifacts referenced by the given template to an
649
s3 bucket.
650
651
:return: The template with references to artifacts that have been
652
exported to s3.
653
"""
654
self.template_dict = self.export_metadata(self.template_dict)
655
656
if "Resources" not in self.template_dict:
657
return self.template_dict
658
659
self.template_dict = self.export_global_artifacts(self.template_dict)
660
661
self.export_resources(self.template_dict["Resources"])
662
663
return self.template_dict
664
665
def export_resources(self, resource_dict):
666
for resource_id, resource in resource_dict.items():
667
668
if resource_id.startswith("Fn::ForEach::"):
669
if not isinstance(resource, list) or len(resource) != 3:
670
raise exceptions.InvalidForEachIntrinsicFunctionError(resource_id=resource_id)
671
self.export_resources(resource[2])
672
continue
673
674
resource_type = resource.get("Type", None)
675
resource_dict = resource.get("Properties", None)
676
677
for exporter_class in self.resources_to_export:
678
if exporter_class.RESOURCE_TYPE != resource_type:
679
continue
680
681
# Export code resources
682
exporter = exporter_class(self.uploader)
683
exporter.export(resource_id, resource_dict, self.template_dir)
684
685