Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1import os 

2from urllib.parse import urlparse 

3 

4from .abstract_deployer import AbstractDeployer 

5from spark_etl import Build 

6from spark_etl.exceptions import SparkETLDeploymentFailure 

7 

8import boto3 

9 

10class S3Deployer(AbstractDeployer): 

11 """ 

12 This deployer deploys application to AWS S3 buckets 

13 """ 

14 def __init__(self, config): 

15 super(S3Deployer, self).__init__(config) 

16 

17 

18 def deploy(self, build_dir, deployment_location): 

19 o = urlparse(deployment_location) 

20 if o.scheme != 's3': 

21 raise SparkETLDeploymentFailure("deployment_location must be in s3") 

22 

23 build = Build(build_dir) 

24 

25 s3_client = boto3.client( 

26 's3', 

27 aws_access_key_id=self.config['aws_access_key_id'], 

28 aws_secret_access_key=self.config['aws_secret_access_key'] 

29 ) 

30 bucket_name = o.netloc 

31 s3_dirname = os.path.join(o.path[1:], build.version) 

32 

33 print(f"Upload to AWS s3, bucket name = {bucket_name}") 

34 for artifact in build.artifacts: 

35 local_filename = os.path.join(build.build_dir, artifact) 

36 object_name = os.path.join(s3_dirname, artifact) 

37 

38 print(f"{local_filename} ==> {object_name}") 

39 s3_client.upload_file(local_filename, bucket_name, object_name) 

40 

41 

42 local_filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'job_loader.py') 

43 object_name = os.path.join(s3_dirname, "job_loader.py") 

44 print(f"{local_filename} ==> {object_name}") 

45 s3_client.upload_file(local_filename, bucket_name, object_name) 

46