# Deploying Airflow With Turbine Cloud Formation Steps Dependencies: 1. aws cli - https://aws.amazon.com/cli/ 2. jq - https://stedolan.github.io/jq/ ```bash # Change this if you have a different S3 bucket export DEPLOYMENT_BUCKET= export DEPLOYMENT_REGION= aws configure set default.region $DEPLOYMENT_REGION aws s3 mb s3://$DEPLOYMENT_BUCKET rm -r -f aws-airflow-stack && git clone --recursive git@github.com:villasv/aws-airflow-stack.git zip aws-airflow-stack/functions/package.zip aws-airflow-stack/functions/load_metric.py aws s3 cp aws-airflow-stack s3://$DEPLOYMENT_BUCKET/quickstart-turbine-airflow --recursive aws cloudformation create-stack \ --stack-name airflow-$(python -c "import uuid; print(str(uuid.uuid4()))") \ --template-url https://$DEPLOYMENT_BUCKET.s3.amazonaws.com/quickstart-turbine-airflow/templates/turbine-master.template \ --parameters ParameterKey=QSS3BucketName,ParameterValue=$DEPLOYMENT_BUCKET ParameterKey=WorkerInstanceType,ParameterValue=t3.2xlarge ParameterKey=SchedulerInstanceType,ParameterValue=t3.small \ --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM CREATE_IN_PROGRESS="CREATE_IN_PROGRESS" STACK_PROGRESS="CREATE_IN_PROGRESS" printf 'Creating Stack' while [ "$STACK_PROGRESS" = "$CREATE_IN_PROGRESS" ]; do printf '.'; sleep 5; STACK_PROGRESS=$(aws cloudformation describe-stacks | jq '.Stacks[] | select(.StackId | test("airflow.")) | select(.ParentId == null and .DeletionTime == null) .StackStatus' -r); done export AIRFLOW_UI=$( aws ec2 describe-instances --filters Name=tag:Name,Values=turbine-webserver Name=instance-state-code,Values=16 --query 'Reservations[].Instances[].PublicDnsName'| jq '.[0]' -r) echo echo opening http://$AIRFLOW_UI:8080 open http://$AIRFLOW_UI:8080 echo "Create a Pipeline here https://$DEPLOYMENT_REGION.console.aws.amazon.com/codesuite/codepipeline/pipelines" open https://$DEPLOYMENT_REGION.console.aws.amazon.com/codesuite/codepipeline/pipelines ``` # Create Password User Inside the EC2 ```bash export $(xargs