{# Jinja2 template for loading a csv into redshift After rendering template you will still need to replace %s with aws credentials This is done so credentials dont get printed in logs #} BEGIN TRANSACTION; DROP TABLE IF EXISTS {{table}}_staging; CREATE TEMPORARY TABLE {{table}}_staging (LIKE {{schema}}.{{table}}); COPY {{table}}_staging FROM 's3://data-ilum/pipeline_dumps/{{filename}}' CREDENTIALS %s FORMAT CSV DELIMITER ',' TRUNCATECOLUMNS; {% if truncate %} TRUNCATE TABLE {{schema}}.{{table}}; {% else %} UNLOAD ('SELECT * FROM {{schema}}.{{table}} JOIN {{table}}_staging ON {% for key in primary_key %} {% if loop.index > 1 %} AND{% endif %} {{table}}.{{key}} = {{table}}_staging.{{key}} {% endfor %};') TO 's3://data-ilum/redshift_dumps/{{file_prefix}}' WITH CREDENTIALS %s MANIFEST PARALLEL off; DELETE FROM {{schema}}.{{table}} USING {{table}}_staging WHERE {% for key in primary_key %} {% if loop.index > 1 %} AND{% endif %} {{table}}.{{key}} = {{table}}_staging.{{key}} {% endfor %}; {% endif %} INSERT INTO {{schema}}.{{table}} SELECT * FROM {{table}}_staging; END TRANSACTION; DROP TABLE {{table}}_staging;