require 'fog/aws' BUCKET_NAME = ENV['BUCKET_NAME'] SOURCE_FILE_PATH = ENV['SOURCE_FILE_PATH'] TARGET_DIRECTORY = ENV['TARGET_DIRECTORY'] STORAGE_REGION = ENV["STORAGE_REGION"] || 'eu-central-1' CHUNK_SIZE = ENV['CHUNK_SIZE'].to_i || 104_857_600 CONCURRENCY = ENV['CONCURRENCY'].to_i || 10 class S3Backup def initialize @connection = Fog::Storage.new( provider: 'AWS', use_iam_profile: false, aws_access_key_id: ENV['AWS_ACCESS_KEY_ID'], aws_secret_access_key: ENV['AWS_SECRET_ACCESS_KEY'], aws_session_token: ENV['AWS_SESSION_TOKEN'], aws_credentials_expire_at: ENV['AWS_CREDENTIAL_EXPIRATION'], region: STORAGE_REGION, ) @directory = @connection.directories.get(BUCKET_NAME) || @connection.directories.create(key: BUCKET_NAME) end def upload file = File.basename(SOURCE_FILE_PATH) target_path = File.join(TARGET_DIRECTORY, file) File.open(SOURCE_FILE_PATH, 'r') do |local_file| options = { key: target_path, body: local_file, public: false, multipart_chunk_size: CHUNK_SIZE, concurrency: CONCURRENCY, } @directory.files.create(options) end puts 'Upload completed successfully.' end end backup = S3Backup.new backup.upload