class Datapimp::Sync::S3Bucket

Public Instance Methods

asset_fingerprints() click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 71
def asset_fingerprints
  deploy_manifest['asset_fingerprints'] ||= {}
end
build_deploy_manifest_from_remote() click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 51
def build_deploy_manifest_from_remote
  # TODO
  # Implement
end
cloudfront() click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 21
def cloudfront
  @cloudfront ||= Datapimp::Sync::CloudfrontDistribution.new(bucket: remote)
end
deploy_manifest() click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 47
def deploy_manifest
  @deploy_manifest ||= (JSON.parse(deploy_manifest_path.read) || {} rescue {})
end
deploy_manifest_path() click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 41
def deploy_manifest_path
  Datapimp.config.deploy_manifests_path
    .tap {|p| FileUtils.mkdir_p(p) }
    .join(remote.to_s.parameterize + '.json')
end
local_path() click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 37
def local_path
  Pathname(local)
end
prepare_manifest_for(entries) click to toggle source

builds a manifest of MD5 hashes for each file so that we aren’t deploying stuff which is the same since last time we deployed

# File lib/datapimp/sync/s3_bucket.rb, line 59
def prepare_manifest_for(entries)
   deploy_manifest
end
run(action, options={}) click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 169
def run(action, options={})
  action = action.to_sym

  if action == :push
    run_push_action(options)
  elsif action == :create
    run_create_action(options)
  elsif action == :update_acl
    run_update_acl_action(options)
  elsif action == :pull
    run_pull_action(options)
  elsif action == :reset
    run_reset_options(options)
  end
end
run_create_action(options={}) click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 153
def run_create_action(options={})
  directories = Datapimp::Sync.amazon.storage.directories

  make_private = !!options[:make_private]

  bucket = if existing = directories.get(remote)
    existing
  else
    directories.create(key:remote, public: !make_private)
  end

  storage.put_bucket_website(remote, :IndexDocument => 'index.html', :ErrorDocument => 'error.html')

  bucket
end
run_pull_action(options={}) click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 129
def run_pull_action(options={})
  directories = Datapimp::Sync.amazon.storage.directories
  bucket = directories.get(remote)
  options = options.to_mash

  if options.reset == true
    FileUtils.rm_rf(local_path)
    FileUtils.mkdir_p(local_path)
  end

  bucket.files.each do |file|
    local_file = local_path.join(file.key)

    if local_file.exist? && file.etag == Digest::MD5.hexdigest(local_file.read)
      log "Skipping #{ file.key }"
      next
    end

    FileUtils.mkdir_p(local_file.dirname)

    local_file.open("w+") {|fh| fh.write(file.body); log("Updated #{ file.key }"); }
  end
end
run_push_action(options={}) click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 75
def run_push_action(options={})
  require 'rack' unless defined?(::Rack)
  entries = Dir[local_path.join('**/*')].map(&:to_pathname)
  prepare_manifest_for(entries)

  entries.reject! { |entry| entry.to_s.match(/\.DS_Store/) }
  entries.reject!(&:directory?)

  uploaded = deploy_manifest['uploaded'] = []

  entries.each do |entry|
    destination = entry.relative_path_from(local_path).to_s.without_leading_slash
    fingerprint = Digest::MD5.hexdigest(entry.read)

    if asset_fingerprints[destination] == fingerprint
      #log "Skipping #{ destination }: found in manifest"
      next
    end

    content_type = Rack::Mime.mime_type(File.extname(destination.split("/").last))

    if existing = s3.files.get(destination)
      if existing.etag == fingerprint
        log "Skipping #{ destination }: similar etag"
      else
        existing.body = entry.read
        existing.acl = options.acl || 'public-read'
        existing.content_type = content_type
        log "Updated #{ destination }; content-type: #{ content_type }"
        uploaded << destination
        existing.save
      end
    else
      log "Uploaded #{ destination }; content-type: #{ content_type }"
      s3.files.create(key: destination, body: entry.read, acl: 'public-read', content_type: content_type)
      uploaded << destination
    end

    asset_fingerprints[destination] = fingerprint
  end

  if count == 0
    return
  end

  log "Saving deploy manifest. #{ deploy_manifest.keys.length } entries"
  deploy_manifest_path.open("w+") {|fh| fh.write(deploy_manifest.to_json) }
end
run_reset_action(options={}) click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 124
def run_reset_action(options={})
  bucket = directories.get(remote)
  bucket.files.each {|f| key = f.key; f.delete rescue nil; f.destroy rescue nil; log "Deleting #{ key }"}
end
run_update_acl_action(options={}) click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 63
def run_update_acl_action(options={})
  s3.files.each do |file|
    file.acl = 'public-read'
    file.save
    log "Updated acl for #{ file.key } to public-read"
  end
end
s3() click to toggle source

returns the s3 bucket via fog

# File lib/datapimp/sync/s3_bucket.rb, line 7
def s3
  @s3 ||= storage.directories.get(remote).tap do |bucket|
    if setup_website == true
      bucket.public = true
      bucket.save
      storage.put_bucket_website(remote, 'index.html', key: 'error.html')
    end

    if redirect == true
      log "Should be creating a redirect bucket"
    end
  end
end
storage() click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 25
def storage
  Datapimp::Sync.amazon.storage
end
website_hostname() click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 29
def website_hostname
  "#{s3.key}.s3-website-#{ s3.location }.amazonaws.com"
end
website_url(proto="http") click to toggle source
# File lib/datapimp/sync/s3_bucket.rb, line 33
def website_url(proto="http")
  "#{proto}://#{ website_hostname }"
end