class GoodDataEloqua::Request

Attributes

asset_type[RW]

Public Class Methods

new(config = {}) click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 5
def initialize(config = {})

  @client = $client
  @asset_type_plural = nil
  @asset_type_singular = nil

end

Public Instance Methods

clean() click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 480
def clean
  FileUtils.rm_f("/downloads/#{@session_csv}")
end
download_all_ids_return_csv(file_name, config={}) click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 32
def download_all_ids_return_csv(file_name, config={})

  puts "#{Time.now} => Downloading IDs..."
  if file_name
    fd = IO.sysopen("./downloads/#{file_name}", 'w+')
  end

  csv = CSV.new(IO.new(fd))

  first_download = self.get_all_by_page(0)
  pages = first_download['total'].to_i
  total = (pages/1000).round

  total = 1 if total == 0 || total < 1

  if total == 1

    count = 0
    first_download['elements'].each { |record|
      count += 1
      csv << [record['id']]
    }

    puts "#{Time.now} => #{count} IDs extracted."
    puts "#{Time.now} => Flushing IO to \"./downloads/#{file_name}\"\n"
    csv.flush

  else

    iterations = []
    total.times { |i| iterations << i }
    iterations << total

    puts "#{Time.now} => #{iterations.length} pages queued for download."
    count = iterations.length
    mutex = Mutex.new

    if $threading

      iterations.pmap { |page|

        response = self.get_all_by_page(page)
        print "\r#{Time.now} => Extracting IDs - Remaining Pages: #{count}\s\s\s"

        mutex.synchronize {
          count -= 1
          response['elements'].each { |element|
            csv << [element['id']]
          }
        }

      }
    else
      iterations.each { |page|

        response = self.get_all_by_page(page)
        print "\r#{Time.now} => Extracting IDs - Remaining Pages: #{count}\s\s\s"

        mutex.synchronize {
          count -= 1
          response['elements'].each { |element|
            csv << [element['id']]
          }
        }

      }
    end

    csv.flush
    puts "#{Time.now} => #{count} IDs extracted."

    puts "\n#{Time.now} => Flushing IO to \"./downloads/#{file_name}\"\n"

  end

  file_name

end
get_all(config = {}) click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 13
def get_all(config = {})
  end_point = @asset_type_plural || config[:end_point] || config[:url] || @parent
  page = config[:page] || config[:page_number]

  raise ':end_point and :page_number must be defined.' unless end_point && page

  @client.get(end_point, "?page=#{page}")

end
get_all_by_page(page, config ={}) click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 23
def get_all_by_page(page, config ={})

  default = Hash.new
  default[:end_point] = @asset_type_plural || config[:end_point]
  default[:page] = page
  self.get_all(default)

end
Also aliased as: get_all_from_page
get_all_distributed(num_iterations, csv) click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 356
def get_all_distributed num_iterations, csv

  iterations = []
  num_iterations.times { |i| iterations << i }
  iterations << num_iterations

  mutex = Mutex.new

  iterations.pmap { |page|
    response = self.get_all_by_page(page)
    puts "#{Time.now} => Extracting IDs from page: #{page}"
    mutex.synchronize {
      response['elements'].each { |element|
        csv << [element['id']]
      }

    }

  }

  csv.flush

  pool = Pool.new

  while pool.running do

    batch = iterations.slice!(1..20)

    break unless batch

    batch.each { |i|
      pool.thread {


        print "\r#{Time.now} => Pages #{iterations.length} of #{num_iterations} - Workers: #{pool.active_threads}\s"
      }
    }

    if pool.active_threads > 20
      sleep 2
    end

    if iterations.length == 0 && pool.active_threads == 0
      pool.running = false
    else
      next
    end

  end

end
get_all_from_page(page, config ={})
Alias for: get_all_by_page
get_complete_profile_from_ids_csv(csv, config = {}) click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 111
def get_complete_profile_from_ids_csv(csv, config = {})

  return unless File.exists? "./downloads/#{csv}"

  puts "#{Time.now} => Extracting complete profile from IDs..."
  if csv
    fd = IO.sysopen("./downloads/#{csv}", 'r')
  end

  file_name = "./downloads/#{@session_id}_complete_profile.csv"
  write_fd = IO.sysopen(file_name, 'w+')
  @csv = CSV.new(IO.new(write_fd))

  read_csv = CSV.open(fd)

  ids = read_csv.map { |row|
    row[0]
  }

  return if ids.empty?

  cache = []
  @headers = nil
  set_headers = []
  have_not_set_headers = false
  count = 0

  if $threading
    ids.pmap { |id| # <- Change this back to parallel

      unless set_headers.empty?
        puts "\nWAS ABLE TO SET HEADER\n"
        @csv << set_headers.pop
      end

      if cache.length > 2000
        batch = cache.slice!(1..500)
        batch.map { |row| @csv << row }
      end

      count += 1
      response = self.get_one_by_id(id)

      case response['type']
        when "Contact"
          keys = 'fieldValues'
        when "Campaign"
          keys = 'elements'
        when "Email"
          keys = 'htmlContent'
        when "Form"
          keys = 'elements'
        else
          keys = 'elements'
      end

      if response.empty?

        payload = [[id]]

      else

        if @headers == nil

          begin
            unless response[keys].empty?

              element_keys = response[keys][0].keys.map { |key|
                "element_#{key}"
              }

            else
              element_keys = []
            end

            @headers = response.keys + element_keys
            set_headers << response.keys + element_keys

          rescue NoMethodError
            headers = [id]
          end

        end

        if response[keys].is_a? Array
          if response[keys].empty?
            payload = [response.values]
          else
            payload = []

            response[keys].each { |element|
              payload << response.values + element.values
            }
          end
        else
          payload = [response]
        end

      end

      ######### EDITING PAYLOAD TO REMOVE KEY ROW VALUES #####

      payload = payload[0].map { |key_value|
        if key_value.is_a? Array or key_value.is_a? Hash
          key_value.to_json
        else
          key_value
        end
      }

      cache << payload

      print "\r#{Time.now} => Extracting Profiles - IDs Remaining: #{ids.length-count} Cache: #{cache.length}\s\s\s"

    }
  else
    ids.each { |id| # <- Change this back to parallel

      unless set_headers.empty?
        puts "\nWAS ABLE TO SET HEADER\n"
        @csv << set_headers.pop
      end

      if cache.length > 2000
        batch = cache.slice!(1..500)
        batch.map { |row| @csv << row }
      end

      count += 1
      response = self.get_one_by_id(id)

      case response['type']
        when "Contact"
          keys = 'fieldValues'
        when "Campaign"
          keys = 'elements'
        when "Email"
          keys = 'htmlContent'
        when "Form"
          keys = 'elements'
        else
          keys = 'elements'
      end

      if response.empty?

        payload = [[id]]

      else

        if @headers == nil

          begin
            unless response[keys].empty?

              element_keys = response[keys][0].keys.map { |key|
                "element_#{key}"
              }

            else
              element_keys = []
            end

            @headers = response.keys + element_keys
            set_headers << response.keys + element_keys

          rescue NoMethodError
            headers = [id]
          end

        end

        if response[keys].is_a? Array
          if response[keys].empty?
            payload = [response.values]
          else
            payload = []

            response[keys].each { |element|
              payload << response.values + element.values
            }
          end
        else
          payload = [response]
        end

      end

      ######### EDITING PAYLOAD TO REMOVE KEY ROW VALUES #####

      payload = payload[0].map { |key_value|
        if key_value.is_a? Array or key_value.is_a? Hash
          key_value.to_json
        else
          key_value
        end
      }

      cache << payload

      print "\r#{Time.now} => Extracting Profiles - IDs Remaining: #{ids.length-count} Cache: #{cache.length}\s\s\s"

    }

  end

  puts "\n#{Time.now} => Threads complete!\n"
  count = cache.length

  if $threading

    cache.pmap { |row|
      print "\r#{Time.now} => Dumping pool to CSV #{count}\s\s\s\s "
      count -= 1
      @csv << row

    }

  else

    cache.each { |row|
      print "\r#{Time.now} => Dumping pool to CSV #{count}\s\s\s\s "
      count -= 1
      @csv << row

    }

  end


  @csv.flush

  puts "\n#{Time.now} => Flushing IO to \"#{file_name}\"\n"

  puts "\n#{Time.now} => Complete: #{file_name}\n\n"

  file_name





end
get_one(config={}) click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 465
def get_one(config={})
  #lookup_key = @asset_type_singular.gsub("/assets/","").downcase.to_sym
  sym_key = config[:id]
  depth = config[:depth] || 'complete'
  end_point = @asset_type_singular+"/"+sym_key
  @client.get(end_point, depth)
end
get_one_by_id(id, config={}) click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 473
def get_one_by_id(id, config={})
  default = Hash.new
  default[:depth] = config[:depth] || 'complete'
  default[:id] = id
  self.get_one(default)
end
set_asset_type(asset) click to toggle source
# File lib/gooddata_eloqua/helpers/request.rb, line 408
def set_asset_type asset

  case asset
    when 'contact', 'contacts'

      if asset[-1] == 's'
        @asset_type_plural = "/data/#{asset}"
        @asset_type_singular = "/data/#{asset[0..-2]}"
      elsif asset[-1] != 's'
        @asset_type_plural = "/data/#{asset}s"
        @asset_type_singular = "/data/#{asset}"
      else
        raise "Unable to parse \"#{asset}\" please make sure it is a string."
      end

    when 'email', 'emails'

      if asset[-1] == 's'
        @asset_type_plural = "/assets/#{asset}"
        @asset_type_singular = "/assets/#{asset[0..-2]}"
      elsif asset[-1] != 's'
        @asset_type_plural = "/assets/#{asset}s"
        @asset_type_singular = "/assets/#{asset}"
      else
        raise "Unable to parse \"#{asset}\" please make sure it is a string."
      end

    when 'campaign', 'campaigns'

      if asset[-1] == 's'
        @asset_type_plural = "/assets/#{asset}"
        @asset_type_singular = "/assets/#{asset[0..-2]}"
      elsif asset[-1] != 's'
        @asset_type_plural = "/assets/#{asset}s"
        @asset_type_singular = "/assets/#{asset}"
      else
        raise "Unable to parse \"#{asset}\" please make sure it is a string."
      end

    else

      if asset[-1] == 's'
        @asset_type_plural = "/assets/#{asset}"
        @asset_type_singular = "/assets/#{asset[0..-2]}"
      elsif asset[-1] != 's'
        @asset_type_plural = "/assets/#{asset}s"
        @asset_type_singular = "/assets/#{asset}"
      else
        raise "Unable to parse \"#{asset}\" please make sure it is a string."
      end

  end

end