class Dbox::Syncer::Pull
Public Class Methods
new(database, api)
click to toggle source
Calls superclass method
Dbox::Syncer::Operation::new
# File lib/dbox/syncer.rb, line 168 def initialize(database, api) super(database, api) end
Public Instance Methods
calculate_changes(dir, operation = :update)
click to toggle source
# File lib/dbox/syncer.rb, line 258 def calculate_changes(dir, operation = :update) raise(ArgumentError, "Not a directory: #{dir.inspect}") unless dir[:is_dir] out = [] recur_dirs = [] # grab the metadata for the current dir (either off the filesystem or from Dropbox) res = gather_remote_info(dir) if res == :not_modified # directory itself was not modified, but we still need to # recur on subdirectories recur_dirs += database.subdirs(dir[:id]).map {|d| [:update, d] } else raise(ArgumentError, "Not a directory: #{res.inspect}") unless res[:is_dir] # dir may have changed -- calculate changes on contents contents = res.delete(:contents) if operation == :create || modified?(dir, res) res[:parent_id] = dir[:parent_id] if dir[:parent_id] res[:parent_path] = dir[:parent_path] if dir[:parent_path] out << [operation, res] end found_paths = [] existing_entries = current_dir_entries_as_hash(dir) # process each entry that came back from dropbox/filesystem contents.each do |c| found_paths << c[:path] if entry = existing_entries[c[:path]] c[:id] = entry[:id] c[:modified] = parse_time(c[:modified]) if c[:is_dir] # queue dir for later c[:remote_hash] = entry[:remote_hash] recur_dirs << [:update, c] else # update iff modified out << [:update, c] if modified?(entry, c) end else # create c[:modified] = parse_time(c[:modified]) if c[:is_dir] # queue dir for later recur_dirs << [:create, c] else out << [:create, c] end end end # add any deletions out += case_insensitive_difference(existing_entries.keys, found_paths).map do |p| [:delete, existing_entries[p]] end end # recursively process new & existing subdirectories in parallel recur_dirs.each do |operation, dir| begin out += calculate_changes(dir, operation) rescue => e log.error "Error while caclulating changes for #{operation} on #{dir[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}" out += [[:failed, dir.merge({ :operation => operation, :error => e })]] end end out end
create_dir(dir)
click to toggle source
# File lib/dbox/syncer.rb, line 336 def create_dir(dir) local_path = dir[:local_path] log.info "Creating #{local_path}" saving_parent_timestamp(dir) do FileUtils.mkdir_p(local_path) update_file_timestamp(dir) end end
create_file(file)
click to toggle source
# File lib/dbox/syncer.rb, line 357 def create_file(file) saving_parent_timestamp(file) do download_file(file) end end
delete_dir(dir)
click to toggle source
# File lib/dbox/syncer.rb, line 349 def delete_dir(dir) local_path = dir[:local_path] log.info "Deleting #{local_path}" saving_parent_timestamp(dir) do FileUtils.rm_r(local_path) end end
delete_file(file)
click to toggle source
# File lib/dbox/syncer.rb, line 367 def delete_file(file) local_path = file[:local_path] log.info "Deleting file: #{local_path}" saving_parent_timestamp(file) do FileUtils.rm_rf(local_path) end end
download_file(file)
click to toggle source
# File lib/dbox/syncer.rb, line 375 def download_file(file) local_path = file[:local_path] remote_path = file[:remote_path] # check to ensure we aren't overwriting an untracked file or a # file with local modifications clobbering = false if entry = database.find_by_path(file[:path]) clobbering = calculate_hash(local_path) != entry[:local_hash] else clobbering = File.exists?(local_path) end # stream files larger than the minimum stream = file[:size] && file[:size] > MIN_BYTES_TO_STREAM_DOWNLOAD # download to temp file tmp = generate_tmpfilename(file[:path]) File.open(tmp, "wb") do |f| api.get_file(remote_path, f, stream) end # rename old file if clobbering if clobbering && File.exists?(local_path) backup_path = find_nonconflicting_path(local_path) FileUtils.mv(local_path, backup_path) backup_relpath = local_to_relative_path(backup_path) log.warn "#{file[:path]} had a conflict and the existing copy was renamed to #{backup_relpath} locally" end # atomic move over to the real file, and update the timestamp FileUtils.mv(tmp, local_path) update_file_timestamp(file) if backup_relpath [:conflict, { :original => file[:path], :renamed => backup_relpath }] else true end end
execute()
click to toggle source
# File lib/dbox/syncer.rb, line 178 def execute remove_tmpfiles dir = database.root_dir changes = calculate_changes(dir) log.debug "Executing changes:\n" + changes.map {|c| c.inspect }.join("\n") parent_ids_of_failed_entries = [] changelist = { :created => [], :deleted => [], :updated => [], :failed => [] } changes.each do |op, c| case op when :create c[:parent_id] ||= lookup_id_by_path(c[:parent_path]) if c[:is_dir] # create the local directory create_dir(c) database.add_entry(c[:path], true, c[:parent_id], c[:modified], c[:revision], c[:remote_hash], nil) changelist[:created] << c[:path] else # download the new file begin res = create_file(c) local_hash = calculate_hash(c[:local_path]) database.add_entry(c[:path], false, c[:parent_id], c[:modified], c[:revision], c[:remote_hash], local_hash) changelist[:created] << c[:path] if res.kind_of?(Array) && res[0] == :conflict changelist[:conflicts] ||= [] changelist[:conflicts] << res[1] end rescue => e log.error "Error while downloading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}" parent_ids_of_failed_entries << c[:parent_id] changelist[:failed] << { :operation => :create, :path => c[:path], :error => e } end end when :update if c[:is_dir] # update the local directory update_dir(c) database.update_entry_by_path(c[:path], :modified => c[:modified], :revision => c[:revision], :remote_hash => c[:remote_hash]) changelist[:updated] << c[:path] else # download updates to the file begin res = update_file(c) local_hash = calculate_hash(c[:local_path]) database.update_entry_by_path(c[:path], :modified => c[:modified], :revision => c[:revision], :remote_hash => c[:remote_hash], :local_hash => local_hash) changelist[:updated] << c[:path] if res.kind_of?(Array) && res[0] == :conflict changelist[:conflicts] ||= [] changelist[:conflicts] << res[1] end rescue => e log.error "Error while downloading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}" parent_ids_of_failed_entries << c[:parent_id] changelist[:failed] << { :operation => :create, :path => c[:path], :error => e } end end when :delete # delete the local directory/file c[:is_dir] ? delete_dir(c) : delete_file(c) database.delete_entry_by_path(c[:path]) changelist[:deleted] << c[:path] when :failed parent_ids_of_failed_entries << c[:parent_id] changelist[:failed] << { :operation => c[:operation], :path => c[:path], :error => c[:error] } else raise(RuntimeError, "Unknown operation type: #{op}") end end # clear hashes on any dirs with children that failed so that # they are processed again on next pull parent_ids_of_failed_entries.uniq.each do |id| database.update_entry_by_id(id, :remote_hash => nil) end # sort & return output sort_changelist(changelist) end
modified?(entry, res)
click to toggle source
# File lib/dbox/syncer.rb, line 328 def modified?(entry, res) out = (entry[:revision] != res[:revision]) || !times_equal?(entry[:modified], res[:modified]) out ||= (entry[:remote_hash] != res[:remote_hash]) if res.has_key?(:remote_hash) log.debug "#{entry[:path]} modified? r#{entry[:revision]} vs. r#{res[:revision]}, h#{entry[:remote_hash]} vs. h#{res[:remote_hash]}, t#{time_to_s(entry[:modified])} vs. t#{time_to_s(res[:modified])} => #{out}" out end
practice()
click to toggle source
# File lib/dbox/syncer.rb, line 172 def practice dir = database.root_dir changes = calculate_changes(dir) log.debug "Changes that would be executed:\n" + changes.map {|c| c.inspect }.join("\n") end
update_dir(dir)
click to toggle source
# File lib/dbox/syncer.rb, line 345 def update_dir(dir) update_file_timestamp(dir) end
update_file(file)
click to toggle source
# File lib/dbox/syncer.rb, line 363 def update_file(file) download_file(file) end