class XRBP::Model::Node
Constants
- DEFAULT_CRAWL_PORT
Attributes
addr[RW]
ip[RW]
ledgers[RW]
port[RW]
type[RW]
uptime[RW]
version[RW]
Public Class Methods
crawl(start, opts={})
click to toggle source
Crawl nodes via WebClient::Connection
@param opts [Hash] options to crawl nodes with @option opts [WebSocket::Connection] :connection Connection
to use to crawl nodes
@option opts [Integer] :delay optional delay to wait between
crawl iterations
# File lib/xrbp/model/node.rb, line 86 def self.crawl(start, opts={}) set_opts(opts) delay = opts[:delay] || 1 queue = Array.new queue << start connection.add_plugin :result_parser unless connection.plugin?(:result_parser) connection.add_plugin Parsers::NodePeers unless connection.plugin?(Parsers::NodePeers) connection.ssl_verify_peer = false connection.ssl_verify_host = false until connection.force_quit? node = queue.shift node = parse_url node unless node.is_a?(Node) connection.emit :precrawl, node connection.url = node.url peers = connection.perform if peers.nil? || peers.empty? queue << node connection.emit :crawlerr, node connection.rsleep(delay) unless connection.force_quit? next end connection.emit :peers, node, peers peers.each { |peer| break if connection.force_quit? peer = Node.from_peer peer next unless peer.valid? # skip unless valid connection.emit :peer, node, peer queue << peer unless queue.include?(peer) } queue << node connection.emit :postcrawl, node connection.rsleep(delay) unless connection.force_quit? end end
from_peer(p)
click to toggle source
Return new node from the specified peer object
@param p [Hash] peer data @return [Node] new node instance
# File lib/xrbp/model/node.rb, line 65 def self.from_peer(p) n = new n.addr = p["public_key"] n.ip = p["ip"]&.gsub("::ffff:", "") n.port = p["port"] || DEFAULT_CRAWL_PORT n.version = p["version"].split("-").last n.uptime = p["uptime"] n.type = p["type"] n.ledgers = p["complete_ledgers"] n end
parse_url(url)
click to toggle source
Return new node from the specified url
@param url [String] node url @return [Node] new node instance
# File lib/xrbp/model/node.rb, line 51 def self.parse_url(url) n = new uri = URI.parse(url) n.ip = Resolv.getaddress(uri.host) n.port = uri.port n end
Public Instance Methods
==(o)
click to toggle source
# File lib/xrbp/model/node.rb, line 43 def ==(o) ip == o.ip && port == o.port end
complete_ledgers(opts={})
click to toggle source
Retrieve ledgers which this server has
# File lib/xrbp/model/node.rb, line 138 def complete_ledgers(opts={}) server_info(opts)["result"]["info"]["complete_ledgers"].split("-").collect { |l| l.to_i } end
id()
click to toggle source
Return unique node id
# File lib/xrbp/model/node.rb, line 17 def id "#{ip}:#{port}" end
server_info(opts={}, &bl)
click to toggle source
Retrieve server info via WebSocket::Connection
# File lib/xrbp/model/node.rb, line 132 def server_info(opts={}, &bl) set_opts(opts) connection.cmd(WebSocket::Cmds::ServerInfo.new, &bl) end
url()
click to toggle source
Return node url
# File lib/xrbp/model/node.rb, line 22 def url "https://#{ip}:#{port}/crawl" end
valid?()
click to toggle source
Return bool indicating if this node is valid for crawling
# File lib/xrbp/model/node.rb, line 27 def valid? return false unless ip && port # ensure no parsing errs begin # FIXME URI.parse is limiting our ability to traverse entire node-set, # some nodes are represented as IPv6 addresses which is throwing # things off. URI.parse(url) rescue false end true end