ResilientLink Ruby SDK

Official Ruby gem for the ResilientLink Web Scraping API.

Installation

gem install resilientlink

Or add to your Gemfile:

gem 'resilientlink'

Quick Start

require 'resilientlink'

client = Resilientlink::Client.new(api_key: 'YOUR_API_KEY')

result = client.scrape('https://example.com')

puts result['data']['title']        # "Example Domain"
puts result['data']['description']  # meta description
puts result['data']['image']        # OG image URL

Options

result = client.scrape('https://example.com',
  return_html:       true,         # include raw HTML
  screenshot:        true,         # base64 PNG (Pro/Enterprise)
  pdf:               true,         # base64 PDF (Pro/Enterprise)
  pdf_format:        'A4',
  bypass_cache:      true,         # force fresh scrape
  js_render:         true,         # JS rendering (Pro/Enterprise)
  wait_for_selector: '#app',       # wait for CSS selector
  wait_ms:           2000,         # wait 2s before scraping
  custom_headers:    { 'Accept-Language' => 'en-US' },
  timeout:           30_000        # ms (max 60000)
)

Response

{
  'success'      => true,
  'cached'       => false,
  'tier'         => '...',
  'responseTime' => 412,
  'data'         => {
    'url'         => 'https://example.com',
    'title'       => 'Example Domain',
    'description' => '...',
    'image'       => '...',
    'domain'      => 'example.com',
    'og'          => { 'title' => '...', 'description' => '...' },
    'content'     => { 'wordCount' => 423, 'readTimeMinutes' => 2 },
    'scrapedAt'   => '2026-...'
  }
}

Error Handling

begin
  result = client.scrape('https://example.com')
rescue Resilientlink::Error => e
  puts e.message     # human-readable error
  puts e.status_code # 429 = rate limit, 401 = bad key, 451 = blocked
  puts e.body        # full response hash
end

Get Your API Key

Sign up at resilientlink → Dashboard → API Key.