-
require 'net/http' require 'uri' class HTTPClient def initialize(base_url, options = {}) @uri = URI(base_url) @options = { open_timeout: 10, read_timeout: 30, max_retries: 3 }.merge(options) @http = Net::HTTP.new(@uri.host, @uri.port) @http.use_ssl = @uri.scheme == 'https' @http.open_timeout = @options[:open_timeout] @http.read_timeout = @options[:read_timeout] @http.start end def get(path, params = {}) uri = @uri.dup uri.path = path uri.query = URI.encode_www_form(params) unless params.empty? request = Net::HTTP::Get.new(uri.request_uri) execute_request(request) end def post(path, data, content_type = 'application/json') request = Net::HTTP::Post.new(path) request['Content-Type'] = content_type request.body = data.is_a?(String) ? data : data.to_json execute_request(request) end def close @http.finish if @http.started? end private def execute_request(request) retries = 0 begin response = @http.request(request) case response when Net::HTTPSuccess response.body when Net::HTTPRedirection raise "Redirection not supported in this client" else raise "HTTP Error: #{response.code} #{response.message}" end rescue Net::TimeoutError, Errno::ECONNRESET => e retries += 1 if retries <= @options[:max_retries] sleep(2 ** retries) # Exponential backoff retry else raise "Max retries exceeded: #{e.message}" end end end end # Usage client = HTTPClient.new('https://httpbin.org') result = client.get('/get', { key: 'value' }) puts result client.close
-
Stream
Stream - Scalable APIs for Chat, Feeds, Moderation, & Video. Stream helps developers build engaging apps that scale to millions with performant and flexible Chat, Feeds, Moderation, and Video APIs and SDKs powered by a global edge network and enterprise-grade infrastructure.
-
require 'net/http' require 'nokogiri' require 'uri' class WebScraper def initialize(options = {}) @max_retries = options[:max_retries] || 3 @retry_delay = options[:retry_delay] || 1 @user_agent = options[:user_agent] || 'Ruby WebScraper 1.0' end def scrape(url) retries = 0 begin uri = URI(url) http = Net::HTTP.new(uri.host, uri.port) http.use_ssl = uri.scheme == 'https' request = Net::HTTP::Get.new(uri.path) request['User-Agent'] = @user_agent response = http.request(request) case response when Net::HTTPSuccess parse_content(response.body) when Net::HTTPRedirection new_url = response['location'] scrape(new_url) else raise "HTTP Error: #{response.code}" end rescue StandardError => e retries += 1 if retries <= @max_retries puts "Retry #{retries}/#{@max_retries} for #{url}: #{e.message}" sleep(@retry_delay * retries) retry else raise "Failed to scrape #{url} after #{@max_retries} retries: #{e.message}" end end end private def parse_content(html) doc = Nokogiri::HTML(html) { title: doc.css('title').text.strip, links: doc.css('a').map { |link| link['href'] }.compact, headings: doc.css('h1, h2, h3').map(&:text).map(&:strip) } end end # Usage scraper = WebScraper.new(max_retries: 5) result = scraper.scrape('https://example.com') puts result