使用以下工具轻松测试和构建 Crawling API 调用
curl "https://api.crawlbase.com/?token=_USER_TOKEN_&url=https://httpbin.org/headers"
require 'net/http' uri = URI('https://api.crawlbase.com') uri.query = URI.encode_www_form({ token: '_USER_TOKEN_', url: 'https://httpbin.org/headers'}) res = Net::HTTP.get_response(uri) puts "Response HTTP Status Code: \#{res.code}" puts "Response HTTP Header Original Status: \#{res['original_status']}" puts "Response HTTP Response Body: \#{res.body}"
const https = require('https'); const url = encodeURIComponent('https://httpbin.org/headers'); const options = { hostname: 'api.crawlbase.com', path: '/?token=_USER_TOKEN_&url=' + url, }; https .request(options, (response) => { let body = ''; response.on('data', (chunk) => (body += chunk)).on('end', () => console.log(body)); }) .end();
$url = urlencode('https://httpbin.org/headers'); $ch = curl_init(); curl_setopt($ch, CURLOPT_URL, 'https://api.crawlbase.com/?token=_USER_TOKEN_&url=' . $url); curl_setopt($ch, CURLOPT_RETURNTRANSFER, true); $response = curl_exec($ch); curl_close($ch); var_dump($response);
from urllib2 import urlopen from urllib import quote_plus url = quote_plus('https://httpbin.org/headers') handler = urlopen('https://api.crawlbase.com/?token=_USER_TOKEN_&url=' + url) print handler.read()
rpackage main import ( "net/http" "fmt" "io/ioutil" "net/url" ) func main() { url := url.QueryEscape("https://httpbin.org/headers") resp, _ := http.Get("https://api.crawlbase.com/?token=_USER_TOKEN_&url=" + url) defer resp.Body.Close() body, _ := ioutil.ReadAll(resp.Body) fmt.Println("response Body: ", string(body))
← 代理模式 数分钟内的 Scraper API →
©2023 爬网