Code Examples
Below you will find sample scraping requests in the following programming languages: PHP, Python, Nodejs, Go and Ruby.
PHP
<?php
$queryString = http_build_query([
'token' => 'YOUR_API_ACCESS_KEY',
'url' => 'https://apple.com',
]);
$ch = curl_init(sprintf('%s?%s', 'https://api.scrape.fun/scrape', $queryString));
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
$website_content = curl_exec($ch);
curl_close($ch);
echo $website_content;
Python
import requests
params = {
'token': 'YOUR_API_ACCESS_KEY',
'url': 'https://apple.com'
}
api_result = requests.get('https://api.scrape.fun/scrape', params)
website_content = api_result.content
print(website_content)
Nodejs
const axios = require('axios');
const params = {
token: 'YOUR_API_ACCESS_KEY',
url: 'https://apple.com'
}
axios.get('https://api.scrape.fun/scrape', {params})
.then(response => {
const websiteContent = response.data;
console.log(websiteContent);
}).catch(error => {
console.log(error);
});
Go
package main
import (
"fmt"
"io/ioutil"
"net/http"
)
func main() {
httpClient := http.Client{}
req, err := http.NewRequest("GET", "https://api.scrape.fun/scrape", nil)
if err != nil {
panic(err)
}
q := req.URL.Query()
q.Add("token", "YOUR_API_ACCESS_KEY")
q.Add("url", "https://apple.com")
req.URL.RawQuery = q.Encode()
res, err := httpClient.Do(req)
if err != nil {
panic(err)
}
defer res.Body.Close()
if res.StatusCode == http.StatusOK {
bodyBytes, err := ioutil.ReadAll(res.Body)
if err != nil {
panic(err)
}
websiteContent := string(bodyBytes)
fmt.Println(websiteContent)
}
}
Ruby
require 'net/http'
require 'json'
params = {
:access_key => "YOUR_API_ACCESS_KEY",
:url => "https://apple.com"
}
uri = URI('https://api.scrape.fun/scrape')
uri.query = URI.encode_www_form(params)
website_content = Net::HTTP.get(uri)
print(website_content)