Web Crawl HTML API
API health status
Healthy Available Limited Mostly unavailable No data
Loading health status...
Get webpage crawl data, including returns full raw HTML content, fast and cost-efficient, and optimized for static page crawling, for scraping, metadata extraction, and page structure analysis.
Tags: Web Crawling
Parameters
| Name | Required | Type | Default | Description |
|---|---|---|---|---|
| url | Yes | string | The full URL of the webpage to crawl (e.g., 'https://www.example.com'). |
Copy Request
bash
curl -X GET "https://api.justserpapi.com/api/v1/web/html?url=YOUR_VALUE" \
-H "X-API-Key: YOUR_API_KEY"js
const res = await fetch("https://api.justserpapi.com/api/v1/web/html?url=YOUR_VALUE", {
headers: { "X-API-Key": "YOUR_API_KEY" }
});
const data = await res.json();
console.log(data);python
import requests
url = "https://api.justserpapi.com/api/v1/web/html"
headers = { "X-API-Key": "YOUR_API_KEY" }
params = {
"url": "YOUR_VALUE"
}
response = requests.get(url, headers=headers, params=params)
print(response.json())php
<?php
$url = "https://api.justserpapi.com/api/v1/web/html?url=YOUR_VALUE";
$ch = curl_init($url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_HTTPHEADER, [
"X-API-Key: YOUR_API_KEY"
]);
$response = curl_exec($ch);
curl_close($ch);
echo $response;go
package main
import (
"fmt"
"io"
"net/http"
)
func main() {
client := &http.Client{}
req, _ := http.NewRequest("GET", "https://api.justserpapi.com/api/v1/web/html?url=YOUR_VALUE", nil)
req.Header.Set("X-API-Key", "YOUR_API_KEY")
resp, _ := client.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
fmt.Println(string(body))
}Extra
- x-highlights:
["Returns full raw HTML content","Fast and cost-efficient","Optimized for static page crawling"]
Response
No response schema/example provided.
