AkamaiWeb
Get Sensor Data
This task type uses an independent endpoint to obtain /akamaiweb/invoke
Task Object Structure
| Param | Type | Required | Note |
|---|---|---|---|
| type | String | Required | AntiAkamaiWebTask |
| version | String | Required | Akamai’s version:2.0, 3.0 |
| url | String | Required | the url of the target website |
| abck | String | Required | Akamai’s cookie |
| bmsz | String | Required | Akamai’s cookie |
| script | String | Required | Akamai’s script |
| sensorUrl | String | Required | The url that POST sensor_data, Note that this url changes frequently, so you have to dynamically obtain it. |
| userAgent | String | Optional | support the userAgent of Windows Chrome |
| sensorCount | integer | Required | number of sensors required, currently only 3 are supported |
Request Example
POST https://api.capsolver.com/akamaiweb/invoke
Host: api.capsolver.com
Content-Type: application/json{
"clientKey": "YOUR_API_KEY",
"task": {
"type": "AntiAkamaiWebTask",
"version": "3.0",
"url": "https://www.xxxx.com",
"abck": "339491ECBC4E77145D39E852D7F96F4B~-1~YAAQHpHI...",
"bmsz": "B5B30C9F97742E5934EB6068F5F98DE1~YAAQHpHIFde...",
"script": "(function(){if(typeof Array.proto...",
"sensorUrl": "https://www.xxxx.com/AIhKchUpNaetNjPaitrYqE1ee11/aVb7D1cVJw1J6rz1c1/SAsP1w/OE/VGfC3-KTV",
"sensorCount": 3,
"userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36"
}
}Response Example
{
"errorId": 0,
"success": true,
"status": "ready",
"solution": {
"sensorList": [
"3;0;1;0;3486...",
"3;0;1;0;3486...",
"3;0;1;0;3486..."
]
},
"type": "AntiAkamaiWebTask"
}Sample Code
# pip install curl_cffi
import re
import json
from urllib.parse import urlparse, urljoin
from curl_cffi import requests
API_KEY = "..." # TODO: Your API key
website_url = "https://www.xxxxx.com/" # TODO:The url of the target website
proxy = "http://name:[email protected]:9999" # TODO: Your proxy
user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36"
sec_user_agent = '"Google Chrome";v="141", "Not?A_Brand";v="8", "Chromium";v="141"'
session = requests.Session(impersonate="chrome133a", verify=False)
session.proxies = {
"http": proxy,
"https": proxy,
}
def get_sensor_list(abck, bmsz, js_script, sensor_url):
payload = {
"clientKey": API_KEY,
"task": {
"type": "AntiAkamaiWebTask",
"version": "3.0",
"url": website_url,
"abck": abck,
"bmsz": bmsz,
"script": js_script,
"sensorUrl": sensor_url,
"userAgent": user_agent,
"sensorCount": 3,
},
}
resp = requests.post(
"https://api.capsolver.com/akamaiweb/invoke", json=payload
)
print(resp.text)
result = resp.json()
return result.get("solution", {}).get("sensorList")
def get_akamai_url(html):
match = re.search(
r'<script type="text/javascript".*?src="((/[0-9A-Za-z\-\_]+)+)">', html
)
akamai_url_path = match.group(1) if match else None
if not akamai_url_path:
return
parsed_url = urlparse(website_url)
base_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
akamai_url = urljoin(base_url, akamai_url_path)
return base_url, akamai_url
def main():
session.headers.clear()
headers = {
"sec-ch-ua": sec_user_agent,
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"Windows"',
"upgrade-insecure-requests": "1",
"user-agent": user_agent,
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
"sec-fetch-site": "none",
"sec-fetch-mode": "navigate",
"sec-fetch-user": "?1",
"sec-fetch-dest": "document",
"accept-language": "en-US,en;q=0.9",
"priority": "u=0, i",
}
resp = session.get(website_url, headers=headers, verify=False)
if resp.status_code != 200:
print(resp.status_code, resp.text)
return
base_url, akamai_url = get_akamai_url(resp.text)
if not akamai_url:
print("failed to parse akamai url")
return
print("akamai url:", akamai_url)
if not resp.cookies.get("_abck") or not akamai_url:
return
headers["referer"] = website_url
headers["origin"] = base_url
headers["accept"] = "*/*"
headers["content-type"] = "application/json"
headers["sec-fetch-site"] = "same-origin"
headers["sec-fetch-dest"] = "empty"
headers["sec-fetch-mode"] = "cors"
# get script:
resp = session.get(akamai_url, headers=headers, verify=False)
abck = resp.cookies.get("_abck")
bm_sz = session.cookies.get("bm_sz")
print("bm_sz:", bm_sz)
if not bm_sz:
print("bm_sz cookie not found")
return
sensor_list = get_sensor_list(abck, bm_sz, resp.text, akamai_uri)
if not sensor_list:
print("failed to get sensor list")
return
print("sensor list:", len(sensor_list))
# gen cookie:
for sensor in sensor_list:
payload = json.dumps({"sensor_data": sensor}, separators=(",", ":"))
resp = session.post(
url=akamai_url,
headers=headers,
data=payload,
verify=False,
)
abck = resp.cookies.get("_abck")
print(resp.status_code, resp.text, abck)
if resp.status_code != 201 or resp.json().get("success") == "false":
print("failed to gen cookie", resp.text)
return
if not abck:
print("failed to gen cookie")
return
return abck
if __name__ == '__main__':
main()