mirror of
https://github.com/violettoolssite/CFspider.git
synced 2026-04-05 19:39:01 +08:00
娣诲姞 Token 閴存潈鍔熻兘
- Workers 绔細娣诲姞 token 楠岃瘉閫昏緫锛屾敮鎸佷粠鐜鍙橀噺璇诲彇 token 鍒楄〃 - Python 瀹㈡埛绔細鎵€鏈?API 鍑芥暟鍜?Session 绫绘坊鍔?token 鍙傛暟鏀寔 - 鏂囨。锛氭洿鏂?README.md 鍜?pages/api.html锛屾坊鍔?token 閰嶇疆璇存槑
This commit is contained in:
@@ -166,7 +166,7 @@ class CFSpiderResponse:
|
||||
|
||||
def request(method, url, cf_proxies=None, cf_workers=True, http2=False, impersonate=None,
|
||||
map_output=False, map_file="cfspider_map.html",
|
||||
stealth=False, stealth_browser='chrome', delay=None, **kwargs):
|
||||
stealth=False, stealth_browser='chrome', delay=None, token=None, **kwargs):
|
||||
"""
|
||||
发送 HTTP 请求
|
||||
|
||||
@@ -201,6 +201,10 @@ def request(method, url, cf_proxies=None, cf_workers=True, http2=False, imperson
|
||||
delay (tuple, optional): 请求前的随机延迟范围(秒)
|
||||
- 如 (1, 3) 表示请求前随机等待 1-3 秒
|
||||
- 用于模拟人类行为,避免被反爬系统检测
|
||||
token (str, optional): Workers API 鉴权 token
|
||||
- 当使用 Workers API(cf_workers=True)时,将 token 添加到查询参数
|
||||
- 如果 Workers 端配置了 TOKEN 环境变量,必须提供有效的 token
|
||||
- 格式:从查询参数 ?token=xxx 传递
|
||||
**kwargs: 其他参数,与 requests 库完全兼容
|
||||
- params (dict): URL 查询参数
|
||||
- headers (dict): 自定义请求头(会与隐身模式头合并)
|
||||
@@ -284,7 +288,7 @@ def request(method, url, cf_proxies=None, cf_workers=True, http2=False, imperson
|
||||
method, url, cf_proxies, cf_workers, impersonate,
|
||||
params=params, headers=headers, data=data,
|
||||
json_data=json_data, cookies=cookies, timeout=timeout,
|
||||
**kwargs
|
||||
token=token, **kwargs
|
||||
)
|
||||
_handle_map_output(response, url, start_time, map_output, map_file)
|
||||
return response
|
||||
@@ -295,7 +299,7 @@ def request(method, url, cf_proxies=None, cf_workers=True, http2=False, imperson
|
||||
method, url, cf_proxies, cf_workers,
|
||||
params=params, headers=headers, data=data,
|
||||
json_data=json_data, cookies=cookies, timeout=timeout,
|
||||
**kwargs
|
||||
token=token, **kwargs
|
||||
)
|
||||
_handle_map_output(response, url, start_time, map_output, map_file)
|
||||
return response
|
||||
@@ -356,7 +360,10 @@ def request(method, url, cf_proxies=None, cf_workers=True, http2=False, imperson
|
||||
if params:
|
||||
target_url = f"{url}?{urlencode(params)}"
|
||||
|
||||
# 构建代理 URL,添加 token 参数(如果提供)
|
||||
proxy_url = f"{cf_proxies_url}/proxy?url={quote(target_url, safe='')}&method={method.upper()}"
|
||||
if token:
|
||||
proxy_url += f"&token={quote(token, safe='')}"
|
||||
|
||||
request_headers = {}
|
||||
if headers:
|
||||
@@ -408,7 +415,7 @@ def _handle_map_output(response, url, start_time, map_output, map_file):
|
||||
|
||||
def _request_impersonate(method, url, cf_proxies, cf_workers, impersonate,
|
||||
params=None, headers=None, data=None, json_data=None,
|
||||
cookies=None, timeout=30, **kwargs):
|
||||
cookies=None, timeout=30, token=None, **kwargs):
|
||||
"""使用 curl_cffi 发送请求(支持 TLS 指纹模拟)"""
|
||||
curl_requests = _get_curl_cffi()
|
||||
|
||||
@@ -460,6 +467,8 @@ def _request_impersonate(method, url, cf_proxies, cf_workers, impersonate,
|
||||
target_url = f"{url}?{urlencode(params)}"
|
||||
|
||||
proxy_url = f"{cf_proxies}/proxy?url={quote(target_url, safe='')}&method={method.upper()}"
|
||||
if token:
|
||||
proxy_url += f"&token={quote(token, safe='')}"
|
||||
|
||||
request_headers = {}
|
||||
if headers:
|
||||
@@ -487,7 +496,7 @@ def _request_impersonate(method, url, cf_proxies, cf_workers, impersonate,
|
||||
|
||||
|
||||
def _request_httpx(method, url, cf_proxies, cf_workers, params=None, headers=None,
|
||||
data=None, json_data=None, cookies=None, timeout=30, **kwargs):
|
||||
data=None, json_data=None, cookies=None, timeout=30, token=None, **kwargs):
|
||||
"""使用 httpx 发送请求(支持 HTTP/2)"""
|
||||
httpx = _get_httpx()
|
||||
|
||||
@@ -536,6 +545,8 @@ def _request_httpx(method, url, cf_proxies, cf_workers, params=None, headers=Non
|
||||
target_url = f"{url}?{urlencode(params)}"
|
||||
|
||||
proxy_url = f"{cf_proxies}/proxy?url={quote(target_url, safe='')}&method={method.upper()}"
|
||||
if token:
|
||||
proxy_url += f"&token={quote(token, safe='')}"
|
||||
|
||||
request_headers = {}
|
||||
if headers:
|
||||
|
||||
@@ -174,6 +174,7 @@ async def arequest(
|
||||
cf_proxies: Optional[str] = None,
|
||||
cf_workers: bool = True,
|
||||
http2: bool = True,
|
||||
token: Optional[str] = None,
|
||||
**kwargs
|
||||
) -> AsyncCFSpiderResponse:
|
||||
"""
|
||||
@@ -244,6 +245,8 @@ async def arequest(
|
||||
target_url = f"{url}?{urlencode(params)}"
|
||||
|
||||
proxy_url = f"{cf_proxies}/proxy?url={quote(target_url, safe='')}&method={method.upper()}"
|
||||
if token:
|
||||
proxy_url += f"&token={quote(token, safe='')}"
|
||||
|
||||
request_headers = {}
|
||||
if headers:
|
||||
@@ -276,6 +279,7 @@ async def astream(
|
||||
cf_proxies: Optional[str] = None,
|
||||
cf_workers: bool = True,
|
||||
http2: bool = True,
|
||||
token: Optional[str] = None,
|
||||
**kwargs
|
||||
) -> AsyncIterator[AsyncStreamResponse]:
|
||||
"""
|
||||
@@ -351,6 +355,8 @@ async def astream(
|
||||
target_url = f"{url}?{urlencode(params)}"
|
||||
|
||||
proxy_endpoint = f"{cf_proxies_url}/proxy?url={quote(target_url, safe='')}&method={method.upper()}"
|
||||
if token:
|
||||
proxy_endpoint += f"&token={quote(token, safe='')}"
|
||||
|
||||
request_headers = {}
|
||||
if headers:
|
||||
|
||||
@@ -51,6 +51,7 @@ class AsyncSession:
|
||||
self.timeout = timeout
|
||||
self.headers = headers or {}
|
||||
self.cookies = cookies or {}
|
||||
self.token = token
|
||||
self._client_kwargs = kwargs
|
||||
self._client: Optional[httpx.AsyncClient] = None
|
||||
|
||||
@@ -143,6 +144,8 @@ class AsyncSession:
|
||||
target_url = f"{url}?{urlencode(params)}"
|
||||
|
||||
proxy_url = f"{cf_proxies_url}/proxy?url={quote(target_url, safe='')}&method={method.upper()}"
|
||||
if self.token:
|
||||
proxy_url += f"&token={quote(self.token, safe='')}"
|
||||
|
||||
request_headers = {}
|
||||
for key, value in merged_headers.items():
|
||||
@@ -223,6 +226,8 @@ class AsyncSession:
|
||||
target_url = f"{url}?{urlencode(params)}"
|
||||
|
||||
proxy_url = f"{cf_proxies_url}/proxy?url={quote(target_url, safe='')}&method={method.upper()}"
|
||||
if self.token:
|
||||
proxy_url += f"&token={quote(self.token, safe='')}"
|
||||
|
||||
request_headers = {}
|
||||
for key, value in merged_headers.items():
|
||||
|
||||
@@ -87,6 +87,7 @@ def impersonate_request(
|
||||
impersonate: str = "chrome131",
|
||||
cf_proxies: Optional[str] = None,
|
||||
cf_workers: bool = True,
|
||||
token: Optional[str] = None,
|
||||
**kwargs
|
||||
) -> ImpersonateResponse:
|
||||
"""
|
||||
@@ -171,6 +172,8 @@ def impersonate_request(
|
||||
target_url = f"{url}?{urlencode(params)}"
|
||||
|
||||
proxy_url = f"{cf_proxies}/proxy?url={quote(target_url, safe='')}&method={method.upper()}"
|
||||
if token:
|
||||
proxy_url += f"&token={quote(token, safe='')}"
|
||||
|
||||
request_headers = {}
|
||||
if headers:
|
||||
|
||||
@@ -39,19 +39,21 @@ class Session:
|
||||
请使用 cfspider.StealthSession。
|
||||
"""
|
||||
|
||||
def __init__(self, cf_proxies=None):
|
||||
def __init__(self, cf_proxies=None, token=None):
|
||||
"""
|
||||
初始化会话
|
||||
|
||||
Args:
|
||||
cf_proxies (str): Workers 代理地址(必填)
|
||||
例如:"https://your-workers.dev"
|
||||
token (str, optional): Workers API 鉴权 token
|
||||
当 Workers 端配置了 TOKEN 环境变量时,必须提供有效的 token
|
||||
|
||||
Raises:
|
||||
ValueError: 当 cf_proxies 为空时
|
||||
|
||||
Example:
|
||||
>>> session = cfspider.Session(cf_proxies="https://your-workers.dev")
|
||||
>>> session = cfspider.Session(cf_proxies="https://your-workers.dev", token="your-token")
|
||||
"""
|
||||
if not cf_proxies:
|
||||
raise ValueError(
|
||||
@@ -62,6 +64,7 @@ class Session:
|
||||
"如果需要隐身模式会话,请使用 cfspider.StealthSession。"
|
||||
)
|
||||
self.cf_proxies = cf_proxies.rstrip("/")
|
||||
self.token = token
|
||||
self.headers = {}
|
||||
self.cookies = {}
|
||||
|
||||
@@ -91,6 +94,7 @@ class Session:
|
||||
method,
|
||||
url,
|
||||
cf_proxies=self.cf_proxies,
|
||||
token=self.token,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
**kwargs
|
||||
|
||||
@@ -280,6 +280,7 @@ class StealthSession:
|
||||
cf_workers: bool = True,
|
||||
delay: Tuple[float, float] = None,
|
||||
auto_referer: bool = True,
|
||||
token: str = None,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
@@ -319,6 +320,7 @@ class StealthSession:
|
||||
self.cf_workers = cf_workers
|
||||
self.delay = delay
|
||||
self.auto_referer = auto_referer
|
||||
self.token = token
|
||||
self.last_url = None
|
||||
self.request_count = 0
|
||||
self._extra_kwargs = kwargs
|
||||
@@ -389,6 +391,7 @@ class StealthSession:
|
||||
url,
|
||||
cf_proxies=self.cf_proxies,
|
||||
cf_workers=self.cf_workers,
|
||||
token=self.token,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
**kwargs
|
||||
@@ -419,6 +422,7 @@ class StealthSession:
|
||||
url,
|
||||
cf_proxies=self.cf_proxies,
|
||||
cf_workers=self.cf_workers,
|
||||
token=self.token,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
**kwargs
|
||||
@@ -442,6 +446,7 @@ class StealthSession:
|
||||
url,
|
||||
cf_proxies=self.cf_proxies,
|
||||
cf_workers=self.cf_workers,
|
||||
token=self.token,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
**kwargs
|
||||
@@ -463,6 +468,7 @@ class StealthSession:
|
||||
url,
|
||||
cf_proxies=self.cf_proxies,
|
||||
cf_workers=self.cf_workers,
|
||||
token=self.token,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
**kwargs
|
||||
@@ -484,6 +490,7 @@ class StealthSession:
|
||||
url,
|
||||
cf_proxies=self.cf_proxies,
|
||||
cf_workers=self.cf_workers,
|
||||
token=self.token,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
**kwargs
|
||||
|
||||
Reference in New Issue
Block a user