feat: TLS/HTTP2/async test

This commit is contained in:
test01
2026-01-04 10:24:03 +08:00
parent 5b66549629
commit 0884a07db0
8 changed files with 1355 additions and 2 deletions

3
.gitignore vendored
View File

@@ -10,6 +10,5 @@ build/
*.egg-info/
__pycache__/
# 测试文件
test*.py
# 临时文件
edgetunnel_proxy.py

9
test.py Normal file
View File

@@ -0,0 +1,9 @@
import cfspider
import requests
worker_url = "ip.kami666.xyz"
cf_response = cfspider.get("https://httpbin.org/ip", cf_proxies=worker_url)
req_response = requests.get("https://httpbin.org/ip")
print(cf_response.text)
print(req_response.text)

502
test_antibot.py Normal file
View File

@@ -0,0 +1,502 @@
"""
测试 cfspider 反爬绕过能力
验证 TLS 指纹模拟、Workers 代理等功能的实际效果
"""
import sys
sys.path.insert(0, '.')
import cfspider
import json
CF_WORKERS = "https://ip.kami666.xyz"
def test_tls_fingerprint():
"""测试 TLS 指纹检测"""
print("\n" + "="*70)
print("测试 1: TLS 指纹检测 (browserleaks.com)")
print("="*70)
# 1. 普通 requestsPython 默认指纹)
print("\n[1.1] 普通请求(无指纹模拟):")
try:
response = cfspider.get("https://tls.browserleaks.com/json")
data = response.json()
print(f" JA3 Hash: {data.get('ja3_hash', 'N/A')[:20]}...")
print(f" User Agent: {data.get('user_agent', 'N/A')[:50]}...")
print(f" 状态: 可能被识别为 Python 爬虫")
except Exception as e:
print(f" 错误: {e}")
# 2. Chrome 131 指纹
print("\n[1.2] Chrome 131 指纹模拟:")
try:
response = cfspider.get(
"https://tls.browserleaks.com/json",
impersonate="chrome131"
)
data = response.json()
print(f" JA3 Hash: {data.get('ja3_hash', 'N/A')[:20]}...")
print(f" JA4: {data.get('ja4', 'N/A')[:30]}...")
print(f" Akamai Hash: {data.get('akamai_hash', 'N/A')[:20]}...")
print(f" 状态: ✓ 模拟真实 Chrome 浏览器")
except Exception as e:
print(f" 错误: {e}")
# 3. Safari 指纹
print("\n[1.3] Safari 18 指纹模拟:")
try:
response = cfspider.get(
"https://tls.browserleaks.com/json",
impersonate="safari18_0"
)
data = response.json()
print(f" JA3 Hash: {data.get('ja3_hash', 'N/A')[:20]}...")
print(f" JA4: {data.get('ja4', 'N/A')[:30]}...")
print(f" 状态: ✓ 模拟真实 Safari 浏览器")
except Exception as e:
print(f" 错误: {e}")
def test_cloudflare_detection():
"""测试 Cloudflare 反爬检测"""
print("\n" + "="*70)
print("测试 2: Cloudflare 反爬检测")
print("="*70)
# 测试 Cloudflare trace
print("\n[2.1] Cloudflare CDN Trace:")
try:
response = cfspider.get(
"https://www.cloudflare.com/cdn-cgi/trace",
impersonate="chrome131"
)
lines = response.text.strip().split('\n')
for line in lines:
if any(k in line for k in ['ip=', 'loc=', 'colo=', 'warp=']):
print(f" {line}")
print(f" 状态码: {response.status_code}")
print(f" 状态: ✓ 成功访问 Cloudflare")
except Exception as e:
print(f" 错误: {e}")
def test_nowsecure():
"""测试 NowSecure 反爬检测"""
print("\n" + "="*70)
print("测试 3: NowSecure 反爬检测 (nowsecure.nl)")
print("="*70)
print("\n[3.1] 使用 Chrome 131 指纹:")
try:
response = cfspider.get(
"https://nowsecure.nl/",
impersonate="chrome131",
headers={"Accept-Language": "en-US,en;q=0.9"}
)
print(f" 状态码: {response.status_code}")
if response.status_code == 200:
if "You are not a bot" in response.text or "passed" in response.text.lower():
print(f" 状态: ✓ 通过反爬检测!")
elif "challenge" in response.text.lower():
print(f" 状态: ⚠ 需要 JavaScript 挑战")
else:
print(f" 状态: 已获取响应 ({len(response.text)} 字节)")
else:
print(f" 状态: HTTP {response.status_code}")
except Exception as e:
print(f" 错误: {e}")
def test_httpbin_with_workers():
"""测试 Workers 代理 + TLS 指纹组合"""
print("\n" + "="*70)
print("测试 4: Workers 代理 + TLS 指纹组合")
print("="*70)
print("\n[4.1] Workers 代理 + Chrome 指纹:")
try:
response = cfspider.get(
"https://httpbin.org/ip",
cf_proxies=CF_WORKERS,
impersonate="chrome131"
)
data = response.json()
print(f" 出口 IP: {data.get('origin', 'N/A')}")
print(f" CF Colo: {response.cf_colo}")
print(f" 状态: ✓ 使用 Cloudflare IP + Chrome 指纹")
except Exception as e:
print(f" 错误: {e}")
print("\n[4.2] Workers 代理检测请求头:")
try:
response = cfspider.get(
"https://httpbin.org/headers",
cf_proxies=CF_WORKERS,
impersonate="chrome131",
headers={
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
"Accept-Encoding": "gzip, deflate, br"
}
)
data = response.json()
headers = data.get('headers', {})
print(f" User-Agent: {headers.get('User-Agent', 'N/A')[:60]}...")
print(f" Accept-Language: {headers.get('Accept-Language', 'N/A')}")
print(f" 状态: ✓ 请求头正确传递")
except Exception as e:
print(f" 错误: {e}")
def test_async_with_fingerprint():
"""测试异步请求 + TLS 指纹"""
print("\n" + "="*70)
print("测试 5: 异步请求功能")
print("="*70)
import asyncio
async def async_test():
print("\n[5.1] 异步 GET 请求:")
try:
response = await cfspider.aget(
"https://httpbin.org/ip",
cf_proxies=CF_WORKERS
)
data = response.json()
print(f" 出口 IP: {data.get('origin', 'N/A')}")
print(f" CF Colo: {response.cf_colo}")
print(f" 状态: ✓ 异步请求成功")
except Exception as e:
print(f" 错误: {e}")
print("\n[5.2] 并发异步请求:")
try:
import time
start = time.time()
tasks = [
cfspider.aget("https://httpbin.org/delay/1", cf_proxies=CF_WORKERS),
cfspider.aget("https://httpbin.org/delay/1", cf_proxies=CF_WORKERS),
cfspider.aget("https://httpbin.org/delay/1", cf_proxies=CF_WORKERS)
]
responses = await asyncio.gather(*tasks)
elapsed = time.time() - start
print(f" 3个并发请求完成")
print(f" 总耗时: {elapsed:.2f}s (串行约需 3s)")
print(f" 状态: ✓ 并发请求有效")
except Exception as e:
print(f" 错误: {e}")
asyncio.run(async_test())
def test_fingerprint_comparison():
"""对比不同指纹的差异"""
print("\n" + "="*70)
print("测试 6: 不同浏览器指纹对比")
print("="*70)
browsers = [
("chrome131", "Chrome 131"),
("safari18_0", "Safari 18"),
("firefox133", "Firefox 133"),
("edge101", "Edge 101")
]
print("\n 浏览器 | JA3 Hash (前16字符) | JA4 (前20字符)")
print(" " + "-"*70)
for browser_id, browser_name in browsers:
try:
response = cfspider.get(
"https://tls.browserleaks.com/json",
impersonate=browser_id
)
data = response.json()
ja3 = data.get('ja3_hash', 'N/A')[:16]
ja4 = data.get('ja4', 'N/A')[:20]
print(f" {browser_name:14} | {ja3:22} | {ja4}")
except Exception as e:
print(f" {browser_name:14} | 错误: {e}")
def test_real_websites():
"""测试访问真实网站"""
print("\n" + "="*70)
print("测试 7: 访问真实网站")
print("="*70)
websites = [
("https://www.google.com", "Google"),
("https://www.amazon.com", "Amazon"),
("https://www.github.com", "GitHub"),
("https://www.cloudflare.com", "Cloudflare"),
]
for url, name in websites:
print(f"\n[{name}]")
try:
response = cfspider.get(
url,
impersonate="chrome131",
headers={
"Accept-Language": "en-US,en;q=0.9",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
},
timeout=15
)
print(f" 状态码: {response.status_code}")
print(f" 响应大小: {len(response.text):,} 字节")
if response.status_code == 200:
print(f" 状态: ✓ 成功访问")
else:
print(f" 状态: ⚠ HTTP {response.status_code}")
except Exception as e:
print(f" 错误: {e}")
def test_async_http2():
"""测试异步请求 + HTTP/2"""
print("\n" + "="*70)
print("测试 8: 异步请求 + HTTP/2")
print("="*70)
import asyncio
async def async_http2_test():
print("\n[8.1] 异步 HTTP/2 GET 请求:")
try:
response = await cfspider.aget(
"https://httpbin.org/get",
cf_proxies=CF_WORKERS,
params={"async": "true", "http2": "enabled"}
)
data = response.json()
print(f" 状态码: {response.status_code}")
print(f" CF Colo: {response.cf_colo}")
print(f" HTTP 版本: {getattr(response, 'http_version', 'N/A')}")
print(f" URL 参数: {data.get('args', {})}")
print(f" 状态: OK 异步 HTTP/2 请求成功")
except Exception as e:
print(f" 错误: {e}")
print("\n[8.2] 异步 HTTP/2 POST 请求:")
try:
response = await cfspider.apost(
"https://httpbin.org/post",
cf_proxies=CF_WORKERS,
json={"async": True, "http2": True, "test": "cfspider"},
headers={"Content-Type": "application/json"}
)
data = response.json()
print(f" 状态码: {response.status_code}")
print(f" CF Colo: {response.cf_colo}")
print(f" POST JSON: {data.get('json', {})}")
print(f" 状态: OK 异步 POST 成功")
except Exception as e:
print(f" 错误: {e}")
print("\n[8.3] 异步 Session + HTTP/2:")
try:
async with cfspider.AsyncSession(cf_proxies=CF_WORKERS) as session:
r1 = await session.get("https://httpbin.org/ip")
r2 = await session.get("https://httpbin.org/headers")
r3 = await session.post("https://httpbin.org/post", json={"session": "test"})
print(f" 请求 1 状态码: {r1.status_code}")
print(f" 请求 2 状态码: {r2.status_code}")
print(f" 请求 3 状态码: {r3.status_code}")
print(f" CF Colo: {r1.cf_colo}")
print(f" 状态: OK 异步 Session 正常")
except Exception as e:
print(f" 错误: {e}")
print("\n[8.4] 异步并发 HTTP/2 请求:")
try:
import time
start = time.time()
# 5 个并发请求
tasks = [
cfspider.aget(f"https://httpbin.org/delay/1?id={i}", cf_proxies=CF_WORKERS)
for i in range(5)
]
responses = await asyncio.gather(*tasks)
elapsed = time.time() - start
print(f" 5 个并发请求完成")
print(f" 总耗时: {elapsed:.2f}s (串行约需 5s+)")
print(f" 所有状态码: {[r.status_code for r in responses]}")
print(f" 状态: OK 并发请求有效 (节省 {5 - elapsed:.1f}s)")
except Exception as e:
print(f" 错误: {e}")
print("\n[8.5] 异步流式下载 (astream):")
try:
total_bytes = 0
async with cfspider.astream("GET", "https://httpbin.org/bytes/10240", cf_proxies=CF_WORKERS) as response:
async for chunk in response.aiter_bytes(chunk_size=1024):
total_bytes += len(chunk)
print(f" 下载字节数: {total_bytes}")
print(f" 状态: OK 流式下载成功")
except Exception as e:
print(f" 错误: {e}")
asyncio.run(async_http2_test())
def test_all_parameters():
"""测试 .get() 方法的所有参数组合"""
print("\n" + "="*70)
print("测试 9: .get() 方法所有参数组合")
print("="*70)
print("\n[8.1] 所有参数组合测试:")
try:
response = cfspider.get(
# 基本参数
url="https://httpbin.org/get",
# CFspider 特有参数
cf_proxies=CF_WORKERS, # Workers 代理
cf_workers=True, # 使用 Workers API
http2=False, # HTTP/2与 impersonate 不兼容)
impersonate="chrome131", # TLS 指纹模拟
# requests 兼容参数
params={"key1": "value1", "key2": "中文参数"}, # URL 查询参数
headers={
"User-Agent": "CFspider-Test/1.0",
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
"X-Custom-Header": "custom-value"
},
cookies={"session": "test123", "user": "cfspider"},
timeout=30
)
data = response.json()
print(f" 状态码: {response.status_code}")
print(f" CF Colo: {response.cf_colo}")
print(f" CF Ray: {response.cf_ray}")
print(f"\n 请求参数验证:")
print(f" URL 参数: {data.get('args', {})}")
print(f" Headers 数量: {len(data.get('headers', {}))}")
print(f" Origin (IP): {data.get('origin', 'N/A')}")
print(f"\n 状态: ✓ 所有参数正确传递")
except Exception as e:
print(f" 错误: {e}")
import traceback
traceback.print_exc()
print("\n[8.2] POST 请求所有参数:")
try:
response = cfspider.post(
url="https://httpbin.org/post",
cf_proxies=CF_WORKERS,
cf_workers=True,
impersonate="safari18_0",
params={"action": "test"},
headers={
"Content-Type": "application/json",
"Accept": "application/json"
},
cookies={"auth": "token123"},
json={"name": "cfspider", "version": "1.4.1", "features": ["proxy", "fingerprint", "async"]},
timeout=30
)
data = response.json()
print(f" 状态码: {response.status_code}")
print(f" CF Colo: {response.cf_colo}")
print(f" POST JSON: {data.get('json', {})}")
print(f" 状态: ✓ POST 请求所有参数正确")
except Exception as e:
print(f" 错误: {e}")
print("\n[8.3] 普通代理模式 (cf_workers=False):")
try:
# 直接请求(不使用代理),只测试参数传递
response = cfspider.get(
url="https://httpbin.org/get",
cf_proxies=None, # 无代理
cf_workers=False, # 普通模式
impersonate="firefox133", # Firefox 指纹
params={"test": "direct"},
headers={"X-Test": "no-proxy"},
timeout=15
)
data = response.json()
print(f" 状态码: {response.status_code}")
print(f" URL 参数: {data.get('args', {})}")
print(f" 状态: ✓ 普通模式正常工作")
except Exception as e:
print(f" 错误: {e}")
print("\n[8.4] HTTP/2 模式 (无指纹):")
try:
response = cfspider.get(
url="https://httpbin.org/get",
cf_proxies=CF_WORKERS,
cf_workers=True,
http2=True, # 启用 HTTP/2
impersonate=None, # 不使用指纹HTTP/2 和指纹不兼容)
params={"http2": "enabled"},
headers={"Accept": "application/json"},
timeout=15
)
print(f" 状态码: {response.status_code}")
print(f" CF Colo: {response.cf_colo}")
print(f" 状态: ✓ HTTP/2 模式正常工作")
except Exception as e:
print(f" 错误: {e}")
print("\n[8.5] 参数覆盖率统计:")
print(" ┌─────────────────────┬──────────┐")
print(" │ 参数 │ 状态 │")
print(" ├─────────────────────┼──────────┤")
print(" │ url │ ✓ 已测试 │")
print(" │ cf_proxies │ ✓ 已测试 │")
print(" │ cf_workers=True │ ✓ 已测试 │")
print(" │ cf_workers=False │ ✓ 已测试 │")
print(" │ http2=True │ ✓ 已测试 │")
print(" │ impersonate │ ✓ 已测试 │")
print(" │ params │ ✓ 已测试 │")
print(" │ headers │ ✓ 已测试 │")
print(" │ cookies │ ✓ 已测试 │")
print(" │ timeout │ ✓ 已测试 │")
print(" │ json (POST) │ ✓ 已测试 │")
print(" └─────────────────────┴──────────┘")
def main():
print("="*70)
print("CFspider 反爬绕过能力测试")
print("="*70)
print(f"版本: {cfspider.__version__}")
print(f"Workers: {CF_WORKERS}")
test_tls_fingerprint()
test_cloudflare_detection()
test_nowsecure()
test_httpbin_with_workers()
test_async_with_fingerprint()
test_fingerprint_comparison()
test_real_websites()
test_async_http2()
test_all_parameters()
print("\n" + "="*70)
print("测试完成!")
print("="*70)
if __name__ == "__main__":
main()

17
test_api.py Normal file
View File

@@ -0,0 +1,17 @@
import uuid
import cfspider
print("1. 无代理:")
r = cfspider.get("https://httpbin.org/ip")
print(f" IP: {r.json()['origin']}")
print("2. Workers代理 (cf_workers=True):")
r = cfspider.get("https://httpbin.org/ip", cf_proxies="cfspider.violetqqcom.workers.dev")
print(f" IP: {r.json()['origin']}")
print("3. 普通代理 (cf_workers=False):")
r = cfspider.get("https://httpbin.org/ip", cf_proxies="127.0.0.1:9674", cf_workers=False)
print(f" IP: {r.json()['origin']}")
cfspider.Browser()
print("\nDone!")

260
test_download.py Normal file
View File

@@ -0,0 +1,260 @@
"""
测试 cfspider 文件下载功能(流式响应)
"""
import asyncio
import os
import sys
sys.path.insert(0, '.')
import cfspider
# Workers 地址
CF_WORKERS = "https://ip.kami666.xyz"
# 测试文件 URL
TEST_FILES = [
{
"name": "小文件 (JSON)",
"url": "https://httpbin.org/json",
"filename": "test_json.json"
},
{
"name": "中等文件 (robots.txt)",
"url": "https://www.google.com/robots.txt",
"filename": "test_robots.txt"
},
{
"name": "图片文件 (PNG)",
"url": "https://httpbin.org/image/png",
"filename": "test_image.png"
}
]
async def test_stream_download_no_proxy():
"""测试流式下载 - 无代理"""
print("\n" + "="*60)
print("测试 1: 流式下载 - 无代理模式")
print("="*60)
url = "https://httpbin.org/bytes/10240" # 10KB 随机字节
filename = "test_bytes_no_proxy.bin"
try:
total_bytes = 0
async with cfspider.astream("GET", url) as response:
print(f"状态码: {response.status_code}")
print(f"HTTP 版本: {response.http_version}")
with open(filename, "wb") as f:
async for chunk in response.aiter_bytes(chunk_size=1024):
f.write(chunk)
total_bytes += len(chunk)
file_size = os.path.getsize(filename)
print(f"下载完成: {filename}")
print(f"文件大小: {file_size} bytes")
os.remove(filename)
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_stream_download_workers():
"""测试流式下载 - Workers API 代理"""
print("\n" + "="*60)
print("测试 2: 流式下载 - Workers API 代理")
print("="*60)
url = "https://httpbin.org/bytes/10240" # 10KB 随机字节
filename = "test_bytes_workers.bin"
try:
total_bytes = 0
async with cfspider.astream("GET", url, cf_proxies=CF_WORKERS) as response:
print(f"状态码: {response.status_code}")
print(f"HTTP 版本: {response.http_version}")
print(f"CF Colo: {response.cf_colo}")
with open(filename, "wb") as f:
async for chunk in response.aiter_bytes(chunk_size=1024):
f.write(chunk)
total_bytes += len(chunk)
file_size = os.path.getsize(filename)
print(f"下载完成: {filename}")
print(f"文件大小: {file_size} bytes")
os.remove(filename)
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_download_image():
"""测试下载图片文件"""
print("\n" + "="*60)
print("测试 3: 下载图片文件 - Workers API 代理")
print("="*60)
url = "https://httpbin.org/image/png"
filename = "test_image.png"
try:
async with cfspider.astream("GET", url, cf_proxies=CF_WORKERS) as response:
print(f"状态码: {response.status_code}")
print(f"HTTP 版本: {response.http_version}")
print(f"Content-Type: {response.headers.get('content-type', 'N/A')}")
with open(filename, "wb") as f:
async for chunk in response.aiter_bytes():
f.write(chunk)
file_size = os.path.getsize(filename)
print(f"下载完成: {filename}")
print(f"文件大小: {file_size} bytes")
# 验证 PNG 文件头
with open(filename, "rb") as f:
header = f.read(8)
if header[:4] == b'\x89PNG':
print("文件类型验证: ✓ 有效的 PNG 文件")
else:
print("文件类型验证: ✗ 无效的 PNG 文件")
os.remove(filename)
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_download_text():
"""测试下载文本文件"""
print("\n" + "="*60)
print("测试 4: 下载文本文件 - Workers API 代理")
print("="*60)
url = "https://www.google.com/robots.txt"
filename = "test_robots.txt"
try:
lines = []
async with cfspider.astream("GET", url, cf_proxies=CF_WORKERS) as response:
print(f"状态码: {response.status_code}")
print(f"HTTP 版本: {response.http_version}")
async for line in response.aiter_lines():
lines.append(line)
if len(lines) <= 5:
print(f" {line}")
print(f"...")
print(f"总行数: {len(lines)}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_large_download():
"""测试大文件下载"""
print("\n" + "="*60)
print("测试 5: 大文件下载 (100KB) - 无代理")
print("="*60)
url = "https://httpbin.org/bytes/102400" # 100KB
filename = "test_large.bin"
try:
import time
start = time.time()
async with cfspider.astream("GET", url) as response:
print(f"状态码: {response.status_code}")
with open(filename, "wb") as f:
async for chunk in response.aiter_bytes(chunk_size=8192):
f.write(chunk)
elapsed = time.time() - start
file_size = os.path.getsize(filename)
speed = file_size / elapsed / 1024 # KB/s
print(f"下载完成: {filename}")
print(f"文件大小: {file_size / 1024:.1f} KB")
print(f"耗时: {elapsed:.2f}s")
print(f"速度: {speed:.1f} KB/s")
os.remove(filename)
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_session_stream():
"""测试 Session 流式下载"""
print("\n" + "="*60)
print("测试 6: Session 流式下载 - Workers API 代理")
print("="*60)
try:
async with cfspider.AsyncSession(cf_proxies=CF_WORKERS) as session:
async with session.stream("GET", "https://httpbin.org/bytes/5120") as response:
print(f"状态码: {response.status_code}")
print(f"HTTP 版本: {response.http_version}")
data = await response.aread()
print(f"数据大小: {len(data)} bytes")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def main():
print("="*60)
print("CFspider 文件下载测试")
print("="*60)
print(f"Workers 地址: {CF_WORKERS}")
results = []
results.append(await test_stream_download_no_proxy())
results.append(await test_stream_download_workers())
results.append(await test_download_image())
results.append(await test_download_text())
results.append(await test_large_download())
results.append(await test_session_stream())
# 结果汇总
print("\n" + "="*60)
print("测试结果汇总")
print("="*60)
tests = [
"流式下载 - 无代理",
"流式下载 - Workers API",
"下载图片文件",
"下载文本文件",
"大文件下载 (100KB)",
"Session 流式下载"
]
passed = 0
failed = 0
for i, (test, result) in enumerate(zip(tests, results)):
status = "✓ 通过" if result else "✗ 失败"
print(f"{i+1}. {test}: {status}")
if result:
passed += 1
else:
failed += 1
print(f"\n总计: {passed} 通过, {failed} 失败")
if __name__ == "__main__":
asyncio.run(main())

239
test_httpx.py Normal file
View File

@@ -0,0 +1,239 @@
"""
测试 cfspider httpx 异步功能
"""
import asyncio
import sys
sys.path.insert(0, '.')
import cfspider
# Workers 地址
CF_WORKERS = "https://ip.kami666.xyz"
async def test_async_no_proxy():
"""测试异步请求 - 无代理"""
print("\n" + "="*60)
print("测试 1: 异步请求 - 无代理模式")
print("="*60)
try:
response = await cfspider.aget("https://httpbin.org/ip")
print(f"状态码: {response.status_code}")
print(f"HTTP 版本: {response.http_version}")
print(f"响应: {response.text}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_async_workers_proxy():
"""测试异步请求 - Workers API 代理"""
print("\n" + "="*60)
print("测试 2: 异步请求 - Workers API 代理")
print("="*60)
try:
response = await cfspider.aget(
"https://httpbin.org/ip",
cf_proxies=CF_WORKERS
)
print(f"状态码: {response.status_code}")
print(f"HTTP 版本: {response.http_version}")
print(f"CF Colo: {response.cf_colo}")
print(f"CF Ray: {response.cf_ray}")
print(f"响应: {response.text}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_async_post():
"""测试异步 POST 请求"""
print("\n" + "="*60)
print("测试 3: 异步 POST 请求 - Workers API 代理")
print("="*60)
try:
response = await cfspider.apost(
"https://httpbin.org/post",
cf_proxies=CF_WORKERS,
json={"name": "cfspider", "version": "1.3.0", "feature": "httpx"}
)
print(f"状态码: {response.status_code}")
print(f"HTTP 版本: {response.http_version}")
data = response.json()
print(f"发送的 JSON: {data.get('json', {})}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_async_session():
"""测试异步 Session"""
print("\n" + "="*60)
print("测试 4: 异步 Session - Workers API 代理")
print("="*60)
try:
async with cfspider.AsyncSession(cf_proxies=CF_WORKERS) as session:
# 第一个请求
r1 = await session.get("https://httpbin.org/ip")
print(f"请求 1 - 状态码: {r1.status_code}, HTTP 版本: {r1.http_version}")
# 第二个请求
r2 = await session.post("https://httpbin.org/post", json={"test": 1})
print(f"请求 2 - 状态码: {r2.status_code}, HTTP 版本: {r2.http_version}")
# 第三个请求
r3 = await session.get("https://httpbin.org/headers")
print(f"请求 3 - 状态码: {r3.status_code}, HTTP 版本: {r3.http_version}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_async_session_no_proxy():
"""测试异步 Session - 无代理"""
print("\n" + "="*60)
print("测试 5: 异步 Session - 无代理模式")
print("="*60)
try:
async with cfspider.AsyncSession() as session:
r1 = await session.get("https://httpbin.org/ip")
print(f"请求 1 - 状态码: {r1.status_code}, HTTP 版本: {r1.http_version}")
print(f"响应: {r1.text}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def test_concurrent_requests():
"""测试并发请求"""
print("\n" + "="*60)
print("测试 6: 并发请求 - Workers API 代理")
print("="*60)
try:
urls = [
"https://httpbin.org/ip",
"https://httpbin.org/headers",
"https://httpbin.org/user-agent"
]
async def fetch(url):
return await cfspider.aget(url, cf_proxies=CF_WORKERS)
import time
start = time.time()
results = await asyncio.gather(*[fetch(url) for url in urls])
elapsed = time.time() - start
for i, r in enumerate(results):
print(f"请求 {i+1} - 状态码: {r.status_code}, HTTP 版本: {r.http_version}")
print(f"并发 3 个请求耗时: {elapsed:.2f}s")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_sync_http2():
"""测试同步请求 HTTP/2"""
print("\n" + "="*60)
print("测试 7: 同步请求 HTTP/2 - 无代理")
print("="*60)
try:
response = cfspider.get(
"https://httpbin.org/ip",
http2=True
)
print(f"状态码: {response.status_code}")
print(f"响应: {response.text}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_sync_http2_workers():
"""测试同步请求 HTTP/2 - Workers 代理"""
print("\n" + "="*60)
print("测试 8: 同步请求 HTTP/2 - Workers API 代理")
print("="*60)
try:
response = cfspider.get(
"https://httpbin.org/ip",
cf_proxies=CF_WORKERS,
http2=True
)
print(f"状态码: {response.status_code}")
print(f"CF Colo: {response.cf_colo}")
print(f"响应: {response.text}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
async def main():
print("="*60)
print("CFspider httpx 功能测试")
print("="*60)
print(f"Workers 地址: {CF_WORKERS}")
results = []
# 异步测试
results.append(await test_async_no_proxy())
results.append(await test_async_workers_proxy())
results.append(await test_async_post())
results.append(await test_async_session())
results.append(await test_async_session_no_proxy())
results.append(await test_concurrent_requests())
# 同步 HTTP/2 测试
results.append(test_sync_http2())
results.append(test_sync_http2_workers())
# 结果汇总
print("\n" + "="*60)
print("测试结果汇总")
print("="*60)
tests = [
"异步请求 - 无代理",
"异步请求 - Workers API",
"异步 POST - Workers API",
"异步 Session - Workers API",
"异步 Session - 无代理",
"并发请求 - Workers API",
"同步 HTTP/2 - 无代理",
"同步 HTTP/2 - Workers API"
]
passed = 0
failed = 0
for i, (test, result) in enumerate(zip(tests, results)):
status = "✓ 通过" if result else "✗ 失败"
print(f"{i+1}. {test}: {status}")
if result:
passed += 1
else:
failed += 1
print(f"\n总计: {passed} 通过, {failed} 失败")
if __name__ == "__main__":
asyncio.run(main())

190
test_impersonate.py Normal file
View File

@@ -0,0 +1,190 @@
"""
测试 cfspider TLS 指纹模拟功能
"""
import sys
sys.path.insert(0, '.')
import cfspider
# Workers 地址
CF_WORKERS = "https://ip.kami666.xyz"
def test_impersonate_get():
"""测试 TLS 指纹模拟 GET 请求"""
print("\n" + "="*60)
print("测试 1: TLS 指纹模拟 GET 请求 - Chrome 131")
print("="*60)
try:
response = cfspider.impersonate_get(
"https://tls.browserleaks.com/json",
impersonate="chrome131"
)
print(f"状态码: {response.status_code}")
data = response.json()
print(f"JA3 Hash: {data.get('ja3_hash', 'N/A')}")
print(f"JA4: {data.get('ja4', 'N/A')}")
print(f"Akamai Hash: {data.get('akamai_hash', 'N/A')}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_impersonate_safari():
"""测试 Safari 指纹"""
print("\n" + "="*60)
print("测试 2: TLS 指纹模拟 GET 请求 - Safari 18")
print("="*60)
try:
response = cfspider.impersonate_get(
"https://tls.browserleaks.com/json",
impersonate="safari18_0"
)
print(f"状态码: {response.status_code}")
data = response.json()
print(f"JA3 Hash: {data.get('ja3_hash', 'N/A')}")
print(f"JA4: {data.get('ja4', 'N/A')}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_impersonate_firefox():
"""测试 Firefox 指纹"""
print("\n" + "="*60)
print("测试 3: TLS 指纹模拟 GET 请求 - Firefox 133")
print("="*60)
try:
response = cfspider.impersonate_get(
"https://tls.browserleaks.com/json",
impersonate="firefox133"
)
print(f"状态码: {response.status_code}")
data = response.json()
print(f"JA3 Hash: {data.get('ja3_hash', 'N/A')}")
print(f"JA4: {data.get('ja4', 'N/A')}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_impersonate_workers():
"""测试 TLS 指纹 + Workers 代理"""
print("\n" + "="*60)
print("测试 4: TLS 指纹 + Workers 代理")
print("="*60)
try:
response = cfspider.impersonate_get(
"https://httpbin.org/ip",
impersonate="chrome131",
cf_proxies=CF_WORKERS
)
print(f"状态码: {response.status_code}")
print(f"CF Colo: {response.cf_colo}")
print(f"响应: {response.text}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_impersonate_session():
"""测试 TLS 指纹会话"""
print("\n" + "="*60)
print("测试 5: TLS 指纹会话")
print("="*60)
try:
with cfspider.ImpersonateSession(impersonate="chrome131") as session:
r1 = session.get("https://httpbin.org/ip")
print(f"请求 1 - 状态码: {r1.status_code}")
r2 = session.post("https://httpbin.org/post", json={"test": 1})
print(f"请求 2 - 状态码: {r2.status_code}")
r3 = session.get("https://httpbin.org/headers")
print(f"请求 3 - 状态码: {r3.status_code}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_supported_browsers():
"""测试获取支持的浏览器列表"""
print("\n" + "="*60)
print("测试 6: 支持的浏览器列表")
print("="*60)
try:
browsers = cfspider.get_supported_browsers()
print(f"支持的浏览器数量: {len(browsers)}")
print(f"Chrome: {[b for b in browsers if 'chrome' in b]}")
print(f"Safari: {[b for b in browsers if 'safari' in b]}")
print(f"Firefox: {[b for b in browsers if 'firefox' in b]}")
print(f"Edge: {[b for b in browsers if 'edge' in b]}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def main():
print("="*60)
print("CFspider TLS 指纹模拟功能测试")
print("="*60)
print(f"Workers 地址: {CF_WORKERS}")
results = []
results.append(test_impersonate_get())
results.append(test_impersonate_safari())
results.append(test_impersonate_firefox())
results.append(test_impersonate_workers())
results.append(test_impersonate_session())
results.append(test_supported_browsers())
# 结果汇总
print("\n" + "="*60)
print("测试结果汇总")
print("="*60)
tests = [
"Chrome 131 指纹",
"Safari 18 指纹",
"Firefox 133 指纹",
"指纹 + Workers 代理",
"指纹会话",
"支持的浏览器列表"
]
passed = 0
failed = 0
for i, (test, result) in enumerate(zip(tests, results)):
status = "✓ 通过" if result else "✗ 失败"
print(f"{i+1}. {test}: {status}")
if result:
passed += 1
else:
failed += 1
print(f"\n总计: {passed} 通过, {failed} 失败")
if __name__ == "__main__":
main()

137
test_impersonate_new.py Normal file
View File

@@ -0,0 +1,137 @@
"""
测试 cfspider.get() 直接使用 impersonate 参数
"""
import sys
sys.path.insert(0, '.')
import cfspider
CF_WORKERS = "https://ip.kami666.xyz"
def test_get_impersonate():
"""测试 get() 直接使用 impersonate"""
print("\n" + "="*60)
print("测试 1: cfspider.get() + impersonate='chrome131'")
print("="*60)
try:
response = cfspider.get(
"https://tls.browserleaks.com/json",
impersonate="chrome131"
)
print(f"状态码: {response.status_code}")
data = response.json()
print(f"JA3 Hash: {data.get('ja3_hash', 'N/A')}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_get_impersonate_workers():
"""测试 get() + impersonate + Workers 代理"""
print("\n" + "="*60)
print("测试 2: cfspider.get() + impersonate + cf_proxies")
print("="*60)
try:
response = cfspider.get(
"https://httpbin.org/ip",
impersonate="chrome131",
cf_proxies=CF_WORKERS
)
print(f"状态码: {response.status_code}")
print(f"CF Colo: {response.cf_colo}")
print(f"响应: {response.text}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_post_impersonate():
"""测试 post() + impersonate"""
print("\n" + "="*60)
print("测试 3: cfspider.post() + impersonate='safari18_0'")
print("="*60)
try:
response = cfspider.post(
"https://httpbin.org/post",
impersonate="safari18_0",
json={"test": "data"}
)
print(f"状态码: {response.status_code}")
data = response.json()
print(f"POST 数据: {data.get('json')}")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def test_different_browsers():
"""测试不同浏览器指纹"""
print("\n" + "="*60)
print("测试 4: 不同浏览器指纹对比")
print("="*60)
browsers = ["chrome131", "safari18_0", "firefox133"]
try:
for browser in browsers:
response = cfspider.get(
"https://tls.browserleaks.com/json",
impersonate=browser
)
data = response.json()
print(f"{browser}: JA3={data.get('ja3_hash', 'N/A')[:16]}...")
print("✓ 测试通过")
return True
except Exception as e:
print(f"✗ 测试失败: {e}")
return False
def main():
print("="*60)
print("cfspider.get() impersonate 参数测试")
print("="*60)
results = []
results.append(test_get_impersonate())
results.append(test_get_impersonate_workers())
results.append(test_post_impersonate())
results.append(test_different_browsers())
# 结果汇总
print("\n" + "="*60)
print("测试结果汇总")
print("="*60)
tests = [
"get() + impersonate",
"get() + impersonate + Workers",
"post() + impersonate",
"不同浏览器指纹"
]
passed = sum(results)
failed = len(results) - passed
for i, (test, result) in enumerate(zip(tests, results)):
status = "✓ 通过" if result else "✗ 失败"
print(f"{i+1}. {test}: {status}")
print(f"\n总计: {passed} 通过, {failed} 失败")
if __name__ == "__main__":
main()