Skip to content

Commit a83afc1

Browse files
committed
Add requestType option for browser vs request mode
- Add requestType parameter to types.py RequestOptions (browser | request) - Add requestType to Scrappey client methods (get, post, put, delete, patch) - Add requestType to AsyncScrappey client methods - Add request_type to requests-compatible API functions and Session - Document requestType in README with cost/speed comparison table - Add tests for requestType parameter (107 tests passing) Browser mode (default): Headless browser, 1 balance, more powerful Request mode: HTTP library with TLS, 0.2 balance, faster and cheaper
1 parent 39a3b5f commit a83afc1

File tree

7 files changed

+199
-18
lines changed

7 files changed

+199
-18
lines changed

README.md

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,46 @@ with Scrappey(api_key="YOUR_API_KEY") as scrappey:
5050
print(result["solution"]["statusCode"])
5151
```
5252

53+
## Request Types
54+
55+
Scrappey supports two request modes with different trade-offs:
56+
57+
| Mode | Description | Cost | Speed |
58+
|------|-------------|------|-------|
59+
| `browser` | Headless browser (default) | 1 balance | Slower, more powerful |
60+
| `request` | HTTP library with TLS | 0.2 balance | Faster, cheaper |
61+
62+
### Browser Mode (Default)
63+
64+
Uses a real headless browser. Best for:
65+
- Sites with JavaScript rendering
66+
- Cloudflare, Datadome, and other antibot protection
67+
- Browser actions and screenshots
68+
69+
```python
70+
# Browser mode is the default
71+
result = scrappey.get(url="https://protected-site.com")
72+
```
73+
74+
### Request Mode
75+
76+
Uses an HTTP library with TLS fingerprinting. Best for:
77+
- Simple API calls
78+
- High-volume scraping
79+
- When you need speed and low cost
80+
81+
```python
82+
# Request mode - 5x cheaper and faster
83+
result = scrappey.get(url="https://api.example.com", requestType="request")
84+
85+
# Works with all HTTP methods
86+
result = scrappey.post(
87+
url="https://api.example.com/data",
88+
postData={"key": "value"},
89+
requestType="request",
90+
)
91+
```
92+
5393
## Async Usage
5494

5595
```python
@@ -160,6 +200,7 @@ with requests.Session() as session:
160200
| `cookies` | Yes | Request cookies |
161201
| `timeout` | Yes | Request timeout |
162202
| `proxies` | Yes | Proxy configuration |
203+
| `request_type` | Yes | "browser" (default) or "request" (faster) |
163204
| `allow_redirects` | Warn | Handled by browser |
164205
| `verify` | Warn | SSL handled by service |
165206
| `stream` | Warn | Not supported |
@@ -316,6 +357,7 @@ Scrappey(
316357

317358
| Option | Type | Description |
318359
|--------|------|-------------|
360+
| `requestType` | str | "browser" (default) or "request" (faster, cheaper) |
319361
| `session` | str | Session ID for state persistence |
320362
| `proxy` | str | Custom proxy (http://user:pass@ip:port) |
321363
| `proxyCountry` | str | Proxy country (e.g., "UnitedStates") |

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "scrappey"
7-
version = "1.0.1"
7+
version = "1.0.2"
88
description = "Official Python wrapper for Scrappey.com - Web scraping API with Cloudflare bypass, antibot solving, and browser automation"
99
readme = {file = "README.md", content-type = "text/markdown"}
1010
license = {text = "MIT"}

src/scrappey/async_client.py

Lines changed: 33 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
For synchronous usage, see client.py.
66
"""
77

8-
from typing import Any, Dict, List, Optional, Union
8+
from typing import Any, Dict, List, Literal, Optional, Union
99

1010
import httpx
1111

@@ -153,6 +153,7 @@ async def get(
153153
self,
154154
url: str,
155155
*,
156+
requestType: Optional[Literal["browser", "request"]] = None,
156157
session: Optional[str] = None,
157158
proxy: Optional[str] = None,
158159
proxyCountry: Optional[str] = None,
@@ -174,6 +175,8 @@ async def get(
174175
175176
Args:
176177
url: The target URL to scrape
178+
requestType: Request mode - "browser" (default, headless browser, 1 balance)
179+
or "request" (HTTP library with TLS, faster, 0.2 balance)
177180
session: Session ID for cookie/state persistence
178181
proxy: Custom proxy (format: http://user:pass@ip:port)
179182
proxyCountry: Request proxy from specific country (e.g., "UnitedStates")
@@ -195,15 +198,20 @@ async def get(
195198
196199
Example:
197200
```python
201+
# Browser mode (default)
202+
result = await scrappey.get(url="https://example.com")
203+
204+
# Request mode (faster, cheaper)
198205
result = await scrappey.get(
199206
url="https://example.com",
200-
cloudflareBypass=True,
207+
requestType="request",
201208
)
202-
html = result["solution"]["response"]
203209
```
204210
"""
205211
data: Dict[str, Any] = {"url": url}
206212

213+
if requestType is not None:
214+
data["requestType"] = requestType
207215
if session is not None:
208216
data["session"] = session
209217
if proxy is not None:
@@ -242,6 +250,7 @@ async def post(
242250
url: str,
243251
*,
244252
postData: Optional[Union[str, Dict[str, Any]]] = None,
253+
requestType: Optional[Literal["browser", "request"]] = None,
245254
session: Optional[str] = None,
246255
customHeaders: Optional[Dict[str, str]] = None,
247256
**kwargs: Any,
@@ -252,6 +261,7 @@ async def post(
252261
Args:
253262
url: The target URL
254263
postData: Data to send (string or dict for JSON)
264+
requestType: Request mode - "browser" (default) or "request" (faster, cheaper)
255265
session: Session ID for cookie/state persistence
256266
customHeaders: Custom HTTP headers
257267
**kwargs: Additional API options
@@ -263,6 +273,8 @@ async def post(
263273

264274
if postData is not None:
265275
data["postData"] = postData
276+
if requestType is not None:
277+
data["requestType"] = requestType
266278
if session is not None:
267279
data["session"] = session
268280
if customHeaders is not None:
@@ -277,6 +289,7 @@ async def put(
277289
url: str,
278290
*,
279291
postData: Optional[Union[str, Dict[str, Any]]] = None,
292+
requestType: Optional[Literal["browser", "request"]] = None,
280293
**kwargs: Any,
281294
) -> ScrappeyResponse:
282295
"""
@@ -285,6 +298,7 @@ async def put(
285298
Args:
286299
url: The target URL
287300
postData: Data to send
301+
requestType: Request mode - "browser" (default) or "request" (faster, cheaper)
288302
**kwargs: Additional API options
289303
290304
Returns:
@@ -294,23 +308,34 @@ async def put(
294308

295309
if postData is not None:
296310
data["postData"] = postData
311+
if requestType is not None:
312+
data["requestType"] = requestType
297313

298314
data.update(kwargs)
299315

300316
return await self._request("request.put", data)
301317

302-
async def delete(self, url: str, **kwargs: Any) -> ScrappeyResponse:
318+
async def delete(
319+
self,
320+
url: str,
321+
*,
322+
requestType: Optional[Literal["browser", "request"]] = None,
323+
**kwargs: Any,
324+
) -> ScrappeyResponse:
303325
"""
304326
Perform a DELETE request to the specified URL.
305327
306328
Args:
307329
url: The target URL
330+
requestType: Request mode - "browser" (default) or "request" (faster, cheaper)
308331
**kwargs: Additional API options
309332
310333
Returns:
311334
API response containing the result
312335
"""
313336
data: Dict[str, Any] = {"url": url}
337+
if requestType is not None:
338+
data["requestType"] = requestType
314339
data.update(kwargs)
315340

316341
return await self._request("request.delete", data)
@@ -320,6 +345,7 @@ async def patch(
320345
url: str,
321346
*,
322347
postData: Optional[Union[str, Dict[str, Any]]] = None,
348+
requestType: Optional[Literal["browser", "request"]] = None,
323349
**kwargs: Any,
324350
) -> ScrappeyResponse:
325351
"""
@@ -328,6 +354,7 @@ async def patch(
328354
Args:
329355
url: The target URL
330356
postData: Data to send
357+
requestType: Request mode - "browser" (default) or "request" (faster, cheaper)
331358
**kwargs: Additional API options
332359
333360
Returns:
@@ -337,6 +364,8 @@ async def patch(
337364

338365
if postData is not None:
339366
data["postData"] = postData
367+
if requestType is not None:
368+
data["requestType"] = requestType
340369

341370
data.update(kwargs)
342371

src/scrappey/client.py

Lines changed: 38 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
web scraping API. For async support, see async_client.py.
66
"""
77

8-
from typing import Any, Dict, List, Optional, Union
8+
from typing import Any, Dict, List, Literal, Optional, Union
99

1010
import httpx
1111

@@ -154,6 +154,7 @@ def get(
154154
self,
155155
url: str,
156156
*,
157+
requestType: Optional[Literal["browser", "request"]] = None,
157158
session: Optional[str] = None,
158159
proxy: Optional[str] = None,
159160
proxyCountry: Optional[str] = None,
@@ -175,6 +176,8 @@ def get(
175176
176177
Args:
177178
url: The target URL to scrape
179+
requestType: Request mode - "browser" (default, headless browser, 1 balance)
180+
or "request" (HTTP library with TLS, faster, 0.2 balance)
178181
session: Session ID for cookie/state persistence
179182
proxy: Custom proxy (format: http://user:pass@ip:port)
180183
proxyCountry: Request proxy from specific country (e.g., "UnitedStates")
@@ -196,10 +199,16 @@ def get(
196199
197200
Example:
198201
```python
199-
# Simple request
202+
# Simple request (browser mode, default)
200203
result = scrappey.get(url="https://example.com")
201204
html = result["solution"]["response"]
202205
206+
# Fast request mode (cheaper, faster)
207+
result = scrappey.get(
208+
url="https://example.com",
209+
requestType="request",
210+
)
211+
203212
# With Cloudflare bypass and screenshot
204213
result = scrappey.get(
205214
url="https://protected-site.com",
@@ -211,6 +220,9 @@ def get(
211220
"""
212221
data: Dict[str, Any] = {"url": url}
213222

223+
if requestType is not None:
224+
data["requestType"] = requestType
225+
214226
# Add optional parameters
215227
if session is not None:
216228
data["session"] = session
@@ -251,6 +263,7 @@ def post(
251263
url: str,
252264
*,
253265
postData: Optional[Union[str, Dict[str, Any]]] = None,
266+
requestType: Optional[Literal["browser", "request"]] = None,
254267
session: Optional[str] = None,
255268
customHeaders: Optional[Dict[str, str]] = None,
256269
**kwargs: Any,
@@ -261,6 +274,7 @@ def post(
261274
Args:
262275
url: The target URL
263276
postData: Data to send (string or dict for JSON)
277+
requestType: Request mode - "browser" (default) or "request" (faster, cheaper)
264278
session: Session ID for cookie/state persistence
265279
customHeaders: Custom HTTP headers (e.g., {"content-type": "application/json"})
266280
**kwargs: Additional API options
@@ -276,18 +290,21 @@ def post(
276290
postData="username=user&password=pass",
277291
)
278292
279-
# JSON data
293+
# JSON data with request mode (faster)
280294
result = scrappey.post(
281295
url="https://api.example.com/data",
282296
postData={"key": "value"},
283297
customHeaders={"content-type": "application/json"},
298+
requestType="request",
284299
)
285300
```
286301
"""
287302
data: Dict[str, Any] = {"url": url}
288303

289304
if postData is not None:
290305
data["postData"] = postData
306+
if requestType is not None:
307+
data["requestType"] = requestType
291308
if session is not None:
292309
data["session"] = session
293310
if customHeaders is not None:
@@ -302,6 +319,7 @@ def put(
302319
url: str,
303320
*,
304321
postData: Optional[Union[str, Dict[str, Any]]] = None,
322+
requestType: Optional[Literal["browser", "request"]] = None,
305323
**kwargs: Any,
306324
) -> ScrappeyResponse:
307325
"""
@@ -310,6 +328,7 @@ def put(
310328
Args:
311329
url: The target URL
312330
postData: Data to send
331+
requestType: Request mode - "browser" (default) or "request" (faster, cheaper)
313332
**kwargs: Additional API options
314333
315334
Returns:
@@ -319,23 +338,34 @@ def put(
319338

320339
if postData is not None:
321340
data["postData"] = postData
341+
if requestType is not None:
342+
data["requestType"] = requestType
322343

323344
data.update(kwargs)
324345

325346
return self._request("request.put", data)
326347

327-
def delete(self, url: str, **kwargs: Any) -> ScrappeyResponse:
348+
def delete(
349+
self,
350+
url: str,
351+
*,
352+
requestType: Optional[Literal["browser", "request"]] = None,
353+
**kwargs: Any,
354+
) -> ScrappeyResponse:
328355
"""
329356
Perform a DELETE request to the specified URL.
330357
331358
Args:
332359
url: The target URL
360+
requestType: Request mode - "browser" (default) or "request" (faster, cheaper)
333361
**kwargs: Additional API options
334362
335363
Returns:
336364
API response containing the result
337365
"""
338366
data: Dict[str, Any] = {"url": url}
367+
if requestType is not None:
368+
data["requestType"] = requestType
339369
data.update(kwargs)
340370

341371
return self._request("request.delete", data)
@@ -345,6 +375,7 @@ def patch(
345375
url: str,
346376
*,
347377
postData: Optional[Union[str, Dict[str, Any]]] = None,
378+
requestType: Optional[Literal["browser", "request"]] = None,
348379
**kwargs: Any,
349380
) -> ScrappeyResponse:
350381
"""
@@ -353,6 +384,7 @@ def patch(
353384
Args:
354385
url: The target URL
355386
postData: Data to send
387+
requestType: Request mode - "browser" (default) or "request" (faster, cheaper)
356388
**kwargs: Additional API options
357389
358390
Returns:
@@ -362,6 +394,8 @@ def patch(
362394

363395
if postData is not None:
364396
data["postData"] = postData
397+
if requestType is not None:
398+
data["requestType"] = requestType
365399

366400
data.update(kwargs)
367401

0 commit comments

Comments
 (0)