From 2d59e04973c45d9fdc54b8a78311e491dfb7694e Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 15:26:58 +0000 Subject: [PATCH 01/13] SDK regeneration --- poetry.lock | 12 +- pyproject.toml | 2 +- reference.md | 510 ++++++++++++++++++ src/scrapybara/__init__.py | 15 +- src/scrapybara/base_client.py | 147 +++++ src/scrapybara/beta_vm_management/__init__.py | 2 + src/scrapybara/beta_vm_management/client.py | 396 ++++++++++++++ src/scrapybara/browser/client.py | 143 ++++- src/scrapybara/core/client_wrapper.py | 2 +- src/scrapybara/instance/client.py | 300 +++++++++++ .../types/browser_get_stream_url_response.py | 19 + .../types/delete_browser_auth_response.py | 20 + src/scrapybara/types/expose_port_response.py | 20 + .../types/netlify_deploy_response.py | 25 + src/scrapybara/types/snapshot_response.py | 19 + .../types/start_browser_response.py | 3 +- src/scrapybara/types/success_response.py | 20 + 17 files changed, 1643 insertions(+), 12 deletions(-) create mode 100644 src/scrapybara/beta_vm_management/__init__.py create mode 100644 src/scrapybara/beta_vm_management/client.py create mode 100644 src/scrapybara/types/browser_get_stream_url_response.py create mode 100644 src/scrapybara/types/delete_browser_auth_response.py create mode 100644 src/scrapybara/types/expose_port_response.py create mode 100644 src/scrapybara/types/netlify_deploy_response.py create mode 100644 src/scrapybara/types/snapshot_response.py create mode 100644 src/scrapybara/types/success_response.py diff --git a/poetry.lock b/poetry.lock index d1637e3..b284569 100644 --- a/poetry.lock +++ b/poetry.lock @@ -85,13 +85,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.7" +version = "1.0.8" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, + {file = "httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be"}, + {file = "httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad"}, ] [package.dependencies] @@ -525,13 +525,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.13.1" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.13.1-py3-none-any.whl", hash = "sha256:4b6cf02909eb5495cfbc3f6e8fd49217e6cc7944e145cdda8caa3734777f9e69"}, - {file = "typing_extensions-4.13.1.tar.gz", hash = "sha256:98795af00fb9640edec5b8e31fc647597b4691f099ad75f469a2616be1a76dff"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [metadata] diff --git a/pyproject.toml b/pyproject.toml index da689cf..1a215b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "scrapybara" [tool.poetry] name = "scrapybara" -version = "2.5.0" +version = "2.6.0-beta.0" description = "" readme = "README.md" authors = [] diff --git a/reference.md b/reference.md index ffb2ff0..4aaa995 100644 --- a/reference.md +++ b/reference.md @@ -65,6 +65,22 @@ client.start()
+**backend:** `typing.Optional[str]` + +
+
+ +
+
+ +**snapshot_id:** `typing.Optional[str]` + +
+
+ +
+
+ **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
@@ -221,6 +237,62 @@ client.get_auth_states()
+ + + + +
client.delete_auth_state(...) +
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from scrapybara import Scrapybara + +client = Scrapybara( + api_key="YOUR_API_KEY", +) +client.delete_auth_state( + auth_state_id="auth_state_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**auth_state_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ +
@@ -1070,6 +1142,169 @@ client.instance.resume( + + + + +
client.instance.expose_port(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Expose a port on the instance with a public-facing URL. + +This endpoint creates a temporary public URL that routes traffic to the specified port on the instance. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from scrapybara import Scrapybara + +client = Scrapybara( + api_key="YOUR_API_KEY", +) +client.instance.expose_port( + instance_id="instance_id", + port=1, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**instance_id:** `str` + +
+
+ +
+
+ +**port:** `int` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.instance.deploy_to_netlify(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Deploy a directory from the instance to Netlify. + +Args: + directory_path: Path to the directory on the instance to deploy +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from scrapybara import Scrapybara + +client = Scrapybara( + api_key="YOUR_API_KEY", +) +client.instance.deploy_to_netlify( + instance_id="instance_id", + directory_path="directory_path", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**instance_id:** `str` + +
+
+ +
+
+ +**directory_path:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ +
@@ -1119,6 +1354,14 @@ client.browser.start(
+**separate_stream:** `typing.Optional[bool]` + +
+
+ +
+
+ **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
@@ -1183,6 +1426,62 @@ client.browser.get_cdp_url(
+ + + + +
client.browser.get_stream_url(...) +
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from scrapybara import Scrapybara + +client = Scrapybara( + api_key="YOUR_API_KEY", +) +client.browser.get_stream_url( + instance_id="instance_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**instance_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ +
@@ -2276,3 +2575,214 @@ client.env.delete( +## BetaVmManagement +
client.beta_vm_management.take_snapshot(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Take a snapshot of an instance +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from scrapybara import Scrapybara + +client = Scrapybara( + api_key="YOUR_API_KEY", +) +client.beta_vm_management.take_snapshot( + instance_id="instance_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**instance_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.beta_vm_management.warmup_snapshot(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Warmup a snapshot +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from scrapybara import Scrapybara + +client = Scrapybara( + api_key="YOUR_API_KEY", +) +client.beta_vm_management.warmup_snapshot( + snapshot_id="snapshot_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**snapshot_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.beta_vm_management.delete_snapshot(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a snapshot +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from scrapybara import Scrapybara + +client = Scrapybara( + api_key="YOUR_API_KEY", +) +client.beta_vm_management.delete_snapshot( + snapshot_id="snapshot_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**snapshot_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ diff --git a/src/scrapybara/__init__.py b/src/scrapybara/__init__.py index 0005fcd..2b09b27 100644 --- a/src/scrapybara/__init__.py +++ b/src/scrapybara/__init__.py @@ -6,17 +6,20 @@ BrowserAuthenticateResponse, BrowserGetCdpUrlResponse, BrowserGetCurrentUrlResponse, + BrowserGetStreamUrlResponse, Button, CellType, ClickMouseAction, ClickMouseActionClickType, ComputerResponse, + DeleteBrowserAuthResponse, DeploymentConfigInstanceType, DragMouseAction, EditResponse, EnvGetResponse, EnvResponse, ExecuteCellRequest, + ExposePortResponse, FileResponse, GetCursorPositionAction, GetInstanceResponse, @@ -27,15 +30,18 @@ KernelInfo, ModifyBrowserAuthResponse, MoveMouseAction, + NetlifyDeployResponse, Notebook, NotebookCell, PressKeyAction, SaveBrowserAuthResponse, ScrollAction, + SnapshotResponse, StartBrowserResponse, Status, StopBrowserResponse, StopInstanceResponse, + SuccessResponse, TakeScreenshotAction, TypeTextAction, UploadResponse, @@ -44,7 +50,7 @@ WaitAction, ) from .errors import UnprocessableEntityError -from . import browser, code, env, instance, notebook +from . import beta_vm_management, browser, code, env, instance, notebook from .client import AsyncScrapybara, Scrapybara from .environment import ScrapybaraEnvironment from .instance import ( @@ -69,18 +75,21 @@ "BrowserAuthenticateResponse", "BrowserGetCdpUrlResponse", "BrowserGetCurrentUrlResponse", + "BrowserGetStreamUrlResponse", "Button", "CellType", "ClickMouseAction", "ClickMouseActionClickType", "Command", "ComputerResponse", + "DeleteBrowserAuthResponse", "DeploymentConfigInstanceType", "DragMouseAction", "EditResponse", "EnvGetResponse", "EnvResponse", "ExecuteCellRequest", + "ExposePortResponse", "FileResponse", "GetCursorPositionAction", "GetInstanceResponse", @@ -91,6 +100,7 @@ "KernelInfo", "ModifyBrowserAuthResponse", "MoveMouseAction", + "NetlifyDeployResponse", "Notebook", "NotebookCell", "PressKeyAction", @@ -108,10 +118,12 @@ "Scrapybara", "ScrapybaraEnvironment", "ScrollAction", + "SnapshotResponse", "StartBrowserResponse", "Status", "StopBrowserResponse", "StopInstanceResponse", + "SuccessResponse", "TakeScreenshotAction", "TypeTextAction", "UnprocessableEntityError", @@ -120,6 +132,7 @@ "ValidationErrorLocItem", "WaitAction", "__version__", + "beta_vm_management", "browser", "code", "env", diff --git a/src/scrapybara/base_client.py b/src/scrapybara/base_client.py index 9661a10..9befaee 100644 --- a/src/scrapybara/base_client.py +++ b/src/scrapybara/base_client.py @@ -11,6 +11,7 @@ from .code.client import CodeClient from .notebook.client import NotebookClient from .env.client import EnvClient +from .beta_vm_management.client import BetaVmManagementClient from .types.deployment_config_instance_type import DeploymentConfigInstanceType from .core.request_options import RequestOptions from .types.get_instance_response import GetInstanceResponse @@ -20,12 +21,14 @@ from json.decoder import JSONDecodeError from .core.jsonable_encoder import jsonable_encoder from .types.auth_state_response import AuthStateResponse +from .types.delete_browser_auth_response import DeleteBrowserAuthResponse from .core.client_wrapper import AsyncClientWrapper from .instance.client import AsyncInstanceClient from .browser.client import AsyncBrowserClient from .code.client import AsyncCodeClient from .notebook.client import AsyncNotebookClient from .env.client import AsyncEnvClient +from .beta_vm_management.client import AsyncBetaVmManagementClient # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -98,6 +101,7 @@ def __init__( self.code = CodeClient(client_wrapper=self._client_wrapper) self.notebook = NotebookClient(client_wrapper=self._client_wrapper) self.env = EnvClient(client_wrapper=self._client_wrapper) + self.beta_vm_management = BetaVmManagementClient(client_wrapper=self._client_wrapper) def start( self, @@ -106,6 +110,8 @@ def start( timeout_hours: typing.Optional[float] = OMIT, blocked_domains: typing.Optional[typing.Sequence[str]] = OMIT, resolution: typing.Optional[typing.Sequence[int]] = OMIT, + backend: typing.Optional[str] = OMIT, + snapshot_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> GetInstanceResponse: """ @@ -119,6 +125,10 @@ def start( resolution : typing.Optional[typing.Sequence[int]] + backend : typing.Optional[str] + + snapshot_id : typing.Optional[str] + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -144,6 +154,8 @@ def start( "timeout_hours": timeout_hours, "blocked_domains": blocked_domains, "resolution": resolution, + "backend": backend, + "snapshot_id": snapshot_id, }, headers={ "content-type": "application/json", @@ -313,6 +325,65 @@ def get_auth_states( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) + def delete_auth_state( + self, *, auth_state_id: str, request_options: typing.Optional[RequestOptions] = None + ) -> DeleteBrowserAuthResponse: + """ + Parameters + ---------- + auth_state_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeleteBrowserAuthResponse + Successful Response + + Examples + -------- + from scrapybara import Scrapybara + + client = Scrapybara( + api_key="YOUR_API_KEY", + ) + client.delete_auth_state( + auth_state_id="auth_state_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + "v1/delete_auth_state", + method="POST", + params={ + "auth_state_id": auth_state_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + DeleteBrowserAuthResponse, + parse_obj_as( + type_=DeleteBrowserAuthResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + class AsyncBaseClient: """ @@ -381,6 +452,7 @@ def __init__( self.code = AsyncCodeClient(client_wrapper=self._client_wrapper) self.notebook = AsyncNotebookClient(client_wrapper=self._client_wrapper) self.env = AsyncEnvClient(client_wrapper=self._client_wrapper) + self.beta_vm_management = AsyncBetaVmManagementClient(client_wrapper=self._client_wrapper) async def start( self, @@ -389,6 +461,8 @@ async def start( timeout_hours: typing.Optional[float] = OMIT, blocked_domains: typing.Optional[typing.Sequence[str]] = OMIT, resolution: typing.Optional[typing.Sequence[int]] = OMIT, + backend: typing.Optional[str] = OMIT, + snapshot_id: typing.Optional[str] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> GetInstanceResponse: """ @@ -402,6 +476,10 @@ async def start( resolution : typing.Optional[typing.Sequence[int]] + backend : typing.Optional[str] + + snapshot_id : typing.Optional[str] + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -435,6 +513,8 @@ async def main() -> None: "timeout_hours": timeout_hours, "blocked_domains": blocked_domains, "resolution": resolution, + "backend": backend, + "snapshot_id": snapshot_id, }, headers={ "content-type": "application/json", @@ -630,6 +710,73 @@ async def main() -> None: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) + async def delete_auth_state( + self, *, auth_state_id: str, request_options: typing.Optional[RequestOptions] = None + ) -> DeleteBrowserAuthResponse: + """ + Parameters + ---------- + auth_state_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DeleteBrowserAuthResponse + Successful Response + + Examples + -------- + import asyncio + + from scrapybara import AsyncScrapybara + + client = AsyncScrapybara( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.delete_auth_state( + auth_state_id="auth_state_id", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + "v1/delete_auth_state", + method="POST", + params={ + "auth_state_id": auth_state_id, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + DeleteBrowserAuthResponse, + parse_obj_as( + type_=DeleteBrowserAuthResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + def _get_base_url(*, base_url: typing.Optional[str] = None, environment: ScrapybaraEnvironment) -> str: if base_url is not None: diff --git a/src/scrapybara/beta_vm_management/__init__.py b/src/scrapybara/beta_vm_management/__init__.py new file mode 100644 index 0000000..f3ea265 --- /dev/null +++ b/src/scrapybara/beta_vm_management/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/src/scrapybara/beta_vm_management/client.py b/src/scrapybara/beta_vm_management/client.py new file mode 100644 index 0000000..7c626c3 --- /dev/null +++ b/src/scrapybara/beta_vm_management/client.py @@ -0,0 +1,396 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.client_wrapper import SyncClientWrapper +import typing +from ..core.request_options import RequestOptions +from ..types.snapshot_response import SnapshotResponse +from ..core.jsonable_encoder import jsonable_encoder +from ..core.pydantic_utilities import parse_obj_as +from ..errors.unprocessable_entity_error import UnprocessableEntityError +from ..types.http_validation_error import HttpValidationError +from json.decoder import JSONDecodeError +from ..core.api_error import ApiError +from ..types.success_response import SuccessResponse +from ..core.client_wrapper import AsyncClientWrapper + + +class BetaVmManagementClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def take_snapshot( + self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> SnapshotResponse: + """ + Take a snapshot of an instance + + Parameters + ---------- + instance_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SnapshotResponse + Successful Response + + Examples + -------- + from scrapybara import Scrapybara + + client = Scrapybara( + api_key="YOUR_API_KEY", + ) + client.beta_vm_management.take_snapshot( + instance_id="instance_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/beta/instances/{jsonable_encoder(instance_id)}/snapshot", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + SnapshotResponse, + parse_obj_as( + type_=SnapshotResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def warmup_snapshot( + self, snapshot_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> SuccessResponse: + """ + Warmup a snapshot + + Parameters + ---------- + snapshot_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Successful Response + + Examples + -------- + from scrapybara import Scrapybara + + client = Scrapybara( + api_key="YOUR_API_KEY", + ) + client.beta_vm_management.warmup_snapshot( + snapshot_id="snapshot_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/beta/snapshots/{jsonable_encoder(snapshot_id)}/warmup", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + SuccessResponse, + parse_obj_as( + type_=SuccessResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete_snapshot( + self, snapshot_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> SuccessResponse: + """ + Delete a snapshot + + Parameters + ---------- + snapshot_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Successful Response + + Examples + -------- + from scrapybara import Scrapybara + + client = Scrapybara( + api_key="YOUR_API_KEY", + ) + client.beta_vm_management.delete_snapshot( + snapshot_id="snapshot_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/beta/snapshots/{jsonable_encoder(snapshot_id)}/delete", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + SuccessResponse, + parse_obj_as( + type_=SuccessResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncBetaVmManagementClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def take_snapshot( + self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> SnapshotResponse: + """ + Take a snapshot of an instance + + Parameters + ---------- + instance_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SnapshotResponse + Successful Response + + Examples + -------- + import asyncio + + from scrapybara import AsyncScrapybara + + client = AsyncScrapybara( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.beta_vm_management.take_snapshot( + instance_id="instance_id", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/beta/instances/{jsonable_encoder(instance_id)}/snapshot", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + SnapshotResponse, + parse_obj_as( + type_=SnapshotResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def warmup_snapshot( + self, snapshot_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> SuccessResponse: + """ + Warmup a snapshot + + Parameters + ---------- + snapshot_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Successful Response + + Examples + -------- + import asyncio + + from scrapybara import AsyncScrapybara + + client = AsyncScrapybara( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.beta_vm_management.warmup_snapshot( + snapshot_id="snapshot_id", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/beta/snapshots/{jsonable_encoder(snapshot_id)}/warmup", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + SuccessResponse, + parse_obj_as( + type_=SuccessResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete_snapshot( + self, snapshot_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> SuccessResponse: + """ + Delete a snapshot + + Parameters + ---------- + snapshot_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Successful Response + + Examples + -------- + import asyncio + + from scrapybara import AsyncScrapybara + + client = AsyncScrapybara( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.beta_vm_management.delete_snapshot( + snapshot_id="snapshot_id", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/beta/snapshots/{jsonable_encoder(snapshot_id)}/delete", + method="POST", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + SuccessResponse, + parse_obj_as( + type_=SuccessResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/scrapybara/browser/client.py b/src/scrapybara/browser/client.py index 1d6fe94..71b1049 100644 --- a/src/scrapybara/browser/client.py +++ b/src/scrapybara/browser/client.py @@ -11,6 +11,7 @@ from json.decoder import JSONDecodeError from ..core.api_error import ApiError from ..types.browser_get_cdp_url_response import BrowserGetCdpUrlResponse +from ..types.browser_get_stream_url_response import BrowserGetStreamUrlResponse from ..types.browser_get_current_url_response import BrowserGetCurrentUrlResponse from ..types.save_browser_auth_response import SaveBrowserAuthResponse from ..types.modify_browser_auth_response import ModifyBrowserAuthResponse @@ -24,13 +25,19 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper def start( - self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None + self, + instance_id: str, + *, + separate_stream: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> StartBrowserResponse: """ Parameters ---------- instance_id : str + separate_stream : typing.Optional[bool] + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -53,6 +60,9 @@ def start( _response = self._client_wrapper.httpx_client.request( f"v1/instance/{jsonable_encoder(instance_id)}/browser/start", method="POST", + params={ + "separate_stream": separate_stream, + }, request_options=request_options, ) try: @@ -135,6 +145,62 @@ def get_cdp_url( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) + def get_stream_url( + self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> BrowserGetStreamUrlResponse: + """ + Parameters + ---------- + instance_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + BrowserGetStreamUrlResponse + Successful Response + + Examples + -------- + from scrapybara import Scrapybara + + client = Scrapybara( + api_key="YOUR_API_KEY", + ) + client.browser.get_stream_url( + instance_id="instance_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/instance/{jsonable_encoder(instance_id)}/browser/stream_url", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BrowserGetStreamUrlResponse, + parse_obj_as( + type_=BrowserGetStreamUrlResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + def get_current_url( self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> BrowserGetCurrentUrlResponse: @@ -448,13 +514,19 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper async def start( - self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None + self, + instance_id: str, + *, + separate_stream: typing.Optional[bool] = None, + request_options: typing.Optional[RequestOptions] = None, ) -> StartBrowserResponse: """ Parameters ---------- instance_id : str + separate_stream : typing.Optional[bool] + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -485,6 +557,9 @@ async def main() -> None: _response = await self._client_wrapper.httpx_client.request( f"v1/instance/{jsonable_encoder(instance_id)}/browser/start", method="POST", + params={ + "separate_stream": separate_stream, + }, request_options=request_options, ) try: @@ -575,6 +650,70 @@ async def main() -> None: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) + async def get_stream_url( + self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> BrowserGetStreamUrlResponse: + """ + Parameters + ---------- + instance_id : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + BrowserGetStreamUrlResponse + Successful Response + + Examples + -------- + import asyncio + + from scrapybara import AsyncScrapybara + + client = AsyncScrapybara( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.browser.get_stream_url( + instance_id="instance_id", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/instance/{jsonable_encoder(instance_id)}/browser/stream_url", + method="GET", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + BrowserGetStreamUrlResponse, + parse_obj_as( + type_=BrowserGetStreamUrlResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + async def get_current_url( self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> BrowserGetCurrentUrlResponse: diff --git a/src/scrapybara/core/client_wrapper.py b/src/scrapybara/core/client_wrapper.py index 2ea88cd..e858297 100644 --- a/src/scrapybara/core/client_wrapper.py +++ b/src/scrapybara/core/client_wrapper.py @@ -16,7 +16,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "scrapybara", - "X-Fern-SDK-Version": "2.5.0", + "X-Fern-SDK-Version": "2.6.0-beta.0", } headers["x-api-key"] = self.api_key return headers diff --git a/src/scrapybara/instance/client.py b/src/scrapybara/instance/client.py index 5af25d7..55cb773 100644 --- a/src/scrapybara/instance/client.py +++ b/src/scrapybara/instance/client.py @@ -22,6 +22,8 @@ from ..types.upload_response import UploadResponse from ..types.stop_instance_response import StopInstanceResponse from ..types.get_instance_response import GetInstanceResponse +from ..types.expose_port_response import ExposePortResponse +from ..types.netlify_deploy_response import NetlifyDeployResponse from ..core.client_wrapper import AsyncClientWrapper # this is used as the default value for optional parameters @@ -770,6 +772,147 @@ def resume( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) + def expose_port( + self, instance_id: str, *, port: int, request_options: typing.Optional[RequestOptions] = None + ) -> ExposePortResponse: + """ + Expose a port on the instance with a public-facing URL. + + This endpoint creates a temporary public URL that routes traffic to the specified port on the instance. + + Parameters + ---------- + instance_id : str + + port : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ExposePortResponse + Successful Response + + Examples + -------- + from scrapybara import Scrapybara + + client = Scrapybara( + api_key="YOUR_API_KEY", + ) + client.instance.expose_port( + instance_id="instance_id", + port=1, + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/instance/{jsonable_encoder(instance_id)}/expose_port", + method="POST", + json={ + "port": port, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ExposePortResponse, + parse_obj_as( + type_=ExposePortResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def deploy_to_netlify( + self, instance_id: str, *, directory_path: str, request_options: typing.Optional[RequestOptions] = None + ) -> NetlifyDeployResponse: + """ + Deploy a directory from the instance to Netlify. + + Args: + directory_path: Path to the directory on the instance to deploy + + Parameters + ---------- + instance_id : str + + directory_path : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + NetlifyDeployResponse + Successful Response + + Examples + -------- + from scrapybara import Scrapybara + + client = Scrapybara( + api_key="YOUR_API_KEY", + ) + client.instance.deploy_to_netlify( + instance_id="instance_id", + directory_path="directory_path", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"v1/instance/{jsonable_encoder(instance_id)}/deploy_to_netlify", + method="POST", + json={ + "directory_path": directory_path, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + NetlifyDeployResponse, + parse_obj_as( + type_=NetlifyDeployResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + class AsyncInstanceClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): @@ -1592,3 +1735,160 @@ async def main() -> None: except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) + + async def expose_port( + self, instance_id: str, *, port: int, request_options: typing.Optional[RequestOptions] = None + ) -> ExposePortResponse: + """ + Expose a port on the instance with a public-facing URL. + + This endpoint creates a temporary public URL that routes traffic to the specified port on the instance. + + Parameters + ---------- + instance_id : str + + port : int + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ExposePortResponse + Successful Response + + Examples + -------- + import asyncio + + from scrapybara import AsyncScrapybara + + client = AsyncScrapybara( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.instance.expose_port( + instance_id="instance_id", + port=1, + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/instance/{jsonable_encoder(instance_id)}/expose_port", + method="POST", + json={ + "port": port, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + ExposePortResponse, + parse_obj_as( + type_=ExposePortResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def deploy_to_netlify( + self, instance_id: str, *, directory_path: str, request_options: typing.Optional[RequestOptions] = None + ) -> NetlifyDeployResponse: + """ + Deploy a directory from the instance to Netlify. + + Args: + directory_path: Path to the directory on the instance to deploy + + Parameters + ---------- + instance_id : str + + directory_path : str + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + NetlifyDeployResponse + Successful Response + + Examples + -------- + import asyncio + + from scrapybara import AsyncScrapybara + + client = AsyncScrapybara( + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.instance.deploy_to_netlify( + instance_id="instance_id", + directory_path="directory_path", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"v1/instance/{jsonable_encoder(instance_id)}/deploy_to_netlify", + method="POST", + json={ + "directory_path": directory_path, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return typing.cast( + NetlifyDeployResponse, + parse_obj_as( + type_=NetlifyDeployResponse, # type: ignore + object_=_response.json(), + ), + ) + if _response.status_code == 422: + raise UnprocessableEntityError( + typing.cast( + HttpValidationError, + parse_obj_as( + type_=HttpValidationError, # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/scrapybara/types/browser_get_stream_url_response.py b/src/scrapybara/types/browser_get_stream_url_response.py new file mode 100644 index 0000000..7b24ee4 --- /dev/null +++ b/src/scrapybara/types/browser_get_stream_url_response.py @@ -0,0 +1,19 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing +import pydantic + + +class BrowserGetStreamUrlResponse(UniversalBaseModel): + stream_url: str + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/scrapybara/types/delete_browser_auth_response.py b/src/scrapybara/types/delete_browser_auth_response.py new file mode 100644 index 0000000..2f28daa --- /dev/null +++ b/src/scrapybara/types/delete_browser_auth_response.py @@ -0,0 +1,20 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing +import pydantic + + +class DeleteBrowserAuthResponse(UniversalBaseModel): + status: str + auth_state_id: str + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/scrapybara/types/expose_port_response.py b/src/scrapybara/types/expose_port_response.py new file mode 100644 index 0000000..9ffabda --- /dev/null +++ b/src/scrapybara/types/expose_port_response.py @@ -0,0 +1,20 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing +import pydantic + + +class ExposePortResponse(UniversalBaseModel): + status: str + public_url: str + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/scrapybara/types/netlify_deploy_response.py b/src/scrapybara/types/netlify_deploy_response.py new file mode 100644 index 0000000..f4914f2 --- /dev/null +++ b/src/scrapybara/types/netlify_deploy_response.py @@ -0,0 +1,25 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +import typing +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import pydantic + + +class NetlifyDeployResponse(UniversalBaseModel): + """ + Response model for netlify deployment. + """ + + output: typing.Optional[str] = None + error: typing.Optional[str] = None + site_url: typing.Optional[str] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/scrapybara/types/snapshot_response.py b/src/scrapybara/types/snapshot_response.py new file mode 100644 index 0000000..7cea13e --- /dev/null +++ b/src/scrapybara/types/snapshot_response.py @@ -0,0 +1,19 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing +import pydantic + + +class SnapshotResponse(UniversalBaseModel): + snapshot_id: str + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/scrapybara/types/start_browser_response.py b/src/scrapybara/types/start_browser_response.py index c17f656..e73b7e0 100644 --- a/src/scrapybara/types/start_browser_response.py +++ b/src/scrapybara/types/start_browser_response.py @@ -1,13 +1,14 @@ # This file was auto-generated by Fern from our API Definition. from ..core.pydantic_utilities import UniversalBaseModel -from ..core.pydantic_utilities import IS_PYDANTIC_V2 import typing +from ..core.pydantic_utilities import IS_PYDANTIC_V2 import pydantic class StartBrowserResponse(UniversalBaseModel): cdp_url: str + stream_url: typing.Optional[str] = None if IS_PYDANTIC_V2: model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 diff --git a/src/scrapybara/types/success_response.py b/src/scrapybara/types/success_response.py new file mode 100644 index 0000000..a8a5e10 --- /dev/null +++ b/src/scrapybara/types/success_response.py @@ -0,0 +1,20 @@ +# This file was auto-generated by Fern from our API Definition. + +from ..core.pydantic_utilities import UniversalBaseModel +from ..core.pydantic_utilities import IS_PYDANTIC_V2 +import typing +import pydantic + + +class SuccessResponse(UniversalBaseModel): + success: bool + message: str + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow From 2032dd66fa243a24c65af856c45554293b16ee0b Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Wed, 16 Apr 2025 08:33:51 -0700 Subject: [PATCH 02/13] fix types --- src/scrapybara/types/__init__.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/scrapybara/types/__init__.py b/src/scrapybara/types/__init__.py index 8961e8b..6eaec0b 100644 --- a/src/scrapybara/types/__init__.py +++ b/src/scrapybara/types/__init__.py @@ -4,17 +4,20 @@ from .browser_authenticate_response import BrowserAuthenticateResponse from .browser_get_cdp_url_response import BrowserGetCdpUrlResponse from .browser_get_current_url_response import BrowserGetCurrentUrlResponse +from .browser_get_stream_url_response import BrowserGetStreamUrlResponse from .button import Button from .cell_type import CellType from .click_mouse_action import ClickMouseAction from .click_mouse_action_click_type import ClickMouseActionClickType from .computer_response import ComputerResponse +from .delete_browser_auth_response import DeleteBrowserAuthResponse from .deployment_config_instance_type import DeploymentConfigInstanceType from .drag_mouse_action import DragMouseAction from .edit_response import EditResponse from .env_get_response import EnvGetResponse from .env_response import EnvResponse from .execute_cell_request import ExecuteCellRequest +from .expose_port_response import ExposePortResponse from .file_response import FileResponse from .upload_response import UploadResponse from .get_cursor_position_action import GetCursorPositionAction @@ -28,13 +31,16 @@ from .move_mouse_action import MoveMouseAction from .notebook import Notebook from .notebook_cell import NotebookCell +from .netlify_deploy_response import NetlifyDeployResponse from .press_key_action import PressKeyAction from .save_browser_auth_response import SaveBrowserAuthResponse from .scroll_action import ScrollAction from .start_browser_response import StartBrowserResponse from .status import Status +from .success_response import SuccessResponse from .stop_browser_response import StopBrowserResponse from .stop_instance_response import StopInstanceResponse +from .snapshot_response import SnapshotResponse from .take_screenshot_action import TakeScreenshotAction from .type_text_action import TypeTextAction from .validation_error import ValidationError @@ -80,18 +86,22 @@ "BrowserAuthenticateResponse", "BrowserGetCdpUrlResponse", "BrowserGetCurrentUrlResponse", + "BrowserGetStreamUrlResponse", "Button", "CellType", "ClickMouseAction", "ClickMouseActionClickType", "ComputerResponse", "DeploymentConfigInstanceType", + "DeleteBrowserAuthResponse", "DragMouseAction", "EditResponse", "EnvGetResponse", "EnvResponse", "ExecuteCellRequest", + "ExposePortResponse", "FileResponse", + "UploadResponse", "GetCursorPositionAction", "GetInstanceResponse", "GetInstanceResponseInstanceType", @@ -106,6 +116,7 @@ "MoveMouseAction", "Notebook", "NotebookCell", + "NetlifyDeployResponse", "PressKeyAction", "SaveBrowserAuthResponse", "ScrollAction", @@ -113,9 +124,11 @@ "SingleActResponse", "StartBrowserResponse", "Status", + "SuccessResponse", "Step", "StopBrowserResponse", "StopInstanceResponse", + "SnapshotResponse", "TakeScreenshotAction", "TextPart", "Tool", From aa39aa5f9fb45d68dba645b444466e2bae9c3983 Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Wed, 16 Apr 2025 08:43:58 -0700 Subject: [PATCH 03/13] get_stream_url --- src/scrapybara/client.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/src/scrapybara/client.py b/src/scrapybara/client.py index 604f88e..bdd553d 100644 --- a/src/scrapybara/client.py +++ b/src/scrapybara/client.py @@ -33,6 +33,7 @@ BrowserAuthenticateResponse, BrowserGetCdpUrlResponse, BrowserGetCurrentUrlResponse, + BrowserGetStreamUrlResponse, Button, ClickMouseActionClickType, ComputerResponse, @@ -186,6 +187,12 @@ def stop( self.instance_id, request_options=request_options ) + def get_stream_url( + self, request_options: Optional[RequestOptions] = None + ) -> BrowserGetStreamUrlResponse: + return self._client.browser.get_stream_url( + instance_id=self.instance_id, request_options=request_options + ) class AsyncBrowser: def __init__(self, instance_id: str, client: AsyncBaseClient): @@ -253,6 +260,12 @@ async def stop( self.instance_id, request_options=request_options ) + async def get_stream_url( + self, request_options: Optional[RequestOptions] = None + ) -> BrowserGetStreamUrlResponse: + return await self._client.browser.get_stream_url( + instance_id=self.instance_id, request_options=request_options + ) class Code: def __init__(self, instance_id: str, client: BaseClient): @@ -1602,6 +1615,8 @@ def start_ubuntu( timeout_hours: Optional[float] = OMIT, blocked_domains: Optional[Sequence[str]] = OMIT, resolution: Optional[Sequence[int]] = OMIT, + backend: Optional[str] = OMIT, + snapshot_id: Optional[str] = OMIT, request_options: Optional[RequestOptions] = None, ) -> UbuntuInstance: response = self._base_client.start( @@ -1609,6 +1624,8 @@ def start_ubuntu( timeout_hours=timeout_hours, blocked_domains=blocked_domains, resolution=resolution, + backend=backend, + snapshot_id=snapshot_id, request_options=request_options, ) return UbuntuInstance( @@ -1624,6 +1641,8 @@ def start_browser( timeout_hours: Optional[float] = OMIT, blocked_domains: Optional[Sequence[str]] = OMIT, resolution: Optional[Sequence[int]] = OMIT, + backend: Optional[str] = OMIT, + snapshot_id: Optional[str] = OMIT, request_options: Optional[RequestOptions] = None, ) -> BrowserInstance: response = self._base_client.start( @@ -1631,6 +1650,8 @@ def start_browser( timeout_hours=timeout_hours, blocked_domains=blocked_domains, resolution=resolution, + backend=backend, + snapshot_id=snapshot_id, request_options=request_options, ) return BrowserInstance( @@ -2062,6 +2083,8 @@ async def start_ubuntu( timeout_hours: Optional[float] = OMIT, blocked_domains: Optional[Sequence[str]] = OMIT, resolution: Optional[Sequence[int]] = OMIT, + backend: Optional[str] = OMIT, + snapshot_id: Optional[str] = OMIT, request_options: Optional[RequestOptions] = None, ) -> AsyncUbuntuInstance: response = await self._base_client.start( @@ -2069,6 +2092,8 @@ async def start_ubuntu( timeout_hours=timeout_hours, blocked_domains=blocked_domains, resolution=resolution, + backend=backend, + snapshot_id=snapshot_id, request_options=request_options, ) return AsyncUbuntuInstance( @@ -2084,6 +2109,8 @@ async def start_browser( timeout_hours: Optional[float] = OMIT, blocked_domains: Optional[Sequence[str]] = OMIT, resolution: Optional[Sequence[int]] = OMIT, + backend: Optional[str] = OMIT, + snapshot_id: Optional[str] = OMIT, request_options: Optional[RequestOptions] = None, ) -> AsyncBrowserInstance: response = await self._base_client.start( @@ -2091,6 +2118,8 @@ async def start_browser( timeout_hours=timeout_hours, blocked_domains=blocked_domains, resolution=resolution, + backend=backend, + snapshot_id=snapshot_id, request_options=request_options, ) return AsyncBrowserInstance( From 75b268f73d6050b58d6d4f4fd0d50188c9c60191 Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Wed, 16 Apr 2025 08:53:05 -0700 Subject: [PATCH 04/13] tests --- tests/custom/test_client.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index fd5e253..01f0ac2 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -282,12 +282,43 @@ def test_upload_download() -> None: ubuntu_instance.stop() +def test_beta_vm_management() -> None: + _check_api_key() + client = Scrapybara() + + # Start an rodent instance + instance = client.start_ubuntu(backend="rodent") + assert instance.id is not None + + try: + # Take a snapshot + snapshot_response = client.beta_vm_management.take_snapshot(instance_id=instance.id) + assert snapshot_response is not None + assert snapshot_response.snapshot_id is not None + snapshot_id = snapshot_response.snapshot_id + print(f"Created snapshot with ID: {snapshot_id}") + + # Warmup the snapshot + warmup_response = client.beta_vm_management.warmup_snapshot(snapshot_id=snapshot_id) + assert warmup_response is not None + assert warmup_response.success is True + + # Delete the snapshot + delete_response = client.beta_vm_management.delete_snapshot(snapshot_id=snapshot_id) + assert delete_response is not None + assert delete_response.success is True + + finally: + instance.stop() + + if __name__ == "__main__": test_ubuntu() test_browser() test_ubuntu_openai() test_browser_openai() test_upload_download() + test_beta_vm_management() # test_ubuntu_thinking() # test_browser_thinking() # test_windows() From 314c9a69699d81a90378cff66a591d334324c228 Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Wed, 16 Apr 2025 09:48:00 -0700 Subject: [PATCH 05/13] beta property --- src/scrapybara/client.py | 182 +++++++++++++++++++++++++++++++++++- tests/custom/test_client.py | 8 +- 2 files changed, 185 insertions(+), 5 deletions(-) diff --git a/src/scrapybara/client.py b/src/scrapybara/client.py index bdd553d..3c727a7 100644 --- a/src/scrapybara/client.py +++ b/src/scrapybara/client.py @@ -1604,6 +1604,22 @@ def __init__( follow_redirects=follow_redirects, httpx_client=httpx_client, ) + self._beta = Beta(self._base_client) + + @property + def beta(self) -> "Beta": + """ + Access beta functionality. + + This property provides access to beta features that may change + or be removed in future versions. + + Returns + ------- + Beta + Beta features wrapper + """ + return self._beta @property def httpx_client(self) -> HttpClient: @@ -2072,6 +2088,22 @@ def __init__( follow_redirects=follow_redirects, httpx_client=httpx_client, ) + self._beta = AsyncBeta(self._base_client) + + @property + def beta(self) -> "AsyncBeta": + """ + Access beta functionality. + + This property provides access to beta features that may change + or be removed in future versions. + + Returns + ------- + AsyncBeta + Beta features wrapper + """ + return self._beta @property def httpx_client(self) -> AsyncHttpClient: @@ -2619,4 +2651,152 @@ def _filter_images(messages: List[Message], images_to_keep: int): if images_kept < images_to_keep: images_kept += 1 else: - del tool_result.result["base_64_image"] \ No newline at end of file + del tool_result.result["base_64_image"] + + +class Beta: + """ + Class that provides access to beta functionality in Scrapybara. + + Includes: + - VM management: snapshot operations on VM instances + """ + def __init__(self, base_client: BaseClient): + self._base_client = base_client + + def take_snapshot(self, instance_id: str, *, request_options: Optional[RequestOptions] = None): + """ + Take a snapshot of an instance + + Parameters + ---------- + instance_id : str + ID of the instance to snapshot + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SnapshotResponse + Contains the snapshot_id + """ + return self._base_client.beta_vm_management.take_snapshot( + instance_id=instance_id, + request_options=request_options + ) + + def warmup_snapshot(self, snapshot_id: str, *, request_options: Optional[RequestOptions] = None): + """ + Warmup a snapshot so it's ready for faster instance creation + + Parameters + ---------- + snapshot_id : str + ID of the snapshot to warm up + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Indicates if the operation was successful + """ + return self._base_client.beta_vm_management.warmup_snapshot( + snapshot_id=snapshot_id, + request_options=request_options + ) + + def delete_snapshot(self, snapshot_id: str, *, request_options: Optional[RequestOptions] = None): + """ + Delete a snapshot + + Parameters + ---------- + snapshot_id : str + ID of the snapshot to delete + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Indicates if the operation was successful + """ + return self._base_client.beta_vm_management.delete_snapshot( + snapshot_id=snapshot_id, + request_options=request_options + ) + + +class AsyncBeta: + """ + Class that provides access to beta functionality in AsyncScrapybara. + + Includes: + - VM management: snapshot operations on VM instances + """ + def __init__(self, base_client: AsyncBaseClient): + self._base_client = base_client + + async def take_snapshot(self, instance_id: str, *, request_options: Optional[RequestOptions] = None): + """ + Take a snapshot of an instance + + Parameters + ---------- + instance_id : str + ID of the instance to snapshot + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SnapshotResponse + Contains the snapshot_id + """ + return await self._base_client.beta_vm_management.take_snapshot( + instance_id=instance_id, + request_options=request_options + ) + + async def warmup_snapshot(self, snapshot_id: str, *, request_options: Optional[RequestOptions] = None): + """ + Warmup a snapshot so it's ready for faster instance creation + + Parameters + ---------- + snapshot_id : str + ID of the snapshot to warm up + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Indicates if the operation was successful + """ + return await self._base_client.beta_vm_management.warmup_snapshot( + snapshot_id=snapshot_id, + request_options=request_options + ) + + async def delete_snapshot(self, snapshot_id: str, *, request_options: Optional[RequestOptions] = None): + """ + Delete a snapshot + + Parameters + ---------- + snapshot_id : str + ID of the snapshot to delete + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + SuccessResponse + Indicates if the operation was successful + """ + return await self._base_client.beta_vm_management.delete_snapshot( + snapshot_id=snapshot_id, + request_options=request_options + ) \ No newline at end of file diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index 01f0ac2..445fdaa 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -286,25 +286,25 @@ def test_beta_vm_management() -> None: _check_api_key() client = Scrapybara() - # Start an rodent instance + # Start a rodent instance instance = client.start_ubuntu(backend="rodent") assert instance.id is not None try: # Take a snapshot - snapshot_response = client.beta_vm_management.take_snapshot(instance_id=instance.id) + snapshot_response = client.beta.take_snapshot(instance_id=instance.id) assert snapshot_response is not None assert snapshot_response.snapshot_id is not None snapshot_id = snapshot_response.snapshot_id print(f"Created snapshot with ID: {snapshot_id}") # Warmup the snapshot - warmup_response = client.beta_vm_management.warmup_snapshot(snapshot_id=snapshot_id) + warmup_response = client.beta.warmup_snapshot(snapshot_id=snapshot_id) assert warmup_response is not None assert warmup_response.success is True # Delete the snapshot - delete_response = client.beta_vm_management.delete_snapshot(snapshot_id=snapshot_id) + delete_response = client.beta.delete_snapshot(snapshot_id=snapshot_id) assert delete_response is not None assert delete_response.success is True From a99c9c54414bc500c8e5c668015e4798d98349c9 Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Wed, 16 Apr 2025 10:00:09 -0700 Subject: [PATCH 06/13] restore from snapshot test --- tests/custom/test_client.py | 63 +++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index 445fdaa..7f73e55 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -312,6 +312,68 @@ def test_beta_vm_management() -> None: instance.stop() +def test_restore_from_snapshot() -> None: + _check_api_key() + client = Scrapybara() + + # Start original instance + original_instance = client.start_ubuntu(backend="rodent") + assert original_instance.id is not None + print(f"Started original instance: {original_instance.id}") + + snapshot_id = None + restored_instance = None + + try: + # Create a file to verify restoration later + test_marker = f"test-marker-{uuid.uuid4()}" + original_instance.bash(command=f"echo '{test_marker}' > /tmp/snapshot-test-file") + + # Take a snapshot + snapshot_response = client.beta.take_snapshot(instance_id=original_instance.id) + assert snapshot_response is not None + assert snapshot_response.snapshot_id is not None + + snapshot_id = snapshot_response.snapshot_id + print(f"Created snapshot with ID: {snapshot_id}") + + # Warmup the snapshot (optional but recommended) + client.beta.warmup_snapshot(snapshot_id=snapshot_id) + + # Stop the original instance + original_instance.stop() + + # Start a new instance from the snapshot + restored_instance = client.start_ubuntu(snapshot_id=snapshot_id, backend="rodent") + assert restored_instance.id is not None + print(f"Started restored instance: {restored_instance.id}") + + # Verify the test file exists with our marker + file_content = restored_instance.bash(command="cat /tmp/snapshot-test-file") + assert test_marker in str(file_content) + print("Successfully verified snapshot restoration!") + + finally: + # Clean up resources + if original_instance: + try: + original_instance.stop() + except: + pass + + if restored_instance: + try: + restored_instance.stop() + except: + pass + + if snapshot_id: + try: + client.beta.delete_snapshot(snapshot_id=snapshot_id) + except: + pass + + if __name__ == "__main__": test_ubuntu() test_browser() @@ -319,6 +381,7 @@ def test_beta_vm_management() -> None: test_browser_openai() test_upload_download() test_beta_vm_management() + test_restore_from_snapshot() # test_ubuntu_thinking() # test_browser_thinking() # test_windows() From 9b53f80c42128f5df154080d9bac11fc0c98a976 Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Wed, 16 Apr 2025 10:52:17 -0700 Subject: [PATCH 07/13] separate stream --- src/scrapybara/client.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/scrapybara/client.py b/src/scrapybara/client.py index 3c727a7..54cd6c0 100644 --- a/src/scrapybara/client.py +++ b/src/scrapybara/client.py @@ -127,10 +127,14 @@ def __init__(self, instance_id: str, client: BaseClient): self._client = client def start( - self, request_options: Optional[RequestOptions] = None + self, + separate_stream: Optional[bool] = None, + request_options: Optional[RequestOptions] = None ) -> StartBrowserResponse: return self._client.browser.start( - self.instance_id, request_options=request_options + self.instance_id, + separate_stream=separate_stream, + request_options=request_options ) def get_cdp_url( @@ -200,10 +204,14 @@ def __init__(self, instance_id: str, client: AsyncBaseClient): self._client = client async def start( - self, request_options: Optional[RequestOptions] = None + self, + separate_stream: Optional[bool] = None, + request_options: Optional[RequestOptions] = None ) -> StartBrowserResponse: return await self._client.browser.start( - self.instance_id, request_options=request_options + self.instance_id, + separate_stream=separate_stream, + request_options=request_options ) async def get_cdp_url( From e7b0802b1f7fb40ddef36c8370fb15b5ee25d65d Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Wed, 16 Apr 2025 12:48:31 -0700 Subject: [PATCH 08/13] skip all but snapshotting --- tests/custom/test_client.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index 7f73e55..7fd6834 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -29,7 +29,7 @@ def _check_api_key() -> None: if os.getenv("SCRAPYBARA_API_KEY") is None: raise ValueError("SCRAPYBARA_API_KEY is not set") - +@pytest.mark.skip() def test_ubuntu() -> None: _check_api_key() client = Scrapybara() @@ -63,6 +63,7 @@ def test_ubuntu() -> None: ubuntu_instance.browser.stop() ubuntu_instance.stop() +@pytest.mark.skip() def test_ubuntu_openai() -> None: _check_api_key() client = Scrapybara() @@ -96,7 +97,7 @@ def test_ubuntu_openai() -> None: ubuntu_instance.browser.stop() ubuntu_instance.stop() - +@pytest.mark.skip() def test_browser() -> None: _check_api_key() client = Scrapybara() @@ -124,6 +125,7 @@ def test_browser() -> None: assert response.output.combined_valuation is not None browser_instance.stop() +@pytest.mark.skip() def test_browser_openai() -> None: _check_api_key() client = Scrapybara() @@ -243,6 +245,7 @@ def test_browser_thinking() -> None: browser_instance.stop() +@pytest.mark.skip() def test_upload_download() -> None: _check_api_key() client = Scrapybara() From 9b8672cef762aaa6230ddb300e0a0fb2564e6670 Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Fri, 18 Apr 2025 14:58:32 -0700 Subject: [PATCH 09/13] add expose and deploy --- src/scrapybara/client.py | 38 ++++++++++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/src/scrapybara/client.py b/src/scrapybara/client.py index 06794a7..c2e1c21 100644 --- a/src/scrapybara/client.py +++ b/src/scrapybara/client.py @@ -53,6 +53,8 @@ ModifyBrowserAuthResponse, UploadResponse, FileResponse, + ExposePortResponse, + NetlifyDeployResponse, ) from .types.act import ( @@ -880,9 +882,21 @@ def resume( request_options: Optional[RequestOptions] = None, ) -> GetInstanceResponse: return self._client.instance.resume( - self.id, - timeout_hours=timeout_hours, - request_options=request_options, + self.id, timeout_hours=timeout_hours, request_options=request_options + ) + + def expose_port( + self, *, port: int, request_options: Optional[RequestOptions] = None + ) -> ExposePortResponse: + return self._client.instance.expose_port( + self.id, port=port, request_options=request_options + ) + + def deploy_to_netlify( + self, *, directory_path: str, request_options: Optional[RequestOptions] = None + ) -> NetlifyDeployResponse: + return self._client.instance.deploy_to_netlify( + self.id, directory_path=directory_path, request_options=request_options ) @@ -1404,9 +1418,21 @@ async def resume( request_options: Optional[RequestOptions] = None, ) -> GetInstanceResponse: return await self._client.instance.resume( - self.id, - timeout_hours=timeout_hours, - request_options=request_options, + self.id, timeout_hours=timeout_hours, request_options=request_options + ) + + async def expose_port( + self, *, port: int, request_options: Optional[RequestOptions] = None + ) -> ExposePortResponse: + return await self._client.instance.expose_port( + self.id, port=port, request_options=request_options + ) + + async def deploy_to_netlify( + self, *, directory_path: str, request_options: Optional[RequestOptions] = None + ) -> NetlifyDeployResponse: + return await self._client.instance.deploy_to_netlify( + self.id, directory_path=directory_path, request_options=request_options ) From 34235981651eb3b66af1f8b16d7ae9d762edbf57 Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Tue, 27 May 2025 22:13:27 +0000 Subject: [PATCH 10/13] SDK regeneration --- poetry.lock | 45 ++++++++++++++------------- pyproject.toml | 2 +- reference.md | 8 +++++ src/scrapybara/core/client_wrapper.py | 2 +- src/scrapybara/instance/client.py | 8 +++++ 5 files changed, 42 insertions(+), 23 deletions(-) diff --git a/poetry.lock b/poetry.lock index b284569..f13c117 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,13 +38,13 @@ trio = ["trio (>=0.26.1)"] [[package]] name = "certifi" -version = "2025.1.31" +version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, ] [[package]] @@ -60,43 +60,46 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.2" +version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + [package.extras] test = ["pytest (>=6)"] [[package]] name = "h11" -version = "0.14.0" +version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] name = "httpcore" -version = "1.0.8" +version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be"}, - {file = "httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad"}, + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] [package.dependencies] certifi = "*" -h11 = ">=0.13,<0.15" +h11 = ">=0.16" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] @@ -201,24 +204,24 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "1.0.0" +version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index dfe0372..a54d352 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "scrapybara" [tool.poetry] name = "scrapybara" -version = "2.5.1" +version = "2.5.2" description = "" readme = "README.md" authors = [] diff --git a/reference.md b/reference.md index 624134c..cad20ce 100644 --- a/reference.md +++ b/reference.md @@ -490,6 +490,14 @@ client.instance.bash(
+**timeout:** `typing.Optional[float]` + +
+
+ +
+
+ **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
diff --git a/src/scrapybara/core/client_wrapper.py b/src/scrapybara/core/client_wrapper.py index 7f289d0..7121f16 100644 --- a/src/scrapybara/core/client_wrapper.py +++ b/src/scrapybara/core/client_wrapper.py @@ -16,7 +16,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "scrapybara", - "X-Fern-SDK-Version": "2.5.1", + "X-Fern-SDK-Version": "2.5.2", } headers["x-api-key"] = self.api_key return headers diff --git a/src/scrapybara/instance/client.py b/src/scrapybara/instance/client.py index 8a964c1..e656524 100644 --- a/src/scrapybara/instance/client.py +++ b/src/scrapybara/instance/client.py @@ -217,6 +217,7 @@ def bash( restart: typing.Optional[bool] = OMIT, list_sessions: typing.Optional[bool] = OMIT, check_session: typing.Optional[int] = OMIT, + timeout: typing.Optional[float] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> BashResponse: """ @@ -234,6 +235,8 @@ def bash( check_session : typing.Optional[int] + timeout : typing.Optional[float] + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -262,6 +265,7 @@ def bash( "restart": restart, "list_sessions": list_sessions, "check_session": check_session, + "timeout": timeout, }, headers={ "content-type": "application/json", @@ -988,6 +992,7 @@ async def bash( restart: typing.Optional[bool] = OMIT, list_sessions: typing.Optional[bool] = OMIT, check_session: typing.Optional[int] = OMIT, + timeout: typing.Optional[float] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> BashResponse: """ @@ -1005,6 +1010,8 @@ async def bash( check_session : typing.Optional[int] + timeout : typing.Optional[float] + request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -1041,6 +1048,7 @@ async def main() -> None: "restart": restart, "list_sessions": list_sessions, "check_session": check_session, + "timeout": timeout, }, headers={ "content-type": "application/json", From 14f8c3a8304fb99cd4cc141850ae993f7a8996fb Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Tue, 27 May 2025 16:26:11 -0700 Subject: [PATCH 11/13] add timeout --- src/scrapybara/client.py | 4 ++++ src/scrapybara/tools/__init__.py | 4 +++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/scrapybara/client.py b/src/scrapybara/client.py index 71c1358..29d433e 100644 --- a/src/scrapybara/client.py +++ b/src/scrapybara/client.py @@ -887,6 +887,7 @@ def bash( restart: Optional[bool] = OMIT, list_sessions: Optional[bool] = OMIT, check_session: Optional[int] = OMIT, + timeout: Optional[float] = None, request_options: Optional[RequestOptions] = None, ) -> Optional[Any]: return self._client.instance.bash( @@ -896,6 +897,7 @@ def bash( restart=restart, list_sessions=list_sessions, check_session=check_session, + timeout=timeout, request_options=request_options ) @@ -1411,6 +1413,7 @@ async def bash( restart: Optional[bool] = OMIT, list_sessions: Optional[bool] = OMIT, check_session: Optional[int] = OMIT, + timeout: Optional[float] = None, request_options: Optional[RequestOptions] = None, ) -> Optional[Any]: return await self._client.instance.bash( @@ -1420,6 +1423,7 @@ async def bash( restart=restart, list_sessions=list_sessions, check_session=check_session, + timeout=timeout, request_options=request_options ) diff --git a/src/scrapybara/tools/__init__.py b/src/scrapybara/tools/__init__.py index 63eb214..980f5e9 100644 --- a/src/scrapybara/tools/__init__.py +++ b/src/scrapybara/tools/__init__.py @@ -169,6 +169,7 @@ class BashToolParameters(BaseModel): restart: Optional[bool] = Field(False, description="Whether to restart the shell") list_sessions: Optional[bool] = Field(None, description="Whether to list all bash sessions") check_session: Optional[int] = Field(None, description="Session ID to check status") + timeout: Optional[float] = Field(None, description="Timeout for the command") class BashTool(Tool): @@ -193,6 +194,7 @@ def __call__(self, **kwargs: Any) -> Any: session=params.session, restart=params.restart, list_sessions=params.list_sessions, - check_session=params.check_session + check_session=params.check_session, + timeout=params.timeout, ) From 174a49ef11582b2b74277f31e633ebc05729a174 Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Mon, 8 Sep 2025 09:01:38 -0700 Subject: [PATCH 12/13] rm tests --- tests/custom/test_client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index 1106286..60463a1 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -382,9 +382,9 @@ def test_restore_from_snapshot() -> None: test_browser() # test_ubuntu_openai() # test_browser_openai() - test_upload_download() - test_beta_vm_management() - test_restore_from_snapshot() + # test_upload_download() + # test_beta_vm_management() + # test_restore_from_snapshot() # test_ubuntu_thinking() # test_browser_thinking() # test_windows() From 391ed79cf9c3128a0170eee5136f20cce9b848c4 Mon Sep 17 00:00:00 2001 From: Cooper Miller Date: Mon, 8 Sep 2025 09:04:09 -0700 Subject: [PATCH 13/13] skip --- tests/custom/test_client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py index 60463a1..902f89e 100644 --- a/tests/custom/test_client.py +++ b/tests/custom/test_client.py @@ -284,7 +284,7 @@ def test_upload_download() -> None: # Always stop the instance ubuntu_instance.stop() - +@pytest.mark.skip() def test_beta_vm_management() -> None: _check_api_key() client = Scrapybara() @@ -315,6 +315,7 @@ def test_beta_vm_management() -> None: instance.stop() +@pytest.mark.skip() def test_restore_from_snapshot() -> None: _check_api_key() client = Scrapybara()