diff --git a/pyproject.toml b/pyproject.toml
index 40d1be3..4dae390 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,7 +3,7 @@ name = "scrapybara"
[tool.poetry]
name = "scrapybara"
-version = "2.5.3"
+version = "2.6.0-beta.1"
description = ""
readme = "README.md"
authors = []
diff --git a/reference.md b/reference.md
index cad20ce..c44a159 100644
--- a/reference.md
+++ b/reference.md
@@ -65,6 +65,22 @@ client.start()
-
+**backend:** `typing.Optional[str]`
+
+
+
+
+
+-
+
+**snapshot_id:** `typing.Optional[str]`
+
+
+
+
+
+-
+
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
@@ -221,6 +237,62 @@ client.get_auth_states()
+
+
+
+
+client.delete_auth_state(...)
+
+-
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from scrapybara import Scrapybara
+
+client = Scrapybara(
+ api_key="YOUR_API_KEY",
+)
+client.delete_auth_state(
+ auth_state_id="auth_state_id",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**auth_state_id:** `str`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
@@ -1086,6 +1158,169 @@ client.instance.resume(
+
+
+
+
+client.instance.expose_port(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Expose a port on the instance with a public-facing URL.
+
+This endpoint creates a temporary public URL that routes traffic to the specified port on the instance.
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from scrapybara import Scrapybara
+
+client = Scrapybara(
+ api_key="YOUR_API_KEY",
+)
+client.instance.expose_port(
+ instance_id="instance_id",
+ port=1,
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**instance_id:** `str`
+
+
+
+
+
+-
+
+**port:** `int`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.instance.deploy_to_netlify(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Deploy a directory from the instance to Netlify.
+
+Args:
+ directory_path: Path to the directory on the instance to deploy
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from scrapybara import Scrapybara
+
+client = Scrapybara(
+ api_key="YOUR_API_KEY",
+)
+client.instance.deploy_to_netlify(
+ instance_id="instance_id",
+ directory_path="directory_path",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**instance_id:** `str`
+
+
+
+
+
+-
+
+**directory_path:** `str`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
@@ -1135,6 +1370,14 @@ client.browser.start(
-
+**separate_stream:** `typing.Optional[bool]`
+
+
+
+
+
+-
+
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
@@ -1199,6 +1442,62 @@ client.browser.get_cdp_url(
+
+
+
+
+client.browser.get_stream_url(...)
+
+-
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from scrapybara import Scrapybara
+
+client = Scrapybara(
+ api_key="YOUR_API_KEY",
+)
+client.browser.get_stream_url(
+ instance_id="instance_id",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**instance_id:** `str`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
@@ -2292,3 +2591,214 @@ client.env.delete(
+## BetaVmManagement
+client.beta_vm_management.take_snapshot(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Take a snapshot of an instance
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from scrapybara import Scrapybara
+
+client = Scrapybara(
+ api_key="YOUR_API_KEY",
+)
+client.beta_vm_management.take_snapshot(
+ instance_id="instance_id",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**instance_id:** `str`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.beta_vm_management.warmup_snapshot(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Warmup a snapshot
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from scrapybara import Scrapybara
+
+client = Scrapybara(
+ api_key="YOUR_API_KEY",
+)
+client.beta_vm_management.warmup_snapshot(
+ snapshot_id="snapshot_id",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**snapshot_id:** `str`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
+client.beta_vm_management.delete_snapshot(...)
+
+-
+
+#### 📝 Description
+
+
+-
+
+
+-
+
+Delete a snapshot
+
+
+
+
+
+#### 🔌 Usage
+
+
+-
+
+
+-
+
+```python
+from scrapybara import Scrapybara
+
+client = Scrapybara(
+ api_key="YOUR_API_KEY",
+)
+client.beta_vm_management.delete_snapshot(
+ snapshot_id="snapshot_id",
+)
+
+```
+
+
+
+
+
+#### ⚙️ Parameters
+
+
+-
+
+
+-
+
+**snapshot_id:** `str`
+
+
+
+
+
+-
+
+**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/scrapybara/__init__.py b/src/scrapybara/__init__.py
index 0005fcd..2b09b27 100644
--- a/src/scrapybara/__init__.py
+++ b/src/scrapybara/__init__.py
@@ -6,17 +6,20 @@
BrowserAuthenticateResponse,
BrowserGetCdpUrlResponse,
BrowserGetCurrentUrlResponse,
+ BrowserGetStreamUrlResponse,
Button,
CellType,
ClickMouseAction,
ClickMouseActionClickType,
ComputerResponse,
+ DeleteBrowserAuthResponse,
DeploymentConfigInstanceType,
DragMouseAction,
EditResponse,
EnvGetResponse,
EnvResponse,
ExecuteCellRequest,
+ ExposePortResponse,
FileResponse,
GetCursorPositionAction,
GetInstanceResponse,
@@ -27,15 +30,18 @@
KernelInfo,
ModifyBrowserAuthResponse,
MoveMouseAction,
+ NetlifyDeployResponse,
Notebook,
NotebookCell,
PressKeyAction,
SaveBrowserAuthResponse,
ScrollAction,
+ SnapshotResponse,
StartBrowserResponse,
Status,
StopBrowserResponse,
StopInstanceResponse,
+ SuccessResponse,
TakeScreenshotAction,
TypeTextAction,
UploadResponse,
@@ -44,7 +50,7 @@
WaitAction,
)
from .errors import UnprocessableEntityError
-from . import browser, code, env, instance, notebook
+from . import beta_vm_management, browser, code, env, instance, notebook
from .client import AsyncScrapybara, Scrapybara
from .environment import ScrapybaraEnvironment
from .instance import (
@@ -69,18 +75,21 @@
"BrowserAuthenticateResponse",
"BrowserGetCdpUrlResponse",
"BrowserGetCurrentUrlResponse",
+ "BrowserGetStreamUrlResponse",
"Button",
"CellType",
"ClickMouseAction",
"ClickMouseActionClickType",
"Command",
"ComputerResponse",
+ "DeleteBrowserAuthResponse",
"DeploymentConfigInstanceType",
"DragMouseAction",
"EditResponse",
"EnvGetResponse",
"EnvResponse",
"ExecuteCellRequest",
+ "ExposePortResponse",
"FileResponse",
"GetCursorPositionAction",
"GetInstanceResponse",
@@ -91,6 +100,7 @@
"KernelInfo",
"ModifyBrowserAuthResponse",
"MoveMouseAction",
+ "NetlifyDeployResponse",
"Notebook",
"NotebookCell",
"PressKeyAction",
@@ -108,10 +118,12 @@
"Scrapybara",
"ScrapybaraEnvironment",
"ScrollAction",
+ "SnapshotResponse",
"StartBrowserResponse",
"Status",
"StopBrowserResponse",
"StopInstanceResponse",
+ "SuccessResponse",
"TakeScreenshotAction",
"TypeTextAction",
"UnprocessableEntityError",
@@ -120,6 +132,7 @@
"ValidationErrorLocItem",
"WaitAction",
"__version__",
+ "beta_vm_management",
"browser",
"code",
"env",
diff --git a/src/scrapybara/base_client.py b/src/scrapybara/base_client.py
index 9661a10..9befaee 100644
--- a/src/scrapybara/base_client.py
+++ b/src/scrapybara/base_client.py
@@ -11,6 +11,7 @@
from .code.client import CodeClient
from .notebook.client import NotebookClient
from .env.client import EnvClient
+from .beta_vm_management.client import BetaVmManagementClient
from .types.deployment_config_instance_type import DeploymentConfigInstanceType
from .core.request_options import RequestOptions
from .types.get_instance_response import GetInstanceResponse
@@ -20,12 +21,14 @@
from json.decoder import JSONDecodeError
from .core.jsonable_encoder import jsonable_encoder
from .types.auth_state_response import AuthStateResponse
+from .types.delete_browser_auth_response import DeleteBrowserAuthResponse
from .core.client_wrapper import AsyncClientWrapper
from .instance.client import AsyncInstanceClient
from .browser.client import AsyncBrowserClient
from .code.client import AsyncCodeClient
from .notebook.client import AsyncNotebookClient
from .env.client import AsyncEnvClient
+from .beta_vm_management.client import AsyncBetaVmManagementClient
# this is used as the default value for optional parameters
OMIT = typing.cast(typing.Any, ...)
@@ -98,6 +101,7 @@ def __init__(
self.code = CodeClient(client_wrapper=self._client_wrapper)
self.notebook = NotebookClient(client_wrapper=self._client_wrapper)
self.env = EnvClient(client_wrapper=self._client_wrapper)
+ self.beta_vm_management = BetaVmManagementClient(client_wrapper=self._client_wrapper)
def start(
self,
@@ -106,6 +110,8 @@ def start(
timeout_hours: typing.Optional[float] = OMIT,
blocked_domains: typing.Optional[typing.Sequence[str]] = OMIT,
resolution: typing.Optional[typing.Sequence[int]] = OMIT,
+ backend: typing.Optional[str] = OMIT,
+ snapshot_id: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> GetInstanceResponse:
"""
@@ -119,6 +125,10 @@ def start(
resolution : typing.Optional[typing.Sequence[int]]
+ backend : typing.Optional[str]
+
+ snapshot_id : typing.Optional[str]
+
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@@ -144,6 +154,8 @@ def start(
"timeout_hours": timeout_hours,
"blocked_domains": blocked_domains,
"resolution": resolution,
+ "backend": backend,
+ "snapshot_id": snapshot_id,
},
headers={
"content-type": "application/json",
@@ -313,6 +325,65 @@ def get_auth_states(
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
+ def delete_auth_state(
+ self, *, auth_state_id: str, request_options: typing.Optional[RequestOptions] = None
+ ) -> DeleteBrowserAuthResponse:
+ """
+ Parameters
+ ----------
+ auth_state_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ DeleteBrowserAuthResponse
+ Successful Response
+
+ Examples
+ --------
+ from scrapybara import Scrapybara
+
+ client = Scrapybara(
+ api_key="YOUR_API_KEY",
+ )
+ client.delete_auth_state(
+ auth_state_id="auth_state_id",
+ )
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ "v1/delete_auth_state",
+ method="POST",
+ params={
+ "auth_state_id": auth_state_id,
+ },
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ DeleteBrowserAuthResponse,
+ parse_obj_as(
+ type_=DeleteBrowserAuthResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
class AsyncBaseClient:
"""
@@ -381,6 +452,7 @@ def __init__(
self.code = AsyncCodeClient(client_wrapper=self._client_wrapper)
self.notebook = AsyncNotebookClient(client_wrapper=self._client_wrapper)
self.env = AsyncEnvClient(client_wrapper=self._client_wrapper)
+ self.beta_vm_management = AsyncBetaVmManagementClient(client_wrapper=self._client_wrapper)
async def start(
self,
@@ -389,6 +461,8 @@ async def start(
timeout_hours: typing.Optional[float] = OMIT,
blocked_domains: typing.Optional[typing.Sequence[str]] = OMIT,
resolution: typing.Optional[typing.Sequence[int]] = OMIT,
+ backend: typing.Optional[str] = OMIT,
+ snapshot_id: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> GetInstanceResponse:
"""
@@ -402,6 +476,10 @@ async def start(
resolution : typing.Optional[typing.Sequence[int]]
+ backend : typing.Optional[str]
+
+ snapshot_id : typing.Optional[str]
+
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@@ -435,6 +513,8 @@ async def main() -> None:
"timeout_hours": timeout_hours,
"blocked_domains": blocked_domains,
"resolution": resolution,
+ "backend": backend,
+ "snapshot_id": snapshot_id,
},
headers={
"content-type": "application/json",
@@ -630,6 +710,73 @@ async def main() -> None:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
+ async def delete_auth_state(
+ self, *, auth_state_id: str, request_options: typing.Optional[RequestOptions] = None
+ ) -> DeleteBrowserAuthResponse:
+ """
+ Parameters
+ ----------
+ auth_state_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ DeleteBrowserAuthResponse
+ Successful Response
+
+ Examples
+ --------
+ import asyncio
+
+ from scrapybara import AsyncScrapybara
+
+ client = AsyncScrapybara(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.delete_auth_state(
+ auth_state_id="auth_state_id",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ "v1/delete_auth_state",
+ method="POST",
+ params={
+ "auth_state_id": auth_state_id,
+ },
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ DeleteBrowserAuthResponse,
+ parse_obj_as(
+ type_=DeleteBrowserAuthResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
def _get_base_url(*, base_url: typing.Optional[str] = None, environment: ScrapybaraEnvironment) -> str:
if base_url is not None:
diff --git a/src/scrapybara/beta_vm_management/__init__.py b/src/scrapybara/beta_vm_management/__init__.py
new file mode 100644
index 0000000..f3ea265
--- /dev/null
+++ b/src/scrapybara/beta_vm_management/__init__.py
@@ -0,0 +1,2 @@
+# This file was auto-generated by Fern from our API Definition.
+
diff --git a/src/scrapybara/beta_vm_management/client.py b/src/scrapybara/beta_vm_management/client.py
new file mode 100644
index 0000000..7c626c3
--- /dev/null
+++ b/src/scrapybara/beta_vm_management/client.py
@@ -0,0 +1,396 @@
+# This file was auto-generated by Fern from our API Definition.
+
+from ..core.client_wrapper import SyncClientWrapper
+import typing
+from ..core.request_options import RequestOptions
+from ..types.snapshot_response import SnapshotResponse
+from ..core.jsonable_encoder import jsonable_encoder
+from ..core.pydantic_utilities import parse_obj_as
+from ..errors.unprocessable_entity_error import UnprocessableEntityError
+from ..types.http_validation_error import HttpValidationError
+from json.decoder import JSONDecodeError
+from ..core.api_error import ApiError
+from ..types.success_response import SuccessResponse
+from ..core.client_wrapper import AsyncClientWrapper
+
+
+class BetaVmManagementClient:
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
+ self._client_wrapper = client_wrapper
+
+ def take_snapshot(
+ self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> SnapshotResponse:
+ """
+ Take a snapshot of an instance
+
+ Parameters
+ ----------
+ instance_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SnapshotResponse
+ Successful Response
+
+ Examples
+ --------
+ from scrapybara import Scrapybara
+
+ client = Scrapybara(
+ api_key="YOUR_API_KEY",
+ )
+ client.beta_vm_management.take_snapshot(
+ instance_id="instance_id",
+ )
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"v1/beta/instances/{jsonable_encoder(instance_id)}/snapshot",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ SnapshotResponse,
+ parse_obj_as(
+ type_=SnapshotResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ def warmup_snapshot(
+ self, snapshot_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> SuccessResponse:
+ """
+ Warmup a snapshot
+
+ Parameters
+ ----------
+ snapshot_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Successful Response
+
+ Examples
+ --------
+ from scrapybara import Scrapybara
+
+ client = Scrapybara(
+ api_key="YOUR_API_KEY",
+ )
+ client.beta_vm_management.warmup_snapshot(
+ snapshot_id="snapshot_id",
+ )
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"v1/beta/snapshots/{jsonable_encoder(snapshot_id)}/warmup",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ SuccessResponse,
+ parse_obj_as(
+ type_=SuccessResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ def delete_snapshot(
+ self, snapshot_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> SuccessResponse:
+ """
+ Delete a snapshot
+
+ Parameters
+ ----------
+ snapshot_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Successful Response
+
+ Examples
+ --------
+ from scrapybara import Scrapybara
+
+ client = Scrapybara(
+ api_key="YOUR_API_KEY",
+ )
+ client.beta_vm_management.delete_snapshot(
+ snapshot_id="snapshot_id",
+ )
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"v1/beta/snapshots/{jsonable_encoder(snapshot_id)}/delete",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ SuccessResponse,
+ parse_obj_as(
+ type_=SuccessResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
+
+class AsyncBetaVmManagementClient:
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
+ self._client_wrapper = client_wrapper
+
+ async def take_snapshot(
+ self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> SnapshotResponse:
+ """
+ Take a snapshot of an instance
+
+ Parameters
+ ----------
+ instance_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SnapshotResponse
+ Successful Response
+
+ Examples
+ --------
+ import asyncio
+
+ from scrapybara import AsyncScrapybara
+
+ client = AsyncScrapybara(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.beta_vm_management.take_snapshot(
+ instance_id="instance_id",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"v1/beta/instances/{jsonable_encoder(instance_id)}/snapshot",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ SnapshotResponse,
+ parse_obj_as(
+ type_=SnapshotResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ async def warmup_snapshot(
+ self, snapshot_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> SuccessResponse:
+ """
+ Warmup a snapshot
+
+ Parameters
+ ----------
+ snapshot_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Successful Response
+
+ Examples
+ --------
+ import asyncio
+
+ from scrapybara import AsyncScrapybara
+
+ client = AsyncScrapybara(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.beta_vm_management.warmup_snapshot(
+ snapshot_id="snapshot_id",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"v1/beta/snapshots/{jsonable_encoder(snapshot_id)}/warmup",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ SuccessResponse,
+ parse_obj_as(
+ type_=SuccessResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ async def delete_snapshot(
+ self, snapshot_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> SuccessResponse:
+ """
+ Delete a snapshot
+
+ Parameters
+ ----------
+ snapshot_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Successful Response
+
+ Examples
+ --------
+ import asyncio
+
+ from scrapybara import AsyncScrapybara
+
+ client = AsyncScrapybara(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.beta_vm_management.delete_snapshot(
+ snapshot_id="snapshot_id",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"v1/beta/snapshots/{jsonable_encoder(snapshot_id)}/delete",
+ method="POST",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ SuccessResponse,
+ parse_obj_as(
+ type_=SuccessResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
diff --git a/src/scrapybara/browser/client.py b/src/scrapybara/browser/client.py
index 1d6fe94..71b1049 100644
--- a/src/scrapybara/browser/client.py
+++ b/src/scrapybara/browser/client.py
@@ -11,6 +11,7 @@
from json.decoder import JSONDecodeError
from ..core.api_error import ApiError
from ..types.browser_get_cdp_url_response import BrowserGetCdpUrlResponse
+from ..types.browser_get_stream_url_response import BrowserGetStreamUrlResponse
from ..types.browser_get_current_url_response import BrowserGetCurrentUrlResponse
from ..types.save_browser_auth_response import SaveBrowserAuthResponse
from ..types.modify_browser_auth_response import ModifyBrowserAuthResponse
@@ -24,13 +25,19 @@ def __init__(self, *, client_wrapper: SyncClientWrapper):
self._client_wrapper = client_wrapper
def start(
- self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ self,
+ instance_id: str,
+ *,
+ separate_stream: typing.Optional[bool] = None,
+ request_options: typing.Optional[RequestOptions] = None,
) -> StartBrowserResponse:
"""
Parameters
----------
instance_id : str
+ separate_stream : typing.Optional[bool]
+
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@@ -53,6 +60,9 @@ def start(
_response = self._client_wrapper.httpx_client.request(
f"v1/instance/{jsonable_encoder(instance_id)}/browser/start",
method="POST",
+ params={
+ "separate_stream": separate_stream,
+ },
request_options=request_options,
)
try:
@@ -135,6 +145,62 @@ def get_cdp_url(
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
+ def get_stream_url(
+ self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> BrowserGetStreamUrlResponse:
+ """
+ Parameters
+ ----------
+ instance_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BrowserGetStreamUrlResponse
+ Successful Response
+
+ Examples
+ --------
+ from scrapybara import Scrapybara
+
+ client = Scrapybara(
+ api_key="YOUR_API_KEY",
+ )
+ client.browser.get_stream_url(
+ instance_id="instance_id",
+ )
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/browser/stream_url",
+ method="GET",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ BrowserGetStreamUrlResponse,
+ parse_obj_as(
+ type_=BrowserGetStreamUrlResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
def get_current_url(
self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
) -> BrowserGetCurrentUrlResponse:
@@ -448,13 +514,19 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper):
self._client_wrapper = client_wrapper
async def start(
- self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ self,
+ instance_id: str,
+ *,
+ separate_stream: typing.Optional[bool] = None,
+ request_options: typing.Optional[RequestOptions] = None,
) -> StartBrowserResponse:
"""
Parameters
----------
instance_id : str
+ separate_stream : typing.Optional[bool]
+
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@@ -485,6 +557,9 @@ async def main() -> None:
_response = await self._client_wrapper.httpx_client.request(
f"v1/instance/{jsonable_encoder(instance_id)}/browser/start",
method="POST",
+ params={
+ "separate_stream": separate_stream,
+ },
request_options=request_options,
)
try:
@@ -575,6 +650,70 @@ async def main() -> None:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
+ async def get_stream_url(
+ self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
+ ) -> BrowserGetStreamUrlResponse:
+ """
+ Parameters
+ ----------
+ instance_id : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ BrowserGetStreamUrlResponse
+ Successful Response
+
+ Examples
+ --------
+ import asyncio
+
+ from scrapybara import AsyncScrapybara
+
+ client = AsyncScrapybara(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.browser.get_stream_url(
+ instance_id="instance_id",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/browser/stream_url",
+ method="GET",
+ request_options=request_options,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ BrowserGetStreamUrlResponse,
+ parse_obj_as(
+ type_=BrowserGetStreamUrlResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
async def get_current_url(
self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
) -> BrowserGetCurrentUrlResponse:
diff --git a/src/scrapybara/client.py b/src/scrapybara/client.py
index 29d433e..b345bf6 100644
--- a/src/scrapybara/client.py
+++ b/src/scrapybara/client.py
@@ -33,6 +33,7 @@
BrowserAuthenticateResponse,
BrowserGetCdpUrlResponse,
BrowserGetCurrentUrlResponse,
+ BrowserGetStreamUrlResponse,
Button,
ClickMouseActionClickType,
ComputerResponse,
@@ -52,6 +53,8 @@
ModifyBrowserAuthResponse,
UploadResponse,
FileResponse,
+ ExposePortResponse,
+ NetlifyDeployResponse,
)
from .types.act import (
@@ -126,10 +129,14 @@ def __init__(self, instance_id: str, client: BaseClient):
self._client = client
def start(
- self, request_options: Optional[RequestOptions] = None
+ self,
+ separate_stream: Optional[bool] = None,
+ request_options: Optional[RequestOptions] = None
) -> StartBrowserResponse:
return self._client.browser.start(
- self.instance_id, request_options=request_options
+ self.instance_id,
+ separate_stream=separate_stream,
+ request_options=request_options
)
def get_cdp_url(
@@ -186,6 +193,12 @@ def stop(
self.instance_id, request_options=request_options
)
+ def get_stream_url(
+ self, request_options: Optional[RequestOptions] = None
+ ) -> BrowserGetStreamUrlResponse:
+ return self._client.browser.get_stream_url(
+ instance_id=self.instance_id, request_options=request_options
+ )
class AsyncBrowser:
def __init__(self, instance_id: str, client: AsyncBaseClient):
@@ -193,10 +206,14 @@ def __init__(self, instance_id: str, client: AsyncBaseClient):
self._client = client
async def start(
- self, request_options: Optional[RequestOptions] = None
+ self,
+ separate_stream: Optional[bool] = None,
+ request_options: Optional[RequestOptions] = None
) -> StartBrowserResponse:
return await self._client.browser.start(
- self.instance_id, request_options=request_options
+ self.instance_id,
+ separate_stream=separate_stream,
+ request_options=request_options
)
async def get_cdp_url(
@@ -253,6 +270,12 @@ async def stop(
self.instance_id, request_options=request_options
)
+ async def get_stream_url(
+ self, request_options: Optional[RequestOptions] = None
+ ) -> BrowserGetStreamUrlResponse:
+ return await self._client.browser.get_stream_url(
+ instance_id=self.instance_id, request_options=request_options
+ )
class Code:
def __init__(self, instance_id: str, client: BaseClient):
@@ -859,9 +882,21 @@ def resume(
request_options: Optional[RequestOptions] = None,
) -> GetInstanceResponse:
return self._client.instance.resume(
- self.id,
- timeout_hours=timeout_hours,
- request_options=request_options,
+ self.id, timeout_hours=timeout_hours, request_options=request_options
+ )
+
+ def expose_port(
+ self, *, port: int, request_options: Optional[RequestOptions] = None
+ ) -> ExposePortResponse:
+ return self._client.instance.expose_port(
+ self.id, port=port, request_options=request_options
+ )
+
+ def deploy_to_netlify(
+ self, *, directory_path: str, request_options: Optional[RequestOptions] = None
+ ) -> NetlifyDeployResponse:
+ return self._client.instance.deploy_to_netlify(
+ self.id, directory_path=directory_path, request_options=request_options
)
@@ -1385,9 +1420,21 @@ async def resume(
request_options: Optional[RequestOptions] = None,
) -> GetInstanceResponse:
return await self._client.instance.resume(
- self.id,
- timeout_hours=timeout_hours,
- request_options=request_options,
+ self.id, timeout_hours=timeout_hours, request_options=request_options
+ )
+
+ async def expose_port(
+ self, *, port: int, request_options: Optional[RequestOptions] = None
+ ) -> ExposePortResponse:
+ return await self._client.instance.expose_port(
+ self.id, port=port, request_options=request_options
+ )
+
+ async def deploy_to_netlify(
+ self, *, directory_path: str, request_options: Optional[RequestOptions] = None
+ ) -> NetlifyDeployResponse:
+ return await self._client.instance.deploy_to_netlify(
+ self.id, directory_path=directory_path, request_options=request_options
)
@@ -1599,6 +1646,22 @@ def __init__(
follow_redirects=follow_redirects,
httpx_client=httpx_client,
)
+ self._beta = Beta(self._base_client)
+
+ @property
+ def beta(self) -> "Beta":
+ """
+ Access beta functionality.
+
+ This property provides access to beta features that may change
+ or be removed in future versions.
+
+ Returns
+ -------
+ Beta
+ Beta features wrapper
+ """
+ return self._beta
@property
def httpx_client(self) -> HttpClient:
@@ -1610,6 +1673,8 @@ def start_ubuntu(
timeout_hours: Optional[float] = OMIT,
blocked_domains: Optional[Sequence[str]] = OMIT,
resolution: Optional[Sequence[int]] = OMIT,
+ backend: Optional[str] = OMIT,
+ snapshot_id: Optional[str] = OMIT,
request_options: Optional[RequestOptions] = None,
) -> UbuntuInstance:
response = self._base_client.start(
@@ -1617,6 +1682,8 @@ def start_ubuntu(
timeout_hours=timeout_hours,
blocked_domains=blocked_domains,
resolution=resolution,
+ backend=backend,
+ snapshot_id=snapshot_id,
request_options=request_options,
)
return UbuntuInstance(
@@ -1632,6 +1699,8 @@ def start_browser(
timeout_hours: Optional[float] = OMIT,
blocked_domains: Optional[Sequence[str]] = OMIT,
resolution: Optional[Sequence[int]] = OMIT,
+ backend: Optional[str] = OMIT,
+ snapshot_id: Optional[str] = OMIT,
request_options: Optional[RequestOptions] = None,
) -> BrowserInstance:
response = self._base_client.start(
@@ -1639,6 +1708,8 @@ def start_browser(
timeout_hours=timeout_hours,
blocked_domains=blocked_domains,
resolution=resolution,
+ backend=backend,
+ snapshot_id=snapshot_id,
request_options=request_options,
)
return BrowserInstance(
@@ -2059,6 +2130,22 @@ def __init__(
follow_redirects=follow_redirects,
httpx_client=httpx_client,
)
+ self._beta = AsyncBeta(self._base_client)
+
+ @property
+ def beta(self) -> "AsyncBeta":
+ """
+ Access beta functionality.
+
+ This property provides access to beta features that may change
+ or be removed in future versions.
+
+ Returns
+ -------
+ AsyncBeta
+ Beta features wrapper
+ """
+ return self._beta
@property
def httpx_client(self) -> AsyncHttpClient:
@@ -2070,6 +2157,8 @@ async def start_ubuntu(
timeout_hours: Optional[float] = OMIT,
blocked_domains: Optional[Sequence[str]] = OMIT,
resolution: Optional[Sequence[int]] = OMIT,
+ backend: Optional[str] = OMIT,
+ snapshot_id: Optional[str] = OMIT,
request_options: Optional[RequestOptions] = None,
) -> AsyncUbuntuInstance:
response = await self._base_client.start(
@@ -2077,6 +2166,8 @@ async def start_ubuntu(
timeout_hours=timeout_hours,
blocked_domains=blocked_domains,
resolution=resolution,
+ backend=backend,
+ snapshot_id=snapshot_id,
request_options=request_options,
)
return AsyncUbuntuInstance(
@@ -2092,6 +2183,8 @@ async def start_browser(
timeout_hours: Optional[float] = OMIT,
blocked_domains: Optional[Sequence[str]] = OMIT,
resolution: Optional[Sequence[int]] = OMIT,
+ backend: Optional[str] = OMIT,
+ snapshot_id: Optional[str] = OMIT,
request_options: Optional[RequestOptions] = None,
) -> AsyncBrowserInstance:
response = await self._base_client.start(
@@ -2099,6 +2192,8 @@ async def start_browser(
timeout_hours=timeout_hours,
blocked_domains=blocked_domains,
resolution=resolution,
+ backend=backend,
+ snapshot_id=snapshot_id,
request_options=request_options,
)
return AsyncBrowserInstance(
@@ -2598,4 +2693,152 @@ def _filter_images(messages: List[Message], images_to_keep: int):
if images_kept < images_to_keep:
images_kept += 1
else:
- del tool_result.result["base_64_image"]
\ No newline at end of file
+ del tool_result.result["base_64_image"]
+
+
+class Beta:
+ """
+ Class that provides access to beta functionality in Scrapybara.
+
+ Includes:
+ - VM management: snapshot operations on VM instances
+ """
+ def __init__(self, base_client: BaseClient):
+ self._base_client = base_client
+
+ def take_snapshot(self, instance_id: str, *, request_options: Optional[RequestOptions] = None):
+ """
+ Take a snapshot of an instance
+
+ Parameters
+ ----------
+ instance_id : str
+ ID of the instance to snapshot
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SnapshotResponse
+ Contains the snapshot_id
+ """
+ return self._base_client.beta_vm_management.take_snapshot(
+ instance_id=instance_id,
+ request_options=request_options
+ )
+
+ def warmup_snapshot(self, snapshot_id: str, *, request_options: Optional[RequestOptions] = None):
+ """
+ Warmup a snapshot so it's ready for faster instance creation
+
+ Parameters
+ ----------
+ snapshot_id : str
+ ID of the snapshot to warm up
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Indicates if the operation was successful
+ """
+ return self._base_client.beta_vm_management.warmup_snapshot(
+ snapshot_id=snapshot_id,
+ request_options=request_options
+ )
+
+ def delete_snapshot(self, snapshot_id: str, *, request_options: Optional[RequestOptions] = None):
+ """
+ Delete a snapshot
+
+ Parameters
+ ----------
+ snapshot_id : str
+ ID of the snapshot to delete
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Indicates if the operation was successful
+ """
+ return self._base_client.beta_vm_management.delete_snapshot(
+ snapshot_id=snapshot_id,
+ request_options=request_options
+ )
+
+
+class AsyncBeta:
+ """
+ Class that provides access to beta functionality in AsyncScrapybara.
+
+ Includes:
+ - VM management: snapshot operations on VM instances
+ """
+ def __init__(self, base_client: AsyncBaseClient):
+ self._base_client = base_client
+
+ async def take_snapshot(self, instance_id: str, *, request_options: Optional[RequestOptions] = None):
+ """
+ Take a snapshot of an instance
+
+ Parameters
+ ----------
+ instance_id : str
+ ID of the instance to snapshot
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SnapshotResponse
+ Contains the snapshot_id
+ """
+ return await self._base_client.beta_vm_management.take_snapshot(
+ instance_id=instance_id,
+ request_options=request_options
+ )
+
+ async def warmup_snapshot(self, snapshot_id: str, *, request_options: Optional[RequestOptions] = None):
+ """
+ Warmup a snapshot so it's ready for faster instance creation
+
+ Parameters
+ ----------
+ snapshot_id : str
+ ID of the snapshot to warm up
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Indicates if the operation was successful
+ """
+ return await self._base_client.beta_vm_management.warmup_snapshot(
+ snapshot_id=snapshot_id,
+ request_options=request_options
+ )
+
+ async def delete_snapshot(self, snapshot_id: str, *, request_options: Optional[RequestOptions] = None):
+ """
+ Delete a snapshot
+
+ Parameters
+ ----------
+ snapshot_id : str
+ ID of the snapshot to delete
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ SuccessResponse
+ Indicates if the operation was successful
+ """
+ return await self._base_client.beta_vm_management.delete_snapshot(
+ snapshot_id=snapshot_id,
+ request_options=request_options
+ )
\ No newline at end of file
diff --git a/src/scrapybara/core/client_wrapper.py b/src/scrapybara/core/client_wrapper.py
index 168b9e9..de8000b 100644
--- a/src/scrapybara/core/client_wrapper.py
+++ b/src/scrapybara/core/client_wrapper.py
@@ -16,7 +16,7 @@ def get_headers(self) -> typing.Dict[str, str]:
headers: typing.Dict[str, str] = {
"X-Fern-Language": "Python",
"X-Fern-SDK-Name": "scrapybara",
- "X-Fern-SDK-Version": "2.5.3",
+ "X-Fern-SDK-Version": "2.6.0-beta.1",
}
headers["x-api-key"] = self.api_key
return headers
diff --git a/src/scrapybara/instance/client.py b/src/scrapybara/instance/client.py
index e656524..a52128f 100644
--- a/src/scrapybara/instance/client.py
+++ b/src/scrapybara/instance/client.py
@@ -22,6 +22,8 @@
from ..types.upload_response import UploadResponse
from ..types.stop_instance_response import StopInstanceResponse
from ..types.get_instance_response import GetInstanceResponse
+from ..types.expose_port_response import ExposePortResponse
+from ..types.netlify_deploy_response import NetlifyDeployResponse
from ..core.client_wrapper import AsyncClientWrapper
# this is used as the default value for optional parameters
@@ -778,6 +780,147 @@ def resume(
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
+ def expose_port(
+ self, instance_id: str, *, port: int, request_options: typing.Optional[RequestOptions] = None
+ ) -> ExposePortResponse:
+ """
+ Expose a port on the instance with a public-facing URL.
+
+ This endpoint creates a temporary public URL that routes traffic to the specified port on the instance.
+
+ Parameters
+ ----------
+ instance_id : str
+
+ port : int
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ ExposePortResponse
+ Successful Response
+
+ Examples
+ --------
+ from scrapybara import Scrapybara
+
+ client = Scrapybara(
+ api_key="YOUR_API_KEY",
+ )
+ client.instance.expose_port(
+ instance_id="instance_id",
+ port=1,
+ )
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/expose_port",
+ method="POST",
+ json={
+ "port": port,
+ },
+ headers={
+ "content-type": "application/json",
+ },
+ request_options=request_options,
+ omit=OMIT,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ ExposePortResponse,
+ parse_obj_as(
+ type_=ExposePortResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ def deploy_to_netlify(
+ self, instance_id: str, *, directory_path: str, request_options: typing.Optional[RequestOptions] = None
+ ) -> NetlifyDeployResponse:
+ """
+ Deploy a directory from the instance to Netlify.
+
+ Args:
+ directory_path: Path to the directory on the instance to deploy
+
+ Parameters
+ ----------
+ instance_id : str
+
+ directory_path : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ NetlifyDeployResponse
+ Successful Response
+
+ Examples
+ --------
+ from scrapybara import Scrapybara
+
+ client = Scrapybara(
+ api_key="YOUR_API_KEY",
+ )
+ client.instance.deploy_to_netlify(
+ instance_id="instance_id",
+ directory_path="directory_path",
+ )
+ """
+ _response = self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/deploy_to_netlify",
+ method="POST",
+ json={
+ "directory_path": directory_path,
+ },
+ headers={
+ "content-type": "application/json",
+ },
+ request_options=request_options,
+ omit=OMIT,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ NetlifyDeployResponse,
+ parse_obj_as(
+ type_=NetlifyDeployResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
class AsyncInstanceClient:
def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -1608,3 +1751,160 @@ async def main() -> None:
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ async def expose_port(
+ self, instance_id: str, *, port: int, request_options: typing.Optional[RequestOptions] = None
+ ) -> ExposePortResponse:
+ """
+ Expose a port on the instance with a public-facing URL.
+
+ This endpoint creates a temporary public URL that routes traffic to the specified port on the instance.
+
+ Parameters
+ ----------
+ instance_id : str
+
+ port : int
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ ExposePortResponse
+ Successful Response
+
+ Examples
+ --------
+ import asyncio
+
+ from scrapybara import AsyncScrapybara
+
+ client = AsyncScrapybara(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.instance.expose_port(
+ instance_id="instance_id",
+ port=1,
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/expose_port",
+ method="POST",
+ json={
+ "port": port,
+ },
+ headers={
+ "content-type": "application/json",
+ },
+ request_options=request_options,
+ omit=OMIT,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ ExposePortResponse,
+ parse_obj_as(
+ type_=ExposePortResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
+
+ async def deploy_to_netlify(
+ self, instance_id: str, *, directory_path: str, request_options: typing.Optional[RequestOptions] = None
+ ) -> NetlifyDeployResponse:
+ """
+ Deploy a directory from the instance to Netlify.
+
+ Args:
+ directory_path: Path to the directory on the instance to deploy
+
+ Parameters
+ ----------
+ instance_id : str
+
+ directory_path : str
+
+ request_options : typing.Optional[RequestOptions]
+ Request-specific configuration.
+
+ Returns
+ -------
+ NetlifyDeployResponse
+ Successful Response
+
+ Examples
+ --------
+ import asyncio
+
+ from scrapybara import AsyncScrapybara
+
+ client = AsyncScrapybara(
+ api_key="YOUR_API_KEY",
+ )
+
+
+ async def main() -> None:
+ await client.instance.deploy_to_netlify(
+ instance_id="instance_id",
+ directory_path="directory_path",
+ )
+
+
+ asyncio.run(main())
+ """
+ _response = await self._client_wrapper.httpx_client.request(
+ f"v1/instance/{jsonable_encoder(instance_id)}/deploy_to_netlify",
+ method="POST",
+ json={
+ "directory_path": directory_path,
+ },
+ headers={
+ "content-type": "application/json",
+ },
+ request_options=request_options,
+ omit=OMIT,
+ )
+ try:
+ if 200 <= _response.status_code < 300:
+ return typing.cast(
+ NetlifyDeployResponse,
+ parse_obj_as(
+ type_=NetlifyDeployResponse, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ if _response.status_code == 422:
+ raise UnprocessableEntityError(
+ typing.cast(
+ HttpValidationError,
+ parse_obj_as(
+ type_=HttpValidationError, # type: ignore
+ object_=_response.json(),
+ ),
+ )
+ )
+ _response_json = _response.json()
+ except JSONDecodeError:
+ raise ApiError(status_code=_response.status_code, body=_response.text)
+ raise ApiError(status_code=_response.status_code, body=_response_json)
diff --git a/src/scrapybara/types/__init__.py b/src/scrapybara/types/__init__.py
index 8961e8b..6eaec0b 100644
--- a/src/scrapybara/types/__init__.py
+++ b/src/scrapybara/types/__init__.py
@@ -4,17 +4,20 @@
from .browser_authenticate_response import BrowserAuthenticateResponse
from .browser_get_cdp_url_response import BrowserGetCdpUrlResponse
from .browser_get_current_url_response import BrowserGetCurrentUrlResponse
+from .browser_get_stream_url_response import BrowserGetStreamUrlResponse
from .button import Button
from .cell_type import CellType
from .click_mouse_action import ClickMouseAction
from .click_mouse_action_click_type import ClickMouseActionClickType
from .computer_response import ComputerResponse
+from .delete_browser_auth_response import DeleteBrowserAuthResponse
from .deployment_config_instance_type import DeploymentConfigInstanceType
from .drag_mouse_action import DragMouseAction
from .edit_response import EditResponse
from .env_get_response import EnvGetResponse
from .env_response import EnvResponse
from .execute_cell_request import ExecuteCellRequest
+from .expose_port_response import ExposePortResponse
from .file_response import FileResponse
from .upload_response import UploadResponse
from .get_cursor_position_action import GetCursorPositionAction
@@ -28,13 +31,16 @@
from .move_mouse_action import MoveMouseAction
from .notebook import Notebook
from .notebook_cell import NotebookCell
+from .netlify_deploy_response import NetlifyDeployResponse
from .press_key_action import PressKeyAction
from .save_browser_auth_response import SaveBrowserAuthResponse
from .scroll_action import ScrollAction
from .start_browser_response import StartBrowserResponse
from .status import Status
+from .success_response import SuccessResponse
from .stop_browser_response import StopBrowserResponse
from .stop_instance_response import StopInstanceResponse
+from .snapshot_response import SnapshotResponse
from .take_screenshot_action import TakeScreenshotAction
from .type_text_action import TypeTextAction
from .validation_error import ValidationError
@@ -80,18 +86,22 @@
"BrowserAuthenticateResponse",
"BrowserGetCdpUrlResponse",
"BrowserGetCurrentUrlResponse",
+ "BrowserGetStreamUrlResponse",
"Button",
"CellType",
"ClickMouseAction",
"ClickMouseActionClickType",
"ComputerResponse",
"DeploymentConfigInstanceType",
+ "DeleteBrowserAuthResponse",
"DragMouseAction",
"EditResponse",
"EnvGetResponse",
"EnvResponse",
"ExecuteCellRequest",
+ "ExposePortResponse",
"FileResponse",
+ "UploadResponse",
"GetCursorPositionAction",
"GetInstanceResponse",
"GetInstanceResponseInstanceType",
@@ -106,6 +116,7 @@
"MoveMouseAction",
"Notebook",
"NotebookCell",
+ "NetlifyDeployResponse",
"PressKeyAction",
"SaveBrowserAuthResponse",
"ScrollAction",
@@ -113,9 +124,11 @@
"SingleActResponse",
"StartBrowserResponse",
"Status",
+ "SuccessResponse",
"Step",
"StopBrowserResponse",
"StopInstanceResponse",
+ "SnapshotResponse",
"TakeScreenshotAction",
"TextPart",
"Tool",
diff --git a/src/scrapybara/types/browser_get_stream_url_response.py b/src/scrapybara/types/browser_get_stream_url_response.py
new file mode 100644
index 0000000..7b24ee4
--- /dev/null
+++ b/src/scrapybara/types/browser_get_stream_url_response.py
@@ -0,0 +1,19 @@
+# This file was auto-generated by Fern from our API Definition.
+
+from ..core.pydantic_utilities import UniversalBaseModel
+from ..core.pydantic_utilities import IS_PYDANTIC_V2
+import typing
+import pydantic
+
+
+class BrowserGetStreamUrlResponse(UniversalBaseModel):
+ stream_url: str
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/scrapybara/types/delete_browser_auth_response.py b/src/scrapybara/types/delete_browser_auth_response.py
new file mode 100644
index 0000000..2f28daa
--- /dev/null
+++ b/src/scrapybara/types/delete_browser_auth_response.py
@@ -0,0 +1,20 @@
+# This file was auto-generated by Fern from our API Definition.
+
+from ..core.pydantic_utilities import UniversalBaseModel
+from ..core.pydantic_utilities import IS_PYDANTIC_V2
+import typing
+import pydantic
+
+
+class DeleteBrowserAuthResponse(UniversalBaseModel):
+ status: str
+ auth_state_id: str
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/scrapybara/types/expose_port_response.py b/src/scrapybara/types/expose_port_response.py
new file mode 100644
index 0000000..9ffabda
--- /dev/null
+++ b/src/scrapybara/types/expose_port_response.py
@@ -0,0 +1,20 @@
+# This file was auto-generated by Fern from our API Definition.
+
+from ..core.pydantic_utilities import UniversalBaseModel
+from ..core.pydantic_utilities import IS_PYDANTIC_V2
+import typing
+import pydantic
+
+
+class ExposePortResponse(UniversalBaseModel):
+ status: str
+ public_url: str
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/scrapybara/types/netlify_deploy_response.py b/src/scrapybara/types/netlify_deploy_response.py
new file mode 100644
index 0000000..f4914f2
--- /dev/null
+++ b/src/scrapybara/types/netlify_deploy_response.py
@@ -0,0 +1,25 @@
+# This file was auto-generated by Fern from our API Definition.
+
+from ..core.pydantic_utilities import UniversalBaseModel
+import typing
+from ..core.pydantic_utilities import IS_PYDANTIC_V2
+import pydantic
+
+
+class NetlifyDeployResponse(UniversalBaseModel):
+ """
+ Response model for netlify deployment.
+ """
+
+ output: typing.Optional[str] = None
+ error: typing.Optional[str] = None
+ site_url: typing.Optional[str] = None
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/scrapybara/types/snapshot_response.py b/src/scrapybara/types/snapshot_response.py
new file mode 100644
index 0000000..7cea13e
--- /dev/null
+++ b/src/scrapybara/types/snapshot_response.py
@@ -0,0 +1,19 @@
+# This file was auto-generated by Fern from our API Definition.
+
+from ..core.pydantic_utilities import UniversalBaseModel
+from ..core.pydantic_utilities import IS_PYDANTIC_V2
+import typing
+import pydantic
+
+
+class SnapshotResponse(UniversalBaseModel):
+ snapshot_id: str
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/scrapybara/types/start_browser_response.py b/src/scrapybara/types/start_browser_response.py
index c17f656..e73b7e0 100644
--- a/src/scrapybara/types/start_browser_response.py
+++ b/src/scrapybara/types/start_browser_response.py
@@ -1,13 +1,14 @@
# This file was auto-generated by Fern from our API Definition.
from ..core.pydantic_utilities import UniversalBaseModel
-from ..core.pydantic_utilities import IS_PYDANTIC_V2
import typing
+from ..core.pydantic_utilities import IS_PYDANTIC_V2
import pydantic
class StartBrowserResponse(UniversalBaseModel):
cdp_url: str
+ stream_url: typing.Optional[str] = None
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
diff --git a/src/scrapybara/types/success_response.py b/src/scrapybara/types/success_response.py
new file mode 100644
index 0000000..a8a5e10
--- /dev/null
+++ b/src/scrapybara/types/success_response.py
@@ -0,0 +1,20 @@
+# This file was auto-generated by Fern from our API Definition.
+
+from ..core.pydantic_utilities import UniversalBaseModel
+from ..core.pydantic_utilities import IS_PYDANTIC_V2
+import typing
+import pydantic
+
+
+class SuccessResponse(UniversalBaseModel):
+ success: bool
+ message: str
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/tests/custom/test_client.py b/tests/custom/test_client.py
index 4e76f56..902f89e 100644
--- a/tests/custom/test_client.py
+++ b/tests/custom/test_client.py
@@ -29,7 +29,7 @@ def _check_api_key() -> None:
if os.getenv("SCRAPYBARA_API_KEY") is None:
raise ValueError("SCRAPYBARA_API_KEY is not set")
-
+@pytest.mark.skip()
def test_ubuntu() -> None:
_check_api_key()
client = Scrapybara()
@@ -97,7 +97,7 @@ def test_ubuntu_openai() -> None:
ubuntu_instance.browser.stop()
ubuntu_instance.stop()
-
+@pytest.mark.skip()
def test_browser() -> None:
_check_api_key()
client = Scrapybara()
@@ -245,6 +245,7 @@ def test_browser_thinking() -> None:
browser_instance.stop()
+@pytest.mark.skip()
def test_upload_download() -> None:
_check_api_key()
client = Scrapybara()
@@ -283,13 +284,108 @@ def test_upload_download() -> None:
# Always stop the instance
ubuntu_instance.stop()
+@pytest.mark.skip()
+def test_beta_vm_management() -> None:
+ _check_api_key()
+ client = Scrapybara()
+
+ # Start a rodent instance
+ instance = client.start_ubuntu(backend="rodent")
+ assert instance.id is not None
+
+ try:
+ # Take a snapshot
+ snapshot_response = client.beta.take_snapshot(instance_id=instance.id)
+ assert snapshot_response is not None
+ assert snapshot_response.snapshot_id is not None
+ snapshot_id = snapshot_response.snapshot_id
+ print(f"Created snapshot with ID: {snapshot_id}")
+
+ # Warmup the snapshot
+ warmup_response = client.beta.warmup_snapshot(snapshot_id=snapshot_id)
+ assert warmup_response is not None
+ assert warmup_response.success is True
+
+ # Delete the snapshot
+ delete_response = client.beta.delete_snapshot(snapshot_id=snapshot_id)
+ assert delete_response is not None
+ assert delete_response.success is True
+
+ finally:
+ instance.stop()
+
+
+@pytest.mark.skip()
+def test_restore_from_snapshot() -> None:
+ _check_api_key()
+ client = Scrapybara()
+
+ # Start original instance
+ original_instance = client.start_ubuntu(backend="rodent")
+ assert original_instance.id is not None
+ print(f"Started original instance: {original_instance.id}")
+
+ snapshot_id = None
+ restored_instance = None
+
+ try:
+ # Create a file to verify restoration later
+ test_marker = f"test-marker-{uuid.uuid4()}"
+ original_instance.bash(command=f"echo '{test_marker}' > /tmp/snapshot-test-file")
+
+ # Take a snapshot
+ snapshot_response = client.beta.take_snapshot(instance_id=original_instance.id)
+ assert snapshot_response is not None
+ assert snapshot_response.snapshot_id is not None
+
+ snapshot_id = snapshot_response.snapshot_id
+ print(f"Created snapshot with ID: {snapshot_id}")
+
+ # Warmup the snapshot (optional but recommended)
+ client.beta.warmup_snapshot(snapshot_id=snapshot_id)
+
+ # Stop the original instance
+ original_instance.stop()
+
+ # Start a new instance from the snapshot
+ restored_instance = client.start_ubuntu(snapshot_id=snapshot_id, backend="rodent")
+ assert restored_instance.id is not None
+ print(f"Started restored instance: {restored_instance.id}")
+
+ # Verify the test file exists with our marker
+ file_content = restored_instance.bash(command="cat /tmp/snapshot-test-file")
+ assert test_marker in str(file_content)
+ print("Successfully verified snapshot restoration!")
+
+ finally:
+ # Clean up resources
+ if original_instance:
+ try:
+ original_instance.stop()
+ except:
+ pass
+
+ if restored_instance:
+ try:
+ restored_instance.stop()
+ except:
+ pass
+
+ if snapshot_id:
+ try:
+ client.beta.delete_snapshot(snapshot_id=snapshot_id)
+ except:
+ pass
+
if __name__ == "__main__":
test_ubuntu()
test_browser()
# test_ubuntu_openai()
# test_browser_openai()
- test_upload_download()
+ # test_upload_download()
+ # test_beta_vm_management()
+ # test_restore_from_snapshot()
# test_ubuntu_thinking()
# test_browser_thinking()
# test_windows()