diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 21782ad1d8f1..3da4c3ee88d2 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,12 +1,8 @@ # CODEOWNERS file for OpenHands repository # See https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners -# Frontend code owners -/frontend/ @amanape -/openhands-ui/ @amanape - -# Evaluation code owners +/frontend/ @amanape @hieptl +/openhands-ui/ @amanape @hieptl +/openhands/ @tofarr @malhotra5 @hieptl +/enterprise/ @chuckbutkus @tofarr @malhotra5 /evaluation/ @xingyaoww @neubig - -# Documentation code owners -/docs/ @mamoodi diff --git a/.github/workflows/fe-e2e-tests.yml b/.github/workflows/fe-e2e-tests.yml new file mode 100644 index 000000000000..7ee79e63fc02 --- /dev/null +++ b/.github/workflows/fe-e2e-tests.yml @@ -0,0 +1,47 @@ +# Workflow that runs frontend e2e tests with Playwright +name: Run Frontend E2E Tests + +on: + push: + branches: + - main + pull_request: + paths: + - "frontend/**" + - ".github/workflows/fe-e2e-tests.yml" + +concurrency: + group: ${{ github.workflow }}-${{ (github.head_ref && github.ref) || github.run_id }} + cancel-in-progress: true + +jobs: + fe-e2e-test: + name: FE E2E Tests + runs-on: blacksmith-4vcpu-ubuntu-2204 + strategy: + matrix: + node-version: [22] + fail-fast: true + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up Node.js + uses: useblacksmith/setup-node@v5 + with: + node-version: ${{ matrix.node-version }} + - name: Install dependencies + working-directory: ./frontend + run: npm ci + - name: Install Playwright browsers + working-directory: ./frontend + run: npx playwright install --with-deps chromium + - name: Run Playwright tests + working-directory: ./frontend + run: npx playwright test --project=chromium + - name: Upload Playwright report + uses: actions/upload-artifact@v4 + if: always() + with: + name: playwright-report + path: frontend/playwright-report/ + retention-days: 30 diff --git a/.openhands/microagents/repo.md b/.openhands/microagents/repo.md index ceb87bc2f7c5..cd3ef330745a 100644 --- a/.openhands/microagents/repo.md +++ b/.openhands/microagents/repo.md @@ -63,7 +63,7 @@ Frontend: - We use TanStack Query (fka React Query) for data fetching and cache management - Data Access Layer: API client methods are located in `frontend/src/api` and should never be called directly from UI components - they must always be wrapped with TanStack Query - Custom hooks are located in `frontend/src/hooks/query/` and `frontend/src/hooks/mutation/` - - Query hooks should follow the pattern use[Resource] (e.g., `useConversationMicroagents`) + - Query hooks should follow the pattern use[Resource] (e.g., `useConversationSkills`) - Mutation hooks should follow the pattern use[Action] (e.g., `useDeleteConversation`) - Architecture rule: UI components → TanStack Query hooks → Data Access Layer (`frontend/src/api`) → API endpoints diff --git a/Development.md b/Development.md index bfa057efc1c7..421959a5ec83 100644 --- a/Development.md +++ b/Development.md @@ -161,7 +161,7 @@ poetry run pytest ./tests/unit/test_*.py To reduce build time (e.g., if no changes were made to the client-runtime component), you can use an existing Docker container image by setting the SANDBOX_RUNTIME_CONTAINER_IMAGE environment variable to the desired Docker image. -Example: `export SANDBOX_RUNTIME_CONTAINER_IMAGE=ghcr.io/openhands/runtime:0.62-nikolaik` +Example: `export SANDBOX_RUNTIME_CONTAINER_IMAGE=ghcr.io/openhands/runtime:1.0-nikolaik` ## Develop inside Docker container diff --git a/README.md b/README.md index 89ba6ae98d34..3928ed32d9f4 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,14 @@
- Logo + Logo

OpenHands: AI-Driven Development

MIT License - Benchmark Score + Benchmark Score
Check out the documentation Tech Report @@ -35,9 +35,9 @@ There are a few ways to work with OpenHands: ### OpenHands Software Agent SDK The SDK is a composable Python library that contains all of our agentic tech. It's the engine that powers everything else below. -Define agents in code, then run them locally, or scale to 1000s of agents in the cloud +Define agents in code, then run them locally, or scale to 1000s of agents in the cloud. -[Check out the docs](https://docs.openhands.dev/sdk) or [view the source](https://github.com/All-Hands-AI/agent-sdk/) +[Check out the docs](https://docs.openhands.dev/sdk) or [view the source](https://github.com/OpenHands/software-agent-sdk/) ### OpenHands CLI The CLI is the easiest way to start using OpenHands. The experience will be familiar to anyone who has worked @@ -52,18 +52,15 @@ The experience will be familiar to anyone who has used Devin or Jules. [Check out the docs](https://docs.openhands.dev/openhands/usage/run-openhands/local-setup) or view the source in this repo. ### OpenHands Cloud -This is a commercial deployment of OpenHands GUI, running on hosted infrastructure. +This is a deployment of OpenHands GUI, running on hosted infrastructure. You can try it with a free $10 credit by [signing in with your GitHub account](https://app.all-hands.dev). OpenHands Cloud comes with source-available features and integrations: -- Deeper integrations with GitHub, GitLab, and Bitbucket - Integrations with Slack, Jira, and Linear - Multi-user support - RBAC and permissions - Collaboration features (e.g., conversation sharing) -- Usage reporting -- Budgeting enforcement ### OpenHands Enterprise Large enterprises can work with us to self-host OpenHands Cloud in their own VPC, via Kubernetes. diff --git a/containers/dev/compose.yml b/containers/dev/compose.yml index c6168b094f59..7ff504208136 100644 --- a/containers/dev/compose.yml +++ b/containers/dev/compose.yml @@ -12,7 +12,7 @@ services: - SANDBOX_API_HOSTNAME=host.docker.internal - DOCKER_HOST_ADDR=host.docker.internal # - - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE:-ghcr.io/openhands/runtime:0.62-nikolaik} + - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE:-ghcr.io/openhands/runtime:1.0-nikolaik} - SANDBOX_USER_ID=${SANDBOX_USER_ID:-1234} - WORKSPACE_MOUNT_PATH=${WORKSPACE_BASE:-$PWD/workspace} ports: diff --git a/docker-compose.yml b/docker-compose.yml index b66332462538..d4aef552c0dd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,7 +7,7 @@ services: image: openhands:latest container_name: openhands-app-${DATE:-} environment: - - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE:-docker.openhands.dev/openhands/runtime:0.62-nikolaik} + - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE:-docker.openhands.dev/openhands/runtime:1.0-nikolaik} #- SANDBOX_USER_ID=${SANDBOX_USER_ID:-1234} # enable this only if you want a specific non-root sandbox user but you will have to manually adjust permissions of ~/.openhands for this user - WORKSPACE_MOUNT_PATH=${WORKSPACE_BASE:-$PWD/workspace} ports: diff --git a/enterprise/Dockerfile b/enterprise/Dockerfile index b0ca56a7f693..7b50748ffb1f 100644 --- a/enterprise/Dockerfile +++ b/enterprise/Dockerfile @@ -31,9 +31,8 @@ RUN pip install alembic psycopg2-binary cloud-sql-python-connector pg8000 gsprea "pillow>=11.3.0" WORKDIR /app -COPY enterprise . +COPY --chown=openhands:openhands --chmod=770 enterprise . -RUN chown -R openhands:openhands /app && chmod -R 770 /app USER openhands # Command will be overridden by Kubernetes deployment template diff --git a/enterprise/allhands-realm-github-provider.json.tmpl b/enterprise/allhands-realm-github-provider.json.tmpl index 6cdaa343835b..35ff5f0afc79 100644 --- a/enterprise/allhands-realm-github-provider.json.tmpl +++ b/enterprise/allhands-realm-github-provider.json.tmpl @@ -721,6 +721,7 @@ "https://$WEB_HOST/oauth/keycloak/callback", "https://$WEB_HOST/oauth/keycloak/offline/callback", "https://$WEB_HOST/slack/keycloak-callback", + "https://$WEB_HOST/oauth/device/keycloak-callback", "https://$WEB_HOST/api/email/verified", "/realms/$KEYCLOAK_REALM_NAME/$KEYCLOAK_CLIENT_ID/*" ], diff --git a/enterprise/enterprise_local/convert_to_env.py b/enterprise/enterprise_local/convert_to_env.py index 54e2c5d71b8f..cbd04b644938 100644 --- a/enterprise/enterprise_local/convert_to_env.py +++ b/enterprise/enterprise_local/convert_to_env.py @@ -116,7 +116,7 @@ def convert_yaml_to_env(yaml_file, target_parameters, output_env_file, prefix): lines.append('ENABLE_PROACTIVE_CONVERSATION_STARTERS=true') lines.append('MAX_CONCURRENT_CONVERSATIONS=10') lines.append('LITE_LLM_API_URL=https://llm-proxy.eval.all-hands.dev') -lines.append('LITELLM_DEFAULT_MODEL=litellm_proxy/claude-sonnet-4-20250514') +lines.append('LITELLM_DEFAULT_MODEL=litellm_proxy/claude-opus-4-5-20251101') lines.append(f'LITE_LLM_API_KEY={lite_llm_api_key}') lines.append('LOCAL_DEPLOYMENT=true') lines.append('DB_HOST=localhost') diff --git a/enterprise/integrations/github/github_manager.py b/enterprise/integrations/github/github_manager.py index 1d16dd40d7f3..00ad5124cebd 100644 --- a/enterprise/integrations/github/github_manager.py +++ b/enterprise/integrations/github/github_manager.py @@ -22,6 +22,7 @@ HOST_URL, OPENHANDS_RESOLVER_TEMPLATES_DIR, ) +from integrations.v1_utils import get_saas_user_auth from jinja2 import Environment, FileSystemLoader from pydantic import SecretStr from server.auth.constants import GITHUB_APP_CLIENT_ID, GITHUB_APP_PRIVATE_KEY @@ -164,8 +165,13 @@ async def receive_message(self, message: Message): ) if await self.is_job_requested(message): + payload = message.message.get('payload', {}) + user_id = payload['sender']['id'] + keycloak_user_id = await self.token_manager.get_user_id_from_idp_user_id( + user_id, ProviderType.GITHUB + ) github_view = await GithubFactory.create_github_view_from_payload( - message, self.token_manager + message, keycloak_user_id ) logger.info( f'[GitHub] Creating job for {github_view.user_info.username} in {github_view.full_repo_name}#{github_view.issue_number}' @@ -282,8 +288,15 @@ async def start_job(self, github_view: ResolverViewInterface): f'[Github]: Error summarizing issue solvability: {str(e)}' ) + saas_user_auth = await get_saas_user_auth( + github_view.user_info.keycloak_user_id, self.token_manager + ) + await github_view.create_new_conversation( - self.jinja_env, secret_store.provider_tokens, convo_metadata + self.jinja_env, + secret_store.provider_tokens, + convo_metadata, + saas_user_auth, ) conversation_id = github_view.conversation_id @@ -292,18 +305,19 @@ async def start_job(self, github_view: ResolverViewInterface): f'[GitHub] Created conversation {conversation_id} for user {user_info.username}' ) - # Create a GithubCallbackProcessor - processor = GithubCallbackProcessor( - github_view=github_view, - send_summary_instruction=True, - ) + if not github_view.v1: + # Create a GithubCallbackProcessor + processor = GithubCallbackProcessor( + github_view=github_view, + send_summary_instruction=True, + ) - # Register the callback processor - register_callback_processor(conversation_id, processor) + # Register the callback processor + register_callback_processor(conversation_id, processor) - logger.info( - f'[Github] Registered callback processor for conversation {conversation_id}' - ) + logger.info( + f'[Github] Registered callback processor for conversation {conversation_id}' + ) # Send message with conversation link conversation_link = CONVERSATION_URL.format(conversation_id) diff --git a/enterprise/integrations/github/github_view.py b/enterprise/integrations/github/github_view.py index 435dec8b3f60..a01457f88c1c 100644 --- a/enterprise/integrations/github/github_view.py +++ b/enterprise/integrations/github/github_view.py @@ -1,4 +1,5 @@ -from uuid import uuid4 +from dataclasses import dataclass +from uuid import UUID, uuid4 from github import Github, GithubIntegration from github.Issue import Issue @@ -8,16 +9,17 @@ WorkflowRunStatus, ) from integrations.models import Message +from integrations.resolver_context import ResolverUserContext from integrations.types import ResolverViewInterface, UserData from integrations.utils import ( ENABLE_PROACTIVE_CONVERSATION_STARTERS, + ENABLE_V1_GITHUB_RESOLVER, HOST, HOST_URL, get_oh_labels, has_exact_mention, ) from jinja2 import Environment -from pydantic.dataclasses import dataclass from server.auth.constants import GITHUB_APP_CLIENT_ID, GITHUB_APP_PRIVATE_KEY from server.auth.token_manager import TokenManager from server.config import get_config @@ -26,14 +28,24 @@ from storage.saas_secrets_store import SaasSecretsStore from storage.saas_settings_store import SaasSettingsStore +from openhands.agent_server.models import SendMessageRequest +from openhands.app_server.app_conversation.app_conversation_models import ( + AppConversationStartRequest, + AppConversationStartTaskStatus, +) +from openhands.app_server.config import get_app_conversation_service +from openhands.app_server.services.injector import InjectorState +from openhands.app_server.user.specifiy_user_context import USER_CONTEXT_ATTR from openhands.core.logger import openhands_logger as logger from openhands.integrations.github.github_service import GithubServiceImpl from openhands.integrations.provider import PROVIDER_TOKEN_TYPE, ProviderType from openhands.integrations.service_types import Comment +from openhands.sdk import TextContent from openhands.server.services.conversation_service import ( initialize_conversation, start_conversation, ) +from openhands.server.user_auth.user_auth import UserAuth from openhands.storage.data_models.conversation_metadata import ( ConversationMetadata, ConversationTrigger, @@ -76,6 +88,38 @@ async def get_user_proactive_conversation_setting(user_id: str | None) -> bool: return settings.enable_proactive_conversation_starters +async def get_user_v1_enabled_setting(user_id: str) -> bool: + """Get the user's V1 conversation API setting. + + Args: + user_id: The keycloak user ID + + Returns: + True if V1 conversations are enabled for this user, False otherwise + + Note: + This function checks both the global environment variable kill switch AND + the user's individual setting. Both must be true for the function to return true. + """ + # Check the global environment variable first + if not ENABLE_V1_GITHUB_RESOLVER: + return False + + config = get_config() + settings_store = SaasSettingsStore( + user_id=user_id, session_maker=session_maker, config=config + ) + + settings = await call_sync_from_async( + settings_store.get_user_settings_by_keycloak_id, user_id + ) + + if not settings or settings.v1_enabled is None: + return False + + return settings.v1_enabled + + # ================================================= # SECTION: Github view types # ================================================= @@ -96,6 +140,7 @@ class GithubIssue(ResolverViewInterface): title: str description: str previous_comments: list[Comment] + v1: bool async def _load_resolver_context(self): github_service = GithubServiceImpl( @@ -142,6 +187,19 @@ async def _get_user_secrets(self): async def initialize_new_conversation(self) -> ConversationMetadata: # FIXME: Handle if initialize_conversation returns None + + v1_enabled = await get_user_v1_enabled_setting(self.user_info.keycloak_user_id) + logger.info( + f'[GitHub V1]: User flag found for {self.user_info.keycloak_user_id} is {v1_enabled}' + ) + if v1_enabled: + # Create dummy conversationm metadata + # Don't save to conversation store + # V1 conversations are stored in a separate table + return ConversationMetadata( + conversation_id=uuid4().hex, selected_repository=self.full_repo_name + ) + conversation_metadata: ConversationMetadata = await initialize_conversation( # type: ignore[assignment] user_id=self.user_info.keycloak_user_id, conversation_id=None, @@ -158,7 +216,36 @@ async def create_new_conversation( jinja_env: Environment, git_provider_tokens: PROVIDER_TOKEN_TYPE, conversation_metadata: ConversationMetadata, + saas_user_auth: UserAuth, + ): + v1_enabled = await get_user_v1_enabled_setting(self.user_info.keycloak_user_id) + logger.info( + f'[GitHub V1]: User flag found for {self.user_info.keycloak_user_id} is {v1_enabled}' + ) + if v1_enabled: + try: + # Use V1 app conversation service + await self._create_v1_conversation( + jinja_env, saas_user_auth, conversation_metadata + ) + return + + except Exception as e: + logger.warning(f'Error checking V1 settings, falling back to V0: {e}') + + # Use existing V0 conversation service + await self._create_v0_conversation( + jinja_env, git_provider_tokens, conversation_metadata + ) + + async def _create_v0_conversation( + self, + jinja_env: Environment, + git_provider_tokens: PROVIDER_TOKEN_TYPE, + conversation_metadata: ConversationMetadata, ): + """Create conversation using the legacy V0 system.""" + logger.info('[GitHub]: Creating V0 conversation') custom_secrets = await self._get_user_secrets() user_instructions, conversation_instructions = await self._get_instructions( @@ -177,6 +264,78 @@ async def create_new_conversation( conversation_instructions=conversation_instructions, ) + async def _create_v1_conversation( + self, + jinja_env: Environment, + saas_user_auth: UserAuth, + conversation_metadata: ConversationMetadata, + ): + """Create conversation using the new V1 app conversation system.""" + logger.info('[GitHub V1]: Creating V1 conversation') + + user_instructions, conversation_instructions = await self._get_instructions( + jinja_env + ) + + # Create the initial message request + initial_message = SendMessageRequest( + role='user', content=[TextContent(text=user_instructions)] + ) + + # Create the GitHub V1 callback processor + github_callback_processor = self._create_github_v1_callback_processor() + + # Get the app conversation service and start the conversation + injector_state = InjectorState() + + # Create the V1 conversation start request with the callback processor + start_request = AppConversationStartRequest( + conversation_id=UUID(conversation_metadata.conversation_id), + system_message_suffix=conversation_instructions, + initial_message=initial_message, + selected_repository=self.full_repo_name, + git_provider=ProviderType.GITHUB, + title=f'GitHub Issue #{self.issue_number}: {self.title}', + trigger=ConversationTrigger.RESOLVER, + processors=[ + github_callback_processor + ], # Pass the callback processor directly + ) + + # Set up the GitHub user context for the V1 system + github_user_context = ResolverUserContext(saas_user_auth=saas_user_auth) + setattr(injector_state, USER_CONTEXT_ATTR, github_user_context) + + async with get_app_conversation_service( + injector_state + ) as app_conversation_service: + async for task in app_conversation_service.start_app_conversation( + start_request + ): + if task.status == AppConversationStartTaskStatus.ERROR: + logger.error(f'Failed to start V1 conversation: {task.detail}') + raise RuntimeError( + f'Failed to start V1 conversation: {task.detail}' + ) + + self.v1 = True + + def _create_github_v1_callback_processor(self): + """Create a V1 callback processor for GitHub integration.""" + from openhands.app_server.event_callback.github_v1_callback_processor import ( + GithubV1CallbackProcessor, + ) + + # Create and return the GitHub V1 callback processor + return GithubV1CallbackProcessor( + github_view_data={ + 'issue_number': self.issue_number, + 'full_repo_name': self.full_repo_name, + 'installation_id': self.installation_id, + }, + send_summary_instruction=self.send_summary_instruction, + ) + @dataclass class GithubIssueComment(GithubIssue): @@ -232,7 +391,18 @@ async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]: return user_instructions, conversation_instructions async def initialize_new_conversation(self) -> ConversationMetadata: - # FIXME: Handle if initialize_conversation returns None + v1_enabled = await get_user_v1_enabled_setting(self.user_info.keycloak_user_id) + logger.info( + f'[GitHub V1]: User flag found for {self.user_info.keycloak_user_id} is {v1_enabled}' + ) + if v1_enabled: + # Create dummy conversationm metadata + # Don't save to conversation store + # V1 conversations are stored in a separate table + return ConversationMetadata( + conversation_id=uuid4().hex, selected_repository=self.full_repo_name + ) + conversation_metadata: ConversationMetadata = await initialize_conversation( # type: ignore[assignment] user_id=self.user_info.keycloak_user_id, conversation_id=None, @@ -292,6 +462,24 @@ async def _get_instructions(self, jinja_env: Environment) -> tuple[str, str]: return user_instructions, conversation_instructions + def _create_github_v1_callback_processor(self): + """Create a V1 callback processor for GitHub integration.""" + from openhands.app_server.event_callback.github_v1_callback_processor import ( + GithubV1CallbackProcessor, + ) + + # Create and return the GitHub V1 callback processor + return GithubV1CallbackProcessor( + github_view_data={ + 'issue_number': self.issue_number, + 'full_repo_name': self.full_repo_name, + 'installation_id': self.installation_id, + 'comment_id': self.comment_id, + }, + inline_pr_comment=True, + send_summary_instruction=self.send_summary_instruction, + ) + @dataclass class GithubFailingAction: @@ -605,7 +793,7 @@ def get_full_repo_name(repo_obj: dict) -> str: @staticmethod async def create_github_view_from_payload( - message: Message, token_manager: TokenManager + message: Message, keycloak_user_id: str ) -> ResolverViewInterface: """Create the appropriate class (GithubIssue or GithubPRComment) based on the payload. Also return metadata about the event (e.g., action type). @@ -615,17 +803,10 @@ async def create_github_view_from_payload( user_id = payload['sender']['id'] username = payload['sender']['login'] - keyloak_user_id = await token_manager.get_user_id_from_idp_user_id( - user_id, ProviderType.GITHUB - ) - - if keyloak_user_id is None: - logger.warning(f'Got invalid keyloak user id for GitHub User {user_id} ') - selected_repo = GithubFactory.get_full_repo_name(repo_obj) is_public_repo = not repo_obj.get('private', True) user_info = UserData( - user_id=user_id, username=username, keycloak_user_id=keyloak_user_id + user_id=user_id, username=username, keycloak_user_id=keycloak_user_id ) installation_id = message.message['installation'] @@ -649,6 +830,7 @@ async def create_github_view_from_payload( title='', description='', previous_comments=[], + v1=False, ) elif GithubFactory.is_issue_comment(message): @@ -674,6 +856,7 @@ async def create_github_view_from_payload( title='', description='', previous_comments=[], + v1=False, ) elif GithubFactory.is_pr_comment(message): @@ -715,6 +898,7 @@ async def create_github_view_from_payload( title='', description='', previous_comments=[], + v1=False, ) elif GithubFactory.is_inline_pr_comment(message): @@ -748,6 +932,7 @@ async def create_github_view_from_payload( title='', description='', previous_comments=[], + v1=False, ) else: diff --git a/enterprise/integrations/resolver_context.py b/enterprise/integrations/resolver_context.py new file mode 100644 index 000000000000..5829122e6f9d --- /dev/null +++ b/enterprise/integrations/resolver_context.py @@ -0,0 +1,63 @@ +from openhands.app_server.user.user_context import UserContext +from openhands.app_server.user.user_models import UserInfo +from openhands.integrations.provider import PROVIDER_TOKEN_TYPE +from openhands.integrations.service_types import ProviderType +from openhands.sdk.secret import SecretSource, StaticSecret +from openhands.server.user_auth.user_auth import UserAuth + + +class ResolverUserContext(UserContext): + """User context for resolver operations that inherits from UserContext.""" + + def __init__( + self, + saas_user_auth: UserAuth, + ): + self.saas_user_auth = saas_user_auth + + async def get_user_id(self) -> str | None: + return await self.saas_user_auth.get_user_id() + + async def get_user_info(self) -> UserInfo: + user_settings = await self.saas_user_auth.get_user_settings() + user_id = await self.saas_user_auth.get_user_id() + if user_settings: + return UserInfo( + id=user_id, + **user_settings.model_dump(context={'expose_secrets': True}), + ) + + return UserInfo(id=user_id) + + async def get_authenticated_git_url(self, repository: str) -> str: + # This would need to be implemented based on the git provider tokens + # For now, return a basic HTTPS URL + return f'https://github.com/{repository}.git' + + async def get_latest_token(self, provider_type: ProviderType) -> str | None: + # Return the appropriate token from git_provider_tokens + + provider_tokens = await self.saas_user_auth.get_provider_tokens() + if provider_tokens: + return provider_tokens.get(provider_type) + return None + + async def get_provider_tokens(self) -> PROVIDER_TOKEN_TYPE | None: + return await self.saas_user_auth.get_provider_tokens() + + async def get_secrets(self) -> dict[str, SecretSource]: + """Get secrets for the user, including custom secrets.""" + secrets = await self.saas_user_auth.get_secrets() + if secrets: + # Convert custom secrets to StaticSecret objects for SDK compatibility + # secrets.custom_secrets is of type Mapping[str, CustomSecret] + converted_secrets = {} + for key, custom_secret in secrets.custom_secrets.items(): + # Extract the secret value from CustomSecret and convert to StaticSecret + secret_value = custom_secret.secret.get_secret_value() + converted_secrets[key] = StaticSecret(value=secret_value) + return converted_secrets + return {} + + async def get_mcp_api_key(self) -> str | None: + return await self.saas_user_auth.get_mcp_api_key() diff --git a/enterprise/integrations/types.py b/enterprise/integrations/types.py index dcbcc9b7d3ce..0b8d79228c18 100644 --- a/enterprise/integrations/types.py +++ b/enterprise/integrations/types.py @@ -19,7 +19,7 @@ class PRStatus(Enum): class UserData(BaseModel): user_id: int username: str - keycloak_user_id: str | None + keycloak_user_id: str @dataclass diff --git a/enterprise/integrations/utils.py b/enterprise/integrations/utils.py index ffe4f81360c2..a4721d9ab2c8 100644 --- a/enterprise/integrations/utils.py +++ b/enterprise/integrations/utils.py @@ -51,6 +51,11 @@ os.getenv('ENABLE_SOLVABILITY_ANALYSIS', 'false').lower() == 'true' ) +# Toggle for V1 GitHub resolver feature +ENABLE_V1_GITHUB_RESOLVER = ( + os.getenv('ENABLE_V1_GITHUB_RESOLVER', 'false').lower() == 'true' +) + OPENHANDS_RESOLVER_TEMPLATES_DIR = 'openhands/integrations/templates/resolver/' jinja_env = Environment(loader=FileSystemLoader(OPENHANDS_RESOLVER_TEMPLATES_DIR)) diff --git a/enterprise/integrations/v1_utils.py b/enterprise/integrations/v1_utils.py new file mode 100644 index 000000000000..78953e4e93dc --- /dev/null +++ b/enterprise/integrations/v1_utils.py @@ -0,0 +1,20 @@ +from pydantic import SecretStr +from server.auth.saas_user_auth import SaasUserAuth +from server.auth.token_manager import TokenManager + +from openhands.core.logger import openhands_logger as logger +from openhands.server.user_auth.user_auth import UserAuth + + +async def get_saas_user_auth( + keycloak_user_id: str, token_manager: TokenManager +) -> UserAuth: + offline_token = await token_manager.load_offline_token(keycloak_user_id) + if offline_token is None: + logger.info('no_offline_token_found') + + user_auth = SaasUserAuth( + user_id=keycloak_user_id, + refresh_token=SecretStr(offline_token), + ) + return user_auth diff --git a/enterprise/migrations/versions/083_add_v1_enabled_to_user_settings.py b/enterprise/migrations/versions/083_add_v1_enabled_to_user_settings.py new file mode 100644 index 000000000000..33fdb470af9e --- /dev/null +++ b/enterprise/migrations/versions/083_add_v1_enabled_to_user_settings.py @@ -0,0 +1,35 @@ +"""Add v1_enabled column to user_settings + +Revision ID: 083 +Revises: 082 +Create Date: 2025-11-18 00:00:00.000000 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = '083' +down_revision: Union[str, None] = '082' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add v1_enabled column to user_settings table.""" + op.add_column( + 'user_settings', + sa.Column( + 'v1_enabled', + sa.Boolean(), + nullable=True, + ), + ) + + +def downgrade() -> None: + """Remove v1_enabled column from user_settings table.""" + op.drop_column('user_settings', 'v1_enabled') diff --git a/enterprise/migrations/versions/084_create_device_codes_table.py b/enterprise/migrations/versions/084_create_device_codes_table.py new file mode 100644 index 000000000000..0898e09ef58b --- /dev/null +++ b/enterprise/migrations/versions/084_create_device_codes_table.py @@ -0,0 +1,49 @@ +"""Create device_codes table for OAuth 2.0 Device Flow + +Revision ID: 084 +Revises: 083 +Create Date: 2024-12-10 12:00:00.000000 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = '084' +down_revision = '083' +branch_labels = None +depends_on = None + + +def upgrade(): + """Create device_codes table for OAuth 2.0 Device Flow.""" + op.create_table( + 'device_codes', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('device_code', sa.String(length=128), nullable=False), + sa.Column('user_code', sa.String(length=16), nullable=False), + sa.Column('status', sa.String(length=32), nullable=False), + sa.Column('keycloak_user_id', sa.String(length=255), nullable=True), + sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('authorized_at', sa.DateTime(timezone=True), nullable=True), + # Rate limiting fields for RFC 8628 section 3.5 compliance + sa.Column('last_poll_time', sa.DateTime(timezone=True), nullable=True), + sa.Column('current_interval', sa.Integer(), nullable=False, default=5), + sa.PrimaryKeyConstraint('id'), + ) + + # Create indexes for efficient lookups + op.create_index( + 'ix_device_codes_device_code', 'device_codes', ['device_code'], unique=True + ) + op.create_index( + 'ix_device_codes_user_code', 'device_codes', ['user_code'], unique=True + ) + + +def downgrade(): + """Drop device_codes table.""" + op.drop_index('ix_device_codes_user_code', table_name='device_codes') + op.drop_index('ix_device_codes_device_code', table_name='device_codes') + op.drop_table('device_codes') diff --git a/enterprise/poetry.lock b/enterprise/poetry.lock index c359147ba673..bd2c55c31750 100644 --- a/enterprise/poetry.lock +++ b/enterprise/poetry.lock @@ -201,14 +201,14 @@ files = [ [[package]] name = "anthropic" -version = "0.72.0" +version = "0.75.0" description = "The official Python library for the anthropic API" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "anthropic-0.72.0-py3-none-any.whl", hash = "sha256:0e9f5a7582f038cab8efbb4c959e49ef654a56bfc7ba2da51b5a7b8a84de2e4d"}, - {file = "anthropic-0.72.0.tar.gz", hash = "sha256:8971fe76dcffc644f74ac3883069beb1527641115ae0d6eb8fa21c1ce4082f7a"}, + {file = "anthropic-0.75.0-py3-none-any.whl", hash = "sha256:ea8317271b6c15d80225a9f3c670152746e88805a7a61e14d4a374577164965b"}, + {file = "anthropic-0.75.0.tar.gz", hash = "sha256:e8607422f4ab616db2ea5baacc215dd5f028da99ce2f022e33c7c535b29f3dfb"}, ] [package.dependencies] @@ -682,37 +682,37 @@ crt = ["awscrt (==0.27.6)"] [[package]] name = "browser-use" -version = "0.9.5" +version = "0.10.1" description = "Make websites accessible for AI agents" optional = false python-versions = "<4.0,>=3.11" groups = ["main"] files = [ - {file = "browser_use-0.9.5-py3-none-any.whl", hash = "sha256:4a2e92847204d1ded269026a99cb0cc0e60e38bd2751fa3f58aedd78f00b4e67"}, - {file = "browser_use-0.9.5.tar.gz", hash = "sha256:f8285fe253b149d01769a7084883b4cf4db351e2f38e26302c157bcbf14a703f"}, + {file = "browser_use-0.10.1-py3-none-any.whl", hash = "sha256:96e603bfc71098175342cdcb0592519e6f244412e740f0254e4389fdd82a977f"}, + {file = "browser_use-0.10.1.tar.gz", hash = "sha256:5f211ecfdf1f9fd186160f10df70dedd661821231e30f1bce40939787abab223"}, ] [package.dependencies] aiohttp = "3.12.15" -anthropic = ">=0.68.1,<1.0.0" +anthropic = ">=0.72.1,<1.0.0" anyio = ">=4.9.0" authlib = ">=1.6.0" bubus = ">=1.5.6" -cdp-use = ">=1.4.0" +cdp-use = ">=1.4.4" click = ">=8.1.8" cloudpickle = ">=3.1.1" google-api-core = ">=2.25.0" google-api-python-client = ">=2.174.0" google-auth = ">=2.40.3" google-auth-oauthlib = ">=1.2.2" -google-genai = ">=1.29.0,<2.0.0" +google-genai = ">=1.50.0,<2.0.0" groq = ">=0.30.0" httpx = ">=0.28.1" inquirerpy = ">=0.3.4" markdownify = ">=1.2.0" mcp = ">=1.10.1" ollama = ">=0.5.1" -openai = ">=1.99.2,<2.0.0" +openai = ">=2.7.2,<3.0.0" pillow = ">=11.2.1" portalocker = ">=2.7.0,<3.0.0" posthog = ">=3.7.0" @@ -721,6 +721,7 @@ pydantic = ">=2.11.5" pyobjc = {version = ">=11.0", markers = "platform_system == \"darwin\""} pyotp = ">=2.9.0" pypdf = ">=5.7.0" +python-docx = ">=1.2.0" python-dotenv = ">=1.0.1" reportlab = ">=4.0.0" requests = ">=2.32.3" @@ -850,14 +851,14 @@ files = [ [[package]] name = "cdp-use" -version = "1.4.3" +version = "1.4.4" description = "Type safe generator/client library for CDP" optional = false python-versions = ">=3.11" groups = ["main"] files = [ - {file = "cdp_use-1.4.3-py3-none-any.whl", hash = "sha256:c48664604470c2579aa1e677c3e3e7e24c4f300c54804c093d935abb50479ecd"}, - {file = "cdp_use-1.4.3.tar.gz", hash = "sha256:9029c04bdc49fbd3939d2bf1988ad8d88e260729c7d5e35c2f6c87591f5a10e9"}, + {file = "cdp_use-1.4.4-py3-none-any.whl", hash = "sha256:e37e80e067db2653d6fdf953d4ff9e5d80d75daa27b7c6d48c0261cccbef73e1"}, + {file = "cdp_use-1.4.4.tar.gz", hash = "sha256:330a848b517006eb9ad1dc468aa6434d913cf0c6918610760c36c3fdfdba0fab"}, ] [package.dependencies] @@ -2978,28 +2979,29 @@ testing = ["pytest"] [[package]] name = "google-genai" -version = "1.32.0" +version = "1.53.0" description = "GenAI Python SDK" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "google_genai-1.32.0-py3-none-any.whl", hash = "sha256:c0c4b1d45adf3aa99501050dd73da2f0dea09374002231052d81a6765d15e7f6"}, - {file = "google_genai-1.32.0.tar.gz", hash = "sha256:349da3f5ff0e981066bd508585fcdd308d28fc4646f318c8f6d1aa6041f4c7e3"}, + {file = "google_genai-1.53.0-py3-none-any.whl", hash = "sha256:65a3f99e5c03c372d872cda7419f5940e723374bb12a2f3ffd5e3e56e8eb2094"}, + {file = "google_genai-1.53.0.tar.gz", hash = "sha256:938a26d22f3fd32c6eeeb4276ef204ef82884e63af9842ce3eac05ceb39cbd8d"}, ] [package.dependencies] anyio = ">=4.8.0,<5.0.0" -google-auth = ">=2.14.1,<3.0.0" +google-auth = {version = ">=2.14.1,<3.0.0", extras = ["requests"]} httpx = ">=0.28.1,<1.0.0" -pydantic = ">=2.0.0,<3.0.0" +pydantic = ">=2.9.0,<3.0.0" requests = ">=2.28.1,<3.0.0" tenacity = ">=8.2.3,<9.2.0" typing-extensions = ">=4.11.0,<5.0.0" websockets = ">=13.0.0,<15.1.0" [package.extras] -aiohttp = ["aiohttp (<4.0.0)"] +aiohttp = ["aiohttp (<3.13.3)"] +local-tokenizer = ["protobuf", "sentencepiece (>=0.2.0)"] [[package]] name = "google-resumable-media" @@ -3055,6 +3057,8 @@ files = [ {file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"}, {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"}, {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8"}, {file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"}, {file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"}, {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"}, @@ -3064,6 +3068,8 @@ files = [ {file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"}, {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"}, {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5"}, {file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"}, {file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"}, {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"}, @@ -3073,6 +3079,8 @@ files = [ {file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"}, {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"}, {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d"}, {file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"}, {file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"}, {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"}, @@ -3082,6 +3090,8 @@ files = [ {file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"}, {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"}, {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929"}, {file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"}, {file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"}, {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"}, @@ -3089,6 +3099,8 @@ files = [ {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"}, {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"}, {file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681"}, {file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"}, {file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"}, {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"}, @@ -3098,6 +3110,8 @@ files = [ {file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"}, {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"}, {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be"}, {file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"}, {file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"}, {file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"}, @@ -3166,83 +3180,87 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [[package]] name = "grpcio" -version = "1.74.0" +version = "1.67.1" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "grpcio-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:85bd5cdf4ed7b2d6438871adf6afff9af7096486fcf51818a81b77ef4dd30907"}, - {file = "grpcio-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:68c8ebcca945efff9d86d8d6d7bfb0841cf0071024417e2d7f45c5e46b5b08eb"}, - {file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:e154d230dc1bbbd78ad2fdc3039fa50ad7ffcf438e4eb2fa30bce223a70c7486"}, - {file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8978003816c7b9eabe217f88c78bc26adc8f9304bf6a594b02e5a49b2ef9c11"}, - {file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3d7bd6e3929fd2ea7fbc3f562e4987229ead70c9ae5f01501a46701e08f1ad9"}, - {file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:136b53c91ac1d02c8c24201bfdeb56f8b3ac3278668cbb8e0ba49c88069e1bdc"}, - {file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe0f540750a13fd8e5da4b3eaba91a785eea8dca5ccd2bc2ffe978caa403090e"}, - {file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4e4181bfc24413d1e3a37a0b7889bea68d973d4b45dd2bc68bb766c140718f82"}, - {file = "grpcio-1.74.0-cp310-cp310-win32.whl", hash = "sha256:1733969040989f7acc3d94c22f55b4a9501a30f6aaacdbccfaba0a3ffb255ab7"}, - {file = "grpcio-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e912d3c993a29df6c627459af58975b2e5c897d93287939b9d5065f000249b5"}, - {file = "grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31"}, - {file = "grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4"}, - {file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce"}, - {file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3"}, - {file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182"}, - {file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d"}, - {file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f"}, - {file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4"}, - {file = "grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b"}, - {file = "grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11"}, - {file = "grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8"}, - {file = "grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6"}, - {file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5"}, - {file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49"}, - {file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7"}, - {file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3"}, - {file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707"}, - {file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b"}, - {file = "grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c"}, - {file = "grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc"}, - {file = "grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89"}, - {file = "grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01"}, - {file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e"}, - {file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91"}, - {file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249"}, - {file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362"}, - {file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f"}, - {file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20"}, - {file = "grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa"}, - {file = "grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24"}, - {file = "grpcio-1.74.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:4bc5fca10aaf74779081e16c2bcc3d5ec643ffd528d9e7b1c9039000ead73bae"}, - {file = "grpcio-1.74.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:6bab67d15ad617aff094c382c882e0177637da73cbc5532d52c07b4ee887a87b"}, - {file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:655726919b75ab3c34cdad39da5c530ac6fa32696fb23119e36b64adcfca174a"}, - {file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a2b06afe2e50ebfd46247ac3ba60cac523f54ec7792ae9ba6073c12daf26f0a"}, - {file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f251c355167b2360537cf17bea2cf0197995e551ab9da6a0a59b3da5e8704f9"}, - {file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f7b5882fb50632ab1e48cb3122d6df55b9afabc265582808036b6e51b9fd6b7"}, - {file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:834988b6c34515545b3edd13e902c1acdd9f2465d386ea5143fb558f153a7176"}, - {file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22b834cef33429ca6cc28303c9c327ba9a3fafecbf62fae17e9a7b7163cc43ac"}, - {file = "grpcio-1.74.0-cp39-cp39-win32.whl", hash = "sha256:7d95d71ff35291bab3f1c52f52f474c632db26ea12700c2ff0ea0532cb0b5854"}, - {file = "grpcio-1.74.0-cp39-cp39-win_amd64.whl", hash = "sha256:ecde9ab49f58433abe02f9ed076c7b5be839cf0153883a6d23995937a82392fa"}, - {file = "grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.74.0)"] + {file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"}, + {file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"}, + {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f"}, + {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0"}, + {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa"}, + {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292"}, + {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311"}, + {file = "grpcio-1.67.1-cp310-cp310-win32.whl", hash = "sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed"}, + {file = "grpcio-1.67.1-cp310-cp310-win_amd64.whl", hash = "sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e"}, + {file = "grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb"}, + {file = "grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e"}, + {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f"}, + {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc"}, + {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96"}, + {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f"}, + {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970"}, + {file = "grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744"}, + {file = "grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5"}, + {file = "grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953"}, + {file = "grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb"}, + {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0"}, + {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af"}, + {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e"}, + {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75"}, + {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38"}, + {file = "grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78"}, + {file = "grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc"}, + {file = "grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b"}, + {file = "grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1"}, + {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af"}, + {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955"}, + {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8"}, + {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62"}, + {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb"}, + {file = "grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121"}, + {file = "grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba"}, + {file = "grpcio-1.67.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65"}, + {file = "grpcio-1.67.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426"}, + {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab"}, + {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085"}, + {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3"}, + {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8"}, + {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce"}, + {file = "grpcio-1.67.1-cp38-cp38-win32.whl", hash = "sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46"}, + {file = "grpcio-1.67.1-cp38-cp38-win_amd64.whl", hash = "sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771"}, + {file = "grpcio-1.67.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335"}, + {file = "grpcio-1.67.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e"}, + {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8"}, + {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d"}, + {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04"}, + {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8"}, + {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f"}, + {file = "grpcio-1.67.1-cp39-cp39-win32.whl", hash = "sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e"}, + {file = "grpcio-1.67.1-cp39-cp39-win_amd64.whl", hash = "sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98"}, + {file = "grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.67.1)"] [[package]] name = "grpcio-status" -version = "1.71.2" +version = "1.67.1" description = "Status proto mapping for gRPC" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "grpcio_status-1.71.2-py3-none-any.whl", hash = "sha256:803c98cb6a8b7dc6dbb785b1111aed739f241ab5e9da0bba96888aa74704cfd3"}, - {file = "grpcio_status-1.71.2.tar.gz", hash = "sha256:c7a97e176df71cdc2c179cd1847d7fc86cca5832ad12e9798d7fed6b7a1aab50"}, + {file = "grpcio_status-1.67.1-py3-none-any.whl", hash = "sha256:16e6c085950bdacac97c779e6a502ea671232385e6e37f258884d6883392c2bd"}, + {file = "grpcio_status-1.67.1.tar.gz", hash = "sha256:2bf38395e028ceeecfd8866b081f61628114b384da7d51ae064ddc8d766a5d11"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.71.2" +grpcio = ">=1.67.1" protobuf = ">=5.26.1,<6.0dev" [[package]] @@ -4540,42 +4558,39 @@ valkey = ["valkey (>=6)"] [[package]] name = "litellm" -version = "1.77.7" +version = "1.80.7" description = "Library to easily interface with LLM API providers" optional = false -python-versions = ">=3.8.1,<4.0, !=3.9.7" +python-versions = "<4.0,>=3.9" groups = ["main"] -files = [] -develop = false +files = [ + {file = "litellm-1.80.7-py3-none-any.whl", hash = "sha256:f7d993f78c1e0e4e1202b2a925cc6540b55b6e5fb055dd342d88b145ab3102ed"}, + {file = "litellm-1.80.7.tar.gz", hash = "sha256:3977a8d195aef842d01c18bf9e22984829363c6a4b54daf9a43c9dd9f190b42c"}, +] [package.dependencies] aiohttp = ">=3.10" click = "*" fastuuid = ">=0.13.0" +grpcio = ">=1.62.3,<1.68.0" httpx = ">=0.23.0" importlib-metadata = ">=6.8.0" -jinja2 = "^3.1.2" -jsonschema = "^4.22.0" -openai = ">=1.99.5" -pydantic = "^2.5.0" +jinja2 = ">=3.1.2,<4.0.0" +jsonschema = ">=4.22.0,<5.0.0" +openai = ">=2.8.0" +pydantic = ">=2.5.0,<3.0.0" python-dotenv = ">=0.2.0" tiktoken = ">=0.7.0" tokenizers = "*" [package.extras] caching = ["diskcache (>=5.6.1,<6.0.0)"] -extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0,<0.9.0)"] +extra-proxy = ["azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0)"] mlflow = ["mlflow (>3.1.4) ; python_version >= \"3.10\""] -proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.20)", "litellm-proxy-extras (==0.2.25)", "mcp (>=1.10.0,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"] -semantic-router = ["semantic-router ; python_version >= \"3.9\""] +proxy = ["PyJWT (>=2.10.1,<3.0.0) ; python_version >= \"3.9\"", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.120.1)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.22)", "litellm-proxy-extras (==0.4.9)", "mcp (>=1.21.2,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "soundfile (>=0.12.1,<0.13.0)", "uvicorn (>=0.31.1,<0.32.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=15.0.1,<16.0.0)"] +semantic-router = ["semantic-router (>=0.1.12) ; python_version >= \"3.9\" and python_version < \"3.14\""] utils = ["numpydoc"] -[package.source] -type = "git" -url = "https://github.com/BerriAI/litellm.git" -reference = "v1.77.7.dev9" -resolved_reference = "763d2f8ccdd8412dbe6d4ac0e136d9ac34dcd4c0" - [[package]] name = "llvmlite" version = "0.44.0" @@ -4609,14 +4624,14 @@ files = [ [[package]] name = "lmnr" -version = "0.7.20" +version = "0.7.24" description = "Python SDK for Laminar" optional = false python-versions = "<4,>=3.10" groups = ["main"] files = [ - {file = "lmnr-0.7.20-py3-none-any.whl", hash = "sha256:5f9fa7444e6f96c25e097f66484ff29e632bdd1de0e9346948bf5595f4a8af38"}, - {file = "lmnr-0.7.20.tar.gz", hash = "sha256:1f484cd618db2d71af65f90a0b8b36d20d80dc91a5138b811575c8677bf7c4fd"}, + {file = "lmnr-0.7.24-py3-none-any.whl", hash = "sha256:ad780d4a62ece897048811f3368639c240a9329ab31027da8c96545137a3a08a"}, + {file = "lmnr-0.7.24.tar.gz", hash = "sha256:aa6973f46fc4ba95c9061c1feceb58afc02eb43c9376c21e32545371ff6123d7"}, ] [package.dependencies] @@ -4639,14 +4654,15 @@ tqdm = ">=4.0" [package.extras] alephalpha = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)"] -all = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)", "opentelemetry-instrumentation-bedrock (>=0.47.1)", "opentelemetry-instrumentation-chromadb (>=0.47.1)", "opentelemetry-instrumentation-cohere (>=0.47.1)", "opentelemetry-instrumentation-crewai (>=0.47.1)", "opentelemetry-instrumentation-haystack (>=0.47.1)", "opentelemetry-instrumentation-lancedb (>=0.47.1)", "opentelemetry-instrumentation-langchain (>=0.47.1)", "opentelemetry-instrumentation-llamaindex (>=0.47.1)", "opentelemetry-instrumentation-marqo (>=0.47.1)", "opentelemetry-instrumentation-mcp (>=0.47.1)", "opentelemetry-instrumentation-milvus (>=0.47.1)", "opentelemetry-instrumentation-mistralai (>=0.47.1)", "opentelemetry-instrumentation-ollama (>=0.47.1)", "opentelemetry-instrumentation-pinecone (>=0.47.1)", "opentelemetry-instrumentation-qdrant (>=0.47.1)", "opentelemetry-instrumentation-replicate (>=0.47.1)", "opentelemetry-instrumentation-sagemaker (>=0.47.1)", "opentelemetry-instrumentation-together (>=0.47.1)", "opentelemetry-instrumentation-transformers (>=0.47.1)", "opentelemetry-instrumentation-vertexai (>=0.47.1)", "opentelemetry-instrumentation-watsonx (>=0.47.1)", "opentelemetry-instrumentation-weaviate (>=0.47.1)"] +all = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)", "opentelemetry-instrumentation-bedrock (>=0.47.1)", "opentelemetry-instrumentation-chromadb (>=0.47.1)", "opentelemetry-instrumentation-cohere (>=0.47.1)", "opentelemetry-instrumentation-crewai (>=0.47.1)", "opentelemetry-instrumentation-haystack (>=0.47.1)", "opentelemetry-instrumentation-lancedb (>=0.47.1)", "opentelemetry-instrumentation-langchain (>=0.47.1,<0.48.0)", "opentelemetry-instrumentation-llamaindex (>=0.47.1)", "opentelemetry-instrumentation-marqo (>=0.47.1)", "opentelemetry-instrumentation-mcp (>=0.47.1)", "opentelemetry-instrumentation-milvus (>=0.47.1)", "opentelemetry-instrumentation-mistralai (>=0.47.1)", "opentelemetry-instrumentation-ollama (>=0.47.1)", "opentelemetry-instrumentation-pinecone (>=0.47.1)", "opentelemetry-instrumentation-qdrant (>=0.47.1)", "opentelemetry-instrumentation-replicate (>=0.47.1)", "opentelemetry-instrumentation-sagemaker (>=0.47.1)", "opentelemetry-instrumentation-together (>=0.47.1)", "opentelemetry-instrumentation-transformers (>=0.47.1)", "opentelemetry-instrumentation-vertexai (>=0.47.1)", "opentelemetry-instrumentation-watsonx (>=0.47.1)", "opentelemetry-instrumentation-weaviate (>=0.47.1)"] bedrock = ["opentelemetry-instrumentation-bedrock (>=0.47.1)"] chromadb = ["opentelemetry-instrumentation-chromadb (>=0.47.1)"] +claude-agent-sdk = ["lmnr-claude-code-proxy (>=0.1.0a5)"] cohere = ["opentelemetry-instrumentation-cohere (>=0.47.1)"] crewai = ["opentelemetry-instrumentation-crewai (>=0.47.1)"] haystack = ["opentelemetry-instrumentation-haystack (>=0.47.1)"] lancedb = ["opentelemetry-instrumentation-lancedb (>=0.47.1)"] -langchain = ["opentelemetry-instrumentation-langchain (>=0.47.1)"] +langchain = ["opentelemetry-instrumentation-langchain (>=0.47.1,<0.48.0)"] llamaindex = ["opentelemetry-instrumentation-llamaindex (>=0.47.1)"] marqo = ["opentelemetry-instrumentation-marqo (>=0.47.1)"] mcp = ["opentelemetry-instrumentation-mcp (>=0.47.1)"] @@ -5644,28 +5660,28 @@ pydantic = ">=2.9" [[package]] name = "openai" -version = "1.99.9" +version = "2.8.0" description = "The official Python library for the openai API" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "test"] files = [ - {file = "openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a"}, - {file = "openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92"}, + {file = "openai-2.8.0-py3-none-any.whl", hash = "sha256:ba975e347f6add2fe13529ccb94d54a578280e960765e5224c34b08d7e029ddf"}, + {file = "openai-2.8.0.tar.gz", hash = "sha256:4851908f6d6fcacbd47ba659c5ac084f7725b752b6bfa1e948b6fbfc111a6bad"}, ] [package.dependencies] anyio = ">=3.5.0,<5" distro = ">=1.7.0,<2" httpx = ">=0.23.0,<1" -jiter = ">=0.4.0,<1" +jiter = ">=0.10.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" tqdm = ">4" typing-extensions = ">=4.11,<5" [package.extras] -aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.8)"] +aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.9)"] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] realtime = ["websockets (>=13,<16)"] voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"] @@ -5820,38 +5836,31 @@ llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0 [[package]] name = "openhands-agent-server" -version = "1.1.0" +version = "1.6.0" description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_agent_server-1.1.0-py3-none-any.whl", hash = "sha256:59a856883df23488c0723e47655ef21649a321fcd4709a25a4690866eff6ac88"}, - {file = "openhands_agent_server-1.1.0.tar.gz", hash = "sha256:e39bebd39afd45cfcfd765005e7c4e5409e46678bd7612ae20bae79f7057b935"}, + {file = "openhands_agent_server-1.6.0-py3-none-any.whl", hash = "sha256:e6ae865ac3e7a96b234e10a0faad23f6210e025bbf7721cb66bc7a71d160848c"}, + {file = "openhands_agent_server-1.6.0.tar.gz", hash = "sha256:44ce7694ae2d4bb0666d318ef13e6618bd4dc73022c60354839fe6130e67d02a"}, ] -develop = false [package.dependencies] aiosqlite = ">=0.19" alembic = ">=1.13" docker = ">=7.1,<8" fastapi = ">=0.104" +openhands-sdk = "*" pydantic = ">=2" sqlalchemy = ">=2" uvicorn = ">=0.31.1" websockets = ">=12" wsproto = ">=1.2.0" -[package.source] -type = "git" -url = "https://github.com/OpenHands/agent-sdk.git" -reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -resolved_reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -subdirectory = "openhands-agent-server" - [[package]] name = "openhands-ai" -version = "0.0.0-post.5576+ed2ac6040" +version = "0.0.0-post.5687+7853b41ad" description = "OpenHands: Code Less, Make More" optional = false python-versions = "^3.12,<3.14" @@ -5887,15 +5896,15 @@ json-repair = "*" jupyter_kernel_gateway = "*" kubernetes = "^33.1.0" libtmux = ">=0.46.2" -litellm = ">=1.74.3, <1.78.0, !=1.64.4, !=1.67.*" +litellm = ">=1.74.3, <=1.80.7, !=1.64.4, !=1.67.*" lmnr = "^0.7.20" memory-profiler = "^0.61.0" numpy = "*" -openai = "1.99.9" +openai = "2.8.0" openhands-aci = "0.3.2" -openhands-agent-server = "1.1.0" -openhands-sdk = "1.1.0" -openhands-tools = "1.1.0" +openhands-agent-server = "1.6.0" +openhands-sdk = "1.6.0" +openhands-tools = "1.6.0" opentelemetry-api = "^1.33.1" opentelemetry-exporter-otlp-proto-grpc = "^1.33.1" pathspec = "^0.12.1" @@ -5951,23 +5960,22 @@ url = ".." [[package]] name = "openhands-sdk" -version = "1.1.0" +version = "1.6.0" description = "OpenHands SDK - Core functionality for building AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_sdk-1.1.0-py3-none-any.whl", hash = "sha256:4a984ce1687a48cf99a67fdf3d37b116f8b2840743d4807810b5024af6a1d57e"}, - {file = "openhands_sdk-1.1.0.tar.gz", hash = "sha256:855e0d8f3657205e4119e50520c17e65b3358b1a923f7a051a82512a54bf426c"}, + {file = "openhands_sdk-1.6.0-py3-none-any.whl", hash = "sha256:94d2f87fb35406373da6728ae2d88584137f9e9b67fa0e940444c72f2e44e7d3"}, + {file = "openhands_sdk-1.6.0.tar.gz", hash = "sha256:f45742350e3874a7f5b08befc4a9d5adc7e4454f7ab5f8391c519eee3116090f"}, ] -develop = false [package.dependencies] deprecation = ">=2.1.0" fastmcp = ">=2.11.3" httpx = ">=0.27.0" -litellm = ">=1.77.7.dev9" -lmnr = ">=0.7.20" +litellm = ">=1.80.7" +lmnr = ">=0.7.24" pydantic = ">=2.11.7" python-frontmatter = ">=1.1.0" python-json-logger = ">=3.3.0" @@ -5977,25 +5985,17 @@ websockets = ">=12" [package.extras] boto3 = ["boto3 (>=1.35.0)"] -[package.source] -type = "git" -url = "https://github.com/OpenHands/agent-sdk.git" -reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -resolved_reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -subdirectory = "openhands-sdk" - [[package]] name = "openhands-tools" -version = "1.1.0" +version = "1.6.0" description = "OpenHands Tools - Runtime tools for AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_tools-1.1.0-py3-none-any.whl", hash = "sha256:767d6746f05edade49263aa24450a037485a3dc23379f56917ef19aad22033f9"}, - {file = "openhands_tools-1.1.0.tar.gz", hash = "sha256:c2fadaa4f4e16e9a3df5781ea847565dcae7171584f09ef7c0e1d97c8dfc83f6"}, + {file = "openhands_tools-1.6.0-py3-none-any.whl", hash = "sha256:176556d44186536751b23fe052d3505492cc2afb8d52db20fb7a2cc0169cd57a"}, + {file = "openhands_tools-1.6.0.tar.gz", hash = "sha256:d07ba31050fd4a7891a4c48388aa53ce9f703e17064ddbd59146d6c77e5980b3"}, ] -develop = false [package.dependencies] bashlex = ">=0.18" @@ -6006,13 +6006,7 @@ func-timeout = ">=4.3.5" libtmux = ">=0.46.2" openhands-sdk = "*" pydantic = ">=2.11.7" - -[package.source] -type = "git" -url = "https://github.com/OpenHands/agent-sdk.git" -reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -resolved_reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -subdirectory = "openhands-tools" +tom-swe = ">=1.0.3" [[package]] name = "openpyxl" @@ -13329,6 +13323,31 @@ dev = ["tokenizers[testing]"] docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "pytest-asyncio", "requests", "ruff"] +[[package]] +name = "tom-swe" +version = "1.0.3" +description = "Theory of Mind modeling for Software Engineering assistants" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "tom_swe-1.0.3-py3-none-any.whl", hash = "sha256:7b1172b29eb5c8fb7f1975016e7b6a238511b9ac2a7a980bd400dcb4e29773f2"}, + {file = "tom_swe-1.0.3.tar.gz", hash = "sha256:57c97d0104e563f15bd39edaf2aa6ac4c3e9444afd437fb92458700d22c6c0f5"}, +] + +[package.dependencies] +jinja2 = ">=3.0.0" +json-repair = ">=0.1.0" +litellm = ">=1.0.0" +pydantic = ">=2.0.0" +python-dotenv = ">=1.0.0" +tiktoken = ">=0.8.0" +tqdm = ">=4.65.0" + +[package.extras] +dev = ["aiofiles (>=23.0.0)", "black (>=22.0.0)", "datasets (>=2.0.0)", "fastapi (>=0.104.0)", "httpx (>=0.25.0)", "huggingface-hub (>=0.0.0)", "isort (>=5.0.0)", "mypy (>=1.0.0)", "numpy (>=1.24.0)", "pandas (>=2.0.0)", "pre-commit (>=3.6.0)", "pytest (>=7.0.0)", "pytest-cov (>=6.2.1)", "rich (>=13.0.0)", "ruff (>=0.3.0)", "typing-extensions (>=4.0.0)", "uvicorn (>=0.24.0)"] +search = ["bm25s (>=0.2.0)", "pystemmer (>=2.2.0)"] + [[package]] name = "toml" version = "0.10.2" diff --git a/enterprise/saas_server.py b/enterprise/saas_server.py index 4c3c7c49ba87..96e19a981518 100644 --- a/enterprise/saas_server.py +++ b/enterprise/saas_server.py @@ -34,6 +34,7 @@ from server.routes.integration.linear import linear_integration_router # noqa: E402 from server.routes.integration.slack import slack_router # noqa: E402 from server.routes.mcp_patch import patch_mcp_server # noqa: E402 +from server.routes.oauth_device import oauth_device_router # noqa: E402 from server.routes.readiness import readiness_router # noqa: E402 from server.routes.user import saas_user_router # noqa: E402 @@ -60,6 +61,7 @@ def is_saas(): base_app.include_router(readiness_router) # Add routes for readiness checks base_app.include_router(api_router) # Add additional route for github auth base_app.include_router(oauth_router) # Add additional route for oauth callback +base_app.include_router(oauth_device_router) # Add OAuth 2.0 Device Flow routes base_app.include_router(saas_user_router) # Add additional route SAAS user calls base_app.include_router( billing_router diff --git a/enterprise/server/auth/saas_user_auth.py b/enterprise/server/auth/saas_user_auth.py index eafb7c5b742b..2f399a74cfad 100644 --- a/enterprise/server/auth/saas_user_auth.py +++ b/enterprise/server/auth/saas_user_auth.py @@ -203,6 +203,15 @@ async def get_user_settings_store(self) -> SettingsStore: self.settings_store = settings_store return settings_store + async def get_mcp_api_key(self) -> str: + api_key_store = ApiKeyStore.get_instance() + mcp_api_key = api_key_store.retrieve_mcp_api_key(self.user_id) + if not mcp_api_key: + mcp_api_key = api_key_store.create_api_key( + self.user_id, 'MCP_API_KEY', None + ) + return mcp_api_key + @classmethod async def get_instance(cls, request: Request) -> UserAuth: logger.debug('saas_user_auth_get_instance') @@ -243,7 +252,12 @@ def get_api_key_from_header(request: Request): # This is a temp hack # Streamable HTTP MCP Client works via redirect requests, but drops the Authorization header for reason # We include `X-Session-API-Key` header by default due to nested runtimes, so it used as a drop in replacement here - return request.headers.get('X-Session-API-Key') + session_api_key = request.headers.get('X-Session-API-Key') + if session_api_key: + return session_api_key + + # Fallback to X-Access-Token header as an additional option + return request.headers.get('X-Access-Token') async def saas_user_auth_from_bearer(request: Request) -> SaasUserAuth | None: diff --git a/enterprise/server/constants.py b/enterprise/server/constants.py index 74176d19a104..e2020ba2b9ac 100644 --- a/enterprise/server/constants.py +++ b/enterprise/server/constants.py @@ -25,6 +25,7 @@ 2: 'claude-3-7-sonnet-20250219', 3: 'claude-sonnet-4-20250514', 4: 'claude-sonnet-4-20250514', + 5: 'claude-opus-4-5-20251101', } LITELLM_DEFAULT_MODEL = os.getenv('LITELLM_DEFAULT_MODEL') diff --git a/enterprise/server/legacy_conversation_manager.py b/enterprise/server/legacy_conversation_manager.py deleted file mode 100644 index 5c82b5b42042..000000000000 --- a/enterprise/server/legacy_conversation_manager.py +++ /dev/null @@ -1,331 +0,0 @@ -from __future__ import annotations - -import time -from dataclasses import dataclass, field - -import socketio -from server.clustered_conversation_manager import ClusteredConversationManager -from server.saas_nested_conversation_manager import SaasNestedConversationManager - -from openhands.core.config import LLMConfig, OpenHandsConfig -from openhands.events.action import MessageAction -from openhands.server.config.server_config import ServerConfig -from openhands.server.conversation_manager.conversation_manager import ( - ConversationManager, -) -from openhands.server.data_models.agent_loop_info import AgentLoopInfo -from openhands.server.monitoring import MonitoringListener -from openhands.server.session.conversation import ServerConversation -from openhands.storage.data_models.settings import Settings -from openhands.storage.files import FileStore -from openhands.utils.async_utils import wait_all - -_LEGACY_ENTRY_TIMEOUT_SECONDS = 3600 - - -@dataclass -class LegacyCacheEntry: - """Cache entry for legacy mode status.""" - - is_legacy: bool - timestamp: float - - -@dataclass -class LegacyConversationManager(ConversationManager): - """ - Conversation manager for use while migrating - since existing conversations are not nested! - Separate class from SaasNestedConversationManager so it can be easliy removed in a few weeks. - (As of 2025-07-23) - """ - - sio: socketio.AsyncServer - config: OpenHandsConfig - server_config: ServerConfig - file_store: FileStore - conversation_manager: SaasNestedConversationManager - legacy_conversation_manager: ClusteredConversationManager - _legacy_cache: dict[str, LegacyCacheEntry] = field(default_factory=dict) - - async def __aenter__(self): - await wait_all( - [ - self.conversation_manager.__aenter__(), - self.legacy_conversation_manager.__aenter__(), - ] - ) - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - await wait_all( - [ - self.conversation_manager.__aexit__(exc_type, exc_value, traceback), - self.legacy_conversation_manager.__aexit__( - exc_type, exc_value, traceback - ), - ] - ) - - async def request_llm_completion( - self, - sid: str, - service_id: str, - llm_config: LLMConfig, - messages: list[dict[str, str]], - ) -> str: - session = self.get_agent_session(sid) - llm_registry = session.llm_registry - return llm_registry.request_extraneous_completion( - service_id, llm_config, messages - ) - - async def attach_to_conversation( - self, sid: str, user_id: str | None = None - ) -> ServerConversation | None: - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.attach_to_conversation( - sid, user_id - ) - return await self.conversation_manager.attach_to_conversation(sid, user_id) - - async def detach_from_conversation(self, conversation: ServerConversation): - if await self.should_start_in_legacy_mode(conversation.sid): - return await self.legacy_conversation_manager.detach_from_conversation( - conversation - ) - return await self.conversation_manager.detach_from_conversation(conversation) - - async def join_conversation( - self, - sid: str, - connection_id: str, - settings: Settings, - user_id: str | None, - ) -> AgentLoopInfo: - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.join_conversation( - sid, connection_id, settings, user_id - ) - return await self.conversation_manager.join_conversation( - sid, connection_id, settings, user_id - ) - - def get_agent_session(self, sid: str): - session = self.legacy_conversation_manager.get_agent_session(sid) - if session is None: - session = self.conversation_manager.get_agent_session(sid) - return session - - async def get_running_agent_loops( - self, user_id: str | None = None, filter_to_sids: set[str] | None = None - ) -> set[str]: - if filter_to_sids and len(filter_to_sids) == 1: - sid = next(iter(filter_to_sids)) - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.get_running_agent_loops( - user_id, filter_to_sids - ) - return await self.conversation_manager.get_running_agent_loops( - user_id, filter_to_sids - ) - - # Get all running agent loops from both managers - agent_loops, legacy_agent_loops = await wait_all( - [ - self.conversation_manager.get_running_agent_loops( - user_id, filter_to_sids - ), - self.legacy_conversation_manager.get_running_agent_loops( - user_id, filter_to_sids - ), - ] - ) - - # Combine the results - result = set() - for sid in legacy_agent_loops: - if await self.should_start_in_legacy_mode(sid): - result.add(sid) - - for sid in agent_loops: - if not await self.should_start_in_legacy_mode(sid): - result.add(sid) - - return result - - async def is_agent_loop_running(self, sid: str) -> bool: - return bool(await self.get_running_agent_loops(filter_to_sids={sid})) - - async def get_connections( - self, user_id: str | None = None, filter_to_sids: set[str] | None = None - ) -> dict[str, str]: - if filter_to_sids and len(filter_to_sids) == 1: - sid = next(iter(filter_to_sids)) - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.get_connections( - user_id, filter_to_sids - ) - return await self.conversation_manager.get_connections( - user_id, filter_to_sids - ) - agent_loops, legacy_agent_loops = await wait_all( - [ - self.conversation_manager.get_connections(user_id, filter_to_sids), - self.legacy_conversation_manager.get_connections( - user_id, filter_to_sids - ), - ] - ) - legacy_agent_loops.update(agent_loops) - return legacy_agent_loops - - async def maybe_start_agent_loop( - self, - sid: str, - settings: Settings, - user_id: str, # type: ignore[override] - initial_user_msg: MessageAction | None = None, - replay_json: str | None = None, - ) -> AgentLoopInfo: - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.maybe_start_agent_loop( - sid, settings, user_id, initial_user_msg, replay_json - ) - return await self.conversation_manager.maybe_start_agent_loop( - sid, settings, user_id, initial_user_msg, replay_json - ) - - async def send_to_event_stream(self, connection_id: str, data: dict): - return await self.legacy_conversation_manager.send_to_event_stream( - connection_id, data - ) - - async def send_event_to_conversation(self, sid: str, data: dict): - if await self.should_start_in_legacy_mode(sid): - await self.legacy_conversation_manager.send_event_to_conversation(sid, data) - await self.conversation_manager.send_event_to_conversation(sid, data) - - async def disconnect_from_session(self, connection_id: str): - return await self.legacy_conversation_manager.disconnect_from_session( - connection_id - ) - - async def close_session(self, sid: str): - if await self.should_start_in_legacy_mode(sid): - await self.legacy_conversation_manager.close_session(sid) - await self.conversation_manager.close_session(sid) - - async def get_agent_loop_info( - self, user_id: str | None = None, filter_to_sids: set[str] | None = None - ) -> list[AgentLoopInfo]: - if filter_to_sids and len(filter_to_sids) == 1: - sid = next(iter(filter_to_sids)) - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.get_agent_loop_info( - user_id, filter_to_sids - ) - return await self.conversation_manager.get_agent_loop_info( - user_id, filter_to_sids - ) - agent_loops, legacy_agent_loops = await wait_all( - [ - self.conversation_manager.get_agent_loop_info(user_id, filter_to_sids), - self.legacy_conversation_manager.get_agent_loop_info( - user_id, filter_to_sids - ), - ] - ) - - # Combine results - result = [] - legacy_sids = set() - - # Add legacy agent loops - for agent_loop in legacy_agent_loops: - if await self.should_start_in_legacy_mode(agent_loop.conversation_id): - result.append(agent_loop) - legacy_sids.add(agent_loop.conversation_id) - - # Add non-legacy agent loops - for agent_loop in agent_loops: - if ( - agent_loop.conversation_id not in legacy_sids - and not await self.should_start_in_legacy_mode( - agent_loop.conversation_id - ) - ): - result.append(agent_loop) - - return result - - def _cleanup_expired_cache_entries(self): - """Remove expired entries from the local cache.""" - current_time = time.time() - expired_keys = [ - key - for key, entry in self._legacy_cache.items() - if current_time - entry.timestamp > _LEGACY_ENTRY_TIMEOUT_SECONDS - ] - for key in expired_keys: - del self._legacy_cache[key] - - async def should_start_in_legacy_mode(self, conversation_id: str) -> bool: - """ - Check if a conversation should run in legacy mode by directly checking the runtime. - The /list method does not include stopped conversations even though the PVC for these - may not yet have been deleted, so we need to check /sessions/{session_id} directly. - """ - # Clean up expired entries periodically - self._cleanup_expired_cache_entries() - - # First check the local cache - if conversation_id in self._legacy_cache: - cached_entry = self._legacy_cache[conversation_id] - # Check if the cached value is still valid - if time.time() - cached_entry.timestamp <= _LEGACY_ENTRY_TIMEOUT_SECONDS: - return cached_entry.is_legacy - - # If not in cache or expired, check the runtime directly - runtime = await self.conversation_manager._get_runtime(conversation_id) - is_legacy = self.is_legacy_runtime(runtime) - - # Cache the result with current timestamp - self._legacy_cache[conversation_id] = LegacyCacheEntry(is_legacy, time.time()) - - return is_legacy - - def is_legacy_runtime(self, runtime: dict | None) -> bool: - """ - Determine if a runtime is a legacy runtime based on its command. - - Args: - runtime: The runtime dictionary or None if not found - - Returns: - bool: True if this is a legacy runtime, False otherwise - """ - if runtime is None: - return False - return 'openhands.server' not in runtime['command'] - - @classmethod - def get_instance( - cls, - sio: socketio.AsyncServer, - config: OpenHandsConfig, - file_store: FileStore, - server_config: ServerConfig, - monitoring_listener: MonitoringListener, - ) -> ConversationManager: - return LegacyConversationManager( - sio=sio, - config=config, - server_config=server_config, - file_store=file_store, - conversation_manager=SaasNestedConversationManager.get_instance( - sio, config, file_store, server_config, monitoring_listener - ), - legacy_conversation_manager=ClusteredConversationManager.get_instance( - sio, config, file_store, server_config, monitoring_listener - ), - ) diff --git a/enterprise/server/middleware.py b/enterprise/server/middleware.py index 2972c1ec3808..54e3319595f3 100644 --- a/enterprise/server/middleware.py +++ b/enterprise/server/middleware.py @@ -152,17 +152,22 @@ def _should_attach(self, request: Request) -> bool: return False path = request.url.path - is_api_that_should_attach = path.startswith('/api') and path not in ( + ignore_paths = ( '/api/options/config', '/api/keycloak/callback', '/api/billing/success', '/api/billing/cancel', '/api/billing/customer-setup-success', '/api/billing/stripe-webhook', + '/oauth/device/authorize', + '/oauth/device/token', ) + if path in ignore_paths: + return False is_mcp = path.startswith('/mcp') - return is_api_that_should_attach or is_mcp + is_api_route = path.startswith('/api') + return is_api_route or is_mcp async def _logout(self, request: Request): # Log out of keycloak - this prevents issues where you did not log in with the idp you believe you used diff --git a/enterprise/server/routes/auth.py b/enterprise/server/routes/auth.py index c9e92d54f7f5..ba7aadb88316 100644 --- a/enterprise/server/routes/auth.py +++ b/enterprise/server/routes/auth.py @@ -31,7 +31,6 @@ from openhands.server.shared import config from openhands.server.user_auth import get_access_token from openhands.server.user_auth.user_auth import get_user_auth -from openhands.utils.posthog_tracker import track_user_signup_completed with warnings.catch_warnings(): warnings.simplefilter('ignore') @@ -370,12 +369,6 @@ async def accept_tos(request: Request): logger.info(f'User {user_id} accepted TOS') - # Track user signup completion in PostHog - track_user_signup_completed( - user_id=user_id, - signup_timestamp=user_settings.accepted_tos.isoformat(), - ) - response = JSONResponse( status_code=status.HTTP_200_OK, content={'redirect_url': redirect_url} ) diff --git a/enterprise/server/routes/billing.py b/enterprise/server/routes/billing.py index f1c0c5376bec..5a8b59e2d76b 100644 --- a/enterprise/server/routes/billing.py +++ b/enterprise/server/routes/billing.py @@ -28,7 +28,6 @@ from openhands.server.user_auth import get_user_id from openhands.utils.http_session import httpx_verify_option -from openhands.utils.posthog_tracker import track_credits_purchased stripe.api_key = STRIPE_API_KEY billing_router = APIRouter(prefix='/api/billing') @@ -458,20 +457,6 @@ async def success_callback(session_id: str, request: Request): ) session.commit() - # Track credits purchased in PostHog - try: - track_credits_purchased( - user_id=billing_session.user_id, - amount_usd=amount_subtotal / 100, # Convert cents to dollars - credits_added=add_credits, - stripe_session_id=session_id, - ) - except Exception as e: - logger.warning( - f'Failed to track credits purchase: {e}', - extra={'user_id': billing_session.user_id, 'error': str(e)}, - ) - return RedirectResponse( f'{request.base_url}settings/billing?checkout=success', status_code=302 ) diff --git a/enterprise/server/routes/integration/github.py b/enterprise/server/routes/integration/github.py index d7bf857a3f1c..204b8297f78c 100644 --- a/enterprise/server/routes/integration/github.py +++ b/enterprise/server/routes/integration/github.py @@ -1,3 +1,4 @@ +import asyncio import hashlib import hmac import os @@ -58,7 +59,8 @@ async def github_events( ) try: - payload = await request.body() + # Add timeout to prevent hanging on slow/stalled clients + payload = await asyncio.wait_for(request.body(), timeout=15.0) verify_github_signature(payload, x_hub_signature_256) payload_data = await request.json() @@ -78,6 +80,12 @@ async def github_events( status_code=200, content={'message': 'GitHub events endpoint reached successfully.'}, ) + except asyncio.TimeoutError: + logger.warning('GitHub webhook request timed out waiting for request body') + return JSONResponse( + status_code=408, + content={'error': 'Request timeout - client took too long to send data.'}, + ) except Exception as e: logger.exception(f'Error processing GitHub event: {e}') return JSONResponse(status_code=400, content={'error': 'Invalid payload.'}) diff --git a/enterprise/server/routes/oauth_device.py b/enterprise/server/routes/oauth_device.py new file mode 100644 index 000000000000..39ff9a4081a3 --- /dev/null +++ b/enterprise/server/routes/oauth_device.py @@ -0,0 +1,324 @@ +"""OAuth 2.0 Device Flow endpoints for CLI authentication.""" + +from datetime import UTC, datetime, timedelta +from typing import Optional + +from fastapi import APIRouter, Depends, Form, HTTPException, Request, status +from fastapi.responses import JSONResponse +from pydantic import BaseModel +from storage.api_key_store import ApiKeyStore +from storage.database import session_maker +from storage.device_code_store import DeviceCodeStore + +from openhands.core.logger import openhands_logger as logger +from openhands.server.user_auth import get_user_id + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +DEVICE_CODE_EXPIRES_IN = 600 # 10 minutes +DEVICE_TOKEN_POLL_INTERVAL = 5 # seconds + +API_KEY_NAME = 'Device Link Access Key' +KEY_EXPIRATION_TIME = timedelta(days=1) # Key expires in 24 hours + +# --------------------------------------------------------------------------- +# Models +# --------------------------------------------------------------------------- + + +class DeviceAuthorizationResponse(BaseModel): + device_code: str + user_code: str + verification_uri: str + verification_uri_complete: str + expires_in: int + interval: int + + +class DeviceTokenResponse(BaseModel): + access_token: str # This will be the user's API key + token_type: str = 'Bearer' + expires_in: Optional[int] = None # API keys may not have expiration + + +class DeviceTokenErrorResponse(BaseModel): + error: str + error_description: Optional[str] = None + interval: Optional[int] = None # Required for slow_down error + + +# --------------------------------------------------------------------------- +# Router + stores +# --------------------------------------------------------------------------- + +oauth_device_router = APIRouter(prefix='/oauth/device') +device_code_store = DeviceCodeStore(session_maker) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _oauth_error( + status_code: int, + error: str, + description: str, + interval: Optional[int] = None, +) -> JSONResponse: + """Return a JSON OAuth-style error response.""" + return JSONResponse( + status_code=status_code, + content=DeviceTokenErrorResponse( + error=error, + error_description=description, + interval=interval, + ).model_dump(), + ) + + +# --------------------------------------------------------------------------- +# Endpoints +# --------------------------------------------------------------------------- + + +@oauth_device_router.post('/authorize', response_model=DeviceAuthorizationResponse) +async def device_authorization( + http_request: Request, +) -> DeviceAuthorizationResponse: + """Start device flow by generating device and user codes.""" + try: + device_code_entry = device_code_store.create_device_code( + expires_in=DEVICE_CODE_EXPIRES_IN, + ) + + base_url = str(http_request.base_url).rstrip('/') + verification_uri = f'{base_url}/oauth/device/verify' + verification_uri_complete = ( + f'{verification_uri}?user_code={device_code_entry.user_code}' + ) + + logger.info( + 'Device authorization initiated', + extra={'user_code': device_code_entry.user_code}, + ) + + return DeviceAuthorizationResponse( + device_code=device_code_entry.device_code, + user_code=device_code_entry.user_code, + verification_uri=verification_uri, + verification_uri_complete=verification_uri_complete, + expires_in=DEVICE_CODE_EXPIRES_IN, + interval=device_code_entry.current_interval, + ) + except Exception as e: + logger.exception('Error in device authorization: %s', str(e)) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Internal server error', + ) from e + + +@oauth_device_router.post('/token') +async def device_token(device_code: str = Form(...)): + """Poll for a token until the user authorizes or the code expires.""" + try: + device_code_entry = device_code_store.get_by_device_code(device_code) + + if not device_code_entry: + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'invalid_grant', + 'Invalid device code', + ) + + # Check rate limiting (RFC 8628 section 3.5) + is_too_fast, current_interval = device_code_entry.check_rate_limit() + if is_too_fast: + # Update poll time and increase interval + device_code_store.update_poll_time(device_code, increase_interval=True) + logger.warning( + 'Client polling too fast, returning slow_down error', + extra={ + 'device_code': device_code[:8] + '...', # Log partial for privacy + 'new_interval': current_interval, + }, + ) + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'slow_down', + f'Polling too frequently. Wait at least {current_interval} seconds between requests.', + interval=current_interval, + ) + + # Update poll time for successful rate limit check + device_code_store.update_poll_time(device_code, increase_interval=False) + + if device_code_entry.is_expired(): + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'expired_token', + 'Device code has expired', + ) + + if device_code_entry.status == 'denied': + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'access_denied', + 'User denied the authorization request', + ) + + if device_code_entry.status == 'pending': + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'authorization_pending', + 'User has not yet completed authorization', + ) + + if device_code_entry.status == 'authorized': + # Retrieve the specific API key for this device using the user_code + api_key_store = ApiKeyStore.get_instance() + device_key_name = f'{API_KEY_NAME} ({device_code_entry.user_code})' + device_api_key = api_key_store.retrieve_api_key_by_name( + device_code_entry.keycloak_user_id, device_key_name + ) + + if not device_api_key: + logger.error( + 'No device API key found for authorized device', + extra={ + 'user_id': device_code_entry.keycloak_user_id, + 'user_code': device_code_entry.user_code, + }, + ) + return _oauth_error( + status.HTTP_500_INTERNAL_SERVER_ERROR, + 'server_error', + 'API key not found', + ) + + # Return the API key as access_token + return DeviceTokenResponse( + access_token=device_api_key, + ) + + # Fallback for unexpected status values + logger.error( + 'Unknown device code status', + extra={'status': device_code_entry.status}, + ) + return _oauth_error( + status.HTTP_500_INTERNAL_SERVER_ERROR, + 'server_error', + 'Unknown device code status', + ) + + except Exception as e: + logger.exception('Error in device token: %s', str(e)) + return _oauth_error( + status.HTTP_500_INTERNAL_SERVER_ERROR, + 'server_error', + 'Internal server error', + ) + + +@oauth_device_router.post('/verify-authenticated') +async def device_verification_authenticated( + user_code: str = Form(...), + user_id: str = Depends(get_user_id), +): + """Process device verification for authenticated users (called by frontend).""" + try: + if not user_id: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='Authentication required', + ) + + # Validate device code + device_code_entry = device_code_store.get_by_user_code(user_code) + if not device_code_entry: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='The device code is invalid or has expired.', + ) + + if not device_code_entry.is_pending(): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='This device code has already been processed.', + ) + + # First, authorize the device code + success = device_code_store.authorize_device_code( + user_code=user_code, + user_id=user_id, + ) + + if not success: + logger.error( + 'Failed to authorize device code', + extra={'user_code': user_code, 'user_id': user_id}, + ) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Failed to authorize the device. Please try again.', + ) + + # Only create API key AFTER successful authorization + api_key_store = ApiKeyStore.get_instance() + try: + # Create a unique API key for this device using user_code in the name + device_key_name = f'{API_KEY_NAME} ({user_code})' + api_key_store.create_api_key( + user_id, + name=device_key_name, + expires_at=datetime.now(UTC) + KEY_EXPIRATION_TIME, + ) + logger.info( + 'Created new device API key for user after successful authorization', + extra={'user_id': user_id, 'user_code': user_code}, + ) + except Exception as e: + logger.exception( + 'Failed to create device API key after authorization: %s', str(e) + ) + + # Clean up: revert the device authorization since API key creation failed + # This prevents the device from being in an authorized state without an API key + try: + device_code_store.deny_device_code(user_code) + logger.info( + 'Reverted device authorization due to API key creation failure', + extra={'user_code': user_code, 'user_id': user_id}, + ) + except Exception as cleanup_error: + logger.exception( + 'Failed to revert device authorization during cleanup: %s', + str(cleanup_error), + ) + + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Failed to create API key for device access.', + ) + + logger.info( + 'Device code authorized with API key successfully', + extra={'user_code': user_code, 'user_id': user_id}, + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content={'message': 'Device authorized successfully!'}, + ) + + except HTTPException: + raise + except Exception as e: + logger.exception('Error in device verification: %s', str(e)) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='An unexpected error occurred. Please try again.', + ) diff --git a/enterprise/server/saas_nested_conversation_manager.py b/enterprise/server/saas_nested_conversation_manager.py index e0727996de14..d92e67b9ff9f 100644 --- a/enterprise/server/saas_nested_conversation_manager.py +++ b/enterprise/server/saas_nested_conversation_manager.py @@ -31,6 +31,7 @@ from openhands.events.serialization.event import event_to_dict from openhands.integrations.provider import PROVIDER_TOKEN_TYPE, ProviderHandler from openhands.runtime.impl.remote.remote_runtime import RemoteRuntime +from openhands.runtime.plugins.vscode import VSCodeRequirement from openhands.runtime.runtime_status import RuntimeStatus from openhands.server.config.server_config import ServerConfig from openhands.server.constants import ROOM_KEY @@ -70,6 +71,14 @@ else '/api/conversations/{conversation_id}' ) +RUNTIME_USERNAME = os.getenv('RUNTIME_USERNAME') + +SU_TO_USER = os.getenv('SU_TO_USER', 'false') +truthy = {'1', 'true', 't', 'yes', 'y', 'on'} +SU_TO_USER = str(SU_TO_USER.lower() in truthy).lower() + +DISABLE_VSCODE_PLUGIN = os.getenv('DISABLE_VSCODE_PLUGIN', 'false').lower() == 'true' + # Time in seconds before a Redis entry is considered expired if not refreshed _REDIS_ENTRY_TIMEOUT_SECONDS = 300 @@ -772,7 +781,11 @@ async def _create_runtime( env_vars['SERVE_FRONTEND'] = '0' env_vars['RUNTIME'] = 'local' # TODO: In the long term we may come up with a more secure strategy for user management within the nested runtime. - env_vars['USER'] = 'openhands' if config.run_as_openhands else 'root' + env_vars['USER'] = ( + RUNTIME_USERNAME + if RUNTIME_USERNAME + else ('openhands' if config.run_as_openhands else 'root') + ) env_vars['PERMITTED_CORS_ORIGINS'] = ','.join(PERMITTED_CORS_ORIGINS) env_vars['port'] = '60000' # TODO: These values are static in the runtime-api project, but do not get copied into the runtime ENV @@ -789,6 +802,8 @@ async def _create_runtime( env_vars['INITIAL_NUM_WARM_SERVERS'] = '1' env_vars['INIT_GIT_IN_EMPTY_WORKSPACE'] = '1' env_vars['ENABLE_V1'] = '0' + env_vars['SU_TO_USER'] = SU_TO_USER + env_vars['DISABLE_VSCODE_PLUGIN'] = str(DISABLE_VSCODE_PLUGIN).lower() # We need this for LLM traces tracking to identify the source of the LLM calls env_vars['WEB_HOST'] = WEB_HOST @@ -804,11 +819,18 @@ async def _create_runtime( if self._runtime_container_image: config.sandbox.runtime_container_image = self._runtime_container_image + plugins = [ + plugin + for plugin in agent.sandbox_plugins + if not (DISABLE_VSCODE_PLUGIN and isinstance(plugin, VSCodeRequirement)) + ] + logger.info(f'Loaded plugins for runtime {sid}: {plugins}') + runtime = RemoteRuntime( config=config, event_stream=None, # type: ignore[arg-type] sid=sid, - plugins=agent.sandbox_plugins, + plugins=plugins, # env_vars=env_vars, # status_callback: Callable[..., None] | None = None, attach_to_existing=False, diff --git a/enterprise/storage/api_key_store.py b/enterprise/storage/api_key_store.py index 162ed415c150..9714d7476a31 100644 --- a/enterprise/storage/api_key_store.py +++ b/enterprise/storage/api_key_store.py @@ -17,10 +17,13 @@ class ApiKeyStore: session_maker: sessionmaker + API_KEY_PREFIX = 'sk-oh-' + def generate_api_key(self, length: int = 32) -> str: - """Generate a random API key.""" + """Generate a random API key with the sk-oh- prefix.""" alphabet = string.ascii_letters + string.digits - return ''.join(secrets.choice(alphabet) for _ in range(length)) + random_part = ''.join(secrets.choice(alphabet) for _ in range(length)) + return f'{self.API_KEY_PREFIX}{random_part}' def create_api_key( self, user_id: str, name: str | None = None, expires_at: datetime | None = None @@ -57,9 +60,15 @@ def validate_api_key(self, api_key: str) -> str | None: return None # Check if the key has expired - if key_record.expires_at and key_record.expires_at < now: - logger.info(f'API key has expired: {key_record.id}') - return None + if key_record.expires_at: + # Handle timezone-naive datetime from database by assuming it's UTC + expires_at = key_record.expires_at + if expires_at.tzinfo is None: + expires_at = expires_at.replace(tzinfo=UTC) + + if expires_at < now: + logger.info(f'API key has expired: {key_record.id}') + return None # Update last_used_at timestamp session.execute( @@ -125,6 +134,33 @@ def retrieve_mcp_api_key(self, user_id: str) -> str | None: return None + def retrieve_api_key_by_name(self, user_id: str, name: str) -> str | None: + """Retrieve an API key by name for a specific user.""" + with self.session_maker() as session: + key_record = ( + session.query(ApiKey) + .filter(ApiKey.user_id == user_id, ApiKey.name == name) + .first() + ) + return key_record.key if key_record else None + + def delete_api_key_by_name(self, user_id: str, name: str) -> bool: + """Delete an API key by name for a specific user.""" + with self.session_maker() as session: + key_record = ( + session.query(ApiKey) + .filter(ApiKey.user_id == user_id, ApiKey.name == name) + .first() + ) + + if not key_record: + return False + + session.delete(key_record) + session.commit() + + return True + @classmethod def get_instance(cls) -> ApiKeyStore: """Get an instance of the ApiKeyStore.""" diff --git a/enterprise/storage/device_code.py b/enterprise/storage/device_code.py new file mode 100644 index 000000000000..47e18b51bcc5 --- /dev/null +++ b/enterprise/storage/device_code.py @@ -0,0 +1,109 @@ +"""Device code storage model for OAuth 2.0 Device Flow.""" + +from datetime import datetime, timezone +from enum import Enum + +from sqlalchemy import Column, DateTime, Integer, String +from storage.base import Base + + +class DeviceCodeStatus(Enum): + """Status of a device code authorization request.""" + + PENDING = 'pending' + AUTHORIZED = 'authorized' + EXPIRED = 'expired' + DENIED = 'denied' + + +class DeviceCode(Base): + """Device code for OAuth 2.0 Device Flow. + + This stores the device codes issued during the device authorization flow, + along with their status and associated user information once authorized. + """ + + __tablename__ = 'device_codes' + + id = Column(Integer, primary_key=True, autoincrement=True) + device_code = Column(String(128), unique=True, nullable=False, index=True) + user_code = Column(String(16), unique=True, nullable=False, index=True) + status = Column(String(32), nullable=False, default=DeviceCodeStatus.PENDING.value) + + # Keycloak user ID who authorized the device (set during verification) + keycloak_user_id = Column(String(255), nullable=True) + + # Timestamps + expires_at = Column(DateTime(timezone=True), nullable=False) + authorized_at = Column(DateTime(timezone=True), nullable=True) + + # Rate limiting fields for RFC 8628 section 3.5 compliance + last_poll_time = Column(DateTime(timezone=True), nullable=True) + current_interval = Column(Integer, nullable=False, default=5) + + def __repr__(self) -> str: + return f"" + + def is_expired(self) -> bool: + """Check if the device code has expired.""" + now = datetime.now(timezone.utc) + return now > self.expires_at + + def is_pending(self) -> bool: + """Check if the device code is still pending authorization.""" + return self.status == DeviceCodeStatus.PENDING.value and not self.is_expired() + + def is_authorized(self) -> bool: + """Check if the device code has been authorized.""" + return self.status == DeviceCodeStatus.AUTHORIZED.value + + def authorize(self, user_id: str) -> None: + """Mark the device code as authorized.""" + self.status = DeviceCodeStatus.AUTHORIZED.value + self.keycloak_user_id = user_id # Set the Keycloak user ID during authorization + self.authorized_at = datetime.now(timezone.utc) + + def deny(self) -> None: + """Mark the device code as denied.""" + self.status = DeviceCodeStatus.DENIED.value + + def expire(self) -> None: + """Mark the device code as expired.""" + self.status = DeviceCodeStatus.EXPIRED.value + + def check_rate_limit(self) -> tuple[bool, int]: + """Check if the client is polling too fast. + + Returns: + tuple: (is_too_fast, current_interval) + - is_too_fast: True if client should receive slow_down error + - current_interval: Current polling interval to use + """ + now = datetime.now(timezone.utc) + + # If this is the first poll, allow it + if self.last_poll_time is None: + return False, self.current_interval + + # Calculate time since last poll + time_since_last_poll = (now - self.last_poll_time).total_seconds() + + # Check if polling too fast + if time_since_last_poll < self.current_interval: + # Increase interval for slow_down (RFC 8628 section 3.5) + new_interval = min(self.current_interval + 5, 60) # Cap at 60 seconds + return True, new_interval + + return False, self.current_interval + + def update_poll_time(self, increase_interval: bool = False) -> None: + """Update the last poll time and optionally increase the interval. + + Args: + increase_interval: If True, increase the current interval for slow_down + """ + self.last_poll_time = datetime.now(timezone.utc) + + if increase_interval: + # Increase interval by 5 seconds, cap at 60 seconds (RFC 8628) + self.current_interval = min(self.current_interval + 5, 60) diff --git a/enterprise/storage/device_code_store.py b/enterprise/storage/device_code_store.py new file mode 100644 index 000000000000..de2fe29cc486 --- /dev/null +++ b/enterprise/storage/device_code_store.py @@ -0,0 +1,167 @@ +"""Device code store for OAuth 2.0 Device Flow.""" + +import secrets +import string +from datetime import datetime, timedelta, timezone + +from sqlalchemy.exc import IntegrityError +from storage.device_code import DeviceCode + + +class DeviceCodeStore: + """Store for managing OAuth 2.0 device codes.""" + + def __init__(self, session_maker): + self.session_maker = session_maker + + def generate_user_code(self) -> str: + """Generate a human-readable user code (8 characters, uppercase letters and digits).""" + # Use a mix of uppercase letters and digits, avoiding confusing characters + alphabet = 'ABCDEFGHJKLMNPQRSTUVWXYZ23456789' # No I, O, 0, 1 + return ''.join(secrets.choice(alphabet) for _ in range(8)) + + def generate_device_code(self) -> str: + """Generate a secure device code (128 characters).""" + alphabet = string.ascii_letters + string.digits + return ''.join(secrets.choice(alphabet) for _ in range(128)) + + def create_device_code( + self, + expires_in: int = 600, # 10 minutes default + max_attempts: int = 10, + ) -> DeviceCode: + """Create a new device code entry. + + Uses database constraints to ensure uniqueness, avoiding TOCTOU race conditions. + Retries on constraint violations until unique codes are generated. + + Args: + expires_in: Expiration time in seconds + max_attempts: Maximum number of attempts to generate unique codes + + Returns: + The created DeviceCode instance + + Raises: + RuntimeError: If unable to generate unique codes after max_attempts + """ + for attempt in range(max_attempts): + user_code = self.generate_user_code() + device_code = self.generate_device_code() + expires_at = datetime.now(timezone.utc) + timedelta(seconds=expires_in) + + device_code_entry = DeviceCode( + device_code=device_code, + user_code=user_code, + keycloak_user_id=None, # Will be set during authorization + expires_at=expires_at, + ) + + try: + with self.session_maker() as session: + session.add(device_code_entry) + session.commit() + session.refresh(device_code_entry) + session.expunge(device_code_entry) # Detach from session cleanly + return device_code_entry + except IntegrityError: + # Constraint violation - codes already exist, retry with new codes + continue + + raise RuntimeError( + f'Failed to generate unique device codes after {max_attempts} attempts' + ) + + def get_by_device_code(self, device_code: str) -> DeviceCode | None: + """Get device code entry by device code.""" + with self.session_maker() as session: + result = ( + session.query(DeviceCode).filter_by(device_code=device_code).first() + ) + if result: + session.expunge(result) # Detach from session cleanly + return result + + def get_by_user_code(self, user_code: str) -> DeviceCode | None: + """Get device code entry by user code.""" + with self.session_maker() as session: + result = session.query(DeviceCode).filter_by(user_code=user_code).first() + if result: + session.expunge(result) # Detach from session cleanly + return result + + def authorize_device_code(self, user_code: str, user_id: str) -> bool: + """Authorize a device code. + + Args: + user_code: The user code to authorize + user_id: The user ID from Keycloak + + Returns: + True if authorization was successful, False otherwise + """ + with self.session_maker() as session: + device_code_entry = ( + session.query(DeviceCode).filter_by(user_code=user_code).first() + ) + + if not device_code_entry: + return False + + if not device_code_entry.is_pending(): + return False + + device_code_entry.authorize(user_id) + session.commit() + + return True + + def deny_device_code(self, user_code: str) -> bool: + """Deny a device code authorization. + + Args: + user_code: The user code to deny + + Returns: + True if denial was successful, False otherwise + """ + with self.session_maker() as session: + device_code_entry = ( + session.query(DeviceCode).filter_by(user_code=user_code).first() + ) + + if not device_code_entry: + return False + + if not device_code_entry.is_pending(): + return False + + device_code_entry.deny() + session.commit() + + return True + + def update_poll_time( + self, device_code: str, increase_interval: bool = False + ) -> bool: + """Update the poll time for a device code and optionally increase interval. + + Args: + device_code: The device code to update + increase_interval: If True, increase the polling interval for slow_down + + Returns: + True if update was successful, False otherwise + """ + with self.session_maker() as session: + device_code_entry = ( + session.query(DeviceCode).filter_by(device_code=device_code).first() + ) + + if not device_code_entry: + return False + + device_code_entry.update_poll_time(increase_interval) + session.commit() + + return True diff --git a/enterprise/storage/saas_settings_store.py b/enterprise/storage/saas_settings_store.py index bf27c4aaa56d..fd64924263eb 100644 --- a/enterprise/storage/saas_settings_store.py +++ b/enterprise/storage/saas_settings_store.py @@ -94,9 +94,14 @@ async def load(self) -> Settings | None: } self._decrypt_kwargs(kwargs) settings = Settings(**kwargs) + return settings async def store(self, item: Settings): + # Check if provider is OpenHands and generate API key if needed + if item and self._is_openhands_provider(item): + await self._ensure_openhands_api_key(item) + with self.session_maker() as session: existing = None kwargs = {} @@ -368,6 +373,30 @@ def _fernet(self): def _should_encrypt(self, key: str) -> bool: return key in ('llm_api_key', 'llm_api_key_for_byor', 'search_api_key') + def _is_openhands_provider(self, item: Settings) -> bool: + """Check if the settings use the OpenHands provider.""" + return bool(item.llm_model and item.llm_model.startswith('openhands/')) + + async def _ensure_openhands_api_key(self, item: Settings) -> None: + """Generate and set the OpenHands API key for the given settings. + + First checks if an existing key with the OpenHands alias exists, + and reuses it if found. Otherwise, generates a new key. + """ + # Generate new key if none exists + generated_key = await self._generate_openhands_key() + if generated_key: + item.llm_api_key = SecretStr(generated_key) + logger.info( + 'saas_settings_store:store:generated_openhands_key', + extra={'user_id': self.user_id}, + ) + else: + logger.warning( + 'saas_settings_store:store:failed_to_generate_openhands_key', + extra={'user_id': self.user_id}, + ) + async def _create_user_in_lite_llm( self, client: httpx.AsyncClient, email: str | None, max_budget: int, spend: int ): @@ -390,3 +419,55 @@ async def _create_user_in_lite_llm( }, ) return response + + async def _generate_openhands_key(self) -> str | None: + """Generate a new OpenHands provider key for a user.""" + if not (LITE_LLM_API_KEY and LITE_LLM_API_URL): + logger.warning( + 'saas_settings_store:_generate_openhands_key:litellm_config_not_found', + extra={'user_id': self.user_id}, + ) + return None + + try: + async with httpx.AsyncClient( + verify=httpx_verify_option(), + headers={ + 'x-goog-api-key': LITE_LLM_API_KEY, + }, + ) as client: + response = await client.post( + f'{LITE_LLM_API_URL}/key/generate', + json={ + 'user_id': self.user_id, + 'metadata': {'type': 'openhands'}, + }, + ) + response.raise_for_status() + response_json = response.json() + key = response_json.get('key') + + if key: + logger.info( + 'saas_settings_store:_generate_openhands_key:success', + extra={ + 'user_id': self.user_id, + 'key_length': len(key) if key else 0, + 'key_prefix': ( + key[:10] + '...' if key and len(key) > 10 else key + ), + }, + ) + return key + else: + logger.error( + 'saas_settings_store:_generate_openhands_key:no_key_in_response', + extra={'user_id': self.user_id, 'response_json': response_json}, + ) + return None + except Exception as e: + logger.exception( + 'saas_settings_store:_generate_openhands_key:error', + extra={'user_id': self.user_id, 'error': str(e)}, + ) + return None diff --git a/enterprise/storage/user_settings.py b/enterprise/storage/user_settings.py index b84f644b7160..4d60d8b67631 100644 --- a/enterprise/storage/user_settings.py +++ b/enterprise/storage/user_settings.py @@ -38,3 +38,4 @@ class UserSettings(Base): # type: ignore email_verified = Column(Boolean, nullable=True) git_user_name = Column(String, nullable=True) git_user_email = Column(String, nullable=True) + v1_enabled = Column(Boolean, nullable=True) diff --git a/enterprise/tests/unit/conftest.py b/enterprise/tests/unit/conftest.py index 08516fd81305..873f7b775fb7 100644 --- a/enterprise/tests/unit/conftest.py +++ b/enterprise/tests/unit/conftest.py @@ -12,6 +12,7 @@ # Anything not loaded here may not have a table created for it. from storage.billing_session import BillingSession from storage.conversation_work import ConversationWork +from storage.device_code import DeviceCode # noqa: F401 from storage.feedback import Feedback from storage.github_app_installation import GithubAppInstallation from storage.maintenance_task import MaintenanceTask, MaintenanceTaskStatus diff --git a/enterprise/tests/unit/integrations/test_resolver_context.py b/enterprise/tests/unit/integrations/test_resolver_context.py new file mode 100644 index 000000000000..f1e5f814bac0 --- /dev/null +++ b/enterprise/tests/unit/integrations/test_resolver_context.py @@ -0,0 +1,133 @@ +"""Test for ResolverUserContext get_secrets conversion logic. + +This test focuses on testing the actual ResolverUserContext implementation. +""" + +from types import MappingProxyType +from unittest.mock import AsyncMock + +import pytest +from pydantic import SecretStr + +from enterprise.integrations.resolver_context import ResolverUserContext + +# Import the real classes we want to test +from openhands.integrations.provider import CustomSecret + +# Import the SDK types we need for testing +from openhands.sdk.secret import SecretSource, StaticSecret +from openhands.storage.data_models.secrets import Secrets + + +@pytest.fixture +def mock_saas_user_auth(): + """Mock SaasUserAuth for testing.""" + return AsyncMock() + + +@pytest.fixture +def resolver_context(mock_saas_user_auth): + """Create a ResolverUserContext instance for testing.""" + return ResolverUserContext(saas_user_auth=mock_saas_user_auth) + + +def create_custom_secret(value: str, description: str = 'Test secret') -> CustomSecret: + """Helper to create CustomSecret instances.""" + return CustomSecret(secret=SecretStr(value), description=description) + + +def create_secrets(custom_secrets_dict: dict[str, CustomSecret]) -> Secrets: + """Helper to create Secrets instances.""" + return Secrets(custom_secrets=MappingProxyType(custom_secrets_dict)) + + +@pytest.mark.asyncio +async def test_get_secrets_converts_custom_to_static( + resolver_context, mock_saas_user_auth +): + """Test that get_secrets correctly converts CustomSecret objects to StaticSecret objects.""" + # Arrange + secrets = create_secrets( + { + 'TEST_SECRET_1': create_custom_secret('secret_value_1'), + 'TEST_SECRET_2': create_custom_secret('secret_value_2'), + } + ) + mock_saas_user_auth.get_secrets.return_value = secrets + + # Act + result = await resolver_context.get_secrets() + + # Assert + assert len(result) == 2 + assert all(isinstance(secret, StaticSecret) for secret in result.values()) + assert result['TEST_SECRET_1'].value.get_secret_value() == 'secret_value_1' + assert result['TEST_SECRET_2'].value.get_secret_value() == 'secret_value_2' + + +@pytest.mark.asyncio +async def test_get_secrets_with_special_characters( + resolver_context, mock_saas_user_auth +): + """Test that secret values with special characters are preserved during conversion.""" + # Arrange + special_value = 'very_secret_password_123!@#$%^&*()' + secrets = create_secrets({'SPECIAL_SECRET': create_custom_secret(special_value)}) + mock_saas_user_auth.get_secrets.return_value = secrets + + # Act + result = await resolver_context.get_secrets() + + # Assert + assert len(result) == 1 + assert isinstance(result['SPECIAL_SECRET'], StaticSecret) + assert result['SPECIAL_SECRET'].value.get_secret_value() == special_value + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'secrets_input,expected_result', + [ + (None, {}), # No secrets available + (create_secrets({}), {}), # Empty custom secrets + ], +) +async def test_get_secrets_empty_cases( + resolver_context, mock_saas_user_auth, secrets_input, expected_result +): + """Test that get_secrets handles empty cases correctly.""" + # Arrange + mock_saas_user_auth.get_secrets.return_value = secrets_input + + # Act + result = await resolver_context.get_secrets() + + # Assert + assert result == expected_result + + +def test_static_secret_is_valid_secret_source(): + """Test that StaticSecret is a valid SecretSource for SDK validation.""" + # Arrange & Act + static_secret = StaticSecret(value='test_secret_123') + + # Assert + assert isinstance(static_secret, StaticSecret) + assert isinstance(static_secret, SecretSource) + assert static_secret.value.get_secret_value() == 'test_secret_123' + + +def test_custom_to_static_conversion(): + """Test the complete conversion flow from CustomSecret to StaticSecret.""" + # Arrange + secret_value = 'conversion_test_secret' + custom_secret = create_custom_secret(secret_value, 'Conversion test') + + # Act - simulate the conversion logic from the actual method + extracted_value = custom_secret.secret.get_secret_value() + static_secret = StaticSecret(value=extracted_value) + + # Assert + assert isinstance(static_secret, StaticSecret) + assert isinstance(static_secret, SecretSource) + assert static_secret.value.get_secret_value() == secret_value diff --git a/enterprise/tests/unit/server/routes/test_oauth_device.py b/enterprise/tests/unit/server/routes/test_oauth_device.py new file mode 100644 index 000000000000..53682e65f02f --- /dev/null +++ b/enterprise/tests/unit/server/routes/test_oauth_device.py @@ -0,0 +1,610 @@ +"""Unit tests for OAuth2 Device Flow endpoints.""" + +from datetime import UTC, datetime, timedelta +from unittest.mock import MagicMock, patch + +import pytest +from fastapi import HTTPException, Request +from fastapi.responses import JSONResponse +from server.routes.oauth_device import ( + device_authorization, + device_token, + device_verification_authenticated, +) +from storage.device_code import DeviceCode + + +@pytest.fixture +def mock_device_code_store(): + """Mock device code store.""" + return MagicMock() + + +@pytest.fixture +def mock_api_key_store(): + """Mock API key store.""" + return MagicMock() + + +@pytest.fixture +def mock_token_manager(): + """Mock token manager.""" + return MagicMock() + + +@pytest.fixture +def mock_request(): + """Mock FastAPI request.""" + request = MagicMock(spec=Request) + request.base_url = 'https://test.example.com/' + return request + + +class TestDeviceAuthorization: + """Test device authorization endpoint.""" + + @patch('server.routes.oauth_device.device_code_store') + async def test_device_authorization_success(self, mock_store, mock_request): + """Test successful device authorization.""" + mock_device = DeviceCode( + device_code='test-device-code-123', + user_code='ABC12345', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + current_interval=5, # Default interval + ) + mock_store.create_device_code.return_value = mock_device + + result = await device_authorization(mock_request) + + assert result.device_code == 'test-device-code-123' + assert result.user_code == 'ABC12345' + assert result.expires_in == 600 + assert result.interval == 5 # Should match device's current_interval + assert 'verify' in result.verification_uri + assert 'ABC12345' in result.verification_uri_complete + + @patch('server.routes.oauth_device.device_code_store') + async def test_device_authorization_with_increased_interval( + self, mock_store, mock_request + ): + """Test device authorization returns increased interval from rate limiting.""" + mock_device = DeviceCode( + device_code='test-device-code-456', + user_code='XYZ98765', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + current_interval=15, # Increased interval from previous rate limiting + ) + mock_store.create_device_code.return_value = mock_device + + result = await device_authorization(mock_request) + + assert result.device_code == 'test-device-code-456' + assert result.user_code == 'XYZ98765' + assert result.expires_in == 600 + assert result.interval == 15 # Should match device's increased current_interval + assert 'verify' in result.verification_uri + assert 'XYZ98765' in result.verification_uri_complete + + +class TestDeviceToken: + """Test device token endpoint.""" + + @pytest.mark.parametrize( + 'device_exists,status,expected_error', + [ + (False, None, 'invalid_grant'), + (True, 'expired', 'expired_token'), + (True, 'denied', 'access_denied'), + (True, 'pending', 'authorization_pending'), + ], + ) + @patch('server.routes.oauth_device.device_code_store') + async def test_device_token_error_cases( + self, mock_store, device_exists, status, expected_error + ): + """Test various error cases for device token endpoint.""" + device_code = 'test-device-code' + + if device_exists: + mock_device = MagicMock() + mock_device.is_expired.return_value = status == 'expired' + mock_device.status = status + # Mock rate limiting - return False (not too fast) and default interval + mock_device.check_rate_limit.return_value = (False, 5) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + else: + mock_store.get_by_device_code.return_value = None + + result = await device_token(device_code=device_code) + + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + # Check error in response content + content = result.body.decode() + assert expected_error in content + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_device_token_success(self, mock_store, mock_api_key_class): + """Test successful device token retrieval.""" + device_code = 'test-device-code' + + # Mock authorized device + mock_device = MagicMock() + mock_device.is_expired.return_value = False + mock_device.status = 'authorized' + mock_device.keycloak_user_id = 'user-123' + mock_device.user_code = ( + 'ABC12345' # Add user_code for device-specific API key lookup + ) + # Mock rate limiting - return False (not too fast) and default interval + mock_device.check_rate_limit.return_value = (False, 5) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + # Mock API key retrieval + mock_api_key_store = MagicMock() + mock_api_key_store.retrieve_api_key_by_name.return_value = 'test-api-key' + mock_api_key_class.get_instance.return_value = mock_api_key_store + + result = await device_token(device_code=device_code) + + # Check that result is a DeviceTokenResponse + assert result.access_token == 'test-api-key' + assert result.token_type == 'Bearer' + + # Verify that the correct device-specific API key name was used + mock_api_key_store.retrieve_api_key_by_name.assert_called_once_with( + 'user-123', 'Device Link Access Key (ABC12345)' + ) + + +class TestDeviceVerificationAuthenticated: + """Test device verification authenticated endpoint.""" + + async def test_verification_unauthenticated_user(self): + """Test verification with unauthenticated user.""" + with pytest.raises(HTTPException): + await device_verification_authenticated(user_code='ABC12345', user_id=None) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_verification_invalid_device_code( + self, mock_store, mock_api_key_class + ): + """Test verification with invalid device code.""" + mock_store.get_by_user_code.return_value = None + + with pytest.raises(HTTPException): + await device_verification_authenticated( + user_code='INVALID', user_id='user-123' + ) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_verification_already_processed(self, mock_store, mock_api_key_class): + """Test verification with already processed device code.""" + mock_device = MagicMock() + mock_device.is_pending.return_value = False + mock_store.get_by_user_code.return_value = mock_device + + with pytest.raises(HTTPException): + await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_verification_success(self, mock_store, mock_api_key_class): + """Test successful device verification.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = True + + # Mock API key store + mock_api_key_store = MagicMock() + mock_api_key_class.get_instance.return_value = mock_api_key_store + + result = await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert isinstance(result, JSONResponse) + assert result.status_code == 200 + # Should NOT delete existing API keys (multiple devices allowed) + mock_api_key_store.delete_api_key_by_name.assert_not_called() + # Should create a new API key with device-specific name + mock_api_key_store.create_api_key.assert_called_once() + call_args = mock_api_key_store.create_api_key.call_args + assert call_args[1]['name'] == 'Device Link Access Key (ABC12345)' + mock_store.authorize_device_code.assert_called_once_with( + user_code='ABC12345', user_id='user-123' + ) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_multiple_device_authentication(self, mock_store, mock_api_key_class): + """Test that multiple devices can authenticate simultaneously.""" + # Mock API key store + mock_api_key_store = MagicMock() + mock_api_key_class.get_instance.return_value = mock_api_key_store + + # Simulate two different devices + device1_code = 'ABC12345' + device2_code = 'XYZ67890' + user_id = 'user-123' + + # Mock device codes + mock_device1 = MagicMock() + mock_device1.is_pending.return_value = True + mock_device2 = MagicMock() + mock_device2.is_pending.return_value = True + + # Configure mock store to return appropriate device for each user_code + def get_by_user_code_side_effect(user_code): + if user_code == device1_code: + return mock_device1 + elif user_code == device2_code: + return mock_device2 + return None + + mock_store.get_by_user_code.side_effect = get_by_user_code_side_effect + mock_store.authorize_device_code.return_value = True + + # Authenticate first device + result1 = await device_verification_authenticated( + user_code=device1_code, user_id=user_id + ) + + # Authenticate second device + result2 = await device_verification_authenticated( + user_code=device2_code, user_id=user_id + ) + + # Both should succeed + assert isinstance(result1, JSONResponse) + assert result1.status_code == 200 + assert isinstance(result2, JSONResponse) + assert result2.status_code == 200 + + # Should create two separate API keys with different names + assert mock_api_key_store.create_api_key.call_count == 2 + + # Check that each device got a unique API key name + call_args_list = mock_api_key_store.create_api_key.call_args_list + device1_name = call_args_list[0][1]['name'] + device2_name = call_args_list[1][1]['name'] + + assert device1_name == f'Device Link Access Key ({device1_code})' + assert device2_name == f'Device Link Access Key ({device2_code})' + assert device1_name != device2_name # Ensure they're different + + # Should NOT delete any existing API keys + mock_api_key_store.delete_api_key_by_name.assert_not_called() + + +class TestDeviceTokenRateLimiting: + """Test rate limiting for device token polling (RFC 8628 section 3.5).""" + + @patch('server.routes.oauth_device.device_code_store') + async def test_first_poll_allowed(self, mock_store): + """Test that the first poll is always allowed.""" + # Create a device code with no previous poll time + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=None, # First poll + current_interval=5, + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return authorization_pending, not slow_down + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'authorization_pending' in content + assert 'slow_down' not in content + + # Should update poll time without increasing interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=False + ) + + @patch('server.routes.oauth_device.device_code_store') + async def test_normal_polling_allowed(self, mock_store): + """Test that normal polling (respecting interval) is allowed.""" + # Create a device code with last poll time 6 seconds ago (interval is 5) + last_poll = datetime.now(UTC) - timedelta(seconds=6) + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=5, + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return authorization_pending, not slow_down + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'authorization_pending' in content + assert 'slow_down' not in content + + # Should update poll time without increasing interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=False + ) + + @patch('server.routes.oauth_device.device_code_store') + async def test_fast_polling_returns_slow_down(self, mock_store): + """Test that polling too fast returns slow_down error.""" + # Create a device code with last poll time 2 seconds ago (interval is 5) + last_poll = datetime.now(UTC) - timedelta(seconds=2) + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=5, + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return slow_down error + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'slow_down' in content + assert 'interval' in content + assert '10' in content # New interval should be 5 + 5 = 10 + + # Should update poll time and increase interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=True + ) + + @patch('server.routes.oauth_device.device_code_store') + async def test_interval_increases_with_repeated_fast_polling(self, mock_store): + """Test that interval increases with repeated fast polling.""" + # Create a device code with higher current interval from previous slow_down + last_poll = datetime.now(UTC) - timedelta(seconds=5) # 5 seconds ago + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=15, # Already increased from previous slow_down + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return slow_down error with increased interval + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'slow_down' in content + assert '20' in content # New interval should be 15 + 5 = 20 + + # Should update poll time and increase interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=True + ) + + @patch('server.routes.oauth_device.device_code_store') + async def test_interval_caps_at_maximum(self, mock_store): + """Test that interval is capped at maximum value.""" + # Create a device code with interval near maximum + last_poll = datetime.now(UTC) - timedelta(seconds=30) + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=58, # Near maximum of 60 + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return slow_down error with capped interval + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'slow_down' in content + assert '60' in content # Should be capped at 60, not 63 + + @patch('server.routes.oauth_device.device_code_store') + async def test_rate_limiting_with_authorized_device(self, mock_store): + """Test that rate limiting still applies to authorized devices.""" + # Create an authorized device code with recent poll + last_poll = datetime.now(UTC) - timedelta(seconds=2) + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='authorized', # Device is authorized + keycloak_user_id='user123', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=5, + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should still return slow_down error even for authorized device + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'slow_down' in content + + # Should update poll time and increase interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=True + ) + + +class TestDeviceVerificationTransactionIntegrity: + """Test transaction integrity for device verification to prevent orphaned API keys.""" + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_authorization_failure_prevents_api_key_creation( + self, mock_store, mock_api_key_class + ): + """Test that if device authorization fails, no API key is created.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = False # Authorization fails + + # Mock API key store + mock_api_key_store = MagicMock() + mock_api_key_class.get_instance.return_value = mock_api_key_store + + # Should raise HTTPException due to authorization failure + with pytest.raises(HTTPException) as exc_info: + await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert exc_info.value.status_code == 500 + assert 'Failed to authorize the device' in exc_info.value.detail + + # API key should NOT be created since authorization failed + mock_api_key_store.create_api_key.assert_not_called() + mock_store.authorize_device_code.assert_called_once_with( + user_code='ABC12345', user_id='user-123' + ) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_api_key_creation_failure_reverts_authorization( + self, mock_store, mock_api_key_class + ): + """Test that if API key creation fails after authorization, the authorization is reverted.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = True # Authorization succeeds + mock_store.deny_device_code.return_value = True # Cleanup succeeds + + # Mock API key store to fail on creation + mock_api_key_store = MagicMock() + mock_api_key_store.create_api_key.side_effect = Exception('Database error') + mock_api_key_class.get_instance.return_value = mock_api_key_store + + # Should raise HTTPException due to API key creation failure + with pytest.raises(HTTPException) as exc_info: + await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert exc_info.value.status_code == 500 + assert 'Failed to create API key for device access' in exc_info.value.detail + + # Authorization should have been attempted first + mock_store.authorize_device_code.assert_called_once_with( + user_code='ABC12345', user_id='user-123' + ) + + # API key creation should have been attempted after authorization + mock_api_key_store.create_api_key.assert_called_once() + + # Authorization should be reverted due to API key creation failure + mock_store.deny_device_code.assert_called_once_with('ABC12345') + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_api_key_creation_failure_cleanup_failure_logged( + self, mock_store, mock_api_key_class + ): + """Test that cleanup failure is logged but doesn't prevent the main error from being raised.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = True # Authorization succeeds + mock_store.deny_device_code.side_effect = Exception( + 'Cleanup failed' + ) # Cleanup fails + + # Mock API key store to fail on creation + mock_api_key_store = MagicMock() + mock_api_key_store.create_api_key.side_effect = Exception('Database error') + mock_api_key_class.get_instance.return_value = mock_api_key_store + + # Should still raise HTTPException for the original API key creation failure + with pytest.raises(HTTPException) as exc_info: + await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert exc_info.value.status_code == 500 + assert 'Failed to create API key for device access' in exc_info.value.detail + + # Both operations should have been attempted + mock_store.authorize_device_code.assert_called_once() + mock_api_key_store.create_api_key.assert_called_once() + mock_store.deny_device_code.assert_called_once_with('ABC12345') + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_successful_flow_creates_api_key_after_authorization( + self, mock_store, mock_api_key_class + ): + """Test that in the successful flow, API key is created only after authorization.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = True # Authorization succeeds + + # Mock API key store + mock_api_key_store = MagicMock() + mock_api_key_class.get_instance.return_value = mock_api_key_store + + result = await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert isinstance(result, JSONResponse) + assert result.status_code == 200 + + # Verify the order: authorization first, then API key creation + mock_store.authorize_device_code.assert_called_once_with( + user_code='ABC12345', user_id='user-123' + ) + mock_api_key_store.create_api_key.assert_called_once() + + # No cleanup should be needed in successful case + mock_store.deny_device_code.assert_not_called() diff --git a/enterprise/tests/unit/storage/test_device_code.py b/enterprise/tests/unit/storage/test_device_code.py new file mode 100644 index 000000000000..0d2193075ba2 --- /dev/null +++ b/enterprise/tests/unit/storage/test_device_code.py @@ -0,0 +1,83 @@ +"""Unit tests for DeviceCode model.""" + +from datetime import datetime, timedelta, timezone + +import pytest +from storage.device_code import DeviceCode, DeviceCodeStatus + + +class TestDeviceCode: + """Test cases for DeviceCode model.""" + + @pytest.fixture + def device_code(self): + """Create a test device code.""" + return DeviceCode( + device_code='test-device-code-123', + user_code='ABC12345', + expires_at=datetime.now(timezone.utc) + timedelta(minutes=10), + ) + + @pytest.mark.parametrize( + 'expires_delta,expected', + [ + (timedelta(minutes=5), False), # Future expiry + (timedelta(minutes=-5), True), # Past expiry + (timedelta(seconds=1), False), # Just future (not expired) + ], + ) + def test_is_expired(self, expires_delta, expected): + """Test expiration check with various time deltas.""" + device_code = DeviceCode( + device_code='test-device-code', + user_code='ABC12345', + expires_at=datetime.now(timezone.utc) + expires_delta, + ) + assert device_code.is_expired() == expected + + @pytest.mark.parametrize( + 'status,expired,expected', + [ + (DeviceCodeStatus.PENDING.value, False, True), + (DeviceCodeStatus.PENDING.value, True, False), + (DeviceCodeStatus.AUTHORIZED.value, False, False), + (DeviceCodeStatus.DENIED.value, False, False), + ], + ) + def test_is_pending(self, status, expired, expected): + """Test pending status check.""" + expires_at = ( + datetime.now(timezone.utc) - timedelta(minutes=1) + if expired + else datetime.now(timezone.utc) + timedelta(minutes=10) + ) + device_code = DeviceCode( + device_code='test-device-code', + user_code='ABC12345', + status=status, + expires_at=expires_at, + ) + assert device_code.is_pending() == expected + + def test_authorize(self, device_code): + """Test device authorization.""" + user_id = 'test-user-123' + + device_code.authorize(user_id) + + assert device_code.status == DeviceCodeStatus.AUTHORIZED.value + assert device_code.keycloak_user_id == user_id + assert device_code.authorized_at is not None + assert isinstance(device_code.authorized_at, datetime) + + @pytest.mark.parametrize( + 'method,expected_status', + [ + ('deny', DeviceCodeStatus.DENIED.value), + ('expire', DeviceCodeStatus.EXPIRED.value), + ], + ) + def test_status_changes(self, device_code, method, expected_status): + """Test status change methods.""" + getattr(device_code, method)() + assert device_code.status == expected_status diff --git a/enterprise/tests/unit/storage/test_device_code_store.py b/enterprise/tests/unit/storage/test_device_code_store.py new file mode 100644 index 000000000000..65a58cda8a17 --- /dev/null +++ b/enterprise/tests/unit/storage/test_device_code_store.py @@ -0,0 +1,193 @@ +"""Unit tests for DeviceCodeStore.""" + +from unittest.mock import MagicMock + +import pytest +from sqlalchemy.exc import IntegrityError +from storage.device_code import DeviceCode +from storage.device_code_store import DeviceCodeStore + + +@pytest.fixture +def mock_session(): + """Mock database session.""" + session = MagicMock() + return session + + +@pytest.fixture +def mock_session_maker(mock_session): + """Mock session maker.""" + session_maker = MagicMock() + session_maker.return_value.__enter__.return_value = mock_session + session_maker.return_value.__exit__.return_value = None + return session_maker + + +@pytest.fixture +def device_code_store(mock_session_maker): + """Create DeviceCodeStore instance.""" + return DeviceCodeStore(mock_session_maker) + + +class TestDeviceCodeStore: + """Test cases for DeviceCodeStore.""" + + def test_generate_user_code(self, device_code_store): + """Test user code generation.""" + code = device_code_store.generate_user_code() + + assert len(code) == 8 + assert code.isupper() + # Should not contain confusing characters + assert not any(char in code for char in 'IO01') + + def test_generate_device_code(self, device_code_store): + """Test device code generation.""" + code = device_code_store.generate_device_code() + + assert len(code) == 128 + assert code.isalnum() + + def test_create_device_code_success(self, device_code_store, mock_session): + """Test successful device code creation.""" + # Mock successful creation (no IntegrityError) + mock_device_code = MagicMock(spec=DeviceCode) + mock_device_code.device_code = 'test-device-code-123' + mock_device_code.user_code = 'TESTCODE' + + # Mock the session to return our mock device code after refresh + def mock_refresh(obj): + obj.device_code = mock_device_code.device_code + obj.user_code = mock_device_code.user_code + + mock_session.refresh.side_effect = mock_refresh + + result = device_code_store.create_device_code(expires_in=600) + + assert isinstance(result, DeviceCode) + mock_session.add.assert_called_once() + mock_session.commit.assert_called_once() + mock_session.refresh.assert_called_once() + mock_session.expunge.assert_called_once() + + def test_create_device_code_with_retries( + self, device_code_store, mock_session_maker + ): + """Test device code creation with constraint violation retries.""" + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_session_maker.return_value.__exit__.return_value = None + + # First attempt fails with IntegrityError, second succeeds + mock_session.commit.side_effect = [IntegrityError('', '', ''), None] + + mock_device_code = MagicMock(spec=DeviceCode) + mock_device_code.device_code = 'test-device-code-456' + mock_device_code.user_code = 'TESTCD2' + + def mock_refresh(obj): + obj.device_code = mock_device_code.device_code + obj.user_code = mock_device_code.user_code + + mock_session.refresh.side_effect = mock_refresh + + store = DeviceCodeStore(mock_session_maker) + result = store.create_device_code(expires_in=600) + + assert isinstance(result, DeviceCode) + assert mock_session.add.call_count == 2 # Two attempts + assert mock_session.commit.call_count == 2 # Two attempts + + def test_create_device_code_max_attempts_exceeded( + self, device_code_store, mock_session_maker + ): + """Test device code creation failure after max attempts.""" + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_session_maker.return_value.__exit__.return_value = None + + # All attempts fail with IntegrityError + mock_session.commit.side_effect = IntegrityError('', '', '') + + store = DeviceCodeStore(mock_session_maker) + + with pytest.raises( + RuntimeError, + match='Failed to generate unique device codes after 3 attempts', + ): + store.create_device_code(expires_in=600, max_attempts=3) + + @pytest.mark.parametrize( + 'lookup_method,lookup_field', + [ + ('get_by_device_code', 'device_code'), + ('get_by_user_code', 'user_code'), + ], + ) + def test_lookup_methods( + self, device_code_store, mock_session, lookup_method, lookup_field + ): + """Test device code lookup methods.""" + test_code = 'test-code-123' + mock_device_code = MagicMock() + mock_session.query.return_value.filter_by.return_value.first.return_value = ( + mock_device_code + ) + + result = getattr(device_code_store, lookup_method)(test_code) + + assert result == mock_device_code + mock_session.query.assert_called_once_with(DeviceCode) + mock_session.query.return_value.filter_by.assert_called_once_with( + **{lookup_field: test_code} + ) + + @pytest.mark.parametrize( + 'device_exists,is_pending,expected_result', + [ + (True, True, True), # Success case + (False, True, False), # Device not found + (True, False, False), # Device not pending + ], + ) + def test_authorize_device_code( + self, + device_code_store, + mock_session, + device_exists, + is_pending, + expected_result, + ): + """Test device code authorization.""" + user_code = 'ABC12345' + user_id = 'test-user-123' + + if device_exists: + mock_device = MagicMock() + mock_device.is_pending.return_value = is_pending + mock_session.query.return_value.filter_by.return_value.first.return_value = mock_device + else: + mock_session.query.return_value.filter_by.return_value.first.return_value = None + + result = device_code_store.authorize_device_code(user_code, user_id) + + assert result == expected_result + if expected_result: + mock_device.authorize.assert_called_once_with(user_id) + mock_session.commit.assert_called_once() + + def test_deny_device_code(self, device_code_store, mock_session): + """Test device code denial.""" + user_code = 'ABC12345' + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_session.query.return_value.filter_by.return_value.first.return_value = ( + mock_device + ) + + result = device_code_store.deny_device_code(user_code) + + assert result is True + mock_device.deny.assert_called_once() + mock_session.commit.assert_called_once() diff --git a/enterprise/tests/unit/test_api_key_store.py b/enterprise/tests/unit/test_api_key_store.py index ea386cb69c4e..df0481937df1 100644 --- a/enterprise/tests/unit/test_api_key_store.py +++ b/enterprise/tests/unit/test_api_key_store.py @@ -25,10 +25,12 @@ def api_key_store(mock_session_maker): def test_generate_api_key(api_key_store): - """Test that generate_api_key returns a string of the expected length.""" + """Test that generate_api_key returns a string with sk-oh- prefix and expected length.""" key = api_key_store.generate_api_key(length=32) assert isinstance(key, str) - assert len(key) == 32 + assert key.startswith('sk-oh-') + # Total length should be prefix (6 chars) + random part (32 chars) = 38 chars + assert len(key) == len('sk-oh-') + 32 def test_create_api_key(api_key_store, mock_session): @@ -90,6 +92,50 @@ def test_validate_api_key_expired(api_key_store, mock_session): mock_session.commit.assert_not_called() +def test_validate_api_key_expired_timezone_naive(api_key_store, mock_session): + """Test validating an expired API key with timezone-naive datetime from database.""" + # Setup + api_key = 'test-api-key' + mock_key_record = MagicMock() + # Simulate timezone-naive datetime as returned from database + mock_key_record.expires_at = datetime.now() - timedelta(days=1) # No UTC timezone + mock_key_record.id = 1 + mock_session.query.return_value.filter.return_value.first.return_value = ( + mock_key_record + ) + + # Execute + result = api_key_store.validate_api_key(api_key) + + # Verify + assert result is None + mock_session.execute.assert_not_called() + mock_session.commit.assert_not_called() + + +def test_validate_api_key_valid_timezone_naive(api_key_store, mock_session): + """Test validating a valid API key with timezone-naive datetime from database.""" + # Setup + api_key = 'test-api-key' + user_id = 'test-user-123' + mock_key_record = MagicMock() + mock_key_record.user_id = user_id + # Simulate timezone-naive datetime as returned from database (future date) + mock_key_record.expires_at = datetime.now() + timedelta(days=1) # No UTC timezone + mock_key_record.id = 1 + mock_session.query.return_value.filter.return_value.first.return_value = ( + mock_key_record + ) + + # Execute + result = api_key_store.validate_api_key(api_key) + + # Verify + assert result == user_id + mock_session.execute.assert_called_once() + mock_session.commit.assert_called_once() + + def test_validate_api_key_not_found(api_key_store, mock_session): """Test validating a non-existent API key.""" # Setup diff --git a/enterprise/tests/unit/test_get_user_v1_enabled_setting.py b/enterprise/tests/unit/test_get_user_v1_enabled_setting.py new file mode 100644 index 000000000000..bbed7b8ba06e --- /dev/null +++ b/enterprise/tests/unit/test_get_user_v1_enabled_setting.py @@ -0,0 +1,132 @@ +"""Unit tests for get_user_v1_enabled_setting function.""" + +import os +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from integrations.github.github_view import get_user_v1_enabled_setting + + +@pytest.fixture +def mock_user_settings(): + """Create a mock user settings object.""" + settings = MagicMock() + settings.v1_enabled = True # Default to True, can be overridden in tests + return settings + + +@pytest.fixture +def mock_settings_store(mock_user_settings): + """Create a mock settings store.""" + store = MagicMock() + store.get_user_settings_by_keycloak_id = AsyncMock(return_value=mock_user_settings) + return store + + +@pytest.fixture +def mock_config(): + """Create a mock config object.""" + return MagicMock() + + +@pytest.fixture +def mock_session_maker(): + """Create a mock session maker.""" + return MagicMock() + + +@pytest.fixture +def mock_dependencies( + mock_settings_store, mock_config, mock_session_maker, mock_user_settings +): + """Fixture that patches all the common dependencies.""" + with patch( + 'integrations.github.github_view.SaasSettingsStore', + return_value=mock_settings_store, + ) as mock_store_class, patch( + 'integrations.github.github_view.get_config', return_value=mock_config + ) as mock_get_config, patch( + 'integrations.github.github_view.session_maker', mock_session_maker + ), patch( + 'integrations.github.github_view.call_sync_from_async', + return_value=mock_user_settings, + ) as mock_call_sync: + yield { + 'store_class': mock_store_class, + 'get_config': mock_get_config, + 'session_maker': mock_session_maker, + 'call_sync': mock_call_sync, + 'settings_store': mock_settings_store, + 'user_settings': mock_user_settings, + } + + +class TestGetUserV1EnabledSetting: + """Test cases for get_user_v1_enabled_setting function.""" + + @pytest.mark.asyncio + @pytest.mark.parametrize( + 'env_var_enabled,user_setting_enabled,expected_result', + [ + (False, True, False), # Env var disabled, user enabled -> False + (True, False, False), # Env var enabled, user disabled -> False + (True, True, True), # Both enabled -> True + (False, False, False), # Both disabled -> False + ], + ) + async def test_v1_enabled_combinations( + self, mock_dependencies, env_var_enabled, user_setting_enabled, expected_result + ): + """Test all combinations of environment variable and user setting values.""" + mock_dependencies['user_settings'].v1_enabled = user_setting_enabled + + with patch( + 'integrations.github.github_view.ENABLE_V1_GITHUB_RESOLVER', env_var_enabled + ): + result = await get_user_v1_enabled_setting('test_user_id') + assert result is expected_result + + @pytest.mark.asyncio + @pytest.mark.parametrize( + 'env_var_value,env_var_bool,expected_result', + [ + ('false', False, False), # Environment variable 'false' -> False + ('true', True, True), # Environment variable 'true' -> True + ], + ) + async def test_environment_variable_integration( + self, mock_dependencies, env_var_value, env_var_bool, expected_result + ): + """Test that the function properly reads the ENABLE_V1_GITHUB_RESOLVER environment variable.""" + mock_dependencies['user_settings'].v1_enabled = True + + with patch.dict( + os.environ, {'ENABLE_V1_GITHUB_RESOLVER': env_var_value} + ), patch('integrations.utils.os.getenv', return_value=env_var_value), patch( + 'integrations.github.github_view.ENABLE_V1_GITHUB_RESOLVER', env_var_bool + ): + result = await get_user_v1_enabled_setting('test_user_id') + assert result is expected_result + + @pytest.mark.asyncio + async def test_function_calls_correct_methods(self, mock_dependencies): + """Test that the function calls the correct methods with correct parameters.""" + mock_dependencies['user_settings'].v1_enabled = True + + with patch('integrations.github.github_view.ENABLE_V1_GITHUB_RESOLVER', True): + result = await get_user_v1_enabled_setting('test_user_123') + + # Verify the result + assert result is True + + # Verify correct methods were called with correct parameters + mock_dependencies['get_config'].assert_called_once() + mock_dependencies['store_class'].assert_called_once_with( + user_id='test_user_123', + session_maker=mock_dependencies['session_maker'], + config=mock_dependencies['get_config'].return_value, + ) + mock_dependencies['call_sync'].assert_called_once_with( + mock_dependencies['settings_store'].get_user_settings_by_keycloak_id, + 'test_user_123', + ) diff --git a/enterprise/tests/unit/test_github_view.py b/enterprise/tests/unit/test_github_view.py index 731b35b55f84..1edc46bc2af3 100644 --- a/enterprise/tests/unit/test_github_view.py +++ b/enterprise/tests/unit/test_github_view.py @@ -1,7 +1,10 @@ from unittest import TestCase, mock +from unittest.mock import MagicMock, patch -from integrations.github.github_view import GithubFactory, get_oh_labels +import pytest +from integrations.github.github_view import GithubFactory, GithubIssue, get_oh_labels from integrations.models import Message, SourceType +from integrations.types import UserData class TestGithubLabels(TestCase): @@ -75,3 +78,132 @@ def test_issue_comment_case_insensitivity(self): self.assertTrue(GithubFactory.is_issue_comment(message_lower)) self.assertTrue(GithubFactory.is_issue_comment(message_upper)) self.assertTrue(GithubFactory.is_issue_comment(message_mixed)) + + +class TestGithubV1ConversationRouting(TestCase): + """Test V1 conversation routing logic in GitHub integration.""" + + def setUp(self): + """Set up test fixtures.""" + # Create a proper UserData instance instead of MagicMock + user_data = UserData( + user_id=123, username='testuser', keycloak_user_id='test-keycloak-id' + ) + + # Create a mock raw_payload + raw_payload = Message( + source=SourceType.GITHUB, + message={ + 'payload': { + 'action': 'opened', + 'issue': {'number': 123}, + } + }, + ) + + self.github_issue = GithubIssue( + user_info=user_data, + full_repo_name='test/repo', + issue_number=123, + installation_id=456, + conversation_id='test-conversation-id', + should_extract=True, + send_summary_instruction=False, + is_public_repo=True, + raw_payload=raw_payload, + uuid='test-uuid', + title='Test Issue', + description='Test issue description', + previous_comments=[], + v1=False, + ) + + @pytest.mark.asyncio + @patch('integrations.github.github_view.get_user_v1_enabled_setting') + @patch.object(GithubIssue, '_create_v0_conversation') + @patch.object(GithubIssue, '_create_v1_conversation') + async def test_create_new_conversation_routes_to_v0_when_disabled( + self, mock_create_v1, mock_create_v0, mock_get_v1_setting + ): + """Test that conversation creation routes to V0 when v1_enabled is False.""" + # Mock v1_enabled as False + mock_get_v1_setting.return_value = False + mock_create_v0.return_value = None + mock_create_v1.return_value = None + + # Mock parameters + jinja_env = MagicMock() + git_provider_tokens = MagicMock() + conversation_metadata = MagicMock() + + # Call the method + await self.github_issue.create_new_conversation( + jinja_env, git_provider_tokens, conversation_metadata + ) + + # Verify V0 was called and V1 was not + mock_create_v0.assert_called_once_with( + jinja_env, git_provider_tokens, conversation_metadata + ) + mock_create_v1.assert_not_called() + + @pytest.mark.asyncio + @patch('integrations.github.github_view.get_user_v1_enabled_setting') + @patch.object(GithubIssue, '_create_v0_conversation') + @patch.object(GithubIssue, '_create_v1_conversation') + async def test_create_new_conversation_routes_to_v1_when_enabled( + self, mock_create_v1, mock_create_v0, mock_get_v1_setting + ): + """Test that conversation creation routes to V1 when v1_enabled is True.""" + # Mock v1_enabled as True + mock_get_v1_setting.return_value = True + mock_create_v0.return_value = None + mock_create_v1.return_value = None + + # Mock parameters + jinja_env = MagicMock() + git_provider_tokens = MagicMock() + conversation_metadata = MagicMock() + + # Call the method + await self.github_issue.create_new_conversation( + jinja_env, git_provider_tokens, conversation_metadata + ) + + # Verify V1 was called and V0 was not + mock_create_v1.assert_called_once_with( + jinja_env, git_provider_tokens, conversation_metadata + ) + mock_create_v0.assert_not_called() + + @pytest.mark.asyncio + @patch('integrations.github.github_view.get_user_v1_enabled_setting') + @patch.object(GithubIssue, '_create_v0_conversation') + @patch.object(GithubIssue, '_create_v1_conversation') + async def test_create_new_conversation_fallback_on_v1_setting_error( + self, mock_create_v1, mock_create_v0, mock_get_v1_setting + ): + """Test that conversation creation falls back to V0 when _create_v1_conversation fails.""" + # Mock v1_enabled as True so V1 is attempted + mock_get_v1_setting.return_value = True + # Mock _create_v1_conversation to raise an exception + mock_create_v1.side_effect = Exception('V1 conversation creation failed') + mock_create_v0.return_value = None + + # Mock parameters + jinja_env = MagicMock() + git_provider_tokens = MagicMock() + conversation_metadata = MagicMock() + + # Call the method + await self.github_issue.create_new_conversation( + jinja_env, git_provider_tokens, conversation_metadata + ) + + # Verify V1 was attempted first, then V0 was called as fallback + mock_create_v1.assert_called_once_with( + jinja_env, git_provider_tokens, conversation_metadata + ) + mock_create_v0.assert_called_once_with( + jinja_env, git_provider_tokens, conversation_metadata + ) diff --git a/enterprise/tests/unit/test_legacy_conversation_manager.py b/enterprise/tests/unit/test_legacy_conversation_manager.py deleted file mode 100644 index 55b424dabc60..000000000000 --- a/enterprise/tests/unit/test_legacy_conversation_manager.py +++ /dev/null @@ -1,485 +0,0 @@ -import time -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from server.legacy_conversation_manager import ( - _LEGACY_ENTRY_TIMEOUT_SECONDS, - LegacyCacheEntry, - LegacyConversationManager, -) - -from openhands.core.config.openhands_config import OpenHandsConfig -from openhands.server.config.server_config import ServerConfig -from openhands.server.monitoring import MonitoringListener -from openhands.storage.memory import InMemoryFileStore - - -@pytest.fixture -def mock_sio(): - """Create a mock SocketIO server.""" - return MagicMock() - - -@pytest.fixture -def mock_config(): - """Create a mock OpenHands config.""" - return MagicMock(spec=OpenHandsConfig) - - -@pytest.fixture -def mock_server_config(): - """Create a mock server config.""" - return MagicMock(spec=ServerConfig) - - -@pytest.fixture -def mock_file_store(): - """Create a mock file store.""" - return MagicMock(spec=InMemoryFileStore) - - -@pytest.fixture -def mock_monitoring_listener(): - """Create a mock monitoring listener.""" - return MagicMock(spec=MonitoringListener) - - -@pytest.fixture -def mock_conversation_manager(): - """Create a mock SaasNestedConversationManager.""" - mock_cm = MagicMock() - mock_cm._get_runtime = AsyncMock() - return mock_cm - - -@pytest.fixture -def mock_legacy_conversation_manager(): - """Create a mock ClusteredConversationManager.""" - return MagicMock() - - -@pytest.fixture -def legacy_manager( - mock_sio, - mock_config, - mock_server_config, - mock_file_store, - mock_conversation_manager, - mock_legacy_conversation_manager, -): - """Create a LegacyConversationManager instance for testing.""" - return LegacyConversationManager( - sio=mock_sio, - config=mock_config, - server_config=mock_server_config, - file_store=mock_file_store, - conversation_manager=mock_conversation_manager, - legacy_conversation_manager=mock_legacy_conversation_manager, - ) - - -class TestLegacyCacheEntry: - """Test the LegacyCacheEntry dataclass.""" - - def test_cache_entry_creation(self): - """Test creating a cache entry.""" - timestamp = time.time() - entry = LegacyCacheEntry(is_legacy=True, timestamp=timestamp) - - assert entry.is_legacy is True - assert entry.timestamp == timestamp - - def test_cache_entry_false(self): - """Test creating a cache entry with False value.""" - timestamp = time.time() - entry = LegacyCacheEntry(is_legacy=False, timestamp=timestamp) - - assert entry.is_legacy is False - assert entry.timestamp == timestamp - - -class TestLegacyConversationManagerCacheCleanup: - """Test cache cleanup functionality.""" - - def test_cleanup_expired_cache_entries_removes_expired(self, legacy_manager): - """Test that expired entries are removed from cache.""" - current_time = time.time() - expired_time = current_time - _LEGACY_ENTRY_TIMEOUT_SECONDS - 1 - valid_time = current_time - 100 # Well within timeout - - # Add both expired and valid entries - legacy_manager._legacy_cache = { - 'expired_conversation': LegacyCacheEntry(True, expired_time), - 'valid_conversation': LegacyCacheEntry(False, valid_time), - 'another_expired': LegacyCacheEntry(True, expired_time - 100), - } - - legacy_manager._cleanup_expired_cache_entries() - - # Only valid entry should remain - assert len(legacy_manager._legacy_cache) == 1 - assert 'valid_conversation' in legacy_manager._legacy_cache - assert 'expired_conversation' not in legacy_manager._legacy_cache - assert 'another_expired' not in legacy_manager._legacy_cache - - def test_cleanup_expired_cache_entries_keeps_valid(self, legacy_manager): - """Test that valid entries are kept during cleanup.""" - current_time = time.time() - valid_time = current_time - 100 # Well within timeout - - legacy_manager._legacy_cache = { - 'valid_conversation_1': LegacyCacheEntry(True, valid_time), - 'valid_conversation_2': LegacyCacheEntry(False, valid_time - 50), - } - - legacy_manager._cleanup_expired_cache_entries() - - # Both entries should remain - assert len(legacy_manager._legacy_cache) == 2 - assert 'valid_conversation_1' in legacy_manager._legacy_cache - assert 'valid_conversation_2' in legacy_manager._legacy_cache - - def test_cleanup_expired_cache_entries_empty_cache(self, legacy_manager): - """Test cleanup with empty cache.""" - legacy_manager._legacy_cache = {} - - legacy_manager._cleanup_expired_cache_entries() - - assert len(legacy_manager._legacy_cache) == 0 - - -class TestIsLegacyRuntime: - """Test the is_legacy_runtime method.""" - - def test_is_legacy_runtime_none(self, legacy_manager): - """Test with None runtime.""" - result = legacy_manager.is_legacy_runtime(None) - assert result is False - - def test_is_legacy_runtime_legacy_command(self, legacy_manager): - """Test with legacy runtime command.""" - runtime = {'command': 'some_old_legacy_command'} - result = legacy_manager.is_legacy_runtime(runtime) - assert result is True - - def test_is_legacy_runtime_new_command(self, legacy_manager): - """Test with new runtime command containing openhands.server.""" - runtime = {'command': 'python -m openhands.server.listen'} - result = legacy_manager.is_legacy_runtime(runtime) - assert result is False - - def test_is_legacy_runtime_partial_match(self, legacy_manager): - """Test with command that partially matches but is still legacy.""" - runtime = {'command': 'openhands.client.start'} - result = legacy_manager.is_legacy_runtime(runtime) - assert result is True - - def test_is_legacy_runtime_empty_command(self, legacy_manager): - """Test with empty command.""" - runtime = {'command': ''} - result = legacy_manager.is_legacy_runtime(runtime) - assert result is True - - def test_is_legacy_runtime_missing_command_key(self, legacy_manager): - """Test with runtime missing command key.""" - runtime = {'other_key': 'value'} - # This should raise a KeyError - with pytest.raises(KeyError): - legacy_manager.is_legacy_runtime(runtime) - - -class TestShouldStartInLegacyMode: - """Test the should_start_in_legacy_mode method.""" - - @pytest.mark.asyncio - async def test_cache_hit_valid_entry_legacy(self, legacy_manager): - """Test cache hit with valid legacy entry.""" - conversation_id = 'test_conversation' - current_time = time.time() - - # Add valid cache entry - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry( - True, current_time - 100 - ) - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is True - # Should not call _get_runtime since we hit cache - legacy_manager.conversation_manager._get_runtime.assert_not_called() - - @pytest.mark.asyncio - async def test_cache_hit_valid_entry_non_legacy(self, legacy_manager): - """Test cache hit with valid non-legacy entry.""" - conversation_id = 'test_conversation' - current_time = time.time() - - # Add valid cache entry - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry( - False, current_time - 100 - ) - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is False - # Should not call _get_runtime since we hit cache - legacy_manager.conversation_manager._get_runtime.assert_not_called() - - @pytest.mark.asyncio - async def test_cache_miss_legacy_runtime(self, legacy_manager): - """Test cache miss with legacy runtime.""" - conversation_id = 'test_conversation' - runtime = {'command': 'old_command'} - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is True - # Should call _get_runtime - legacy_manager.conversation_manager._get_runtime.assert_called_once_with( - conversation_id - ) - # Should cache the result - assert conversation_id in legacy_manager._legacy_cache - assert legacy_manager._legacy_cache[conversation_id].is_legacy is True - - @pytest.mark.asyncio - async def test_cache_miss_non_legacy_runtime(self, legacy_manager): - """Test cache miss with non-legacy runtime.""" - conversation_id = 'test_conversation' - runtime = {'command': 'python -m openhands.server.listen'} - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is False - # Should call _get_runtime - legacy_manager.conversation_manager._get_runtime.assert_called_once_with( - conversation_id - ) - # Should cache the result - assert conversation_id in legacy_manager._legacy_cache - assert legacy_manager._legacy_cache[conversation_id].is_legacy is False - - @pytest.mark.asyncio - async def test_cache_expired_entry(self, legacy_manager): - """Test with expired cache entry.""" - conversation_id = 'test_conversation' - expired_time = time.time() - _LEGACY_ENTRY_TIMEOUT_SECONDS - 1 - runtime = {'command': 'python -m openhands.server.listen'} - - # Add expired cache entry - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry( - True, - expired_time, # This should be considered expired - ) - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is False # Runtime indicates non-legacy - # Should call _get_runtime since cache is expired - legacy_manager.conversation_manager._get_runtime.assert_called_once_with( - conversation_id - ) - # Should update cache with new result - assert legacy_manager._legacy_cache[conversation_id].is_legacy is False - - @pytest.mark.asyncio - async def test_cache_exactly_at_timeout(self, legacy_manager): - """Test with cache entry exactly at timeout boundary.""" - conversation_id = 'test_conversation' - timeout_time = time.time() - _LEGACY_ENTRY_TIMEOUT_SECONDS - runtime = {'command': 'python -m openhands.server.listen'} - - # Add cache entry exactly at timeout - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry( - True, timeout_time - ) - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - # Should treat as expired and fetch from runtime - assert result is False - legacy_manager.conversation_manager._get_runtime.assert_called_once_with( - conversation_id - ) - - @pytest.mark.asyncio - async def test_runtime_returns_none(self, legacy_manager): - """Test when runtime returns None.""" - conversation_id = 'test_conversation' - - legacy_manager.conversation_manager._get_runtime.return_value = None - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is False - # Should cache the result - assert conversation_id in legacy_manager._legacy_cache - assert legacy_manager._legacy_cache[conversation_id].is_legacy is False - - @pytest.mark.asyncio - async def test_cleanup_called_on_each_invocation(self, legacy_manager): - """Test that cleanup is called on each invocation.""" - conversation_id = 'test_conversation' - runtime = {'command': 'test'} - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - # Mock the cleanup method to verify it's called - with patch.object( - legacy_manager, '_cleanup_expired_cache_entries' - ) as mock_cleanup: - await legacy_manager.should_start_in_legacy_mode(conversation_id) - mock_cleanup.assert_called_once() - - @pytest.mark.asyncio - async def test_multiple_conversations_cached_independently(self, legacy_manager): - """Test that multiple conversations are cached independently.""" - conv1 = 'conversation_1' - conv2 = 'conversation_2' - - runtime1 = {'command': 'old_command'} # Legacy - runtime2 = {'command': 'python -m openhands.server.listen'} # Non-legacy - - # Mock to return different runtimes based on conversation_id - def mock_get_runtime(conversation_id): - if conversation_id == conv1: - return runtime1 - return runtime2 - - legacy_manager.conversation_manager._get_runtime.side_effect = mock_get_runtime - - result1 = await legacy_manager.should_start_in_legacy_mode(conv1) - result2 = await legacy_manager.should_start_in_legacy_mode(conv2) - - assert result1 is True - assert result2 is False - - # Both should be cached - assert conv1 in legacy_manager._legacy_cache - assert conv2 in legacy_manager._legacy_cache - assert legacy_manager._legacy_cache[conv1].is_legacy is True - assert legacy_manager._legacy_cache[conv2].is_legacy is False - - @pytest.mark.asyncio - async def test_cache_timestamp_updated_on_refresh(self, legacy_manager): - """Test that cache timestamp is updated when entry is refreshed.""" - conversation_id = 'test_conversation' - old_time = time.time() - _LEGACY_ENTRY_TIMEOUT_SECONDS - 1 - runtime = {'command': 'test'} - - # Add expired entry - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry(True, old_time) - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - # Record time before call - before_call = time.time() - await legacy_manager.should_start_in_legacy_mode(conversation_id) - after_call = time.time() - - # Timestamp should be updated - cached_entry = legacy_manager._legacy_cache[conversation_id] - assert cached_entry.timestamp >= before_call - assert cached_entry.timestamp <= after_call - - -class TestLegacyConversationManagerIntegration: - """Integration tests for LegacyConversationManager.""" - - @pytest.mark.asyncio - async def test_get_instance_creates_proper_manager( - self, - mock_sio, - mock_config, - mock_file_store, - mock_server_config, - mock_monitoring_listener, - ): - """Test that get_instance creates a properly configured manager.""" - with patch( - 'server.legacy_conversation_manager.SaasNestedConversationManager' - ) as mock_saas, patch( - 'server.legacy_conversation_manager.ClusteredConversationManager' - ) as mock_clustered: - mock_saas.get_instance.return_value = MagicMock() - mock_clustered.get_instance.return_value = MagicMock() - - manager = LegacyConversationManager.get_instance( - mock_sio, - mock_config, - mock_file_store, - mock_server_config, - mock_monitoring_listener, - ) - - assert isinstance(manager, LegacyConversationManager) - assert manager.sio == mock_sio - assert manager.config == mock_config - assert manager.file_store == mock_file_store - assert manager.server_config == mock_server_config - - # Verify that both nested managers are created - mock_saas.get_instance.assert_called_once() - mock_clustered.get_instance.assert_called_once() - - def test_legacy_cache_initialized_empty(self, legacy_manager): - """Test that legacy cache is initialized as empty dict.""" - assert isinstance(legacy_manager._legacy_cache, dict) - assert len(legacy_manager._legacy_cache) == 0 - - -class TestEdgeCases: - """Test edge cases and error scenarios.""" - - @pytest.mark.asyncio - async def test_get_runtime_raises_exception(self, legacy_manager): - """Test behavior when _get_runtime raises an exception.""" - conversation_id = 'test_conversation' - - legacy_manager.conversation_manager._get_runtime.side_effect = Exception( - 'Runtime error' - ) - - # Should propagate the exception - with pytest.raises(Exception, match='Runtime error'): - await legacy_manager.should_start_in_legacy_mode(conversation_id) - - @pytest.mark.asyncio - async def test_very_large_cache(self, legacy_manager): - """Test behavior with a large number of cache entries.""" - current_time = time.time() - - # Add many cache entries - for i in range(1000): - legacy_manager._legacy_cache[f'conversation_{i}'] = LegacyCacheEntry( - i % 2 == 0, current_time - i - ) - - # This should work without issues - await legacy_manager.should_start_in_legacy_mode('new_conversation') - - # Should have added one more entry - assert len(legacy_manager._legacy_cache) == 1001 - - def test_cleanup_with_concurrent_modifications(self, legacy_manager): - """Test cleanup behavior when cache is modified during cleanup.""" - current_time = time.time() - expired_time = current_time - _LEGACY_ENTRY_TIMEOUT_SECONDS - 1 - - # Add expired entries - legacy_manager._legacy_cache = { - f'conversation_{i}': LegacyCacheEntry(True, expired_time) for i in range(10) - } - - # This should work without raising exceptions - legacy_manager._cleanup_expired_cache_entries() - - # All entries should be removed - assert len(legacy_manager._legacy_cache) == 0 diff --git a/enterprise/tests/unit/test_saas_user_auth.py b/enterprise/tests/unit/test_saas_user_auth.py index 35672af7242d..d4ba902677b0 100644 --- a/enterprise/tests/unit/test_saas_user_auth.py +++ b/enterprise/tests/unit/test_saas_user_auth.py @@ -535,3 +535,115 @@ def test_get_api_key_from_header_with_invalid_authorization_format(): # Assert that None was returned assert api_key is None + + +def test_get_api_key_from_header_with_x_access_token(): + """Test that get_api_key_from_header extracts API key from X-Access-Token header.""" + # Create a mock request with X-Access-Token header + mock_request = MagicMock(spec=Request) + mock_request.headers = {'X-Access-Token': 'access_token_key'} + + # Call the function + api_key = get_api_key_from_header(mock_request) + + # Assert that the API key was correctly extracted + assert api_key == 'access_token_key' + + +def test_get_api_key_from_header_priority_authorization_over_x_access_token(): + """Test that Authorization header takes priority over X-Access-Token header.""" + # Create a mock request with both headers + mock_request = MagicMock(spec=Request) + mock_request.headers = { + 'Authorization': 'Bearer auth_api_key', + 'X-Access-Token': 'access_token_key', + } + + # Call the function + api_key = get_api_key_from_header(mock_request) + + # Assert that the API key from Authorization header was used + assert api_key == 'auth_api_key' + + +def test_get_api_key_from_header_priority_x_session_over_x_access_token(): + """Test that X-Session-API-Key header takes priority over X-Access-Token header.""" + # Create a mock request with both headers + mock_request = MagicMock(spec=Request) + mock_request.headers = { + 'X-Session-API-Key': 'session_api_key', + 'X-Access-Token': 'access_token_key', + } + + # Call the function + api_key = get_api_key_from_header(mock_request) + + # Assert that the API key from X-Session-API-Key header was used + assert api_key == 'session_api_key' + + +def test_get_api_key_from_header_all_three_headers(): + """Test header priority when all three headers are present.""" + # Create a mock request with all three headers + mock_request = MagicMock(spec=Request) + mock_request.headers = { + 'Authorization': 'Bearer auth_api_key', + 'X-Session-API-Key': 'session_api_key', + 'X-Access-Token': 'access_token_key', + } + + # Call the function + api_key = get_api_key_from_header(mock_request) + + # Assert that the API key from Authorization header was used (highest priority) + assert api_key == 'auth_api_key' + + +def test_get_api_key_from_header_invalid_authorization_fallback_to_x_access_token(): + """Test that invalid Authorization header falls back to X-Access-Token.""" + # Create a mock request with invalid Authorization header and X-Access-Token + mock_request = MagicMock(spec=Request) + mock_request.headers = { + 'Authorization': 'InvalidFormat api_key', + 'X-Access-Token': 'access_token_key', + } + + # Call the function + api_key = get_api_key_from_header(mock_request) + + # Assert that the API key from X-Access-Token header was used + assert api_key == 'access_token_key' + + +def test_get_api_key_from_header_empty_headers(): + """Test that empty header values are handled correctly.""" + # Create a mock request with empty header values + mock_request = MagicMock(spec=Request) + mock_request.headers = { + 'Authorization': '', + 'X-Session-API-Key': '', + 'X-Access-Token': 'access_token_key', + } + + # Call the function + api_key = get_api_key_from_header(mock_request) + + # Assert that the API key from X-Access-Token header was used + assert api_key == 'access_token_key' + + +def test_get_api_key_from_header_bearer_with_empty_token(): + """Test that Bearer header with empty token falls back to other headers.""" + # Create a mock request with Bearer header with empty token + mock_request = MagicMock(spec=Request) + mock_request.headers = { + 'Authorization': 'Bearer ', + 'X-Access-Token': 'access_token_key', + } + + # Call the function + api_key = get_api_key_from_header(mock_request) + + # Assert that empty string from Bearer is returned (current behavior) + # This tests the current implementation behavior + assert api_key == '' diff --git a/evaluation/README.md b/evaluation/README.md index 694623f63d6f..b4a125b3fc28 100644 --- a/evaluation/README.md +++ b/evaluation/README.md @@ -1,5 +1,10 @@ # Evaluation +> [!WARNING] +> **This directory is deprecated.** Our new benchmarks are located at [OpenHands/benchmarks](https://github.com/OpenHands/benchmarks). +> +> If you have already implemented a benchmark in this directory and would like to contribute it, we are happy to have the contribution. However, if you are starting anew, please use the new location. + This folder contains code and resources to run experiments and evaluations. ## For Benchmark Users diff --git a/evaluation/benchmarks/swefficiency/README.md b/evaluation/benchmarks/swefficiency/README.md new file mode 100644 index 000000000000..6418f3a87b7e --- /dev/null +++ b/evaluation/benchmarks/swefficiency/README.md @@ -0,0 +1,65 @@ +# SWE-fficiency Evaluation + +This folder contains the OpenHands inference generation of the [SWE-fficiency benchmark](https://swefficiency.com/) ([paper](https://arxiv.org/pdf/2507.12415v1)). + +The evaluation consists of three steps: + +1. Environment setup: [install python environment](../../README.md#development-environment) and [configure LLM config](../../README.md#configure-openhands-and-your-llm). +2. [Run inference](#running-inference-locally-with-docker): Generate a edit patch for each Github issue +3. [Evaluate patches](#evaluate-generated-patches) + +## Setup Environment and LLM Configuration + +Please follow instruction [here](../../README.md#setup) to setup your local development environment and LLM. + +## Running inference Locally with Docker + +Make sure your Docker daemon is running, and you have ample disk space (at least 200-500GB, depends on the SWE-PErf set you are running on) for the instance-level docker image. + +When the `run_infer.sh` script is started, it will automatically pull the relevant SWE-Perf images. +For example, for instance ID `scikit-learn_scikit-learn-11674`, it will try to pull our pre-build docker image `betty1202/sweb.eval.x86_64.scikit-learn_s_scikit-learn-11674` from DockerHub. +This image will be used create an OpenHands runtime image where the agent will operate on. + +```bash +./evaluation/benchmarks/swefficiency/scripts/run_infer.sh [model_config] [git-version] [agent] [eval_limit] [max_iter] [num_workers] [dataset] [dataset_split] [n_runs] [mode] + +# Example +./evaluation/benchmarks/swefficiency/scripts/run_infer.sh llm.eval_gpt4_1106_preview HEAD CodeActAgent 500 100 1 swefficiency/swefficiency test +``` + +where `model_config` is mandatory, and the rest are optional. + +- `model_config`, e.g. `eval_gpt4_1106_preview`, is the config group name for your +LLM settings, as defined in your `config.toml`. +- `git-version`, e.g. `HEAD`, is the git commit hash of the OpenHands version you would +like to evaluate. It could also be a release tag like `0.6.2`. +- `agent`, e.g. `CodeActAgent`, is the name of the agent for benchmarks, defaulting +to `CodeActAgent`. +- `eval_limit`, e.g. `10`, limits the evaluation to the first `eval_limit` instances. By +default, the script evaluates the entire SWE-Perf test set (140 issues). Note: +in order to use `eval_limit`, you must also set `agent`. +- `max_iter`, e.g. `20`, is the maximum number of iterations for the agent to run. By +default, it is set to 100. +- `num_workers`, e.g. `3`, is the number of parallel workers to run the evaluation. By +default, it is set to 1. +- `dataset`, a huggingface dataset name. e.g. `SWE-Perf/SWE-Perf`, specifies which dataset to evaluate on. +- `dataset_split`, split for the huggingface dataset. e.g., `test`, `dev`. Default to `test`. + +- `n_runs`, e.g. `3`, is the number of times to run the evaluation. Default is 1. +- `mode`, e.g. `swt`, `swt-ci`, or `swe`, specifies the evaluation mode. Default is `swe`. + +> [!CAUTION] +> Setting `num_workers` larger than 1 is not officially tested, YMMV. + + +Let's say you'd like to run 10 instances using `llm.eval_gpt4_1106_preview` and CodeActAgent, + +then your command would be: + +```bash +./evaluation/benchmarks/swe_bench/scripts/run_infer.sh llm.eval_gpt4_1106_preview HEAD CodeActAgent 10 +``` + +### 2. Run the SWE-fficiency benchmark official evaluation + +Once the output is converted, use the [official SWE-fficiency benchmark evaluation](https://github.com/swefficiency/swefficiency) to evaluate it. diff --git a/frontend/tests/fixtures/project.zip b/evaluation/benchmarks/swefficiency/__init__.py similarity index 100% rename from frontend/tests/fixtures/project.zip rename to evaluation/benchmarks/swefficiency/__init__.py diff --git a/evaluation/benchmarks/swefficiency/binary_patch_utils.py b/evaluation/benchmarks/swefficiency/binary_patch_utils.py new file mode 100644 index 000000000000..9cf0dbd714d7 --- /dev/null +++ b/evaluation/benchmarks/swefficiency/binary_patch_utils.py @@ -0,0 +1,52 @@ +""" +Utilities for handling binary files and patch generation in SWE-bench evaluation. +""" + + +def remove_binary_diffs(patch_text): + """ + Remove binary file diffs from a git patch. + + Args: + patch_text (str): The git patch text + + Returns: + str: The cleaned patch text with binary diffs removed + """ + lines = patch_text.splitlines() + cleaned_lines = [] + block = [] + is_binary_block = False + + for line in lines: + if line.startswith('diff --git '): + if block and not is_binary_block: + cleaned_lines.extend(block) + block = [line] + is_binary_block = False + elif 'Binary files' in line: + is_binary_block = True + block.append(line) + else: + block.append(line) + + if block and not is_binary_block: + cleaned_lines.extend(block) + return '\n'.join(cleaned_lines) + + +def remove_binary_files_from_git(): + """ + Generate a bash command to remove binary files from git staging. + + Returns: + str: A bash command that removes binary files from git staging + """ + return """ + for file in $(git status --porcelain | grep -E "^(M| M|\\?\\?|A| A)" | cut -c4-); do + if [ -f "$file" ] && (file "$file" | grep -q "executable" || git check-attr binary "$file" | grep -q "binary: set"); then + git rm -f "$file" 2>/dev/null || rm -f "$file" + echo "Removed: $file" + fi + done + """.strip() diff --git a/evaluation/benchmarks/swefficiency/run_infer.py b/evaluation/benchmarks/swefficiency/run_infer.py new file mode 100644 index 000000000000..42da17d2346d --- /dev/null +++ b/evaluation/benchmarks/swefficiency/run_infer.py @@ -0,0 +1,960 @@ +import asyncio +import copy +import functools +import json +import multiprocessing +import os +import tempfile +from typing import Any, Literal + +import pandas as pd +import toml +from datasets import load_dataset + +import openhands.agenthub +from evaluation.benchmarks.swe_bench.binary_patch_utils import ( + remove_binary_diffs, + remove_binary_files_from_git, +) +from evaluation.utils.shared import ( + EvalException, + EvalMetadata, + EvalOutput, + assert_and_raise, + codeact_user_response, + get_default_sandbox_config_for_eval, + get_metrics, + is_fatal_evaluation_error, + make_metadata, + prepare_dataset, + reset_logger_for_multiprocessing, + run_evaluation, + update_llm_config_for_completions_logging, +) +from openhands.controller.state.state import State +from openhands.core.config import ( + AgentConfig, + OpenHandsConfig, + get_evaluation_parser, + get_llm_config_arg, +) +from openhands.core.config.condenser_config import NoOpCondenserConfig +from openhands.core.config.utils import get_condenser_config_arg +from openhands.core.logger import openhands_logger as logger +from openhands.core.main import create_runtime, run_controller +from openhands.critic import AgentFinishedCritic +from openhands.events.action import CmdRunAction, FileReadAction, MessageAction +from openhands.events.observation import ( + CmdOutputObservation, + ErrorObservation, + FileReadObservation, +) +from openhands.events.serialization.event import event_from_dict, event_to_dict +from openhands.runtime.base import Runtime +from openhands.utils.async_utils import call_async_from_sync +from openhands.utils.shutdown_listener import sleep_if_should_continue + +USE_HINT_TEXT = os.environ.get('USE_HINT_TEXT', 'false').lower() == 'true' +RUN_WITH_BROWSING = os.environ.get('RUN_WITH_BROWSING', 'false').lower() == 'true' +BenchMode = Literal['swe', 'swt', 'swt-ci'] + + +AGENT_CLS_TO_FAKE_USER_RESPONSE_FN = { + 'CodeActAgent': codeact_user_response, +} + + +def _get_swebench_workspace_dir_name(instance: pd.Series) -> str: + return f'{instance.repo}__{instance.version}'.replace('/', '__') + + +def get_instruction(instance: pd.Series, metadata: EvalMetadata) -> MessageAction: + workspace_dir_name = _get_swebench_workspace_dir_name(instance) + + # TODO: Change to testbed? + instruction = f""" + +/workspace/{workspace_dir_name} + + +I’ve uploaded a python code repository in the directory workspace_dir_name. Consider the following performance workload and `workload()` function showing an specific usage of the repository: + +{instance.workload} + + +Can you help me implement the necessary changes to the repository so that the runtime of the `workload()` function is faster? Basic guidelines: +1. Your task is to make changes to non-test files in the /workspace directory to improve the performance of the code running in `workload()`. Please do not directly change the implementation of the `workload()` function to optimize things: I want you to focus on making the workload AS IS run faster by only editing the repository containing code that the `workload()` function calls. +2. Make changes while ensuring the repository is functionally equivalent to the original: your changes should not introduce new bugs or cause already-passing tests to begin failing after your changes. However, you do not need to worry about tests that already fail without any changes made. For relevant test files you find in the repository, you can run them via the bash command `{instance.test_cmd} ` to check for correctness. Note that running all the tests may take a long time, so you need to determine which tests are relevant to your changes. +3. Make sure the `workload()` function improves in performance after you make changes to the repository. The workload can potentially take some time to run, so please allow it to finish and be generous with setting your timeout parameter (a timeout value of 3600 or larger here is encouraged): for faster iteration, you should adjust the workload script to use fewer iterations. Before you complete your task, please make sure to check that the **original performance workload** and `workload()` function runs successfully and the performance is improved. +4. You may need to reinstall/rebuild the repo for your changes to take effect before testing if you made non-Python changes. Reinstalling may take a long time to run (a timeout value of 3600 or larger here is encouraged), so please be patient with running it and allow it to complete if possible. You can reinstall the repository by running the bash command `{instance.rebuild_cmd}` in the workspace directory. +5. All the dependencies required to run the `workload()` function are already installed in the environment. You should not install or upgrade any dependencies. + +Follow these steps to improve performance: +1. As a first step, explore the repository structure. +2. Create a Python script to reproduce the performance workload, execute it with python , and examine the printed output metrics. +3. Edit the source code of the repository to improve performance. Please do not change the contents of the `workload()` function itself, but focus on optimizing the code in the repository that the original `workload()` function uses. +4. If non-Python changes were made, rebuild the repo to make sure the changes take effect. +5. Rerun your script to confirm that performance has improved. +6. If necessary, identify any relevant test files in the repository related to your changes and verify that test statuses did not change after your modifications. +7. After each attempted change, please reflect on the changes attempted and the performance impact observed. If the performance did not improve, consider alternative approaches or optimizations. +8. Once you are satisfied, please use the finish command to complete your task. + +Please remember that you should not change the implementation of the `workload()` function. The performance improvement should solely come from editing the source files in the code repository. +""" + + if RUN_WITH_BROWSING: + instruction += ( + '\nYou SHOULD NEVER attempt to browse the web. \n' + ) + + return MessageAction(content=instruction) + + +def get_instance_docker_image( + instance_id: str, +) -> str: + return f'ghcr.io/swefficiency/swefficiency-images:{instance_id}' + + +def get_config( + instance: pd.Series, + metadata: EvalMetadata, + cpu_group: list[int] | None = None, +) -> OpenHandsConfig: + # We use a different instance image for the each instance of swe-bench eval + base_container_image = get_instance_docker_image( + instance['instance_id'], + ) + logger.info( + f'Using instance container image: {base_container_image}. ' + f'Please make sure this image exists. ' + f'Submit an issue on https://github.com/All-Hands-AI/OpenHands if you run into any issues.' + ) + + sandbox_config = get_default_sandbox_config_for_eval() + sandbox_config.base_container_image = base_container_image + sandbox_config.enable_auto_lint = True + sandbox_config.use_host_network = False + sandbox_config.timeout = 3600 + + # Control container cleanup behavior via environment variable + # Default to False for multiprocessing stability to prevent cascade failures + sandbox_config.rm_all_containers = True + + sandbox_config.platform = 'linux/amd64' + sandbox_config.remote_runtime_resource_factor = 4.0 + sandbox_config.runtime_startup_env_vars.update( + { + 'NO_CHANGE_TIMEOUT_SECONDS': '900', # 15 minutes + } + ) + + if cpu_group is not None: + print(f'Configuring Docker runtime with CPU group: {cpu_group}') + sandbox_config.docker_runtime_kwargs = { + # HACK: Use the cpu_group if provided, otherwise use all available CPUs + 'cpuset_cpus': ','.join(map(str, cpu_group)), + 'nano_cpus': int(1e9 * len(cpu_group)), # optional: hard cap to vCPU count + 'mem_limit': '16g', + } + + # Note: We keep rm_all_containers = False for worker process safety + + config = OpenHandsConfig( + default_agent=metadata.agent_class, + run_as_openhands=False, + max_iterations=metadata.max_iterations, + runtime=os.environ.get('RUNTIME', 'docker'), + sandbox=sandbox_config, + # do not mount workspace + workspace_base=None, + workspace_mount_path=None, + ) + config.set_llm_config( + update_llm_config_for_completions_logging( + metadata.llm_config, metadata.eval_output_dir, instance['instance_id'] + ) + ) + agent_config = AgentConfig( + enable_jupyter=False, + enable_browsing=RUN_WITH_BROWSING, + enable_llm_editor=False, + enable_mcp=False, + condenser=metadata.condenser_config, + enable_prompt_extensions=False, + ) + config.set_agent_config(agent_config) + return config + + +def initialize_runtime( + runtime: Runtime, + instance: pd.Series, # this argument is not required + metadata: EvalMetadata, +): + """Initialize the runtime for the agent. + + This function is called before the runtime is used to run the agent. + """ + logger.info('-' * 30) + logger.info('BEGIN Runtime Initialization Fn') + logger.info('-' * 30) + workspace_dir_name = _get_swebench_workspace_dir_name(instance) + obs: CmdOutputObservation + + # Set instance id and git configuration + action = CmdRunAction( + command=f"""echo 'export SWE_INSTANCE_ID={instance['instance_id']}' >> ~/.bashrc && echo 'export PIP_CACHE_DIR=~/.cache/pip' >> ~/.bashrc && echo "alias git='git --no-pager'" >> ~/.bashrc && git config --global core.pager "" && git config --global diff.binary false""" + ) + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + obs.exit_code == 0, + f'Failed to export SWE_INSTANCE_ID and configure git: {str(obs)}', + ) + + action = CmdRunAction(command="""export USER=$(whoami); echo USER=${USER} """) + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise(obs.exit_code == 0, f'Failed to export USER: {str(obs)}') + + # inject the init script + script_dir = os.path.dirname(__file__) + + # inject the instance info + action = CmdRunAction(command='mkdir -p /swe_util/eval_data/instances') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + obs.exit_code == 0, + f'Failed to create /swe_util/eval_data/instances: {str(obs)}', + ) + + swe_instance_json_name = 'swe-bench-instance.json' + with tempfile.TemporaryDirectory() as temp_dir: + # Construct the full path for the desired file name within the temporary directory + temp_file_path = os.path.join(temp_dir, swe_instance_json_name) + # Write to the file with the desired name within the temporary directory + with open(temp_file_path, 'w') as f: + if not isinstance(instance, dict): + json.dump([instance.to_dict()], f) + else: + json.dump([instance], f) + + # Copy the file to the desired location + runtime.copy_to(temp_file_path, '/swe_util/eval_data/instances/') + + # inject the instance swe entry + runtime.copy_to( + str(os.path.join(script_dir, 'scripts/setup/instance_swe_entry.sh')), + '/swe_util/', + ) + + action = CmdRunAction(command='cat ~/.bashrc') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise(obs.exit_code == 0, f'Failed to cat ~/.bashrc: {str(obs)}') + + action = CmdRunAction(command='source ~/.bashrc') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + if isinstance(obs, ErrorObservation): + logger.error(f'Failed to source ~/.bashrc: {str(obs)}') + assert_and_raise(obs.exit_code == 0, f'Failed to source ~/.bashrc: {str(obs)}') + + action = CmdRunAction(command='source /swe_util/instance_swe_entry.sh') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + obs.exit_code == 0, + f'Failed to source /swe_util/instance_swe_entry.sh: {str(obs)}', + ) + + action = CmdRunAction(command=f'cd /workspace/{workspace_dir_name}') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + obs.exit_code == 0, + f'Failed to cd to /workspace/{workspace_dir_name}: {str(obs)}', + ) + + action = CmdRunAction(command='git reset --hard') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise(obs.exit_code == 0, f'Failed to git reset --hard: {str(obs)}') + + action = CmdRunAction( + command='for remote_name in $(git remote); do git remote remove "${remote_name}"; done' + ) + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise(obs.exit_code == 0, f'Failed to remove git remotes: {str(obs)}') + + action = CmdRunAction(command='which python') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + obs.exit_code == 0 and 'testbed' in obs.content, + f'Expected to find python interpreter from testbed, but got: {str(obs)}', + ) + + logger.info('-' * 30) + logger.info('END Runtime Initialization Fn') + logger.info('-' * 30) + + +def complete_runtime( + runtime: Runtime, + instance: pd.Series, # this argument is not required, but it is used to get the workspace_dir_name +) -> dict[str, Any]: + """Complete the runtime for the agent. + + This function is called before the runtime is used to run the agent. + If you need to do something in the sandbox to get the correctness metric after + the agent has run, modify this function. + """ + logger.info('-' * 30) + logger.info('BEGIN Runtime Completion Fn') + logger.info('-' * 30) + obs: CmdOutputObservation + workspace_dir_name = _get_swebench_workspace_dir_name(instance) + + action = CmdRunAction(command=f'cd /workspace/{workspace_dir_name}') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + + if obs.exit_code == -1: + # The previous command is still running + # We need to kill previous command + logger.info('The previous command is still running, trying to kill it...') + action = CmdRunAction(command='C-c') + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + + # Then run the command again + action = CmdRunAction(command=f'cd /workspace/{workspace_dir_name}') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + + if obs.exit_code == -1: + # The previous command is still running + # We need to kill previous command + logger.info('The previous command is still running, trying to ctrl+z it...') + action = CmdRunAction(command='C-z') + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + + # Then run the command again + action = CmdRunAction(command=f'cd /workspace/{workspace_dir_name}') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + + assert_and_raise( + isinstance(obs, CmdOutputObservation) and obs.exit_code == 0, + f'Failed to cd to /workspace/{workspace_dir_name}: {str(obs)}', + ) + + action = CmdRunAction(command='git config --global core.pager ""') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + isinstance(obs, CmdOutputObservation) and obs.exit_code == 0, + f'Failed to git config --global core.pager "": {str(obs)}', + ) + + # First check for any git repositories in subdirectories + action = CmdRunAction(command='find . -type d -name .git -not -path "./.git"') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + isinstance(obs, CmdOutputObservation) and obs.exit_code == 0, + f'Failed to find git repositories: {str(obs)}', + ) + + git_dirs = [p for p in obs.content.strip().split('\n') if p] + if git_dirs: + # Remove all .git directories in subdirectories + for git_dir in git_dirs: + action = CmdRunAction(command=f'rm -rf "{git_dir}"') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + isinstance(obs, CmdOutputObservation) and obs.exit_code == 0, + f'Failed to remove git directory {git_dir}: {str(obs)}', + ) + + # add all files + action = CmdRunAction(command='git add -A') + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + isinstance(obs, CmdOutputObservation) and obs.exit_code == 0, + f'Failed to git add -A: {str(obs)}', + ) + + # Remove binary files from git staging + action = CmdRunAction(command=remove_binary_files_from_git()) + action.set_hard_timeout(600) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + assert_and_raise( + isinstance(obs, CmdOutputObservation) and obs.exit_code == 0, + f'Failed to remove binary files: {str(obs)}', + ) + + n_retries = 0 + git_patch = None + while n_retries < 5: + action = CmdRunAction( + command=f'git diff --no-color --cached {instance["base_commit"]} > patch.diff' + ) + action.set_hard_timeout(max(300 + 100 * n_retries, 600)) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + n_retries += 1 + if isinstance(obs, CmdOutputObservation): + if obs.exit_code == 0: + # Read the patch file + action = FileReadAction(path='patch.diff') + action.set_hard_timeout(max(300 + 100 * n_retries, 600)) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + if isinstance(obs, FileReadObservation): + git_patch = obs.content + break + elif isinstance(obs, ErrorObservation): + # Fall back to cat "patch.diff" to get the patch + assert 'File could not be decoded as utf-8' in obs.content + action = CmdRunAction(command='cat patch.diff') + action.set_hard_timeout(max(300 + 100 * n_retries, 600)) + logger.info(action, extra={'msg_type': 'ACTION'}) + obs = runtime.run_action(action) + assert isinstance(obs, CmdOutputObservation) and obs.exit_code == 0 + logger.info(obs, extra={'msg_type': 'OBSERVATION'}) + git_patch = obs.content + break + else: + assert_and_raise(False, f'Unexpected observation type: {str(obs)}') + else: + logger.info('Failed to get git diff, retrying...') + sleep_if_should_continue(10) + elif isinstance(obs, ErrorObservation): + logger.error(f'Error occurred: {obs.content}. Retrying...') + sleep_if_should_continue(10) + else: + assert_and_raise(False, f'Unexpected observation type: {str(obs)}') + + assert_and_raise(git_patch is not None, 'Failed to get git diff (None)') + + # Remove binary diffs from the patch + git_patch = remove_binary_diffs(git_patch) + + logger.info('-' * 30) + logger.info('END Runtime Completion Fn') + logger.info('-' * 30) + return {'git_patch': git_patch} + + +class CPUGroupManager: + def __init__(self, cpu_groups_queue: multiprocessing.Queue): + self.cpu_groups_queue = cpu_groups_queue + + def __enter__(self): + # Get the current CPU group for this worker] + if self.cpu_groups_queue is not None: + self.cpu_group = self.cpu_groups_queue.get() + logger.info(f'Worker started with CPU group: {self.cpu_group}') + return self.cpu_group + return None + + def __exit__(self, exc_type, exc_value, traceback): + # Put the CPU group back into the queue for other workers to use + if self.cpu_groups_queue is not None: + self.cpu_groups_queue.put(self.cpu_group) + logger.info(f'Worker finished with CPU group: {self.cpu_group}') + + +def cleanup_docker_resources_for_worker(): + """Clean up Docker resources specific to this worker process. + + This prevents cascade failures when one worker's container crashes. + Note: This only cleans up stale locks, not containers, to avoid + interfering with other workers. Container cleanup is handled + by the DockerRuntime.close() method based on configuration. + """ + + # Clean up any stale port locks from crashed processes + try: + from openhands.runtime.utils.port_lock import cleanup_stale_locks + + cleanup_stale_locks(max_age_seconds=300) # Clean up locks older than 5 minutes + except Exception as e: + logger.debug(f'Error cleaning up stale port locks: {e}') + + +def process_instance( + instance: pd.Series, + metadata: EvalMetadata, + reset_logger: bool = True, + runtime_failure_count: int = 0, + cpu_groups_queue: multiprocessing.Queue = None, +) -> EvalOutput: + # Clean up any Docker resources from previous failed runs + cleanup_docker_resources_for_worker() + + # HACK: Use the global and get the cpu group for this worker. + with CPUGroupManager(cpu_groups_queue) as cpu_group: + config = get_config(instance, metadata, cpu_group=cpu_group) + + # Setup the logger properly, so you can run multi-processing to parallelize the evaluation + if reset_logger: + log_dir = os.path.join(metadata.eval_output_dir, 'infer_logs') + reset_logger_for_multiprocessing(logger, instance.instance_id, log_dir) + else: + logger.info(f'Starting evaluation for instance {instance.instance_id}.') + + metadata = copy.deepcopy(metadata) + metadata.details['runtime_failure_count'] = runtime_failure_count + metadata.details['remote_runtime_resource_factor'] = ( + config.sandbox.remote_runtime_resource_factor + ) + + runtime = create_runtime(config, sid=None) + call_async_from_sync(runtime.connect) + + try: + initialize_runtime(runtime, instance, metadata) + + message_action = get_instruction(instance, metadata) + + # Here's how you can run the agent (similar to the `main` function) and get the final task state + state: State | None = asyncio.run( + run_controller( + config=config, + initial_user_action=message_action, + runtime=runtime, + fake_user_response_fn=AGENT_CLS_TO_FAKE_USER_RESPONSE_FN[ + metadata.agent_class + ], + ) + ) + + # if fatal error, throw EvalError to trigger re-run + if is_fatal_evaluation_error(state.last_error): + raise EvalException('Fatal error detected: ' + state.last_error) + + # ======= THIS IS SWE-Bench specific ======= + # Get git patch + return_val = complete_runtime(runtime, instance) + git_patch = return_val['git_patch'] + logger.info( + f'Got git diff for instance {instance.instance_id}:\n--------\n{git_patch}\n--------' + ) + except Exception as e: + # Log the error but don't let it crash other workers + logger.error( + f'Error in worker processing instance {instance.instance_id}: {str(e)}' + ) + raise + finally: + # Ensure runtime is properly closed to prevent cascade failures + try: + runtime.close() + except Exception as e: + logger.warning( + f'Error closing runtime for {instance.instance_id}: {str(e)}' + ) + # Don't re-raise - we want to continue cleanup + + # ========================================== + + # ======= Attempt to evaluate the agent's edits ======= + # we use eval_infer.sh to evaluate the agent's edits, not here + # because the agent may alter the environment / testcases + test_result = { + 'git_patch': git_patch, + } + + # If you are working on some simpler benchmark that only evaluates the final model output (e.g., in a MessageAction) + # You can simply get the LAST `MessageAction` from the returned `state.history` and parse it for evaluation. + if state is None: + raise ValueError('State should not be None.') + + # NOTE: this is NO LONGER the event stream, but an agent history that includes delegate agent's events + histories = [event_to_dict(event) for event in state.history] + metrics = get_metrics(state) + + # Save the output + instruction = message_action.content + if message_action.image_urls: + instruction += ( + '\n\n' + + '\n'.join(message_action.image_urls) + + '' + ) + output = EvalOutput( + instance_id=instance.instance_id, + instruction=instruction, + instance=instance.to_dict(), # SWE Bench specific + test_result=test_result, + metadata=metadata, + history=histories, + metrics=metrics, + error=state.last_error if state and state.last_error else None, + ) + return output + + +def filter_dataset(dataset: pd.DataFrame, filter_column: str) -> pd.DataFrame: + file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.toml') + if os.path.exists(file_path): + with open(file_path, 'r') as file: + data = toml.load(file) + if 'selected_ids' in data: + selected_ids = data['selected_ids'] + logger.info( + f'Filtering {len(selected_ids)} tasks from "selected_ids"...' + ) + subset = dataset[dataset[filter_column].isin(selected_ids)] + logger.info(f'Retained {subset.shape[0]} tasks after filtering') + return subset + if 'selected_repos' in data: + # repos for the swe-bench instances: + # ['astropy/astropy', 'django/django', 'matplotlib/matplotlib', 'mwaskom/seaborn', 'pallets/flask', 'psf/requests', 'pydata/xarray', 'pylint-dev/pylint', 'pytest-dev/pytest', 'scikit-learn/scikit-learn', 'sphinx-doc/sphinx', 'sympy/sympy'] + selected_repos = data['selected_repos'] + if isinstance(selected_repos, str): + selected_repos = [selected_repos] + assert isinstance(selected_repos, list) + logger.info( + f'Filtering {selected_repos} tasks from "selected_repos"...' + ) + subset = dataset[dataset['repo'].isin(selected_repos)] + logger.info(f'Retained {subset.shape[0]} tasks after filtering') + return subset + + skip_ids = os.environ.get('SKIP_IDS', '').split(',') + if len(skip_ids) > 0: + logger.info(f'Filtering {len(skip_ids)} tasks from "SKIP_IDS"...') + return dataset[~dataset[filter_column].isin(skip_ids)] + return dataset + + +def divide_cpus_among_workers(num_workers, num_cpus_per_worker=4, num_to_skip=0): + """Divide CPUs among workers, with better error handling for multiprocessing.""" + try: + current_cpus = list(os.sched_getaffinity(0)) + except AttributeError: + # os.sched_getaffinity not available on all platforms + import multiprocessing + + current_cpus = list(range(multiprocessing.cpu_count())) + + num_cpus = len(current_cpus) + if num_workers <= 0: + raise ValueError('Number of workers must be greater than 0') + + # Chec that num worers and num_cpus_per_worker fit into available CPUs + total_cpus_needed = num_workers * num_cpus_per_worker + num_to_skip + if total_cpus_needed > num_cpus: + raise ValueError( + f'Not enough CPUs available. Requested {total_cpus_needed} ' + f'CPUs (num_workers={num_workers}, num_cpus_per_worker={num_cpus_per_worker}, ' + f'num_to_skip={num_to_skip}), but only {num_cpus} CPUs are available.' + ) + + # Divide this into groups, skipping the first `num_to_skip` CPUs. + available_cpus = current_cpus[num_to_skip:] + cpu_groups = [ + available_cpus[i * num_cpus_per_worker : (i + 1) * num_cpus_per_worker] + for i in range(num_workers) + ] + print( + f'Divided {num_cpus} CPUs into {num_workers} groups, each with {num_cpus_per_worker} CPUs.' + ) + print(f'CPU groups: {cpu_groups}') + + return cpu_groups + + +if __name__ == '__main__': + parser = get_evaluation_parser() + parser.add_argument( + '--dataset', + type=str, + default=None, + help='data set to evaluate on, for now use local.', + ) + parser.add_argument( + '--split', + type=str, + default='test', + help='split to evaluate on', + ) + parser.add_argument( + '--mode', + type=str, + default='swe', + help='mode to evaluate on', + ) + + args, _ = parser.parse_known_args() + + # NOTE: It is preferable to load datasets from huggingface datasets and perform post-processing + # so we don't need to manage file uploading to OpenHands's repo + + # dataset = load_dataset(args.dataset, split=args.split) + # swe_bench_tests = filter_dataset(dataset.to_pandas(), 'instance_id') + dataset = load_dataset(args.dataset, split=args.split) + + # Convert dataset to pandas DataFrame if it is not already. + if not isinstance(dataset, pd.DataFrame): + dataset = dataset.to_pandas() + + dataset['version'] = dataset['version'].astype(str) + + # Convert created_at column to string. + dataset['created_at'] = dataset['created_at'].astype(str) + + swe_bench_tests = filter_dataset(dataset, 'instance_id') + + logger.info( + f'Loaded dataset {args.dataset} with split {args.split}: {len(swe_bench_tests)} tasks' + ) + + llm_config = None + if args.llm_config: + llm_config = get_llm_config_arg(args.llm_config) + llm_config.log_completions = True + # modify_params must be False for evaluation purpose, for reproducibility and accurancy of results + llm_config.modify_params = False + + if llm_config is None: + raise ValueError(f'Could not find LLM config: --llm_config {args.llm_config}') + + # Get condenser config from environment variable + condenser_name = os.environ.get('EVAL_CONDENSER') + if condenser_name: + condenser_config = get_condenser_config_arg(condenser_name) + if condenser_config is None: + raise ValueError( + f'Could not find Condenser config: EVAL_CONDENSER={condenser_name}' + ) + else: + # If no specific condenser config is provided via env var, default to NoOpCondenser + condenser_config = NoOpCondenserConfig() + logger.debug( + 'No Condenser config provided via EVAL_CONDENSER, using NoOpCondenser.' + ) + + details = {'mode': args.mode} + _agent_cls = openhands.agenthub.Agent.get_cls(args.agent_cls) + + dataset_descrption = ( + args.dataset.replace('/', '__') + '-' + args.split.replace('/', '__') + ) + metadata = make_metadata( + llm_config, + dataset_descrption, + args.agent_cls, + args.max_iterations, + args.eval_note, + args.eval_output_dir, + details=details, + condenser_config=condenser_config, + ) + + output_file = os.path.join(metadata.eval_output_dir, 'output.jsonl') + print(f'### OUTPUT FILE: {output_file} ###') + + # Run evaluation in iterative mode: + # If a rollout fails to output AgentFinishAction, we will try again until it succeeds OR total 3 attempts have been made. + ITERATIVE_EVAL_MODE = ( + os.environ.get('ITERATIVE_EVAL_MODE', 'false').lower() == 'true' + ) + ITERATIVE_EVAL_MODE_MAX_ATTEMPTS = int( + os.environ.get('ITERATIVE_EVAL_MODE_MAX_ATTEMPTS', '3') + ) + + # Get all CPUs and divide into groups of num_workers and put them into a multiprocessing.Queue. + cpu_groups_queue = None + cpu_groups_list = divide_cpus_among_workers(args.eval_num_workers, num_to_skip=8) + cpu_groups_queue = multiprocessing.Manager().Queue() + for cpu_group in cpu_groups_list: + cpu_groups_queue.put(cpu_group) + + if not ITERATIVE_EVAL_MODE: + # load the dataset + instances = prepare_dataset(swe_bench_tests, output_file, args.eval_n_limit) + + process_instance_with_cpu_groups = functools.partial( + process_instance, + cpu_groups_queue=cpu_groups_queue, + ) + + config = get_config( + instances.iloc[0], # Use the first instance to get the config + metadata, + cpu_group=None, # We will use the cpu_groups_queue to get the cpu group later + ) + + run_evaluation( + instances, + metadata, + output_file, + args.eval_num_workers, + process_instance_with_cpu_groups, + timeout_seconds=8 + * 60 + * 60, # 8 hour PER instance should be more than enough + max_retries=3, + ) + else: + critic = AgentFinishedCritic() + + def get_cur_output_file_path(attempt: int) -> str: + return ( + f'{output_file.removesuffix(".jsonl")}.critic_attempt_{attempt}.jsonl' + ) + + eval_ids = None + for attempt in range(1, ITERATIVE_EVAL_MODE_MAX_ATTEMPTS + 1): + cur_output_file = get_cur_output_file_path(attempt) + logger.info( + f'Running evaluation with critic {critic.__class__.__name__} for attempt {attempt} of {ITERATIVE_EVAL_MODE_MAX_ATTEMPTS}.' + ) + + # For deterministic eval, we set temperature to 0.1 for (>1) attempt + # so hopefully we get slightly different results + if attempt > 1 and metadata.llm_config.temperature == 0: + logger.info( + f'Detected temperature is 0 for (>1) attempt {attempt}. Setting temperature to 0.1...' + ) + metadata.llm_config.temperature = 0.1 + + # Load instances - at first attempt, we evaluate all instances + # On subsequent attempts, we only evaluate the instances that failed the previous attempt determined by critic + instances = prepare_dataset( + swe_bench_tests, cur_output_file, args.eval_n_limit, eval_ids=eval_ids + ) + if len(instances) > 0 and not isinstance( + instances['PASS_TO_PASS'][instances['PASS_TO_PASS'].index[0]], str + ): + for col in ['PASS_TO_PASS', 'FAIL_TO_PASS']: + instances[col] = instances[col].apply(lambda x: str(x)) + + # Run evaluation - but save them to cur_output_file + logger.info( + f'Evaluating {len(instances)} instances for attempt {attempt}...' + ) + run_evaluation( + instances, + metadata, + cur_output_file, + args.eval_num_workers, + process_instance, + timeout_seconds=8 + * 60 + * 60, # 8 hour PER instance should be more than enough + max_retries=1, + ) + + # When eval is done, we update eval_ids to the instances that failed the current attempt + instances_failed = [] + logger.info( + f'Use critic {critic.__class__.__name__} to check {len(instances)} instances for attempt {attempt}...' + ) + with open(cur_output_file, 'r') as f: + for line in f: + instance = json.loads(line) + try: + history = [ + event_from_dict(event) for event in instance['history'] + ] + critic_result = critic.evaluate( + history, instance['test_result'].get('git_patch', '') + ) + if not critic_result.success: + instances_failed.append(instance['instance_id']) + except Exception as e: + logger.error( + f'Error loading history for instance {instance["instance_id"]}: {e}' + ) + instances_failed.append(instance['instance_id']) + logger.info( + f'{len(instances_failed)} instances failed the current attempt {attempt}: {instances_failed}' + ) + eval_ids = instances_failed + + # If no instances failed, we break + if len(instances_failed) == 0: + break + + # Then we should aggregate the results from all attempts into the original output file + # and remove the intermediate files + logger.info( + 'Aggregating results from all attempts into the original output file...' + ) + fout = open(output_file, 'w') + added_instance_ids = set() + for attempt in reversed(range(1, ITERATIVE_EVAL_MODE_MAX_ATTEMPTS + 1)): + cur_output_file = get_cur_output_file_path(attempt) + if not os.path.exists(cur_output_file): + logger.warning( + f'Intermediate output file {cur_output_file} does not exist. Skipping...' + ) + continue + + with open(cur_output_file, 'r') as f: + for line in f: + instance = json.loads(line) + # Also make sure git_patch is not empty - otherwise we fall back to previous attempt (empty patch is worse than anything else) + if ( + instance['instance_id'] not in added_instance_ids + and instance['test_result'].get('git_patch', '').strip() + ): + fout.write(line) + added_instance_ids.add(instance['instance_id']) + logger.info( + f'Aggregated instances from {cur_output_file}. Total instances added so far: {len(added_instance_ids)}' + ) + fout.close() + logger.info( + f'Done! Total {len(added_instance_ids)} instances added to {output_file}' + ) diff --git a/evaluation/benchmarks/swefficiency/scripts/run_infer.sh b/evaluation/benchmarks/swefficiency/scripts/run_infer.sh new file mode 100755 index 000000000000..1cd122676e58 --- /dev/null +++ b/evaluation/benchmarks/swefficiency/scripts/run_infer.sh @@ -0,0 +1,148 @@ +#!/usr/bin/env bash +set -eo pipefail + +source "evaluation/utils/version_control.sh" + +MODEL_CONFIG=$1 +COMMIT_HASH=$2 +AGENT=$3 +EVAL_LIMIT=$4 +MAX_ITER=$5 +NUM_WORKERS=$6 +DATASET=$7 +SPLIT=$8 +N_RUNS=$9 +MODE=${10} + + +if [ -z "$NUM_WORKERS" ]; then + NUM_WORKERS=1 + echo "Number of workers not specified, use default $NUM_WORKERS" +fi +checkout_eval_branch + +if [ -z "$AGENT" ]; then + echo "Agent not specified, use default CodeActAgent" + AGENT="CodeActAgent" +fi + +if [ -z "$MAX_ITER" ]; then + echo "MAX_ITER not specified, use default 100" + MAX_ITER=100 +fi + +if [ -z "$RUN_WITH_BROWSING" ]; then + echo "RUN_WITH_BROWSING not specified, use default false" + RUN_WITH_BROWSING=false +fi + + +if [ -z "$DATASET" ]; then + echo "DATASET not specified, use default princeton-nlp/SWE-bench_Lite" + DATASET="swefficiency/swefficiency" +fi + +if [ -z "$SPLIT" ]; then + echo "SPLIT not specified, use default test" + SPLIT="test" +fi + +if [ -z "$MODE" ]; then + MODE="swe" + echo "MODE not specified, use default $MODE" +fi + +if [ -n "$EVAL_CONDENSER" ]; then + echo "Using Condenser Config: $EVAL_CONDENSER" +else + echo "No Condenser Config provided via EVAL_CONDENSER, use default (NoOpCondenser)." +fi + +export RUN_WITH_BROWSING=$RUN_WITH_BROWSING +echo "RUN_WITH_BROWSING: $RUN_WITH_BROWSING" + +get_openhands_version + +echo "AGENT: $AGENT" +echo "OPENHANDS_VERSION: $OPENHANDS_VERSION" +echo "MODEL_CONFIG: $MODEL_CONFIG" +echo "DATASET: $DATASET" +echo "SPLIT: $SPLIT" +echo "MAX_ITER: $MAX_ITER" +echo "NUM_WORKERS: $NUM_WORKERS" +echo "COMMIT_HASH: $COMMIT_HASH" +echo "MODE: $MODE" +echo "EVAL_CONDENSER: $EVAL_CONDENSER" + +# Default to NOT use Hint +if [ -z "$USE_HINT_TEXT" ]; then + export USE_HINT_TEXT=false +fi +echo "USE_HINT_TEXT: $USE_HINT_TEXT" +EVAL_NOTE="$OPENHANDS_VERSION" +# if not using Hint, add -no-hint to the eval note +if [ "$USE_HINT_TEXT" = false ]; then + EVAL_NOTE="$EVAL_NOTE-no-hint" +fi + +if [ "$RUN_WITH_BROWSING" = true ]; then + EVAL_NOTE="$EVAL_NOTE-with-browsing" +fi + +if [ -n "$EXP_NAME" ]; then + EVAL_NOTE="$EVAL_NOTE-$EXP_NAME" +fi +# if mode != swe, add mode to the eval note +if [ "$MODE" != "swe" ]; then + EVAL_NOTE="${EVAL_NOTE}-${MODE}" +fi +# Add condenser config to eval note if provided +if [ -n "$EVAL_CONDENSER" ]; then + EVAL_NOTE="${EVAL_NOTE}-${EVAL_CONDENSER}" +fi + +# export RUNTIME="remote" +# export SANDBOX_REMOTE_RUNTIME_API_URL="https://runtime.eval.all-hands.dev" +export NO_CHANGE_TIMEOUT_SECONDS=900 # 15 minutes + +function run_eval() { + local eval_note="${1}" + COMMAND="poetry run python evaluation/benchmarks/swefficiency/run_infer.py \ + --agent-cls $AGENT \ + --llm-config $MODEL_CONFIG \ + --max-iterations $MAX_ITER \ + --eval-num-workers $NUM_WORKERS \ + --eval-note $eval_note \ + --dataset $DATASET \ + --split $SPLIT \ + --mode $MODE" + + if [ -n "$EVAL_LIMIT" ]; then + echo "EVAL_LIMIT: $EVAL_LIMIT" + COMMAND="$COMMAND --eval-n-limit $EVAL_LIMIT" + fi + + # Run the command + eval $COMMAND +} + +unset SANDBOX_ENV_GITHUB_TOKEN # prevent the agent from using the github token to push +if [ -z "$N_RUNS" ]; then + N_RUNS=1 + echo "N_RUNS not specified, use default $N_RUNS" +fi + +# Skip runs if the run number is in the SKIP_RUNS list +# read from env variable SKIP_RUNS as a comma separated list of run numbers +SKIP_RUNS=(${SKIP_RUNS//,/ }) +for i in $(seq 1 $N_RUNS); do + if [[ " ${SKIP_RUNS[@]} " =~ " $i " ]]; then + echo "Skipping run $i" + continue + fi + current_eval_note="$EVAL_NOTE-run_$i" + echo "EVAL_NOTE: $current_eval_note" + run_eval $current_eval_note +done + +checkout_original_branch diff --git a/evaluation/benchmarks/swefficiency/scripts/setup/instance_swe_entry.sh b/evaluation/benchmarks/swefficiency/scripts/setup/instance_swe_entry.sh new file mode 100755 index 000000000000..61ca1e151097 --- /dev/null +++ b/evaluation/benchmarks/swefficiency/scripts/setup/instance_swe_entry.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash + +source ~/.bashrc +SWEUTIL_DIR=/swe_util + +# FIXME: Cannot read SWE_INSTANCE_ID from the environment variable +# SWE_INSTANCE_ID=django__django-11099 +if [ -z "$SWE_INSTANCE_ID" ]; then + echo "Error: SWE_INSTANCE_ID is not set." >&2 + exit 1 +fi + +# Read the swe-bench-test-lite.json file and extract the required item based on instance_id +item=$(jq --arg INSTANCE_ID "$SWE_INSTANCE_ID" '.[] | select(.instance_id == $INSTANCE_ID)' $SWEUTIL_DIR/eval_data/instances/swe-bench-instance.json) + +if [[ -z "$item" ]]; then + echo "No item found for the provided instance ID." + exit 1 +fi + + +WORKSPACE_NAME=$(echo "$item" | jq -r '(.repo | tostring) + "__" + (.version | tostring) | gsub("/"; "__")') + +echo "WORKSPACE_NAME: $WORKSPACE_NAME" + +# Clear the workspace +if [ -d /workspace ]; then + rm -rf /workspace/* +else + mkdir /workspace +fi +# Copy repo to workspace +if [ -d /workspace/$WORKSPACE_NAME ]; then + rm -rf /workspace/$WORKSPACE_NAME +fi +mkdir -p /workspace +cp -r /testbed /workspace/$WORKSPACE_NAME + +# Activate instance-specific environment +if [ -d /opt/miniconda3 ]; then + . /opt/miniconda3/etc/profile.d/conda.sh + conda activate testbed +fi diff --git a/evaluation/benchmarks/swefficiency/scripts/setup/prepare_swe_utils.sh b/evaluation/benchmarks/swefficiency/scripts/setup/prepare_swe_utils.sh new file mode 100755 index 000000000000..c5726a402f06 --- /dev/null +++ b/evaluation/benchmarks/swefficiency/scripts/setup/prepare_swe_utils.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +set -e +EVAL_WORKSPACE="evaluation/benchmarks/swe_bench/eval_workspace" +mkdir -p $EVAL_WORKSPACE + +# 1. Prepare REPO +echo "==== Prepare SWE-bench repo ====" +OH_SWE_BENCH_REPO_PATH="https://github.com/All-Hands-AI/SWE-bench.git" +OH_SWE_BENCH_REPO_BRANCH="eval" +git clone -b $OH_SWE_BENCH_REPO_BRANCH $OH_SWE_BENCH_REPO_PATH $EVAL_WORKSPACE/OH-SWE-bench + +# 2. Prepare DATA +echo "==== Prepare SWE-bench data ====" +EVAL_IMAGE=ghcr.io/all-hands-ai/eval-swe-bench:builder_with_conda +EVAL_WORKSPACE=$(realpath $EVAL_WORKSPACE) +chmod +x $EVAL_WORKSPACE/OH-SWE-bench/swebench/harness/prepare_data.sh +if [ -d $EVAL_WORKSPACE/eval_data ]; then + rm -r $EVAL_WORKSPACE/eval_data +fi +docker run \ + -v $EVAL_WORKSPACE:/workspace \ + -w /workspace \ + -u $(id -u):$(id -g) \ + -e HF_DATASETS_CACHE="/tmp" \ + --rm -it $EVAL_IMAGE \ + bash -c "cd OH-SWE-bench/swebench/harness && /swe_util/miniforge3/bin/conda run -n swe-bench-eval ./prepare_data.sh && mv eval_data /workspace/" diff --git a/evaluation/benchmarks/swefficiency/scripts/setup/swe_entry.sh b/evaluation/benchmarks/swefficiency/scripts/setup/swe_entry.sh new file mode 100755 index 000000000000..03e0de7a23b2 --- /dev/null +++ b/evaluation/benchmarks/swefficiency/scripts/setup/swe_entry.sh @@ -0,0 +1,96 @@ +#!/usr/bin/env bash + +set -e + +# assert user name is `root` +if [ "$USER" != "root" ]; then + echo "Error: This script is intended to be run by the 'root' user only." >&2 + exit 1 +fi + +source ~/.bashrc + +SWEUTIL_DIR=/swe_util + +# Create logs directory +LOG_DIR=/openhands/logs +mkdir -p $LOG_DIR && chmod 777 $LOG_DIR + +# FIXME: Cannot read SWE_INSTANCE_ID from the environment variable +# SWE_INSTANCE_ID=django__django-11099 +if [ -z "$SWE_INSTANCE_ID" ]; then + echo "Error: SWE_INSTANCE_ID is not set." >&2 + exit 1 +fi + +# Read the swe-bench-test-lite.json file and extract the required item based on instance_id +item=$(jq --arg INSTANCE_ID "$SWE_INSTANCE_ID" '.[] | select(.instance_id == $INSTANCE_ID)' $SWEUTIL_DIR/eval_data/instances/swe-bench-test-lite.json) + +if [[ -z "$item" ]]; then + echo "No item found for the provided instance ID." + exit 1 +fi + +CONDA_ENV_NAME=$(echo "$item" | jq -r '.repo + "__" + .version | gsub("/"; "__")') + +echo "CONDA_ENV_NAME: $CONDA_ENV_NAME" + +SWE_TASK_DIR=/openhands/swe_tasks +mkdir -p $SWE_TASK_DIR +# Dump test_patch to /workspace/test.patch +echo "$item" | jq -r '.test_patch' > $SWE_TASK_DIR/test.patch +# Dump patch to /workspace/gold.patch +echo "$item" | jq -r '.patch' > $SWE_TASK_DIR/gold.patch +# Dump the item to /workspace/instance.json except for the "test_patch" and "patch" fields +echo "$item" | jq 'del(.test_patch, .patch)' > $SWE_TASK_DIR/instance.json + +# Clear the workspace +rm -rf /workspace/* +# Copy repo to workspace +if [ -d /workspace/$CONDA_ENV_NAME ]; then + rm -rf /workspace/$CONDA_ENV_NAME +fi +cp -r $SWEUTIL_DIR/eval_data/testbeds/$CONDA_ENV_NAME /workspace + +# Reset swe-bench testbed and install the repo +. $SWEUTIL_DIR/miniforge3/etc/profile.d/conda.sh +conda config --set changeps1 False +conda config --append channels conda-forge +conda activate swe-bench-eval + +mkdir -p $SWE_TASK_DIR/reset_testbed_temp +mkdir -p $SWE_TASK_DIR/reset_testbed_log_dir +SWE_BENCH_DIR=/swe_util/OH-SWE-bench +output=$( + export PYTHONPATH=$SWE_BENCH_DIR && \ + cd $SWE_BENCH_DIR && \ + python swebench/harness/reset_swe_env.py \ + --swe_bench_tasks $SWEUTIL_DIR/eval_data/instances/swe-bench-test.json \ + --temp_dir $SWE_TASK_DIR/reset_testbed_temp \ + --testbed /workspace \ + --conda_path $SWEUTIL_DIR/miniforge3 \ + --instance_id $SWE_INSTANCE_ID \ + --log_dir $SWE_TASK_DIR/reset_testbed_log_dir \ + --timeout 900 \ + --verbose +) + +REPO_PATH=$(echo "$output" | awk -F': ' '/repo_path:/ {print $2}') +TEST_CMD=$(echo "$output" | awk -F': ' '/test_cmd:/ {print $2}') +echo "Repo Path: $REPO_PATH" +echo "Test Command: $TEST_CMD" + +echo "export SWE_BENCH_DIR=\"$SWE_BENCH_DIR\"" >> ~/.bashrc +echo "export REPO_PATH=\"$REPO_PATH\"" >> ~/.bashrc +echo "export TEST_CMD=\"$TEST_CMD\"" >> ~/.bashrc + +if [[ "$REPO_PATH" == "None" ]]; then + echo "Error: Failed to retrieve repository path. Tests may not have passed or output was not as expected." >&2 + exit 1 +fi + +# Activate instance-specific environment +. $SWEUTIL_DIR/miniforge3/etc/profile.d/conda.sh +conda activate $CONDA_ENV_NAME + +set +e diff --git a/frontend/.eslintrc b/frontend/.eslintrc index c89d89c85762..3efd6aea6997 100644 --- a/frontend/.eslintrc +++ b/frontend/.eslintrc @@ -18,6 +18,8 @@ "i18next/no-literal-string": "error", "unused-imports/no-unused-imports": "error", "prettier/prettier": ["error"], + // Enforce using optional chaining (?.) instead of && chains for null/undefined checks + "@typescript-eslint/prefer-optional-chain": "error", // Resolves https://stackoverflow.com/questions/59265981/typescript-eslint-missing-file-extension-ts-import-extensions/59268871#59268871 "import/extensions": [ "error", diff --git a/frontend/.npmrc b/frontend/.npmrc deleted file mode 100644 index daecc6941a9f..000000000000 --- a/frontend/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -public-hoist-pattern[]=*@nextui-org/* -enable-pre-post-scripts=true diff --git a/frontend/__tests__/components/browser.test.tsx b/frontend/__tests__/components/browser.test.tsx index df6aeec6409d..9466133c34af 100644 --- a/frontend/__tests__/components/browser.test.tsx +++ b/frontend/__tests__/components/browser.test.tsx @@ -30,61 +30,33 @@ vi.mock("react-i18next", async () => { }; }); -// Mock Zustand browser store -let mockBrowserState = { - url: "https://example.com", - screenshotSrc: "", - setUrl: vi.fn(), - setScreenshotSrc: vi.fn(), - reset: vi.fn(), -}; - -vi.mock("#/stores/browser-store", () => ({ - useBrowserStore: () => mockBrowserState, -})); - -// Import the component after all mocks are set up import { BrowserPanel } from "#/components/features/browser/browser"; +import { useBrowserStore } from "#/stores/browser-store"; describe("Browser", () => { afterEach(() => { vi.clearAllMocks(); - // Reset the mock state - mockBrowserState = { - url: "https://example.com", - screenshotSrc: "", - setUrl: vi.fn(), - setScreenshotSrc: vi.fn(), - reset: vi.fn(), - }; }); it("renders a message if no screenshotSrc is provided", () => { - // Set the mock state for this test - mockBrowserState = { + useBrowserStore.setState({ url: "https://example.com", screenshotSrc: "", - setUrl: vi.fn(), - setScreenshotSrc: vi.fn(), reset: vi.fn(), - }; + }); render(); - // i18n empty message key expect(screen.getByText("BROWSER$NO_PAGE_LOADED")).toBeInTheDocument(); }); it("renders the url and a screenshot", () => { - // Set the mock state for this test - mockBrowserState = { + useBrowserStore.setState({ url: "https://example.com", screenshotSrc: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mN0uGvyHwAFCAJS091fQwAAAABJRU5ErkJggg==", - setUrl: vi.fn(), - setScreenshotSrc: vi.fn(), reset: vi.fn(), - }; + }); render(); diff --git a/frontend/__tests__/components/chat/chat-interface.test.tsx b/frontend/__tests__/components/chat/chat-interface.test.tsx index 9a68eb3805f8..43da7cfae797 100644 --- a/frontend/__tests__/components/chat/chat-interface.test.tsx +++ b/frontend/__tests__/components/chat/chat-interface.test.tsx @@ -25,10 +25,7 @@ import { useUnifiedUploadFiles } from "#/hooks/mutation/use-unified-upload-files import { OpenHandsAction } from "#/types/core/actions"; import { useEventStore } from "#/stores/use-event-store"; -// Mock the hooks vi.mock("#/context/ws-client-provider"); -vi.mock("#/stores/error-message-store"); -vi.mock("#/stores/optimistic-user-message-store"); vi.mock("#/hooks/query/use-config"); vi.mock("#/hooks/mutation/use-get-trajectory"); vi.mock("#/hooks/mutation/use-unified-upload-files"); @@ -102,24 +99,20 @@ describe("ChatInterface - Chat Suggestions", () => { }, }); - // Default mock implementations (useWsClient as unknown as ReturnType).mockReturnValue({ send: vi.fn(), isLoadingMessages: false, parsedEvents: [], }); - ( - useOptimisticUserMessageStore as unknown as ReturnType - ).mockReturnValue({ - setOptimisticUserMessage: vi.fn(), - getOptimisticUserMessage: vi.fn(() => null), + + useOptimisticUserMessageStore.setState({ + optimisticUserMessage: null, }); - ( - useErrorMessageStore as unknown as ReturnType - ).mockReturnValue({ - setErrorMessage: vi.fn(), - removeErrorMessage: vi.fn(), + + useErrorMessageStore.setState({ + errorMessage: null, }); + (useConfig as unknown as ReturnType).mockReturnValue({ data: { APP_MODE: "local" }, }); @@ -204,11 +197,8 @@ describe("ChatInterface - Chat Suggestions", () => { }); test("should hide chat suggestions when there is an optimistic user message", () => { - ( - useOptimisticUserMessageStore as unknown as ReturnType - ).mockReturnValue({ - setOptimisticUserMessage: vi.fn(), - getOptimisticUserMessage: vi.fn(() => "Optimistic message"), + useOptimisticUserMessageStore.setState({ + optimisticUserMessage: "Optimistic message", }); renderWithQueryClient(, queryClient); @@ -240,24 +230,19 @@ describe("ChatInterface - Empty state", () => { }); beforeEach(() => { - // Reset mocks to ensure empty state (useWsClient as unknown as ReturnType).mockReturnValue({ send: sendMock, status: "CONNECTED", isLoadingMessages: false, parsedEvents: [], }); - ( - useOptimisticUserMessageStore as unknown as ReturnType - ).mockReturnValue({ - setOptimisticUserMessage: vi.fn(), - getOptimisticUserMessage: vi.fn(() => null), + + useOptimisticUserMessageStore.setState({ + optimisticUserMessage: null, }); - ( - useErrorMessageStore as unknown as ReturnType - ).mockReturnValue({ - setErrorMessage: vi.fn(), - removeErrorMessage: vi.fn(), + + useErrorMessageStore.setState({ + errorMessage: null, }); (useConfig as unknown as ReturnType).mockReturnValue({ data: { APP_MODE: "local" }, diff --git a/frontend/__tests__/components/chat/expandable-message.test.tsx b/frontend/__tests__/components/chat/expandable-message.test.tsx index 0b25ef1f920c..4ba839b8af83 100644 --- a/frontend/__tests__/components/chat/expandable-message.test.tsx +++ b/frontend/__tests__/components/chat/expandable-message.test.tsx @@ -61,7 +61,7 @@ describe("ExpandableMessage", () => { expect(icon).toHaveClass("fill-success"); }); - it("should render with error icon for failed action messages", () => { + it("should render with no icon for failed action messages", () => { renderWithProviders( { "div.flex.gap-2.items-center.justify-start", ); expect(container).toHaveClass("border-neutral-300"); - const icon = screen.getByTestId("status-icon"); - expect(icon).toHaveClass("fill-danger"); + expect(screen.queryByTestId("status-icon")).not.toBeInTheDocument(); }); it("should render with neutral border and no icon for action messages without success prop", () => { diff --git a/frontend/__tests__/components/conversation-tab-title.test.tsx b/frontend/__tests__/components/conversation-tab-title.test.tsx new file mode 100644 index 000000000000..4e3a0aa0fef8 --- /dev/null +++ b/frontend/__tests__/components/conversation-tab-title.test.tsx @@ -0,0 +1,149 @@ +import { render, screen, waitFor } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { describe, expect, it, vi, beforeEach, afterEach } from "vitest"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { ConversationTabTitle } from "#/components/features/conversation/conversation-tabs/conversation-tab-title"; +import GitService from "#/api/git-service/git-service.api"; +import V1GitService from "#/api/git-service/v1-git-service.api"; + +// Mock the services that the hook depends on +vi.mock("#/api/git-service/git-service.api"); +vi.mock("#/api/git-service/v1-git-service.api"); + +// Mock the hooks that useUnifiedGetGitChanges depends on +vi.mock("#/hooks/use-conversation-id", () => ({ + useConversationId: () => ({ + conversationId: "test-conversation-id", + }), +})); + +vi.mock("#/hooks/query/use-active-conversation", () => ({ + useActiveConversation: () => ({ + data: { + conversation_version: "V0", + url: null, + session_api_key: null, + selected_repository: null, + }, + }), +})); + +vi.mock("#/hooks/use-runtime-is-ready", () => ({ + useRuntimeIsReady: () => true, +})); + +vi.mock("#/utils/get-git-path", () => ({ + getGitPath: () => "/workspace", +})); + +describe("ConversationTabTitle", () => { + let queryClient: QueryClient; + + beforeEach(() => { + queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, + }); + + // Mock GitService methods + vi.mocked(GitService.getGitChanges).mockResolvedValue([]); + vi.mocked(V1GitService.getGitChanges).mockResolvedValue([]); + }); + + afterEach(() => { + vi.clearAllMocks(); + queryClient.clear(); + }); + + const renderWithProviders = (ui: React.ReactElement) => { + return render( + {ui}, + ); + }; + + describe("Rendering", () => { + it("should render the title", () => { + // Arrange + const title = "Test Title"; + + // Act + renderWithProviders( + , + ); + + // Assert + expect(screen.getByText(title)).toBeInTheDocument(); + }); + + it("should show refresh button when conversationKey is 'editor'", () => { + // Arrange + const title = "Changes"; + + // Act + renderWithProviders( + , + ); + + // Assert + const refreshButton = screen.getByRole("button"); + expect(refreshButton).toBeInTheDocument(); + }); + + it("should not show refresh button when conversationKey is not 'editor'", () => { + // Arrange + const title = "Browser"; + + // Act + renderWithProviders( + , + ); + + // Assert + expect(screen.queryByRole("button")).not.toBeInTheDocument(); + }); + }); + + describe("User Interactions", () => { + it("should call refetch and trigger GitService.getGitChanges when refresh button is clicked", async () => { + // Arrange + const user = userEvent.setup(); + const title = "Changes"; + const mockGitChanges: Array<{ + path: string; + status: "M" | "A" | "D" | "R" | "U"; + }> = [ + { path: "file1.ts", status: "M" }, + { path: "file2.ts", status: "A" }, + ]; + + vi.mocked(GitService.getGitChanges).mockResolvedValue(mockGitChanges); + + renderWithProviders( + , + ); + + const refreshButton = screen.getByRole("button"); + + // Wait for initial query to complete + await waitFor(() => { + expect(GitService.getGitChanges).toHaveBeenCalled(); + }); + + // Clear the mock to track refetch calls + vi.mocked(GitService.getGitChanges).mockClear(); + + // Act + await user.click(refreshButton); + + // Assert - refetch should trigger another service call + await waitFor(() => { + expect(GitService.getGitChanges).toHaveBeenCalledWith( + "test-conversation-id", + ); + }); + }); + }); +}); diff --git a/frontend/__tests__/components/features/analytics/analytics-consent-form-modal.test.tsx b/frontend/__tests__/components/features/analytics/analytics-consent-form-modal.test.tsx index b5746d6d2539..eb7c39397cff 100644 --- a/frontend/__tests__/components/features/analytics/analytics-consent-form-modal.test.tsx +++ b/frontend/__tests__/components/features/analytics/analytics-consent-form-modal.test.tsx @@ -3,7 +3,7 @@ import { describe, expect, it, vi } from "vitest"; import { render, screen, waitFor } from "@testing-library/react"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; import { AnalyticsConsentFormModal } from "#/components/features/analytics/analytics-consent-form-modal"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; describe("AnalyticsConsentFormModal", () => { it("should call saveUserSettings with consent", async () => { diff --git a/frontend/__tests__/components/features/conversation/agent-status.test.tsx b/frontend/__tests__/components/features/conversation/agent-status.test.tsx new file mode 100644 index 000000000000..3bfb70be2463 --- /dev/null +++ b/frontend/__tests__/components/features/conversation/agent-status.test.tsx @@ -0,0 +1,71 @@ +import { render, screen } from "@testing-library/react"; +import { describe, it, expect, vi } from "vitest"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { MemoryRouter } from "react-router"; +import { AgentStatus } from "#/components/features/controls/agent-status"; +import { AgentState } from "#/types/agent-state"; +import { useAgentState } from "#/hooks/use-agent-state"; +import { useConversationStore } from "#/stores/conversation-store"; + +vi.mock("#/hooks/use-agent-state"); + +vi.mock("#/hooks/use-conversation-id", () => ({ + useConversationId: () => ({ conversationId: "test-id" }), +})); + +const wrapper = ({ children }: { children: React.ReactNode }) => ( + + + {children} + + +); + +const renderAgentStatus = ({ + isPausing = false, +}: { isPausing?: boolean } = {}) => + render( + , + { wrapper }, + ); + +describe("AgentStatus - isLoading logic", () => { + it("should show loading when curAgentState is INIT", () => { + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.INIT, + }); + + renderAgentStatus(); + + expect(screen.getByTestId("agent-loading-spinner")).toBeInTheDocument(); + }); + + it("should show loading when isPausing is true, even if shouldShownAgentLoading is false", () => { + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.AWAITING_USER_INPUT, + }); + + renderAgentStatus({ isPausing: true }); + + expect(screen.getByTestId("agent-loading-spinner")).toBeInTheDocument(); + }); + + it("should NOT update global shouldShownAgentLoading when only isPausing is true", () => { + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.AWAITING_USER_INPUT, + }); + + renderAgentStatus({ isPausing: true }); + + // Loading spinner shows (because isPausing) + expect(screen.getByTestId("agent-loading-spinner")).toBeInTheDocument(); + + // But global state should be false (because shouldShownAgentLoading is false) + const { shouldShownAgentLoading } = useConversationStore.getState(); + expect(shouldShownAgentLoading).toBe(false); + }); +}); diff --git a/frontend/__tests__/components/features/conversation/conversation-name.test.tsx b/frontend/__tests__/components/features/conversation/conversation-name.test.tsx index 572ca590b1e3..41078b69cb26 100644 --- a/frontend/__tests__/components/features/conversation/conversation-name.test.tsx +++ b/frontend/__tests__/components/features/conversation/conversation-name.test.tsx @@ -42,7 +42,7 @@ vi.mock("react-i18next", async () => { BUTTON$EXPORT_CONVERSATION: "Export Conversation", BUTTON$DOWNLOAD_VIA_VSCODE: "Download via VS Code", BUTTON$SHOW_AGENT_TOOLS_AND_METADATA: "Show Agent Tools", - CONVERSATION$SHOW_MICROAGENTS: "Show Microagents", + CONVERSATION$SHOW_SKILLS: "Show Skills", BUTTON$DISPLAY_COST: "Display Cost", COMMON$CLOSE_CONVERSATION_STOP_RUNTIME: "Close Conversation (Stop Runtime)", @@ -290,7 +290,7 @@ describe("ConversationNameContextMenu", () => { onStop: vi.fn(), onDisplayCost: vi.fn(), onShowAgentTools: vi.fn(), - onShowMicroagents: vi.fn(), + onShowSkills: vi.fn(), onExportConversation: vi.fn(), onDownloadViaVSCode: vi.fn(), }; @@ -304,7 +304,7 @@ describe("ConversationNameContextMenu", () => { expect(screen.getByTestId("stop-button")).toBeInTheDocument(); expect(screen.getByTestId("display-cost-button")).toBeInTheDocument(); expect(screen.getByTestId("show-agent-tools-button")).toBeInTheDocument(); - expect(screen.getByTestId("show-microagents-button")).toBeInTheDocument(); + expect(screen.getByTestId("show-skills-button")).toBeInTheDocument(); expect( screen.getByTestId("export-conversation-button"), ).toBeInTheDocument(); @@ -321,9 +321,7 @@ describe("ConversationNameContextMenu", () => { expect( screen.queryByTestId("show-agent-tools-button"), ).not.toBeInTheDocument(); - expect( - screen.queryByTestId("show-microagents-button"), - ).not.toBeInTheDocument(); + expect(screen.queryByTestId("show-skills-button")).not.toBeInTheDocument(); expect( screen.queryByTestId("export-conversation-button"), ).not.toBeInTheDocument(); @@ -410,19 +408,19 @@ describe("ConversationNameContextMenu", () => { it("should call show microagents handler when show microagents button is clicked", async () => { const user = userEvent.setup(); - const onShowMicroagents = vi.fn(); + const onShowSkills = vi.fn(); renderWithProviders( , ); - const showMicroagentsButton = screen.getByTestId("show-microagents-button"); + const showMicroagentsButton = screen.getByTestId("show-skills-button"); await user.click(showMicroagentsButton); - expect(onShowMicroagents).toHaveBeenCalledTimes(1); + expect(onShowSkills).toHaveBeenCalledTimes(1); }); it("should call export conversation handler when export conversation button is clicked", async () => { @@ -519,7 +517,7 @@ describe("ConversationNameContextMenu", () => { onStop: vi.fn(), onDisplayCost: vi.fn(), onShowAgentTools: vi.fn(), - onShowMicroagents: vi.fn(), + onShowSkills: vi.fn(), onExportConversation: vi.fn(), onDownloadViaVSCode: vi.fn(), }; @@ -541,8 +539,8 @@ describe("ConversationNameContextMenu", () => { expect(screen.getByTestId("show-agent-tools-button")).toHaveTextContent( "Show Agent Tools", ); - expect(screen.getByTestId("show-microagents-button")).toHaveTextContent( - "Show Microagents", + expect(screen.getByTestId("show-skills-button")).toHaveTextContent( + "Show Skills", ); expect(screen.getByTestId("export-conversation-button")).toHaveTextContent( "Export Conversation", diff --git a/frontend/__tests__/components/features/home/recent-conversations.test.tsx b/frontend/__tests__/components/features/home/recent-conversations.test.tsx new file mode 100644 index 000000000000..8e979c99d25c --- /dev/null +++ b/frontend/__tests__/components/features/home/recent-conversations.test.tsx @@ -0,0 +1,56 @@ +import { render, screen, waitFor } from "@testing-library/react"; +import { describe, it, expect, vi } from "vitest"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { createRoutesStub } from "react-router"; +import { RecentConversations } from "#/components/features/home/recent-conversations/recent-conversations"; +import ConversationService from "#/api/conversation-service/conversation-service.api"; + +const renderRecentConversations = () => { + const RouterStub = createRoutesStub([ + { + Component: () => , + path: "/", + }, + ]); + + const queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, + }); + + return render(, { + wrapper: ({ children }) => ( + {children} + ), + }); +}; + +describe("RecentConversations", () => { + const getUserConversationsSpy = vi.spyOn( + ConversationService, + "getUserConversations", + ); + + it("should not show empty state when there is an error", async () => { + getUserConversationsSpy.mockRejectedValue( + new Error("Failed to fetch conversations"), + ); + + renderRecentConversations(); + + // Wait for the error to be displayed + await waitFor(() => { + expect( + screen.getByText("Failed to fetch conversations"), + ).toBeInTheDocument(); + }); + + // The empty state should NOT be displayed when there's an error + expect( + screen.queryByText("HOME$NO_RECENT_CONVERSATIONS"), + ).not.toBeInTheDocument(); + }); +}); diff --git a/frontend/__tests__/components/features/home/repo-connector.test.tsx b/frontend/__tests__/components/features/home/repo-connector.test.tsx index 0500d441a2da..4418d57db3da 100644 --- a/frontend/__tests__/components/features/home/repo-connector.test.tsx +++ b/frontend/__tests__/components/features/home/repo-connector.test.tsx @@ -3,7 +3,7 @@ import { beforeEach, describe, expect, it, vi } from "vitest"; import userEvent from "@testing-library/user-event"; import { QueryClientProvider, QueryClient } from "@tanstack/react-query"; import { createRoutesStub, Outlet } from "react-router"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import ConversationService from "#/api/conversation-service/conversation-service.api"; import GitService from "#/api/git-service/git-service.api"; import OptionService from "#/api/option-service/option-service.api"; @@ -404,7 +404,7 @@ describe("RepoConnector", () => { ConversationService, "createConversation", ); - createConversationSpy.mockImplementation(() => new Promise(() => {})); // Never resolves to keep loading state + createConversationSpy.mockImplementation(() => new Promise(() => { })); // Never resolves to keep loading state const retrieveUserGitRepositoriesSpy = vi.spyOn( GitService, "retrieveUserGitRepositories", diff --git a/frontend/__tests__/components/features/home/repo-selection-form.test.tsx b/frontend/__tests__/components/features/home/repo-selection-form.test.tsx index 42a4087a4eff..db7b2626a9e3 100644 --- a/frontend/__tests__/components/features/home/repo-selection-form.test.tsx +++ b/frontend/__tests__/components/features/home/repo-selection-form.test.tsx @@ -2,9 +2,9 @@ import { render, screen } from "@testing-library/react"; import { describe, expect, vi, beforeEach, it } from "vitest"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; import { RepositorySelectionForm } from "../../../../src/components/features/home/repo-selection-form"; -import UserService from "#/api/user-service/user-service.api"; import GitService from "#/api/git-service/git-service.api"; import { GitRepository } from "#/types/git"; +import { useHomeStore } from "#/stores/home-store"; // Create mock functions const mockUseUserRepositories = vi.fn(); @@ -97,7 +97,7 @@ vi.mock("#/context/auth-context", () => ({ // Mock debounce to simulate proper debounced behavior let debouncedValue = ""; vi.mock("#/hooks/use-debounce", () => ({ - useDebounce: (value: string, _delay: number) => { + useDebounce: (value: string) => { // In real debouncing, only the final value after the delay should be returned // For testing, we'll return the full value once it's complete if (value && value.length > 20) { @@ -124,28 +124,51 @@ vi.mock("#/hooks/query/use-search-repositories", () => ({ })); const mockOnRepoSelection = vi.fn(); -const renderForm = () => - render(, { - wrapper: ({ children }) => ( - - {children} - - ), + +// Helper function to render with custom store state +const renderForm = ( + storeOverrides: Partial<{ + recentRepositories: GitRepository[]; + lastSelectedProvider: 'gitlab' | null; + }> = {}, +) => { + // Set up the store state before rendering + useHomeStore.setState({ + recentRepositories: [], + lastSelectedProvider: null, + ...storeOverrides, }); + return render( + , + { + wrapper: ({ children }) => ( + + {children} + + ), + }, + ); +}; + describe("RepositorySelectionForm", () => { beforeEach(() => { vi.clearAllMocks(); + // Reset the store to initial state + useHomeStore.setState({ + recentRepositories: [], + lastSelectedProvider: null, + }); }); it("shows dropdown when repositories are loaded", async () => { @@ -226,7 +249,7 @@ describe("RepositorySelectionForm", () => { renderForm(); - const input = await screen.findByTestId("git-repo-dropdown"); + await screen.findByTestId("git-repo-dropdown"); // The test should verify that typing a URL triggers the search behavior // Since the component uses useSearchRepositories hook, just verify the hook is set up correctly @@ -261,7 +284,7 @@ describe("RepositorySelectionForm", () => { renderForm(); - const input = await screen.findByTestId("git-repo-dropdown"); + await screen.findByTestId("git-repo-dropdown"); // Verify that the onRepoSelection callback prop was provided expect(mockOnRepoSelection).toBeDefined(); @@ -270,4 +293,38 @@ describe("RepositorySelectionForm", () => { // we'll verify that the basic structure is in place and the callback is available expect(typeof mockOnRepoSelection).toBe("function"); }); + + it("should auto-select the last selected provider when multiple providers are available", async () => { + // Mock multiple providers + mockUseUserProviders.mockReturnValue({ + providers: ["github", "gitlab", "bitbucket"], + }); + + // Set up the store with gitlab as the last selected provider + renderForm({ + lastSelectedProvider: "gitlab", + }); + + // The provider dropdown should be visible since there are multiple providers + expect( + await screen.findByTestId("git-provider-dropdown"), + ).toBeInTheDocument(); + + // Verify that the store has the correct last selected provider + expect(useHomeStore.getState().lastSelectedProvider).toBe("gitlab"); + }); + + it("should not show provider dropdown when there's only one provider", async () => { + // Mock single provider + mockUseUserProviders.mockReturnValue({ + providers: ["github"], + }); + + renderForm(); + + // The provider dropdown should not be visible since there's only one provider + expect( + screen.queryByTestId("git-provider-dropdown"), + ).not.toBeInTheDocument(); + }); }); diff --git a/frontend/__tests__/components/features/microagent-management/microagent-management.test.tsx b/frontend/__tests__/components/features/microagent-management/microagent-management.test.tsx index afdb8e84ba5b..b256fa14d9bf 100644 --- a/frontend/__tests__/components/features/microagent-management/microagent-management.test.tsx +++ b/frontend/__tests__/components/features/microagent-management/microagent-management.test.tsx @@ -12,7 +12,7 @@ import GitService from "#/api/git-service/git-service.api"; import { GitRepository } from "#/types/git"; import { RepositoryMicroagent } from "#/types/microagent-management"; import { Conversation } from "#/api/open-hands.types"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; // Mock hooks const mockUseUserProviders = vi.fn(); diff --git a/frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-form.validation.test.tsx b/frontend/__tests__/components/features/settings/mcp-settings/mcp-server-form.validation.test.tsx similarity index 96% rename from frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-form.validation.test.tsx rename to frontend/__tests__/components/features/settings/mcp-settings/mcp-server-form.validation.test.tsx index a2546ac15cdc..6b290c94b633 100644 --- a/frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-form.validation.test.tsx +++ b/frontend/__tests__/components/features/settings/mcp-settings/mcp-server-form.validation.test.tsx @@ -1,6 +1,6 @@ import { render, screen, fireEvent } from "@testing-library/react"; import { describe, it, expect, vi } from "vitest"; -import { MCPServerForm } from "../mcp-server-form"; +import { MCPServerForm } from "#/components/features/settings/mcp-settings/mcp-server-form"; // i18n mock vi.mock("react-i18next", () => ({ diff --git a/frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-list.test.tsx b/frontend/__tests__/components/features/settings/mcp-settings/mcp-server-list.test.tsx similarity index 98% rename from frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-list.test.tsx rename to frontend/__tests__/components/features/settings/mcp-settings/mcp-server-list.test.tsx index 4e1c4fa98634..9e75f2448378 100644 --- a/frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-list.test.tsx +++ b/frontend/__tests__/components/features/settings/mcp-settings/mcp-server-list.test.tsx @@ -1,6 +1,6 @@ import { render, screen } from "@testing-library/react"; import { describe, it, expect, vi } from "vitest"; -import { MCPServerList } from "../mcp-server-list"; +import { MCPServerList } from "#/components/features/settings/mcp-settings/mcp-server-list"; // Mock react-i18next vi.mock("react-i18next", () => ({ diff --git a/frontend/__tests__/components/features/sidebar/sidebar.test.tsx b/frontend/__tests__/components/features/sidebar/sidebar.test.tsx index 4844a778e1c0..dc5be687f5e6 100644 --- a/frontend/__tests__/components/features/sidebar/sidebar.test.tsx +++ b/frontend/__tests__/components/features/sidebar/sidebar.test.tsx @@ -3,7 +3,7 @@ import { renderWithProviders } from "test-utils"; import { createRoutesStub } from "react-router"; import { waitFor } from "@testing-library/react"; import { Sidebar } from "#/components/features/sidebar/sidebar"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; // These tests will now fail because the conversation panel is rendered through a portal // and technically not a child of the Sidebar component. diff --git a/frontend/__tests__/components/interactive-chat-box.test.tsx b/frontend/__tests__/components/interactive-chat-box.test.tsx index b51858065007..cb164123c11a 100644 --- a/frontend/__tests__/components/interactive-chat-box.test.tsx +++ b/frontend/__tests__/components/interactive-chat-box.test.tsx @@ -6,18 +6,12 @@ import { InteractiveChatBox } from "#/components/features/chat/interactive-chat- import { renderWithProviders } from "../../test-utils"; import { AgentState } from "#/types/agent-state"; import { useAgentState } from "#/hooks/use-agent-state"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; -// Mock the agent state hook vi.mock("#/hooks/use-agent-state", () => ({ useAgentState: vi.fn(), })); -// Mock the conversation store -vi.mock("#/state/conversation-store", () => ({ - useConversationStore: vi.fn(), -})); - // Mock React Router hooks vi.mock("react-router", async () => { const actual = await vi.importActual("react-router"); @@ -58,44 +52,23 @@ vi.mock("#/hooks/use-conversation-name-context-menu", () => ({ describe("InteractiveChatBox", () => { const onSubmitMock = vi.fn(); - // Helper function to mock stores const mockStores = (agentState: AgentState = AgentState.INIT) => { vi.mocked(useAgentState).mockReturnValue({ curAgentState: agentState, }); - vi.mocked(useConversationStore).mockReturnValue({ + useConversationStore.setState({ images: [], files: [], - addImages: vi.fn(), - addFiles: vi.fn(), - clearAllFiles: vi.fn(), - addFileLoading: vi.fn(), - removeFileLoading: vi.fn(), - addImageLoading: vi.fn(), - removeImageLoading: vi.fn(), - submittedMessage: null, - setShouldHideSuggestions: vi.fn(), - setSubmittedMessage: vi.fn(), - isRightPanelShown: true, - selectedTab: "editor" as const, loadingFiles: [], loadingImages: [], + submittedMessage: null, messageToSend: null, shouldShownAgentLoading: false, shouldHideSuggestions: false, + isRightPanelShown: true, + selectedTab: "editor" as const, hasRightPanelToggled: true, - setIsRightPanelShown: vi.fn(), - setSelectedTab: vi.fn(), - setShouldShownAgentLoading: vi.fn(), - removeImage: vi.fn(), - removeFile: vi.fn(), - clearImages: vi.fn(), - clearFiles: vi.fn(), - clearAllLoading: vi.fn(), - setMessageToSend: vi.fn(), - resetConversationState: vi.fn(), - setHasRightPanelToggled: vi.fn(), }); }; diff --git a/frontend/__tests__/components/modals/microagents/microagent-modal.test.tsx b/frontend/__tests__/components/modals/microagents/microagent-modal.test.tsx deleted file mode 100644 index f3b68c513d11..000000000000 --- a/frontend/__tests__/components/modals/microagents/microagent-modal.test.tsx +++ /dev/null @@ -1,89 +0,0 @@ -import { screen } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; -import { renderWithProviders } from "test-utils"; -import { MicroagentsModal } from "#/components/features/conversation-panel/microagents-modal"; -import ConversationService from "#/api/conversation-service/conversation-service.api"; -import { AgentState } from "#/types/agent-state"; -import { useAgentState } from "#/hooks/use-agent-state"; - -// Mock the agent state hook -vi.mock("#/hooks/use-agent-state", () => ({ - useAgentState: vi.fn(), -})); - -// Mock the conversation ID hook -vi.mock("#/hooks/use-conversation-id", () => ({ - useConversationId: () => ({ conversationId: "test-conversation-id" }), -})); - -describe("MicroagentsModal - Refresh Button", () => { - const mockOnClose = vi.fn(); - const conversationId = "test-conversation-id"; - - const defaultProps = { - onClose: mockOnClose, - conversationId, - }; - - const mockMicroagents = [ - { - name: "Test Agent 1", - type: "repo" as const, - triggers: ["test", "example"], - content: "This is test content for agent 1", - }, - { - name: "Test Agent 2", - type: "knowledge" as const, - triggers: ["help", "support"], - content: "This is test content for agent 2", - }, - ]; - - beforeEach(() => { - // Reset all mocks before each test - vi.clearAllMocks(); - - // Setup default mock for getMicroagents - vi.spyOn(ConversationService, "getMicroagents").mockResolvedValue({ - microagents: mockMicroagents, - }); - - // Mock the agent state to return a ready state - vi.mocked(useAgentState).mockReturnValue({ - curAgentState: AgentState.AWAITING_USER_INPUT, - }); - }); - - afterEach(() => { - vi.clearAllMocks(); - }); - - describe("Refresh Button Rendering", () => { - it("should render the refresh button with correct text and test ID", async () => { - renderWithProviders(); - - // Wait for the component to load and render the refresh button - const refreshButton = await screen.findByTestId("refresh-microagents"); - expect(refreshButton).toBeInTheDocument(); - expect(refreshButton).toHaveTextContent("BUTTON$REFRESH"); - }); - }); - - describe("Refresh Button Functionality", () => { - it("should call refetch when refresh button is clicked", async () => { - const user = userEvent.setup(); - - renderWithProviders(); - - const refreshSpy = vi.spyOn(ConversationService, "getMicroagents"); - - // Wait for the component to load and render the refresh button - const refreshButton = await screen.findByTestId("refresh-microagents"); - await user.click(refreshButton); - - expect(refreshSpy).toHaveBeenCalledTimes(1); - }); - }); -}); diff --git a/frontend/__tests__/components/modals/skills/skill-modal.test.tsx b/frontend/__tests__/components/modals/skills/skill-modal.test.tsx new file mode 100644 index 000000000000..33ab5098c8aa --- /dev/null +++ b/frontend/__tests__/components/modals/skills/skill-modal.test.tsx @@ -0,0 +1,394 @@ +import { screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { renderWithProviders } from "test-utils"; +import { SkillsModal } from "#/components/features/conversation-panel/skills-modal"; +import ConversationService from "#/api/conversation-service/conversation-service.api"; +import V1ConversationService from "#/api/conversation-service/v1-conversation-service.api"; +import { AgentState } from "#/types/agent-state"; +import { useAgentState } from "#/hooks/use-agent-state"; +import SettingsService from "#/api/settings-service/settings-service.api"; + +// Mock the agent state hook +vi.mock("#/hooks/use-agent-state", () => ({ + useAgentState: vi.fn(), +})); + +// Mock the conversation ID hook +vi.mock("#/hooks/use-conversation-id", () => ({ + useConversationId: () => ({ conversationId: "test-conversation-id" }), +})); + +describe("SkillsModal - Refresh Button", () => { + const mockOnClose = vi.fn(); + const conversationId = "test-conversation-id"; + + const defaultProps = { + onClose: mockOnClose, + conversationId, + }; + + const mockSkills = [ + { + name: "Test Agent 1", + type: "repo" as const, + triggers: ["test", "example"], + content: "This is test content for agent 1", + }, + { + name: "Test Agent 2", + type: "knowledge" as const, + triggers: ["help", "support"], + content: "This is test content for agent 2", + }, + ]; + + beforeEach(() => { + // Reset all mocks before each test + vi.clearAllMocks(); + + // Setup default mock for getMicroagents (V0) + vi.spyOn(ConversationService, "getMicroagents").mockResolvedValue({ + microagents: mockSkills, + }); + + // Mock the agent state to return a ready state + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.AWAITING_USER_INPUT, + }); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("Refresh Button Rendering", () => { + it("should render the refresh button with correct text and test ID", async () => { + renderWithProviders(); + + // Wait for the component to load and render the refresh button + const refreshButton = await screen.findByTestId("refresh-skills"); + expect(refreshButton).toBeInTheDocument(); + expect(refreshButton).toHaveTextContent("BUTTON$REFRESH"); + }); + }); + + describe("Refresh Button Functionality", () => { + it("should call refetch when refresh button is clicked", async () => { + const user = userEvent.setup(); + const refreshSpy = vi.spyOn(ConversationService, "getMicroagents"); + + renderWithProviders(); + + // Wait for the component to load and render the refresh button + const refreshButton = await screen.findByTestId("refresh-skills"); + + // Clear previous calls to only track the click + refreshSpy.mockClear(); + + await user.click(refreshButton); + + // Verify the refresh triggered a new API call + expect(refreshSpy).toHaveBeenCalled(); + }); + }); +}); + +describe("useConversationSkills - V1 API Integration", () => { + const conversationId = "test-conversation-id"; + + const mockMicroagents = [ + { + name: "V0 Test Agent", + type: "repo" as const, + triggers: ["v0"], + content: "V0 skill content", + }, + ]; + + const mockSkills = [ + { + name: "V1 Test Skill", + type: "knowledge" as const, + triggers: ["v1", "skill"], + content: "V1 skill content", + }, + ]; + + beforeEach(() => { + vi.clearAllMocks(); + + // Mock agent state + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.AWAITING_USER_INPUT, + }); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("V0 API Usage (v1_enabled: false)", () => { + it("should call v0 ConversationService.getMicroagents when v1_enabled is false", async () => { + // Arrange + const getMicroagentsSpy = vi + .spyOn(ConversationService, "getMicroagents") + .mockResolvedValue({ microagents: mockMicroagents }); + + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: false, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act + renderWithProviders(); + + // Assert + await screen.findByText("V0 Test Agent"); + expect(getMicroagentsSpy).toHaveBeenCalledWith(conversationId); + expect(getMicroagentsSpy).toHaveBeenCalledTimes(1); + }); + + it("should display v0 skills correctly", async () => { + // Arrange + vi.spyOn(ConversationService, "getMicroagents").mockResolvedValue({ + microagents: mockMicroagents, + }); + + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: false, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act + renderWithProviders(); + + // Assert + const agentName = await screen.findByText("V0 Test Agent"); + expect(agentName).toBeInTheDocument(); + }); + }); + + describe("V1 API Usage (v1_enabled: true)", () => { + it("should call v1 V1ConversationService.getSkills when v1_enabled is true", async () => { + // Arrange + const getSkillsSpy = vi + .spyOn(V1ConversationService, "getSkills") + .mockResolvedValue({ skills: mockSkills }); + + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: true, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act + renderWithProviders(); + + // Assert + await screen.findByText("V1 Test Skill"); + expect(getSkillsSpy).toHaveBeenCalledWith(conversationId); + expect(getSkillsSpy).toHaveBeenCalledTimes(1); + }); + + it("should display v1 skills correctly", async () => { + // Arrange + vi.spyOn(V1ConversationService, "getSkills").mockResolvedValue({ + skills: mockSkills, + }); + + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: true, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act + renderWithProviders(); + + // Assert + const skillName = await screen.findByText("V1 Test Skill"); + expect(skillName).toBeInTheDocument(); + }); + + it("should use v1 API when v1_enabled is true", async () => { + // Arrange + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: true, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + const getSkillsSpy = vi + .spyOn(V1ConversationService, "getSkills") + .mockResolvedValue({ + skills: mockSkills, + }); + + // Act + renderWithProviders(); + + // Assert + await screen.findByText("V1 Test Skill"); + // Verify v1 API was called + expect(getSkillsSpy).toHaveBeenCalledWith(conversationId); + }); + }); + + describe("API Switching on Settings Change", () => { + it("should refetch using different API when v1_enabled setting changes", async () => { + // Arrange + const getMicroagentsSpy = vi + .spyOn(ConversationService, "getMicroagents") + .mockResolvedValue({ microagents: mockMicroagents }); + const getSkillsSpy = vi + .spyOn(V1ConversationService, "getSkills") + .mockResolvedValue({ skills: mockSkills }); + + const settingsSpy = vi + .spyOn(SettingsService, "getSettings") + .mockResolvedValue({ + v1_enabled: false, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act - Initial render with v1_enabled: false + const { rerender } = renderWithProviders( + , + ); + + // Assert - v0 API called initially + await screen.findByText("V0 Test Agent"); + expect(getMicroagentsSpy).toHaveBeenCalledWith(conversationId); + + // Arrange - Change settings to v1_enabled: true + settingsSpy.mockResolvedValue({ + v1_enabled: true, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act - Force re-render + rerender(); + + // Assert - v1 API should be called after settings change + await screen.findByText("V1 Test Skill"); + expect(getSkillsSpy).toHaveBeenCalledWith(conversationId); + }); + }); +}); diff --git a/frontend/__tests__/components/shared/modals/settings/settings-form.test.tsx b/frontend/__tests__/components/shared/modals/settings/settings-form.test.tsx index 6b4616ab1298..b820be5829a2 100644 --- a/frontend/__tests__/components/shared/modals/settings/settings-form.test.tsx +++ b/frontend/__tests__/components/shared/modals/settings/settings-form.test.tsx @@ -3,7 +3,7 @@ import { describe, expect, it, vi } from "vitest"; import { renderWithProviders } from "test-utils"; import { createRoutesStub } from "react-router"; import { screen } from "@testing-library/react"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import { SettingsForm } from "#/components/shared/modals/settings/settings-form"; import { DEFAULT_SETTINGS } from "#/services/settings"; @@ -16,7 +16,7 @@ describe("SettingsForm", () => { Component: () => ( ), @@ -33,7 +33,7 @@ describe("SettingsForm", () => { expect(saveSettingsSpy).toHaveBeenCalledWith( expect.objectContaining({ - llm_model: DEFAULT_SETTINGS.LLM_MODEL, + llm_model: DEFAULT_SETTINGS.llm_model, }), ); }); diff --git a/frontend/__tests__/components/terminal/terminal.test.tsx b/frontend/__tests__/components/terminal/terminal.test.tsx index 15fb6357b2dd..ae25748a8b05 100644 --- a/frontend/__tests__/components/terminal/terminal.test.tsx +++ b/frontend/__tests__/components/terminal/terminal.test.tsx @@ -1,7 +1,7 @@ import { act, screen } from "@testing-library/react"; import { renderWithProviders } from "test-utils"; import { vi, describe, afterEach, it, expect } from "vitest"; -import { Command, useCommandStore } from "#/state/command-store"; +import { Command, useCommandStore } from "#/stores/command-store"; import Terminal from "#/components/features/terminal/terminal"; const renderTerminal = (commands: Command[] = []) => { diff --git a/frontend/__tests__/components/v1/chat/event-content-helpers/get-observation-content.test.ts b/frontend/__tests__/components/v1/chat/event-content-helpers/get-observation-content.test.ts new file mode 100644 index 000000000000..9e2da14a26a3 --- /dev/null +++ b/frontend/__tests__/components/v1/chat/event-content-helpers/get-observation-content.test.ts @@ -0,0 +1,92 @@ +import { describe, it, expect } from "vitest"; +import { getObservationContent } from "#/components/v1/chat/event-content-helpers/get-observation-content"; +import { ObservationEvent } from "#/types/v1/core"; +import { BrowserObservation } from "#/types/v1/core/base/observation"; + +describe("getObservationContent - BrowserObservation", () => { + it("should return output content when available", () => { + const mockEvent: ObservationEvent = { + id: "test-id", + timestamp: "2024-01-01T00:00:00Z", + source: "environment", + tool_name: "browser_navigate", + tool_call_id: "call-id", + action_id: "action-id", + observation: { + kind: "BrowserObservation", + output: "Browser action completed", + error: null, + screenshot_data: "base64data", + }, + }; + + const result = getObservationContent(mockEvent); + + expect(result).toContain("**Output:**"); + expect(result).toContain("Browser action completed"); + }); + + it("should handle error cases properly", () => { + const mockEvent: ObservationEvent = { + id: "test-id", + timestamp: "2024-01-01T00:00:00Z", + source: "environment", + tool_name: "browser_navigate", + tool_call_id: "call-id", + action_id: "action-id", + observation: { + kind: "BrowserObservation", + output: "", + error: "Browser action failed", + screenshot_data: null, + }, + }; + + const result = getObservationContent(mockEvent); + + expect(result).toContain("**Error:**"); + expect(result).toContain("Browser action failed"); + }); + + it("should provide default message when no output or error", () => { + const mockEvent: ObservationEvent = { + id: "test-id", + timestamp: "2024-01-01T00:00:00Z", + source: "environment", + tool_name: "browser_navigate", + tool_call_id: "call-id", + action_id: "action-id", + observation: { + kind: "BrowserObservation", + output: "", + error: null, + screenshot_data: "base64data", + }, + }; + + const result = getObservationContent(mockEvent); + + expect(result).toBe("Browser action completed successfully."); + }); + + it("should return output when screenshot_data is null", () => { + const mockEvent: ObservationEvent = { + id: "test-id", + timestamp: "2024-01-01T00:00:00Z", + source: "environment", + tool_name: "browser_navigate", + tool_call_id: "call-id", + action_id: "action-id", + observation: { + kind: "BrowserObservation", + output: "Page loaded successfully", + error: null, + screenshot_data: null, + }, + }; + + const result = getObservationContent(mockEvent); + + expect(result).toBe("**Output:**\nPage loaded successfully"); + }); +}); diff --git a/frontend/__tests__/conversation-websocket-handler.test.tsx b/frontend/__tests__/conversation-websocket-handler.test.tsx index f7d67d82b5ca..d3df1676fa11 100644 --- a/frontend/__tests__/conversation-websocket-handler.test.tsx +++ b/frontend/__tests__/conversation-websocket-handler.test.tsx @@ -1,12 +1,26 @@ -import { describe, it, expect, beforeAll, afterAll, afterEach } from "vitest"; +import { + describe, + it, + expect, + beforeAll, + beforeEach, + afterAll, + afterEach, +} from "vitest"; import { screen, waitFor, render, cleanup } from "@testing-library/react"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; import { http, HttpResponse } from "msw"; import { useOptimisticUserMessageStore } from "#/stores/optimistic-user-message-store"; +import { useBrowserStore } from "#/stores/browser-store"; +import { useCommandStore } from "#/stores/command-store"; import { createMockMessageEvent, createMockUserMessageEvent, createMockAgentErrorEvent, + createMockBrowserObservationEvent, + createMockBrowserNavigateActionEvent, + createMockExecuteBashActionEvent, + createMockExecuteBashObservationEvent, } from "#/mocks/mock-ws-helpers"; import { ConnectionStatusComponent, @@ -439,18 +453,10 @@ describe("Conversation WebSocket Handler", () => { // Set up MSW to mock both the HTTP API and WebSocket connection mswServer.use( - http.get("/api/v1/events/count", ({ request }) => { - const url = new URL(request.url); - const conversationIdParam = url.searchParams.get( - "conversation_id__eq", - ); - - if (conversationIdParam === conversationId) { - return HttpResponse.json(expectedEventCount); - } - - return HttpResponse.json(0); - }), + http.get( + `http://localhost:3000/api/conversations/${conversationId}/events/count`, + () => HttpResponse.json(expectedEventCount), + ), wsLink.addEventListener("connection", ({ client, server }) => { server.connect(); // Send all history events @@ -461,7 +467,7 @@ describe("Conversation WebSocket Handler", () => { ); // Create a test component that displays loading state - const HistoryLoadingComponent = () => { + function HistoryLoadingComponent() { const context = useConversationWebSocket(); const { events } = useEventStore(); @@ -474,7 +480,7 @@ describe("Conversation WebSocket Handler", () => {
{expectedEventCount}
); - }; + } // Render with WebSocket context renderWithWebSocketContext( @@ -484,7 +490,9 @@ describe("Conversation WebSocket Handler", () => { ); // Initially should be loading history - expect(screen.getByTestId("is-loading-history")).toHaveTextContent("true"); + expect(screen.getByTestId("is-loading-history")).toHaveTextContent( + "true", + ); // Wait for all events to be received await waitFor(() => { @@ -504,18 +512,10 @@ describe("Conversation WebSocket Handler", () => { // Set up MSW to mock both the HTTP API and WebSocket connection mswServer.use( - http.get("/api/v1/events/count", ({ request }) => { - const url = new URL(request.url); - const conversationIdParam = url.searchParams.get( - "conversation_id__eq", - ); - - if (conversationIdParam === conversationId) { - return HttpResponse.json(0); - } - - return HttpResponse.json(0); - }), + http.get( + `http://localhost:3000/api/conversations/${conversationId}/events/count`, + () => HttpResponse.json(0), + ), wsLink.addEventListener("connection", ({ server }) => { server.connect(); // No events sent for empty history @@ -523,7 +523,7 @@ describe("Conversation WebSocket Handler", () => { ); // Create a test component that displays loading state - const HistoryLoadingComponent = () => { + function HistoryLoadingComponent() { const context = useConversationWebSocket(); return ( @@ -533,7 +533,7 @@ describe("Conversation WebSocket Handler", () => { ); - }; + } // Render with WebSocket context renderWithWebSocketContext( @@ -561,18 +561,10 @@ describe("Conversation WebSocket Handler", () => { // Set up MSW to mock both the HTTP API and WebSocket connection mswServer.use( - http.get("/api/v1/events/count", ({ request }) => { - const url = new URL(request.url); - const conversationIdParam = url.searchParams.get( - "conversation_id__eq", - ); - - if (conversationIdParam === conversationId) { - return HttpResponse.json(expectedEventCount); - } - - return HttpResponse.json(0); - }), + http.get( + `http://localhost:3000/api/conversations/${conversationId}/events/count`, + () => HttpResponse.json(expectedEventCount), + ), wsLink.addEventListener("connection", ({ client, server }) => { server.connect(); // Send all history events @@ -583,7 +575,7 @@ describe("Conversation WebSocket Handler", () => { ); // Create a test component that displays loading state - const HistoryLoadingComponent = () => { + function HistoryLoadingComponent() { const context = useConversationWebSocket(); const { events } = useEventStore(); @@ -595,7 +587,7 @@ describe("Conversation WebSocket Handler", () => {
{events.length}
); - }; + } // Render with WebSocket context renderWithWebSocketContext( @@ -605,7 +597,9 @@ describe("Conversation WebSocket Handler", () => { ); // Initially should be loading history - expect(screen.getByTestId("is-loading-history")).toHaveTextContent("true"); + expect(screen.getByTestId("is-loading-history")).toHaveTextContent( + "true", + ); // Wait for all events to be received await waitFor(() => { @@ -621,17 +615,133 @@ describe("Conversation WebSocket Handler", () => { }); }); - // 9. Terminal I/O Tests (ExecuteBashAction and ExecuteBashObservation) - describe("Terminal I/O Integration", () => { - it("should append command to store when ExecuteBashAction event is received", async () => { - const { createMockExecuteBashActionEvent } = await import( - "#/mocks/mock-ws-helpers" + // 9. Browser State Tests (BrowserObservation) + describe("Browser State Integration", () => { + beforeEach(() => { + useBrowserStore.getState().reset(); + }); + + it("should update browser store with screenshot when BrowserObservation event is received", async () => { + // Create a mock BrowserObservation event with screenshot data + const mockBrowserObsEvent = createMockBrowserObservationEvent( + "base64-screenshot-data", + "Page loaded successfully", + ); + + // Set up MSW to send the event when connection is established + mswServer.use( + wsLink.addEventListener("connection", ({ client, server }) => { + server.connect(); + // Send the mock event after connection + client.send(JSON.stringify(mockBrowserObsEvent)); + }), + ); + + // Render with WebSocket context + renderWithWebSocketContext(); + + // Wait for connection + await waitFor(() => { + expect(screen.getByTestId("connection-state")).toHaveTextContent( + "OPEN", + ); + }); + + // Wait for the browser store to be updated with screenshot + await waitFor(() => { + const { screenshotSrc } = useBrowserStore.getState(); + expect(screenshotSrc).toBe( + "data:image/png;base64,base64-screenshot-data", + ); + }); + }); + + it("should update browser store with URL when BrowserNavigateAction followed by BrowserObservation", async () => { + // Create mock events - action first, then observation + const mockBrowserActionEvent = createMockBrowserNavigateActionEvent( + "https://example.com/test-page", + ); + const mockBrowserObsEvent = createMockBrowserObservationEvent( + "base64-screenshot-data", + "Page loaded successfully", + ); + + // Set up MSW to send both events when connection is established + mswServer.use( + wsLink.addEventListener("connection", ({ client, server }) => { + server.connect(); + // Send action first, then observation + client.send(JSON.stringify(mockBrowserActionEvent)); + client.send(JSON.stringify(mockBrowserObsEvent)); + }), + ); + + // Render with WebSocket context + renderWithWebSocketContext(); + + // Wait for connection + await waitFor(() => { + expect(screen.getByTestId("connection-state")).toHaveTextContent( + "OPEN", + ); + }); + + // Wait for the browser store to be updated with both screenshot and URL + await waitFor(() => { + const { screenshotSrc, url } = useBrowserStore.getState(); + expect(screenshotSrc).toBe( + "data:image/png;base64,base64-screenshot-data", + ); + expect(url).toBe("https://example.com/test-page"); + }); + }); + + it("should not update browser store when BrowserObservation has no screenshot data", async () => { + const initialScreenshot = useBrowserStore.getState().screenshotSrc; + + // Create a mock BrowserObservation event WITHOUT screenshot data + const mockBrowserObsEvent = createMockBrowserObservationEvent( + null, // no screenshot + "Browser action completed", ); - const { useCommandStore } = await import("#/state/command-store"); - // Clear the command store before test + // Set up MSW to send the event when connection is established + mswServer.use( + wsLink.addEventListener("connection", ({ client, server }) => { + server.connect(); + // Send the mock event after connection + client.send(JSON.stringify(mockBrowserObsEvent)); + }), + ); + + // Render with WebSocket context + renderWithWebSocketContext(); + + // Wait for connection + await waitFor(() => { + expect(screen.getByTestId("connection-state")).toHaveTextContent( + "OPEN", + ); + }); + + // Give some time for any potential updates + await new Promise((resolve) => { + setTimeout(resolve, 100); + }); + + // Screenshot should remain unchanged (empty/initial value) + const { screenshotSrc } = useBrowserStore.getState(); + expect(screenshotSrc).toBe(initialScreenshot); + }); + }); + + // 10. Terminal I/O Tests (ExecuteBashAction and ExecuteBashObservation) + describe("Terminal I/O Integration", () => { + beforeEach(() => { useCommandStore.getState().clearTerminal(); + }); + it("should append command to store when ExecuteBashAction event is received", async () => { // Create a mock ExecuteBashAction event const mockBashActionEvent = createMockExecuteBashActionEvent("npm test"); @@ -667,14 +777,6 @@ describe("Conversation WebSocket Handler", () => { }); it("should append output to store when ExecuteBashObservation event is received", async () => { - const { createMockExecuteBashObservationEvent } = await import( - "#/mocks/mock-ws-helpers" - ); - const { useCommandStore } = await import("#/state/command-store"); - - // Clear the command store before test - useCommandStore.getState().clearTerminal(); - // Create a mock ExecuteBashObservation event const mockBashObservationEvent = createMockExecuteBashObservationEvent( "PASS tests/example.test.js\n ✓ should work (2 ms)", diff --git a/frontend/__tests__/hooks/mutation/use-save-settings.test.tsx b/frontend/__tests__/hooks/mutation/use-save-settings.test.tsx index 29fdb99273b7..d2a7c798c411 100644 --- a/frontend/__tests__/hooks/mutation/use-save-settings.test.tsx +++ b/frontend/__tests__/hooks/mutation/use-save-settings.test.tsx @@ -1,7 +1,7 @@ import { renderHook, waitFor } from "@testing-library/react"; import { describe, expect, it, vi } from "vitest"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import { useSaveSettings } from "#/hooks/mutation/use-save-settings"; describe("useSaveSettings", () => { diff --git a/frontend/__tests__/hooks/use-settings-nav-items.test.tsx b/frontend/__tests__/hooks/use-settings-nav-items.test.tsx new file mode 100644 index 000000000000..64bb67534110 --- /dev/null +++ b/frontend/__tests__/hooks/use-settings-nav-items.test.tsx @@ -0,0 +1,53 @@ +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { renderHook, waitFor } from "@testing-library/react"; +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { SAAS_NAV_ITEMS, OSS_NAV_ITEMS } from "#/constants/settings-nav"; +import OptionService from "#/api/option-service/option-service.api"; +import { useSettingsNavItems } from "#/hooks/use-settings-nav-items"; + +const queryClient = new QueryClient(); +const wrapper = ({ children }: { children: React.ReactNode }) => ( + {children} +); + +const mockConfig = (appMode: "saas" | "oss", hideLlmSettings = false) => { + vi.spyOn(OptionService, "getConfig").mockResolvedValue({ + APP_MODE: appMode, + FEATURE_FLAGS: { HIDE_LLM_SETTINGS: hideLlmSettings }, + } as Awaited>); +}; + +describe("useSettingsNavItems", () => { + beforeEach(() => { + queryClient.clear(); + }); + + it("should return SAAS_NAV_ITEMS when APP_MODE is 'saas'", async () => { + mockConfig("saas"); + const { result } = renderHook(() => useSettingsNavItems(), { wrapper }); + + await waitFor(() => { + expect(result.current).toEqual(SAAS_NAV_ITEMS); + }); + }); + + it("should return OSS_NAV_ITEMS when APP_MODE is 'oss'", async () => { + mockConfig("oss"); + const { result } = renderHook(() => useSettingsNavItems(), { wrapper }); + + await waitFor(() => { + expect(result.current).toEqual(OSS_NAV_ITEMS); + }); + }); + + it("should filter out '/settings' item when HIDE_LLM_SETTINGS feature flag is enabled", async () => { + mockConfig("saas", true); + const { result } = renderHook(() => useSettingsNavItems(), { wrapper }); + + await waitFor(() => { + expect( + result.current.find((item) => item.to === "/settings"), + ).toBeUndefined(); + }); + }); +}); diff --git a/frontend/__tests__/hooks/use-terminal.test.tsx b/frontend/__tests__/hooks/use-terminal.test.tsx index 4f110df1716a..0e4761b21b44 100644 --- a/frontend/__tests__/hooks/use-terminal.test.tsx +++ b/frontend/__tests__/hooks/use-terminal.test.tsx @@ -1,6 +1,6 @@ import { beforeAll, describe, expect, it, vi, afterEach } from "vitest"; import { useTerminal } from "#/hooks/use-terminal"; -import { Command, useCommandStore } from "#/state/command-store"; +import { Command, useCommandStore } from "#/stores/command-store"; import { renderWithProviders } from "../../test-utils"; // Mock the WsClient context @@ -42,20 +42,46 @@ describe("useTerminal", () => { write: vi.fn(), writeln: vi.fn(), dispose: vi.fn(), + element: document.createElement("div"), + })); + + const mockFitAddon = vi.hoisted(() => ({ + fit: vi.fn(), })); beforeAll(() => { - // mock ResizeObserver - window.ResizeObserver = vi.fn().mockImplementation(() => ({ - observe: vi.fn(), - unobserve: vi.fn(), - disconnect: vi.fn(), - })); + // mock ResizeObserver - use class for Vitest 4 constructor support + window.ResizeObserver = class { + observe = vi.fn(); + + unobserve = vi.fn(); + + disconnect = vi.fn(); + } as unknown as typeof ResizeObserver; - // mock Terminal + // mock Terminal - use class for Vitest 4 constructor support vi.mock("@xterm/xterm", async (importOriginal) => ({ ...(await importOriginal()), - Terminal: vi.fn().mockImplementation(() => mockTerminal), + Terminal: class { + loadAddon = mockTerminal.loadAddon; + + open = mockTerminal.open; + + write = mockTerminal.write; + + writeln = mockTerminal.writeln; + + dispose = mockTerminal.dispose; + + element = mockTerminal.element; + }, + })); + + // mock FitAddon + vi.mock("@xterm/addon-fit", () => ({ + FitAddon: class { + fit = mockFitAddon.fit; + }, })); }); @@ -83,4 +109,18 @@ describe("useTerminal", () => { expect(mockTerminal.writeln).toHaveBeenNthCalledWith(1, "echo hello"); expect(mockTerminal.writeln).toHaveBeenNthCalledWith(2, "hello"); }); + + it("should not call fit() when terminal.element is null", () => { + // Temporarily set element to null to simulate terminal not being opened + const originalElement = mockTerminal.element; + mockTerminal.element = null as unknown as HTMLDivElement; + + renderWithProviders(); + + // fit() should not be called because terminal.element is null + expect(mockFitAddon.fit).not.toHaveBeenCalled(); + + // Restore original element + mockTerminal.element = originalElement; + }); }); diff --git a/frontend/__tests__/hooks/use-websocket.test.ts b/frontend/__tests__/hooks/use-websocket.test.ts index cb76fbcc90f0..50e8e7057165 100644 --- a/frontend/__tests__/hooks/use-websocket.test.ts +++ b/frontend/__tests__/hooks/use-websocket.test.ts @@ -1,3 +1,11 @@ +/** + * TODO: Fix flaky WebSocket tests (https://github.com/OpenHands/OpenHands/issues/11944) + * + * Several tests in this file are skipped because they fail intermittently in CI + * but pass locally. The SUSPECTED root cause is that `wsLink.broadcast()` sends messages + * to ALL connected clients across all tests, causing cross-test contamination + * when tests run in parallel with Vitest v4. + */ import { renderHook, waitFor } from "@testing-library/react"; import { describe, @@ -51,7 +59,7 @@ describe("useWebSocket", () => { expect(result.current.socket).toBeTruthy(); }); - it("should handle incoming messages correctly", async () => { + it.skip("should handle incoming messages correctly", async () => { const { result } = renderHook(() => useWebSocket("ws://acme.com/ws")); // Wait for connection to be established @@ -114,7 +122,7 @@ describe("useWebSocket", () => { expect(result.current.socket).toBeTruthy(); }); - it("should close the WebSocket connection on unmount", async () => { + it.skip("should close the WebSocket connection on unmount", async () => { const { result, unmount } = renderHook(() => useWebSocket("ws://acme.com/ws"), ); @@ -204,7 +212,7 @@ describe("useWebSocket", () => { }); }); - it("should call onMessage handler when WebSocket receives a message", async () => { + it.skip("should call onMessage handler when WebSocket receives a message", async () => { const onMessageSpy = vi.fn(); const options = { onMessage: onMessageSpy }; @@ -271,7 +279,7 @@ describe("useWebSocket", () => { expect(onErrorSpy).toHaveBeenCalled(); }); - it("should provide sendMessage function to send messages to WebSocket", async () => { + it.skip("should provide sendMessage function to send messages to WebSocket", async () => { const { result } = renderHook(() => useWebSocket("ws://acme.com/ws")); // Wait for connection to be established diff --git a/frontend/__tests__/routes/_oh.test.tsx b/frontend/__tests__/routes/_oh.test.tsx index 7737eaa7f73c..0b76148ad69c 100644 --- a/frontend/__tests__/routes/_oh.test.tsx +++ b/frontend/__tests__/routes/_oh.test.tsx @@ -10,7 +10,7 @@ import MainApp from "#/routes/root-layout"; import i18n from "#/i18n"; import OptionService from "#/api/option-service/option-service.api"; import * as CaptureConsent from "#/utils/handle-capture-consent"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import * as ToastHandlers from "#/utils/custom-toast-handlers"; describe("frontend/routes/_oh", () => { diff --git a/frontend/__tests__/routes/app-settings.test.tsx b/frontend/__tests__/routes/app-settings.test.tsx index 44dacce2fb18..038cb94c52b7 100644 --- a/frontend/__tests__/routes/app-settings.test.tsx +++ b/frontend/__tests__/routes/app-settings.test.tsx @@ -3,7 +3,7 @@ import { afterEach, describe, expect, it, vi } from "vitest"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; import userEvent from "@testing-library/user-event"; import AppSettingsScreen from "#/routes/app-settings"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import { MOCK_DEFAULT_USER_SETTINGS } from "#/mocks/handlers"; import { AvailableLanguages } from "#/i18n"; import * as CaptureConsent from "#/utils/handle-capture-consent"; diff --git a/frontend/__tests__/routes/git-settings.test.tsx b/frontend/__tests__/routes/git-settings.test.tsx index 0c3f77bed088..9f1008ce3ccc 100644 --- a/frontend/__tests__/routes/git-settings.test.tsx +++ b/frontend/__tests__/routes/git-settings.test.tsx @@ -6,7 +6,7 @@ import userEvent from "@testing-library/user-event"; import i18next from "i18next"; import { I18nextProvider } from "react-i18next"; import GitSettingsScreen from "#/routes/git-settings"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import OptionService from "#/api/option-service/option-service.api"; import AuthService from "#/api/auth-service/auth-service.api"; import { MOCK_DEFAULT_USER_SETTINGS } from "#/mocks/handlers"; diff --git a/frontend/__tests__/routes/home-screen.test.tsx b/frontend/__tests__/routes/home-screen.test.tsx index a515f670beab..5ac746e924c8 100644 --- a/frontend/__tests__/routes/home-screen.test.tsx +++ b/frontend/__tests__/routes/home-screen.test.tsx @@ -6,7 +6,7 @@ import { createRoutesStub } from "react-router"; import { createAxiosNotFoundErrorObject } from "test-utils"; import HomeScreen from "#/routes/home"; import { GitRepository } from "#/types/git"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import GitService from "#/api/git-service/git-service.api"; import OptionService from "#/api/option-service/option-service.api"; import MainApp from "#/routes/root-layout"; diff --git a/frontend/__tests__/routes/llm-settings.test.tsx b/frontend/__tests__/routes/llm-settings.test.tsx index f826b20f4506..3942cc8fc140 100644 --- a/frontend/__tests__/routes/llm-settings.test.tsx +++ b/frontend/__tests__/routes/llm-settings.test.tsx @@ -3,13 +3,14 @@ import userEvent from "@testing-library/user-event"; import { beforeEach, describe, expect, it, vi } from "vitest"; import { QueryClientProvider, QueryClient } from "@tanstack/react-query"; import LlmSettingsScreen from "#/routes/llm-settings"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import { MOCK_DEFAULT_USER_SETTINGS, resetTestHandlersMockSettings, } from "#/mocks/handlers"; import * as AdvancedSettingsUtlls from "#/utils/has-advanced-settings-set"; import * as ToastHandlers from "#/utils/custom-toast-handlers"; +import OptionService from "#/api/option-service/option-service.api"; // Mock react-router hooks const mockUseSearchParams = vi.fn(); @@ -71,7 +72,7 @@ describe("Content", () => { await waitFor(() => { expect(provider).toHaveValue("OpenHands"); - expect(model).toHaveValue("claude-sonnet-4-20250514"); + expect(model).toHaveValue("claude-opus-4-5-20251101"); expect(apiKey).toHaveValue(""); expect(apiKey).toHaveProperty("placeholder", ""); @@ -189,7 +190,7 @@ describe("Content", () => { const agent = screen.getByTestId("agent-input"); const condensor = screen.getByTestId("enable-memory-condenser-switch"); - expect(model).toHaveValue("openhands/claude-sonnet-4-20250514"); + expect(model).toHaveValue("openhands/claude-opus-4-5-20251101"); expect(baseUrl).toHaveValue(""); expect(apiKey).toHaveValue(""); expect(apiKey).toHaveProperty("placeholder", ""); @@ -252,9 +253,290 @@ describe("Content", () => { expect(securityAnalyzer).toHaveValue("SETTINGS$SECURITY_ANALYZER_NONE"); }); }); + + it("should omit invariant and custom analyzers when V1 is enabled", async () => { + const getSettingsSpy = vi.spyOn(SettingsService, "getSettings"); + getSettingsSpy.mockResolvedValue({ + ...MOCK_DEFAULT_USER_SETTINGS, + confirmation_mode: true, + security_analyzer: "llm", + v1_enabled: true, + }); + + const getSecurityAnalyzersSpy = vi.spyOn( + OptionService, + "getSecurityAnalyzers", + ); + getSecurityAnalyzersSpy.mockResolvedValue([ + "llm", + "none", + "invariant", + "custom", + ]); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + const advancedSwitch = screen.getByTestId("advanced-settings-switch"); + await userEvent.click(advancedSwitch); + + const securityAnalyzer = await screen.findByTestId( + "security-analyzer-input", + ); + await userEvent.click(securityAnalyzer); + + // Only llm + none should be available when V1 is enabled + screen.getByText("SETTINGS$SECURITY_ANALYZER_LLM_DEFAULT"); + screen.getByText("SETTINGS$SECURITY_ANALYZER_NONE"); + expect( + screen.queryByText("SETTINGS$SECURITY_ANALYZER_INVARIANT"), + ).not.toBeInTheDocument(); + expect(screen.queryByText("custom")).not.toBeInTheDocument(); + }); + + it("should include invariant analyzer option when V1 is disabled", async () => { + const getSettingsSpy = vi.spyOn(SettingsService, "getSettings"); + getSettingsSpy.mockResolvedValue({ + ...MOCK_DEFAULT_USER_SETTINGS, + confirmation_mode: true, + security_analyzer: "llm", + v1_enabled: false, + }); + + const getSecurityAnalyzersSpy = vi.spyOn( + OptionService, + "getSecurityAnalyzers", + ); + getSecurityAnalyzersSpy.mockResolvedValue(["llm", "none", "invariant"]); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + const advancedSwitch = screen.getByTestId("advanced-settings-switch"); + await userEvent.click(advancedSwitch); + + const securityAnalyzer = await screen.findByTestId( + "security-analyzer-input", + ); + await userEvent.click(securityAnalyzer); + + expect( + screen.getByText("SETTINGS$SECURITY_ANALYZER_LLM_DEFAULT"), + ).toBeInTheDocument(); + expect( + screen.getByText("SETTINGS$SECURITY_ANALYZER_NONE"), + ).toBeInTheDocument(); + expect( + screen.getByText("SETTINGS$SECURITY_ANALYZER_INVARIANT"), + ).toBeInTheDocument(); + }); }); it.todo("should render an indicator if the llm api key is set"); + + describe("API key visibility in Basic Settings", () => { + it("should hide API key input when SaaS mode is enabled and OpenHands provider is selected", async () => { + const getConfigSpy = vi.spyOn(OptionService, "getConfig"); + // @ts-expect-error - only return APP_MODE for these tests + getConfigSpy.mockResolvedValue({ + APP_MODE: "saas", + }); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + const basicForm = screen.getByTestId("llm-settings-form-basic"); + const provider = within(basicForm).getByTestId("llm-provider-input"); + + // Verify OpenHands is selected by default + await waitFor(() => { + expect(provider).toHaveValue("OpenHands"); + }); + + // API key input should not be visible when OpenHands provider is selected in SaaS mode + expect( + within(basicForm).queryByTestId("llm-api-key-input"), + ).not.toBeInTheDocument(); + expect( + within(basicForm).queryByTestId("llm-api-key-help-anchor"), + ).not.toBeInTheDocument(); + }); + + it("should show API key input when SaaS mode is enabled and non-OpenHands provider is selected", async () => { + const getConfigSpy = vi.spyOn(OptionService, "getConfig"); + // @ts-expect-error - only return APP_MODE for these tests + getConfigSpy.mockResolvedValue({ + APP_MODE: "saas", + }); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + const basicForm = screen.getByTestId("llm-settings-form-basic"); + const provider = within(basicForm).getByTestId("llm-provider-input"); + + // Select OpenAI provider + await userEvent.click(provider); + const providerOption = screen.getByText("OpenAI"); + await userEvent.click(providerOption); + + await waitFor(() => { + expect(provider).toHaveValue("OpenAI"); + }); + + // API key input should be visible when non-OpenHands provider is selected in SaaS mode + expect( + within(basicForm).getByTestId("llm-api-key-input"), + ).toBeInTheDocument(); + expect( + within(basicForm).getByTestId("llm-api-key-help-anchor"), + ).toBeInTheDocument(); + }); + + it("should show API key input when OSS mode is enabled and OpenHands provider is selected", async () => { + const getConfigSpy = vi.spyOn(OptionService, "getConfig"); + // @ts-expect-error - only return APP_MODE for these tests + getConfigSpy.mockResolvedValue({ + APP_MODE: "oss", + }); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + const basicForm = screen.getByTestId("llm-settings-form-basic"); + const provider = within(basicForm).getByTestId("llm-provider-input"); + + // Verify OpenHands is selected by default + await waitFor(() => { + expect(provider).toHaveValue("OpenHands"); + }); + + // API key input should be visible when OSS mode is enabled (even with OpenHands provider) + expect( + within(basicForm).getByTestId("llm-api-key-input"), + ).toBeInTheDocument(); + expect( + within(basicForm).getByTestId("llm-api-key-help-anchor"), + ).toBeInTheDocument(); + }); + + it("should show API key input when OSS mode is enabled and non-OpenHands provider is selected", async () => { + const getConfigSpy = vi.spyOn(OptionService, "getConfig"); + // @ts-expect-error - only return APP_MODE for these tests + getConfigSpy.mockResolvedValue({ + APP_MODE: "oss", + }); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + const basicForm = screen.getByTestId("llm-settings-form-basic"); + const provider = within(basicForm).getByTestId("llm-provider-input"); + + // Select OpenAI provider + await userEvent.click(provider); + const providerOption = screen.getByText("OpenAI"); + await userEvent.click(providerOption); + + await waitFor(() => { + expect(provider).toHaveValue("OpenAI"); + }); + + // API key input should be visible when OSS mode is enabled + expect( + within(basicForm).getByTestId("llm-api-key-input"), + ).toBeInTheDocument(); + expect( + within(basicForm).getByTestId("llm-api-key-help-anchor"), + ).toBeInTheDocument(); + }); + + it("should hide API key input when switching from non-OpenHands to OpenHands provider in SaaS mode", async () => { + const getConfigSpy = vi.spyOn(OptionService, "getConfig"); + // @ts-expect-error - only return APP_MODE for these tests + getConfigSpy.mockResolvedValue({ + APP_MODE: "saas", + }); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + const basicForm = screen.getByTestId("llm-settings-form-basic"); + const provider = within(basicForm).getByTestId("llm-provider-input"); + + // Start with OpenAI provider + await userEvent.click(provider); + const openAIOption = screen.getByText("OpenAI"); + await userEvent.click(openAIOption); + + await waitFor(() => { + expect(provider).toHaveValue("OpenAI"); + }); + + // API key input should be visible with OpenAI + expect( + within(basicForm).getByTestId("llm-api-key-input"), + ).toBeInTheDocument(); + + // Switch to OpenHands provider + await userEvent.click(provider); + const openHandsOption = screen.getByText("OpenHands"); + await userEvent.click(openHandsOption); + + await waitFor(() => { + expect(provider).toHaveValue("OpenHands"); + }); + + // API key input should now be hidden + expect( + within(basicForm).queryByTestId("llm-api-key-input"), + ).not.toBeInTheDocument(); + expect( + within(basicForm).queryByTestId("llm-api-key-help-anchor"), + ).not.toBeInTheDocument(); + }); + + it("should show API key input when switching from OpenHands to non-OpenHands provider in SaaS mode", async () => { + const getConfigSpy = vi.spyOn(OptionService, "getConfig"); + // @ts-expect-error - only return APP_MODE for these tests + getConfigSpy.mockResolvedValue({ + APP_MODE: "saas", + }); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + const basicForm = screen.getByTestId("llm-settings-form-basic"); + const provider = within(basicForm).getByTestId("llm-provider-input"); + + // Verify OpenHands is selected by default + await waitFor(() => { + expect(provider).toHaveValue("OpenHands"); + }); + + // API key input should be hidden with OpenHands + expect( + within(basicForm).queryByTestId("llm-api-key-input"), + ).not.toBeInTheDocument(); + + // Switch to OpenAI provider + await userEvent.click(provider); + const openAIOption = screen.getByText("OpenAI"); + await userEvent.click(openAIOption); + + await waitFor(() => { + expect(provider).toHaveValue("OpenAI"); + }); + + // API key input should now be visible + expect( + within(basicForm).getByTestId("llm-api-key-input"), + ).toBeInTheDocument(); + expect( + within(basicForm).getByTestId("llm-api-key-help-anchor"), + ).toBeInTheDocument(); + }); + }); }); describe("Form submission", () => { diff --git a/frontend/__tests__/routes/secrets-settings.test.tsx b/frontend/__tests__/routes/secrets-settings.test.tsx index 5517e965aa97..9b5c315f922d 100644 --- a/frontend/__tests__/routes/secrets-settings.test.tsx +++ b/frontend/__tests__/routes/secrets-settings.test.tsx @@ -1,12 +1,12 @@ import { render, screen, waitFor, within } from "@testing-library/react"; -import { beforeEach, describe, expect, it, vi } from "vitest"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; import userEvent from "@testing-library/user-event"; import { createRoutesStub, Outlet } from "react-router"; import SecretsSettingsScreen from "#/routes/secrets-settings"; import { SecretsService } from "#/api/secrets-service"; import { GetSecretsResponse } from "#/api/secrets-service.types"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import OptionService from "#/api/option-service/option-service.api"; import { MOCK_DEFAULT_USER_SETTINGS } from "#/mocks/handlers"; @@ -21,25 +21,25 @@ const MOCK_GET_SECRETS_RESPONSE: GetSecretsResponse["custom_secrets"] = [ }, ]; -const RouterStub = createRoutesStub([ - { - Component: () => , - path: "/settings", - children: [ - { - Component: SecretsSettingsScreen, - path: "/settings/secrets", - }, - { - Component: () =>
, - path: "/settings/integrations", - }, - ], - }, -]); - -const renderSecretsSettings = () => - render(, { +const renderSecretsSettings = () => { + const RouterStub = createRoutesStub([ + { + Component: () => , + path: "/settings", + children: [ + { + Component: SecretsSettingsScreen, + path: "/settings/secrets", + }, + { + Component: () =>
, + path: "/settings/integrations", + }, + ], + }, + ]); + + return render(, { wrapper: ({ children }) => ( ), }); +}; beforeEach(() => { const getConfigSpy = vi.spyOn(OptionService, "getConfig"); @@ -61,6 +62,10 @@ beforeEach(() => { }); }); +afterEach(() => { + vi.restoreAllMocks(); +}); + describe("Content", () => { it("should render the secrets settings screen", () => { renderSecretsSettings(); @@ -501,6 +506,8 @@ describe("Secret actions", () => { it("should not submit whitespace secret names or values", async () => { const createSecretSpy = vi.spyOn(SecretsService, "createSecret"); + const getSecretsSpy = vi.spyOn(SecretsService, "getSecrets"); + getSecretsSpy.mockResolvedValue([]); renderSecretsSettings(); // render form & hide items @@ -532,9 +539,11 @@ describe("Secret actions", () => { await userEvent.click(submitButton); expect(createSecretSpy).not.toHaveBeenCalled(); - expect( - screen.queryByText("SECRETS$SECRET_VALUE_REQUIRED"), - ).toBeInTheDocument(); + await waitFor(() => { + expect( + screen.queryByText("SECRETS$SECRET_VALUE_REQUIRED"), + ).toBeInTheDocument(); + }); }); it("should not reset ipout values on an invalid submit", async () => { diff --git a/frontend/src/services/__tests__/actions.test.ts b/frontend/__tests__/services/actions.test.ts similarity index 94% rename from frontend/src/services/__tests__/actions.test.ts rename to frontend/__tests__/services/actions.test.ts index a0df1915a834..8054b999d1cb 100644 --- a/frontend/src/services/__tests__/actions.test.ts +++ b/frontend/__tests__/services/actions.test.ts @@ -1,8 +1,8 @@ import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; -import { handleStatusMessage } from "../actions"; +import { handleStatusMessage } from "#/services/actions"; import { StatusMessage } from "#/types/message"; import { queryClient } from "#/query-client-config"; -import { useStatusStore } from "#/state/status-store"; +import { useStatusStore } from "#/stores/status-store"; import { trackError } from "#/utils/error-handler"; // Mock dependencies @@ -12,7 +12,7 @@ vi.mock("#/query-client-config", () => ({ }, })); -vi.mock("#/state/status-store", () => ({ +vi.mock("#/stores/status-store", () => ({ useStatusStore: { getState: vi.fn(() => ({ setCurStatusMessage: vi.fn(), diff --git a/frontend/__tests__/services/actions.test.tsx b/frontend/__tests__/services/actions.test.tsx index 05473dcb3589..555fd18caa0c 100644 --- a/frontend/__tests__/services/actions.test.tsx +++ b/frontend/__tests__/services/actions.test.tsx @@ -1,8 +1,8 @@ import { describe, it, expect, vi, beforeEach } from "vitest"; import ActionType from "#/types/action-type"; import { ActionMessage } from "#/types/message"; +import { useCommandStore } from "#/stores/command-store"; -// Mock the store and actions const mockDispatch = vi.fn(); const mockAppendInput = vi.fn(); @@ -12,26 +12,12 @@ vi.mock("#/store", () => ({ }, })); -vi.mock("#/state/command-store", () => ({ - useCommandStore: { - getState: () => ({ - appendInput: mockAppendInput, - }), - }, -})); - -vi.mock("#/state/metrics-slice", () => ({ - setMetrics: vi.fn(), -})); - -vi.mock("#/state/security-analyzer-slice", () => ({ - appendSecurityAnalyzerInput: vi.fn(), -})); - describe("handleActionMessage", () => { beforeEach(() => { - // Clear all mocks before each test vi.clearAllMocks(); + useCommandStore.setState({ + appendInput: mockAppendInput, + }); }); it("should handle RUN actions by adding input to terminal", async () => { diff --git a/frontend/src/utils/__tests__/custom-toast-handlers.test.ts b/frontend/__tests__/utils/custom-toast-handlers.test.ts similarity index 98% rename from frontend/src/utils/__tests__/custom-toast-handlers.test.ts rename to frontend/__tests__/utils/custom-toast-handlers.test.ts index 09023b517ad5..404bc1d4dd56 100644 --- a/frontend/src/utils/__tests__/custom-toast-handlers.test.ts +++ b/frontend/__tests__/utils/custom-toast-handlers.test.ts @@ -3,7 +3,7 @@ import toast from "react-hot-toast"; import { displaySuccessToast, displayErrorToast, -} from "../custom-toast-handlers"; +} from "#/utils/custom-toast-handlers"; // Mock react-hot-toast vi.mock("react-hot-toast", () => ({ diff --git a/frontend/__tests__/utils/has-advanced-settings-set.test.ts b/frontend/__tests__/utils/has-advanced-settings-set.test.ts index c6bd94b8f0e8..36c7a7b60964 100644 --- a/frontend/__tests__/utils/has-advanced-settings-set.test.ts +++ b/frontend/__tests__/utils/has-advanced-settings-set.test.ts @@ -12,20 +12,20 @@ describe("hasAdvancedSettingsSet", () => { }); describe("should be true if", () => { - test("LLM_BASE_URL is set", () => { + test("llm_base_url is set", () => { expect( hasAdvancedSettingsSet({ ...DEFAULT_SETTINGS, - LLM_BASE_URL: "test", + llm_base_url: "test", }), ).toBe(true); }); - test("AGENT is not default value", () => { + test("agent is not default value", () => { expect( hasAdvancedSettingsSet({ ...DEFAULT_SETTINGS, - AGENT: "test", + agent: "test", }), ).toBe(true); }); diff --git a/frontend/__tests__/utils/model-name-case-preservation.test.tsx b/frontend/__tests__/utils/model-name-case-preservation.test.tsx index 4af08e127f7a..f3853ce4a522 100644 --- a/frontend/__tests__/utils/model-name-case-preservation.test.tsx +++ b/frontend/__tests__/utils/model-name-case-preservation.test.tsx @@ -13,7 +13,7 @@ describe("Model name case preservation", () => { const settings = extractSettings(formData); // Test that model names maintain their original casing - expect(settings.LLM_MODEL).toBe("SambaNova/Meta-Llama-3.1-8B-Instruct"); + expect(settings.llm_model).toBe("SambaNova/Meta-Llama-3.1-8B-Instruct"); }); it("should preserve openai model case", () => { @@ -24,7 +24,7 @@ describe("Model name case preservation", () => { formData.set("language", "en"); const settings = extractSettings(formData); - expect(settings.LLM_MODEL).toBe("openai/gpt-4o"); + expect(settings.llm_model).toBe("openai/gpt-4o"); }); it("should preserve anthropic model case", () => { @@ -35,7 +35,7 @@ describe("Model name case preservation", () => { formData.set("language", "en"); const settings = extractSettings(formData); - expect(settings.LLM_MODEL).toBe("anthropic/claude-sonnet-4-20250514"); + expect(settings.llm_model).toBe("anthropic/claude-sonnet-4-20250514"); }); it("should not automatically lowercase model names", () => { @@ -48,7 +48,7 @@ describe("Model name case preservation", () => { const settings = extractSettings(formData); // Test that camelCase and PascalCase are preserved - expect(settings.LLM_MODEL).not.toBe("sambanova/meta-llama-3.1-8b-instruct"); - expect(settings.LLM_MODEL).toBe("SambaNova/Meta-Llama-3.1-8B-Instruct"); + expect(settings.llm_model).not.toBe("sambanova/meta-llama-3.1-8b-instruct"); + expect(settings.llm_model).toBe("SambaNova/Meta-Llama-3.1-8B-Instruct"); }); }); diff --git a/frontend/src/utils/__tests__/settings-utils.test.ts b/frontend/__tests__/utils/settings-utils.test.ts similarity index 90% rename from frontend/src/utils/__tests__/settings-utils.test.ts rename to frontend/__tests__/utils/settings-utils.test.ts index bebdaa0f8895..9eb9a038a5e9 100644 --- a/frontend/src/utils/__tests__/settings-utils.test.ts +++ b/frontend/__tests__/utils/settings-utils.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect } from "vitest"; -import { parseMaxBudgetPerTask, extractSettings } from "../settings-utils"; +import { parseMaxBudgetPerTask, extractSettings } from "#/utils/settings-utils"; describe("parseMaxBudgetPerTask", () => { it("should return null for empty string", () => { @@ -67,10 +67,10 @@ describe("extractSettings", () => { // Verify that the model name case is preserved const expectedModel = `${provider}/${model}`; - expect(settings.LLM_MODEL).toBe(expectedModel); + expect(settings.llm_model).toBe(expectedModel); // Only test that it's not lowercased if the original has uppercase letters if (expectedModel !== expectedModel.toLowerCase()) { - expect(settings.LLM_MODEL).not.toBe(expectedModel.toLowerCase()); + expect(settings.llm_model).not.toBe(expectedModel.toLowerCase()); } }); }); @@ -85,7 +85,7 @@ describe("extractSettings", () => { const settings = extractSettings(formData); // Custom model should take precedence and preserve case - expect(settings.LLM_MODEL).toBe("Custom-Model-Name"); - expect(settings.LLM_MODEL).not.toBe("custom-model-name"); + expect(settings.llm_model).toBe("Custom-Model-Name"); + expect(settings.llm_model).not.toBe("custom-model-name"); }); }); diff --git a/frontend/src/utils/__tests__/toast-duration.test.ts b/frontend/__tests__/utils/toast-duration.test.ts similarity index 97% rename from frontend/src/utils/__tests__/toast-duration.test.ts rename to frontend/__tests__/utils/toast-duration.test.ts index 3b5ffa8b69cc..3ef6c803d9f9 100644 --- a/frontend/src/utils/__tests__/toast-duration.test.ts +++ b/frontend/__tests__/utils/toast-duration.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect } from "vitest"; -import { calculateToastDuration } from "../toast-duration"; +import { calculateToastDuration } from "#/utils/toast-duration"; describe("calculateToastDuration", () => { it("should return minimum duration for short messages", () => { diff --git a/frontend/src/utils/__tests__/vscode-url-helper.test.ts b/frontend/__tests__/utils/vscode-url-helper.test.ts similarity index 96% rename from frontend/src/utils/__tests__/vscode-url-helper.test.ts rename to frontend/__tests__/utils/vscode-url-helper.test.ts index c85804089b2e..a55b03bbbf8c 100644 --- a/frontend/src/utils/__tests__/vscode-url-helper.test.ts +++ b/frontend/__tests__/utils/vscode-url-helper.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect, beforeEach, afterEach } from "vitest"; -import { transformVSCodeUrl } from "../vscode-url-helper"; +import { transformVSCodeUrl } from "#/utils/vscode-url-helper"; describe("transformVSCodeUrl", () => { const originalWindowLocation = window.location; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 18ba27b40ca6..33717ced210f 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,89 +1,71 @@ { "name": "openhands-frontend", - "version": "0.62.0", + "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "openhands-frontend", - "version": "0.62.0", + "version": "1.0.0", "dependencies": { - "@heroui/react": "2.8.5", - "@heroui/use-infinite-scroll": "^2.2.11", + "@heroui/react": "2.8.6", "@microlink/react-json-view": "^1.26.2", "@monaco-editor/react": "^4.7.0-rc.0", - "@posthog/react": "^1.4.0", - "@react-router/node": "^7.9.3", - "@react-router/serve": "^7.9.3", - "@react-types/shared": "^3.32.0", - "@stripe/react-stripe-js": "^4.0.2", - "@stripe/stripe-js": "^7.9.0", - "@tailwindcss/postcss": "^4.1.13", - "@tailwindcss/vite": "^4.1.13", - "@tanstack/react-query": "^5.90.2", + "@react-router/node": "^7.11.0", + "@react-router/serve": "^7.11.0", + "@tailwindcss/vite": "^4.1.18", + "@tanstack/react-query": "^5.90.12", "@uidotdev/usehooks": "^2.4.1", - "@vitejs/plugin-react": "^5.0.4", "@xterm/addon-fit": "^0.10.0", "@xterm/xterm": "^5.4.0", - "axios": "^1.12.2", + "axios": "^1.13.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", - "date-fns": "^4.1.0", - "downshift": "^9.0.10", + "downshift": "^9.0.13", "eslint-config-airbnb-typescript": "^18.0.0", - "framer-motion": "^12.23.22", - "i18next": "^25.5.2", + "framer-motion": "^12.23.25", + "i18next": "^25.7.3", "i18next-browser-languagedetector": "^8.2.0", "i18next-http-backend": "^3.0.2", - "isbot": "^5.1.31", - "jose": "^6.1.0", - "lucide-react": "^0.544.0", - "monaco-editor": "^0.53.0", - "posthog-js": "^1.290.0", - "react": "^19.1.1", - "react-dom": "^19.1.1", - "react-highlight": "^0.15.0", + "isbot": "^5.1.32", + "lucide-react": "^0.562.0", + "monaco-editor": "^0.55.1", + "posthog-js": "^1.309.1", + "react": "^19.2.3", + "react-dom": "^19.2.3", "react-hot-toast": "^2.6.0", - "react-i18next": "^16.0.0", + "react-i18next": "^16.5.0", "react-icons": "^5.5.0", "react-markdown": "^10.1.0", - "react-router": "^7.9.3", - "react-syntax-highlighter": "^15.6.6", + "react-router": "^7.11.0", + "react-syntax-highlighter": "^16.1.0", "remark-breaks": "^4.0.0", "remark-gfm": "^4.0.1", "sirv-cli": "^3.0.1", "socket.io-client": "^4.8.1", - "tailwind-merge": "^3.3.1", + "tailwind-merge": "^3.4.0", "tailwind-scrollbar": "^4.0.2", - "vite": "^7.1.7", - "web-vitals": "^5.1.0", - "ws": "^8.18.2", - "zustand": "^5.0.8" + "vite": "^7.3.0", + "zustand": "^5.0.9" }, "devDependencies": { - "@babel/parser": "^7.28.3", - "@babel/traverse": "^7.28.3", - "@babel/types": "^7.28.2", "@mswjs/socket.io-binding": "^0.2.0", - "@playwright/test": "^1.55.1", - "@react-router/dev": "^7.9.3", + "@playwright/test": "^1.57.0", + "@react-router/dev": "^7.11.0", "@tailwindcss/typography": "^0.5.19", "@tanstack/eslint-plugin-query": "^5.91.0", "@testing-library/dom": "^10.4.1", - "@testing-library/jest-dom": "^6.8.0", - "@testing-library/react": "^16.3.0", + "@testing-library/jest-dom": "^6.9.1", + "@testing-library/react": "^16.3.1", "@testing-library/user-event": "^14.6.1", - "@types/node": "^24.5.2", - "@types/react": "^19.1.15", - "@types/react-dom": "^19.1.9", - "@types/react-highlight": "^0.12.8", + "@types/node": "^25.0.3", + "@types/react": "^19.2.7", + "@types/react-dom": "^19.2.3", "@types/react-syntax-highlighter": "^15.5.13", - "@types/ws": "^8.18.1", "@typescript-eslint/eslint-plugin": "^7.18.0", "@typescript-eslint/parser": "^7.18.0", - "@vitest/coverage-v8": "^3.2.3", - "autoprefixer": "^10.4.21", - "cross-env": "^10.0.0", + "@vitest/coverage-v8": "^4.0.16", + "cross-env": "^10.1.0", "eslint": "^8.57.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-airbnb-typescript": "^18.0.0", @@ -96,21 +78,27 @@ "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-unused-imports": "^4.2.0", "husky": "^9.1.7", - "jsdom": "^27.0.0", - "lint-staged": "^16.2.3", + "jsdom": "^27.3.0", + "lint-staged": "^16.2.7", "msw": "^2.6.6", - "prettier": "^3.6.2", - "stripe": "^18.5.0", + "prettier": "^3.7.3", "tailwindcss": "^4.1.8", - "typescript": "^5.9.2", + "typescript": "^5.9.3", "vite-plugin-svgr": "^4.5.0", - "vite-tsconfig-paths": "^5.1.4", - "vitest": "^3.0.2" + "vite-tsconfig-paths": "^6.0.3", + "vitest": "^4.0.14" }, "engines": { "node": ">=22.0.0" } }, + "node_modules/@acemir/cssom": { + "version": "0.9.29", + "resolved": "https://registry.npmjs.org/@acemir/cssom/-/cssom-0.9.29.tgz", + "integrity": "sha512-G90x0VW+9nW4dFajtjCoT+NM0scAfH9Mb08IcjgFHYbfiL/lU04dTF9JuVOi3/OH+DJCQdcIseSXkdCB9Ky6JA==", + "dev": true, + "license": "MIT" + }, "node_modules/@adobe/css-tools": { "version": "4.4.4", "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.4.tgz", @@ -118,72 +106,50 @@ "dev": true, "license": "MIT" }, - "node_modules/@alloc/quick-lru": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", - "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@asamuzakjp/css-color": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.0.5.tgz", - "integrity": "sha512-lMrXidNhPGsDjytDy11Vwlb6OIGrT3CmLg3VWNFyWkLWtijKl7xjvForlh8vuj0SHGjgl4qZEQzUmYTeQA2JFQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.1.0.tgz", + "integrity": "sha512-9xiBAtLn4aNsa4mDnpovJvBn72tNEIACyvlqaNJ+ADemR+yeMJWnBudOi2qGDviJa7SwcDOU/TRh5dnET7qk0w==", "dev": true, + "license": "MIT", "dependencies": { "@csstools/css-calc": "^2.1.4", "@csstools/css-color-parser": "^3.1.0", "@csstools/css-parser-algorithms": "^3.0.5", "@csstools/css-tokenizer": "^3.0.4", - "lru-cache": "^11.2.1" + "lru-cache": "^11.2.2" } }, "node_modules/@asamuzakjp/css-color/node_modules/lru-cache": { - "version": "11.2.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz", - "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==", + "version": "11.2.4", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz", + "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==", "dev": true, + "license": "BlueOak-1.0.0", "engines": { "node": "20 || >=22" } }, "node_modules/@asamuzakjp/dom-selector": { - "version": "6.5.6", - "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.5.6.tgz", - "integrity": "sha512-Mj3Hu9ymlsERd7WOsUKNUZnJYL4IZ/I9wVVYgtvOsWYiEFbkQ4G7VRIh2USxTVW4BBDIsLG+gBUgqOqf2Kvqow==", + "version": "6.7.6", + "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.7.6.tgz", + "integrity": "sha512-hBaJER6A9MpdG3WgdlOolHmbOYvSk46y7IQN/1+iqiCuUu6iWdQrs9DGKF8ocqsEqWujWf/V7b7vaDgiUmIvUg==", "dev": true, + "license": "MIT", "dependencies": { "@asamuzakjp/nwsapi": "^2.3.9", "bidi-js": "^1.0.3", "css-tree": "^3.1.0", "is-potential-custom-element-name": "^1.0.1", - "lru-cache": "^11.2.1" + "lru-cache": "^11.2.4" } }, "node_modules/@asamuzakjp/dom-selector/node_modules/lru-cache": { - "version": "11.2.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz", - "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==", + "version": "11.2.4", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz", + "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==", "dev": true, + "license": "BlueOak-1.0.0", "engines": { "node": "20 || >=22" } @@ -192,12 +158,14 @@ "version": "2.3.9", "resolved": "https://registry.npmjs.org/@asamuzakjp/nwsapi/-/nwsapi-2.3.9.tgz", "integrity": "sha512-n8GuYSrI9bF7FFZ/SjhwevlHc8xaVlb/7HmHelnc/PZXBD2ZR49NnN9sMMuDdEGPeeRQ5d0hqlSlEpgCX3Wl0Q==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@babel/code-frame": { "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, "license": "MIT", "dependencies": { "@babel/helper-validator-identifier": "^7.27.1", @@ -209,29 +177,32 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.4.tgz", - "integrity": "sha512-YsmSKC29MJwf0gF8Rjjrg5LQCmyh+j/nD8/eP7f+BeoQTKYqs9RoWbjGOdy0+1Ekr68RJZMUOPVQaQisnIo4Rw==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", + "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", + "dev": true, "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz", - "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", + "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", + "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", + "@babel/generator": "^7.28.5", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.28.3", "@babel/helpers": "^7.28.4", - "@babel/parser": "^7.28.4", + "@babel/parser": "^7.28.5", "@babel/template": "^7.27.2", - "@babel/traverse": "^7.28.4", - "@babel/types": "^7.28.4", + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", @@ -251,19 +222,21 @@ "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", + "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", + "dev": true, "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" @@ -289,6 +262,7 @@ "version": "7.27.2", "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "dev": true, "license": "MIT", "dependencies": { "@babel/compat-data": "^7.27.2", @@ -305,24 +279,25 @@ "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", - "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.5.tgz", + "integrity": "sha512-q3WC4JfdODypvxArsJQROfupPBq9+lMwjKq7C33GhbFYJsufD0yd/ziwD+hJucLeWsnFPWZjsU2DNFqBPE7jwQ==", "dev": true, "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-member-expression-to-functions": "^7.28.5", "@babel/helper-optimise-call-expression": "^7.27.1", "@babel/helper-replace-supers": "^7.27.1", "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/traverse": "^7.28.3", + "@babel/traverse": "^7.28.5", "semver": "^6.3.1" }, "engines": { @@ -346,20 +321,21 @@ "version": "7.28.0", "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.27.1.tgz", - "integrity": "sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.28.5.tgz", + "integrity": "sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==", "dev": true, "license": "MIT", "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5" }, "engines": { "node": ">=6.9.0" @@ -369,6 +345,7 @@ "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "dev": true, "license": "MIT", "dependencies": { "@babel/traverse": "^7.27.1", @@ -382,6 +359,7 @@ "version": "7.28.3", "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", + "dev": true, "license": "MIT", "dependencies": { "@babel/helper-module-imports": "^7.27.1", @@ -412,6 +390,7 @@ "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, "license": "MIT", "engines": { "node": ">=6.9.0" @@ -453,15 +432,17 @@ "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", - "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, "license": "MIT", "engines": { "node": ">=6.9.0" @@ -471,6 +452,7 @@ "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, "license": "MIT", "engines": { "node": ">=6.9.0" @@ -480,6 +462,7 @@ "version": "7.28.4", "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "dev": true, "license": "MIT", "dependencies": { "@babel/template": "^7.27.2", @@ -490,12 +473,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", + "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.28.4" + "@babel/types": "^7.28.5" }, "bin": { "parser": "bin/babel-parser.js" @@ -553,45 +537,15 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-react-jsx-self": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", - "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-react-jsx-source": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", - "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, "node_modules/@babel/plugin-transform-typescript": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.0.tgz", - "integrity": "sha512-4AEiDEBPIZvLQaWlc9liCavE0xRM0dNca41WtBeM3jgFptfUOSG9z0uteLhq6+3rq+WB6jIvUwKDTpXEHPJ2Vg==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.5.tgz", + "integrity": "sha512-x2Qa+v/CuEoX7Dr31iAfr0IhInrVOWZU/2vJMJ00FOR/2nM0BcBEclpaf9sWCDc+v5e9dMrhSH8/atq/kX7+bA==", "dev": true, "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-create-class-features-plugin": "^7.27.1", + "@babel/helper-create-class-features-plugin": "^7.28.5", "@babel/helper-plugin-utils": "^7.27.1", "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", "@babel/plugin-syntax-typescript": "^7.27.1" @@ -604,9 +558,9 @@ } }, "node_modules/@babel/preset-typescript": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.27.1.tgz", - "integrity": "sha512-l7WfQfX0WK4M0v2RudjuQK4u99BS6yLHYEmdtVPP7lKV013zr9DygFuWNlnbvQ9LR+LS0Egz/XAvGx5U9MX0fQ==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.28.5.tgz", + "integrity": "sha512-+bQy5WOI2V6LJZpPVxY+yp66XdZ2yifu0Mc1aP5CQKgjn4QM5IN2i5fAZ4xKop47pr8rpVhiAeu+nDQa12C8+g==", "dev": true, "license": "MIT", "dependencies": { @@ -614,7 +568,7 @@ "@babel/helper-validator-option": "^7.27.1", "@babel/plugin-syntax-jsx": "^7.27.1", "@babel/plugin-transform-modules-commonjs": "^7.27.1", - "@babel/plugin-transform-typescript": "^7.27.1" + "@babel/plugin-transform-typescript": "^7.28.5" }, "engines": { "node": ">=6.9.0" @@ -636,6 +590,7 @@ "version": "7.27.2", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "dev": true, "license": "MIT", "dependencies": { "@babel/code-frame": "^7.27.1", @@ -647,17 +602,18 @@ } }, "node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", + "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", + "dev": true, "license": "MIT", "dependencies": { "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", + "@babel/generator": "^7.28.5", "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", + "@babel/parser": "^7.28.5", "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", + "@babel/types": "^7.28.5", "debug": "^4.3.1" }, "engines": { @@ -665,13 +621,14 @@ } }, "node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", + "dev": true, "license": "MIT", "dependencies": { "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" + "@babel/helper-validator-identifier": "^7.28.5" }, "engines": { "node": ">=6.9.0" @@ -687,36 +644,6 @@ "node": ">=18" } }, - "node_modules/@bundled-es-modules/cookie": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@bundled-es-modules/cookie/-/cookie-2.0.1.tgz", - "integrity": "sha512-8o+5fRPLNbjbdGRRmJj3h6Hh1AQJf2dk3qQ/5ZFb+PXkRNiSoMGGUKlsgLfrxneb72axVJyIYji64E2+nNfYyw==", - "dev": true, - "license": "ISC", - "dependencies": { - "cookie": "^0.7.2" - } - }, - "node_modules/@bundled-es-modules/cookie/node_modules/cookie": { - "version": "0.7.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", - "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/@bundled-es-modules/statuses": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@bundled-es-modules/statuses/-/statuses-1.0.1.tgz", - "integrity": "sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg==", - "dev": true, - "license": "ISC", - "dependencies": { - "statuses": "^2.0.1" - } - }, "node_modules/@csstools/color-helpers": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz", @@ -732,6 +659,7 @@ "url": "https://opencollective.com/csstools" } ], + "license": "MIT-0", "engines": { "node": ">=18" } @@ -751,6 +679,7 @@ "url": "https://opencollective.com/csstools" } ], + "license": "MIT", "engines": { "node": ">=18" }, @@ -774,6 +703,7 @@ "url": "https://opencollective.com/csstools" } ], + "license": "MIT", "dependencies": { "@csstools/color-helpers": "^5.1.0", "@csstools/css-calc": "^2.1.4" @@ -801,6 +731,8 @@ "url": "https://opencollective.com/csstools" } ], + "license": "MIT", + "peer": true, "engines": { "node": ">=18" }, @@ -823,6 +755,7 @@ "url": "https://opencollective.com/csstools" } ], + "license": "MIT-0", "engines": { "node": ">=18" }, @@ -845,6 +778,8 @@ "url": "https://opencollective.com/csstools" } ], + "license": "MIT", + "peer": true, "engines": { "node": ">=18" } @@ -857,13 +792,12 @@ "license": "MIT" }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.9.tgz", - "integrity": "sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.1.tgz", + "integrity": "sha512-HHB50pdsBX6k47S4u5g/CaLjqS3qwaOVE5ILsq64jyzgMhLuCuZ8rGzM9yhsAjfjkbgUPMzZEPa7DAp7yz6vuA==", "cpu": [ "ppc64" ], - "license": "MIT", "optional": true, "os": [ "aix" @@ -873,13 +807,12 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.9.tgz", - "integrity": "sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.1.tgz", + "integrity": "sha512-kFqa6/UcaTbGm/NncN9kzVOODjhZW8e+FRdSeypWe6j33gzclHtwlANs26JrupOntlcWmB0u8+8HZo8s7thHvg==", "cpu": [ "arm" ], - "license": "MIT", "optional": true, "os": [ "android" @@ -889,13 +822,12 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.9.tgz", - "integrity": "sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.1.tgz", + "integrity": "sha512-45fuKmAJpxnQWixOGCrS+ro4Uvb4Re9+UTieUY2f8AEc+t7d4AaZ6eUJ3Hva7dtrxAAWHtlEFsXFMAgNnGU9uQ==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "android" @@ -905,13 +837,12 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.9.tgz", - "integrity": "sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.1.tgz", + "integrity": "sha512-LBEpOz0BsgMEeHgenf5aqmn/lLNTFXVfoWMUox8CtWWYK9X4jmQzWjoGoNb8lmAYml/tQ/Ysvm8q7szu7BoxRQ==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "android" @@ -921,13 +852,12 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.9.tgz", - "integrity": "sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.1.tgz", + "integrity": "sha512-veg7fL8eMSCVKL7IW4pxb54QERtedFDfY/ASrumK/SbFsXnRazxY4YykN/THYqFnFwJ0aVjiUrVG2PwcdAEqQQ==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "darwin" @@ -937,13 +867,12 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.9.tgz", - "integrity": "sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.1.tgz", + "integrity": "sha512-+3ELd+nTzhfWb07Vol7EZ+5PTbJ/u74nC6iv4/lwIU99Ip5uuY6QoIf0Hn4m2HoV0qcnRivN3KSqc+FyCHjoVQ==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "darwin" @@ -953,13 +882,12 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.9.tgz", - "integrity": "sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.1.tgz", + "integrity": "sha512-/8Rfgns4XD9XOSXlzUDepG8PX+AVWHliYlUkFI3K3GB6tqbdjYqdhcb4BKRd7C0BhZSoaCxhv8kTcBrcZWP+xg==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "freebsd" @@ -969,13 +897,12 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.9.tgz", - "integrity": "sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.1.tgz", + "integrity": "sha512-GITpD8dK9C+r+5yRT/UKVT36h/DQLOHdwGVwwoHidlnA168oD3uxA878XloXebK4Ul3gDBBIvEdL7go9gCUFzQ==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "freebsd" @@ -985,13 +912,12 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.9.tgz", - "integrity": "sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.1.tgz", + "integrity": "sha512-ieMID0JRZY/ZeCrsFQ3Y3NlHNCqIhTprJfDgSB3/lv5jJZ8FX3hqPyXWhe+gvS5ARMBJ242PM+VNz/ctNj//eA==", "cpu": [ "arm" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1001,13 +927,12 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.9.tgz", - "integrity": "sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.1.tgz", + "integrity": "sha512-W9//kCrh/6in9rWIBdKaMtuTTzNj6jSeG/haWBADqLLa9P8O5YSRDzgD5y9QBok4AYlzS6ARHifAb75V6G670Q==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1017,13 +942,12 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.9.tgz", - "integrity": "sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.1.tgz", + "integrity": "sha512-VIUV4z8GD8rtSVMfAj1aXFahsi/+tcoXXNYmXgzISL+KB381vbSTNdeZHHHIYqFyXcoEhu9n5cT+05tRv13rlw==", "cpu": [ "ia32" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1033,13 +957,12 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.9.tgz", - "integrity": "sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.1.tgz", + "integrity": "sha512-l4rfiiJRN7sTNI//ff65zJ9z8U+k6zcCg0LALU5iEWzY+a1mVZ8iWC1k5EsNKThZ7XCQ6YWtsZ8EWYm7r1UEsg==", "cpu": [ "loong64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1049,13 +972,12 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.9.tgz", - "integrity": "sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.1.tgz", + "integrity": "sha512-U0bEuAOLvO/DWFdygTHWY8C067FXz+UbzKgxYhXC0fDieFa0kDIra1FAhsAARRJbvEyso8aAqvPdNxzWuStBnA==", "cpu": [ "mips64el" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1065,13 +987,12 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.9.tgz", - "integrity": "sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.1.tgz", + "integrity": "sha512-NzdQ/Xwu6vPSf/GkdmRNsOfIeSGnh7muundsWItmBsVpMoNPVpM61qNzAVY3pZ1glzzAxLR40UyYM23eaDDbYQ==", "cpu": [ "ppc64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1081,13 +1002,12 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.9.tgz", - "integrity": "sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.1.tgz", + "integrity": "sha512-7zlw8p3IApcsN7mFw0O1Z1PyEk6PlKMu18roImfl3iQHTnr/yAfYv6s4hXPidbDoI2Q0pW+5xeoM4eTCC0UdrQ==", "cpu": [ "riscv64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1097,13 +1017,12 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.9.tgz", - "integrity": "sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.1.tgz", + "integrity": "sha512-cGj5wli+G+nkVQdZo3+7FDKC25Uh4ZVwOAK6A06Hsvgr8WqBBuOy/1s+PUEd/6Je+vjfm6stX0kmib5b/O2Ykw==", "cpu": [ "s390x" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1113,13 +1032,12 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.9.tgz", - "integrity": "sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.1.tgz", + "integrity": "sha512-z3H/HYI9MM0HTv3hQZ81f+AKb+yEoCRlUby1F80vbQ5XdzEMyY/9iNlAmhqiBKw4MJXwfgsh7ERGEOhrM1niMA==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1129,13 +1047,12 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.9.tgz", - "integrity": "sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.1.tgz", + "integrity": "sha512-wzC24DxAvk8Em01YmVXyjl96Mr+ecTPyOuADAvjGg+fyBpGmxmcr2E5ttf7Im8D0sXZihpxzO1isus8MdjMCXQ==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "netbsd" @@ -1145,13 +1062,12 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.9.tgz", - "integrity": "sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.1.tgz", + "integrity": "sha512-1YQ8ybGi2yIXswu6eNzJsrYIGFpnlzEWRl6iR5gMgmsrR0FcNoV1m9k9sc3PuP5rUBLshOZylc9nqSgymI+TYg==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "netbsd" @@ -1161,13 +1077,12 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.9.tgz", - "integrity": "sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.1.tgz", + "integrity": "sha512-5Z+DzLCrq5wmU7RDaMDe2DVXMRm2tTDvX2KU14JJVBN2CT/qov7XVix85QoJqHltpvAOZUAc3ndU56HSMWrv8g==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "openbsd" @@ -1177,13 +1092,12 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.9.tgz", - "integrity": "sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.1.tgz", + "integrity": "sha512-Q73ENzIdPF5jap4wqLtsfh8YbYSZ8Q0wnxplOlZUOyZy7B4ZKW8DXGWgTCZmF8VWD7Tciwv5F4NsRf6vYlZtqg==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "openbsd" @@ -1193,13 +1107,12 @@ } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.9.tgz", - "integrity": "sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.1.tgz", + "integrity": "sha512-ajbHrGM/XiK+sXM0JzEbJAen+0E+JMQZ2l4RR4VFwvV9JEERx+oxtgkpoKv1SevhjavK2z2ReHk32pjzktWbGg==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "openharmony" @@ -1209,13 +1122,12 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.9.tgz", - "integrity": "sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.1.tgz", + "integrity": "sha512-IPUW+y4VIjuDVn+OMzHc5FV4GubIwPnsz6ubkvN8cuhEqH81NovB53IUlrlBkPMEPxvNnf79MGBoz8rZ2iW8HA==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "sunos" @@ -1225,13 +1137,12 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.9.tgz", - "integrity": "sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.1.tgz", + "integrity": "sha512-RIVRWiljWA6CdVu8zkWcRmGP7iRRIIwvhDKem8UMBjPql2TXM5PkDVvvrzMtj1V+WFPB4K7zkIGM7VzRtFkjdg==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "win32" @@ -1241,13 +1152,12 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.9.tgz", - "integrity": "sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.1.tgz", + "integrity": "sha512-2BR5M8CPbptC1AK5JbJT1fWrHLvejwZidKx3UMSF0ecHMa+smhi16drIrCEggkgviBwLYd5nwrFLSl5Kho96RQ==", "cpu": [ "ia32" ], - "license": "MIT", "optional": true, "os": [ "win32" @@ -1257,13 +1167,12 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.9.tgz", - "integrity": "sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.1.tgz", + "integrity": "sha512-d5X6RMYv6taIymSk8JBP+nxv8DQAMY6A51GPgusqLdK9wBz5wWIXy1KjTck6HnjE9hqJzJRdk+1p/t5soSbCtw==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "win32" @@ -1273,9 +1182,9 @@ } }, "node_modules/@eslint-community/eslint-utils": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.8.0.tgz", - "integrity": "sha512-MJQFqrZgcW0UNYLGOuQpey/oTN59vyWwplvCGZztn1cKz9agZPPYpJB7h2OMmuu7VLqkvEjN8feFZJmxNF9D+Q==", + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", "dev": true, "license": "MIT", "dependencies": { @@ -1292,9 +1201,9 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", - "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", "dev": true, "license": "MIT", "engines": { @@ -1363,7 +1272,6 @@ "version": "2.3.6", "resolved": "https://registry.npmjs.org/@formatjs/ecma402-abstract/-/ecma402-abstract-2.3.6.tgz", "integrity": "sha512-HJnTFeRM2kVFVr5gr5kH1XP6K0JcJtE7Lzvtr3FS/so5f1kpsqqqxy5JF+FRaO6H2qmcMfAUIox7AJteieRtVw==", - "license": "MIT", "dependencies": { "@formatjs/fast-memoize": "2.2.7", "@formatjs/intl-localematcher": "0.6.2", @@ -1375,7 +1283,6 @@ "version": "2.2.7", "resolved": "https://registry.npmjs.org/@formatjs/fast-memoize/-/fast-memoize-2.2.7.tgz", "integrity": "sha512-Yabmi9nSvyOMrlSeGGWDiH7rf3a7sIwplbvo/dlz9WCIjzIQAfy1RMf4S0X3yG724n5Ghu2GmEl5NJIV6O9sZQ==", - "license": "MIT", "dependencies": { "tslib": "^2.8.0" } @@ -1384,7 +1291,6 @@ "version": "2.11.4", "resolved": "https://registry.npmjs.org/@formatjs/icu-messageformat-parser/-/icu-messageformat-parser-2.11.4.tgz", "integrity": "sha512-7kR78cRrPNB4fjGFZg3Rmj5aah8rQj9KPzuLsmcSn4ipLXQvC04keycTI1F7kJYDwIXtT2+7IDEto842CfZBtw==", - "license": "MIT", "dependencies": { "@formatjs/ecma402-abstract": "2.3.6", "@formatjs/icu-skeleton-parser": "1.8.16", @@ -1395,7 +1301,6 @@ "version": "1.8.16", "resolved": "https://registry.npmjs.org/@formatjs/icu-skeleton-parser/-/icu-skeleton-parser-1.8.16.tgz", "integrity": "sha512-H13E9Xl+PxBd8D5/6TVUluSpxGNvFSlN/b3coUp0e0JpuWXXnQDiavIpY3NnvSp4xhEMoXyyBvVfdFX8jglOHQ==", - "license": "MIT", "dependencies": { "@formatjs/ecma402-abstract": "2.3.6", "tslib": "^2.8.0" @@ -1405,21 +1310,19 @@ "version": "0.6.2", "resolved": "https://registry.npmjs.org/@formatjs/intl-localematcher/-/intl-localematcher-0.6.2.tgz", "integrity": "sha512-XOMO2Hupl0wdd172Y06h6kLpBz6Dv+J4okPLl4LPtzbr8f66WbIoy4ev98EBuZ6ZK4h5ydTN6XneT4QVpD7cdA==", - "license": "MIT", "dependencies": { "tslib": "^2.8.0" } }, "node_modules/@heroui/accordion": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/accordion/-/accordion-2.2.24.tgz", - "integrity": "sha512-iVJVKKsGN4t3hn4Exwic6n5SOQOmmmsodSsCt0VUcs5VTHu9876sAC44xlEMpc9CP8pC1wQS3DzWl3mN6Z120g==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/accordion/-/accordion-2.2.25.tgz", + "integrity": "sha512-cukvjTXfSLxjCZJ2PwLYUdkJuzKgKfbYkA+l2yvtYfrAQ8G0uz8a+tAGKGcciVLtYke1KsZ/pKjbpInWgGUV7A==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/divider": "2.2.20", + "@heroui/aria-utils": "2.2.25", + "@heroui/divider": "2.2.21", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1432,26 +1335,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/accordion/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/alert": { - "version": "2.2.27", - "resolved": "https://registry.npmjs.org/@heroui/alert/-/alert-2.2.27.tgz", - "integrity": "sha512-Y6oX9SV//tdhxhpgkSZvnjwdx7d8S7RAhgVlxCs2Hla//nCFC3yiMHIv8UotTryAGdOwZIsffmcna9vqbNL5vw==", - "license": "MIT", + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/@heroui/alert/-/alert-2.2.28.tgz", + "integrity": "sha512-1FgaRWCSj2/s8L1DyQR0ao8cfdC60grC1EInNoqAyvcSJt6j9gK/zWKZTQn+NXDjV2N14dG+b7EjMUc8cJnUjA==", "dependencies": { - "@heroui/button": "2.2.27", + "@heroui/button": "2.2.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1459,25 +1354,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.19", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/alert/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/aria-utils": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/aria-utils/-/aria-utils-2.2.24.tgz", - "integrity": "sha512-Y7FfQl2jvJr8JjpH+iuJElDwbn3eSWohuxHg6e5+xk5GcPYrEecgr0F/9qD6VU8IvVrRzJ00JzmT87lgA5iE3Q==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/aria-utils/-/aria-utils-2.2.25.tgz", + "integrity": "sha512-7ofC3q6qVksIIJMJu3X07oQKrVijw+eaE4LV8AHY/wRl1FFxuTwhxQmjW5JGsGQ0iwlzxf4D5rogYa4YCUcFag==", "dependencies": { - "@heroui/system": "2.4.23", + "@heroui/system": "2.4.24", "@react-aria/utils": "3.31.0", "@react-stately/collections": "3.12.8", "@react-types/overlays": "3.9.2", @@ -1489,19 +1376,18 @@ } }, "node_modules/@heroui/autocomplete": { - "version": "2.3.29", - "resolved": "https://registry.npmjs.org/@heroui/autocomplete/-/autocomplete-2.3.29.tgz", - "integrity": "sha512-BQkiWrrhPbNMFF1Hd60QDyG4iwD+sdsjWh0h7sw2XhcT6Bjw/6Hqpf4eHsTvPElW/554vPZVtChjugRY1N2zsw==", - "license": "MIT", - "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/button": "2.2.27", - "@heroui/form": "2.1.27", - "@heroui/input": "2.4.28", - "@heroui/listbox": "2.3.26", - "@heroui/popover": "2.3.27", + "version": "2.3.30", + "resolved": "https://registry.npmjs.org/@heroui/autocomplete/-/autocomplete-2.3.30.tgz", + "integrity": "sha512-TT5p/EybRdxRs9g3DZGHYVpp4Sgs1X0kLZvc7qO4hzNyKEqmBOx8VESVZs43ZVmLxVWf7fOd3kbGVt9Sbm2U8A==", + "dependencies": { + "@heroui/aria-utils": "2.2.25", + "@heroui/button": "2.2.28", + "@heroui/form": "2.1.28", + "@heroui/input": "2.4.29", + "@heroui/listbox": "2.3.27", + "@heroui/popover": "2.3.28", "@heroui/react-utils": "2.1.14", - "@heroui/scroll-shadow": "2.3.18", + "@heroui/scroll-shadow": "2.3.19", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", "@heroui/use-safe-layout-effect": "2.1.8", @@ -1513,24 +1399,16 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/autocomplete/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/avatar": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/avatar/-/avatar-2.2.22.tgz", - "integrity": "sha512-znmKdsrVj91Fg8+wm/HA/b8zi3iAg5g3MezliBfS2PmwgZcpBR6VtwgeeP6uN49+TR+faGIrck0Zxceuw4U0FQ==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/avatar/-/avatar-2.2.23.tgz", + "integrity": "sha512-YBnb4v1cc/1kZTBx0AH0QNbEno+BhN/zdhxVRJDDI32aVvZhMpR90m7zTG4ma9oetOpCZ0pDeGKenlR9Ack4xg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -1540,46 +1418,30 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/avatar/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/badge": { - "version": "2.2.17", - "resolved": "https://registry.npmjs.org/@heroui/badge/-/badge-2.2.17.tgz", - "integrity": "sha512-UNILRsAIJn+B6aWml+Rv2QCyYB7sadNqRPDPzNeVKJd8j3MNgZyyEHDwvqM2FWrgGccQIuWFaUgGdnPxRJpwwg==", - "license": "MIT", + "version": "2.2.18", + "resolved": "https://registry.npmjs.org/@heroui/badge/-/badge-2.2.18.tgz", + "integrity": "sha512-OfGove8YJ9oDrdugzq05FC15ZKD5nzqe+thPZ+1SY1LZorJQjZvqSD9QnoEH1nG7fu2IdH6pYJy3sZ/b6Vj5Kg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/badge/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/breadcrumbs": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/breadcrumbs/-/breadcrumbs-2.2.22.tgz", - "integrity": "sha512-2fWfpbwhRPeC99Kuzu+DnzOYL4TOkDm9sznvSj0kIAbw/Rvl+D2/6fmBOaTRIUXfswWpHVRUCcNYczIAp0PkoA==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/breadcrumbs/-/breadcrumbs-2.2.23.tgz", + "integrity": "sha512-trWtN/Ci2NTNRGvIxT8hdOml6med9F3HaCszqyVg3zroh6ZqV3iMPL3u4xRnAe0GLPsGwWFUnao7jbouU+avHw==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", @@ -1590,28 +1452,20 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/breadcrumbs/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/button": { - "version": "2.2.27", - "resolved": "https://registry.npmjs.org/@heroui/button/-/button-2.2.27.tgz", - "integrity": "sha512-Fxb8rtjPQm9T4GAtB1oW2QMUiQCtn7EtvO5AN41ANxAgmsNMM5wnLTkxQ05vNueCrp47kTDtSuyMhKU2llATHQ==", - "license": "MIT", + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/@heroui/button/-/button-2.2.28.tgz", + "integrity": "sha512-B4SSMeKXrbENs4VQ3U/MF+RTncPCU3DPYLYhhrDVVo/LXUIcN/KU/mJwF89eYQjvFXVyaZphC+i/5yLiN3uDcw==", "dependencies": { "@heroui/react-utils": "2.1.14", - "@heroui/ripple": "2.2.20", + "@heroui/ripple": "2.2.21", "@heroui/shared-utils": "2.1.12", - "@heroui/spinner": "2.2.24", + "@heroui/spinner": "2.2.25", "@heroui/use-aria-button": "2.2.20", "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", @@ -1619,28 +1473,20 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/button/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/calendar": { - "version": "2.2.27", - "resolved": "https://registry.npmjs.org/@heroui/calendar/-/calendar-2.2.27.tgz", - "integrity": "sha512-VtyXQSoT9u9tC4HjBkJIaSSmhau1LwPUwvof0LjYDpBfTsJKqn+308wI3nAp75BTbAkK+vFM8LI0VfbALCwR4Q==", - "license": "MIT", + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/@heroui/calendar/-/calendar-2.2.28.tgz", + "integrity": "sha512-iJ1jOljJQCgowGLesl27LPh44JjwYLyxuqwIIJqBspiARdtbCWyVRTXb5RaphnbNcZFDuYhyadkVtzZOYVUn8g==", "dependencies": { - "@heroui/button": "2.2.27", + "@heroui/button": "2.2.28", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1660,27 +1506,19 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/calendar/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/card": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/card/-/card-2.2.25.tgz", - "integrity": "sha512-dtd/G24zePIHPutRIxWC69IO3IGJs8X+zh9rBYM9cY5Q972D8Eet5WdWTfDBhw//fFIoagDAs5YcI9emGczGaQ==", - "license": "MIT", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/card/-/card-2.2.26.tgz", + "integrity": "sha512-L+q1VLhEqA/s8o3DchojwtA66IE4MZzAhhPqivBD+mYCVtrCaueDMlU1q0o73SO2iloemRz33T5s4Uyf+1b8Bg==", "dependencies": { "@heroui/react-utils": "2.1.14", - "@heroui/ripple": "2.2.20", + "@heroui/ripple": "2.2.21", "@heroui/shared-utils": "2.1.12", "@heroui/use-aria-button": "2.2.20", "@react-aria/focus": "3.21.2", @@ -1689,26 +1527,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/card/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/checkbox": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/checkbox/-/checkbox-2.3.27.tgz", - "integrity": "sha512-YC0deiB7EOzcpJtk9SdySugD1Z2TNtfyYee2voDBHrng7ZBRB+cmAvizXINHnaQGFi0yuVPrZ5ixR/wsvTNW+Q==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/checkbox/-/checkbox-2.3.28.tgz", + "integrity": "sha512-lbnPihxNJXVxvpJeta6o17k7vu6fSvR6w+JsT/s5iurKk5qrkCrNBXmIZYdKJ43MmG3C/A0FWh3uNhZOM5Q04Q==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-callback-ref": "2.1.8", @@ -1723,23 +1553,15 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/checkbox/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/chip": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/chip/-/chip-2.2.22.tgz", - "integrity": "sha512-6O4Sv1chP+xxftp7E5gHUJIzo04ML9BW9N9jjxWCqT0Qtl+a/ZxnDalCyup6oraMiVLLHp+zEVX93C+3LONgkg==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/chip/-/chip-2.2.23.tgz", + "integrity": "sha512-25HTWX5j9o0suoCYBiEo87ZoTt9VQfca+DSqphNMXHpbCQ0u26fL+8/jjehoYPtySJiLigwQeZn8BEjWWO3pGg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", @@ -1749,48 +1571,32 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/chip/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/code": { - "version": "2.2.21", - "resolved": "https://registry.npmjs.org/@heroui/code/-/code-2.2.21.tgz", - "integrity": "sha512-ExHcfTGr9tCbAaBOfMzTla8iHHfwIV5/xRk4WApeVmL4MiIlLMykc9bSi1c88ltaJInQGFAmE6MOFHXuGHxBXw==", - "license": "MIT", + "version": "2.2.22", + "resolved": "https://registry.npmjs.org/@heroui/code/-/code-2.2.22.tgz", + "integrity": "sha512-i3pDe5Mzzh04jVx0gFwi2NMtCmsYfIRhLvkebXQcmfUDYl0+IGRJLcBsrWoOzes0pE/s7yyv+yJ/VhoU8F5jcg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/system-rsc": "2.3.20" + "@heroui/system-rsc": "2.3.21" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/code/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/date-input": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/date-input/-/date-input-2.3.27.tgz", - "integrity": "sha512-IxvZYezbR9jRxTWdsuHH47nsnB6RV1HPY7VwiJd9ZCy6P6oUV0Rx3cdwIRtUnyXbvz1G7+I22NL4C2Ku194l8A==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/date-input/-/date-input-2.3.28.tgz", + "integrity": "sha512-fzdfo9QMY9R+XffcuLOXXliM87eEu5Hz2wsUnsEAakXEbzAkFfzdSd72DRAbIiTD7yzSvaoyJHVAJ71+3/tCQg==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@internationalized/date": "3.10.0", @@ -1802,30 +1608,22 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/date-input/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/date-picker": { - "version": "2.3.28", - "resolved": "https://registry.npmjs.org/@heroui/date-picker/-/date-picker-2.3.28.tgz", - "integrity": "sha512-duKvXijabpafxU04sItrozf982tXkUDymcT3SoEvW4LDg6bECgPI8bYNN49hlzkI8+zuwJdKzJ4hDmANGVaL8Q==", - "license": "MIT", - "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/button": "2.2.27", - "@heroui/calendar": "2.2.27", - "@heroui/date-input": "2.3.27", - "@heroui/form": "2.1.27", - "@heroui/popover": "2.3.27", + "version": "2.3.29", + "resolved": "https://registry.npmjs.org/@heroui/date-picker/-/date-picker-2.3.29.tgz", + "integrity": "sha512-kSvFjNuST2UhlDjDMvOHlbixyTsb4Dm7QNTXxeQGyKd6D5bUaBRzVSNaLnJ6Od/nEh30xqy3lZEq6nT5VqupMA==", + "dependencies": { + "@heroui/aria-utils": "2.2.25", + "@heroui/button": "2.2.28", + "@heroui/calendar": "2.2.28", + "@heroui/date-input": "2.3.28", + "@heroui/form": "2.1.28", + "@heroui/popover": "2.3.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1839,31 +1637,23 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/date-picker/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/divider": { - "version": "2.2.20", - "resolved": "https://registry.npmjs.org/@heroui/divider/-/divider-2.2.20.tgz", - "integrity": "sha512-t+NNJ2e5okZraLKQoj+rS2l49IMy5AeXTixjsR+QRZ/WPrETNpMj4lw5cBSxG0i7WhRhlBa+KgqweUUezvCdAg==", - "license": "MIT", + "version": "2.2.21", + "resolved": "https://registry.npmjs.org/@heroui/divider/-/divider-2.2.21.tgz", + "integrity": "sha512-aVvl8/3fWUc+/fHbg+hD/0wrkoMKmXG0yRgyNrJSeu0pkRwhb0eD4ZjnBK1pCYqnstoltNE33J8ko/sU+WlmPw==", "dependencies": { "@heroui/react-rsc-utils": "2.1.9", - "@heroui/system-rsc": "2.3.20", + "@heroui/system-rsc": "2.3.21", "@react-types/shared": "3.32.1" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } @@ -1872,45 +1662,35 @@ "version": "2.1.10", "resolved": "https://registry.npmjs.org/@heroui/dom-animation/-/dom-animation-2.1.10.tgz", "integrity": "sha512-dt+0xdVPbORwNvFT5pnqV2ULLlSgOJeqlg/DMo97s9RWeD6rD4VedNY90c8C9meqWqGegQYBQ9ztsfX32mGEPA==", - "license": "MIT", "peerDependencies": { "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1" } }, "node_modules/@heroui/drawer": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/drawer/-/drawer-2.2.24.tgz", - "integrity": "sha512-gb51Lj9A8jlL1UvUrQ+MLS9tz+Qw+cdXwIJd39RXDkJwDmxqhzkz+WoOPZZwcOAHtATmwlTuxxlv6Cro59iswg==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/drawer/-/drawer-2.2.25.tgz", + "integrity": "sha512-+TFagy61+8dm+EWXLY5NJUGJ4COPL4anRiynw92iSD+arKUGN5b6lJUnjf9NkqwM5jqWKk1vxWdGDZEKZva8Bg==", "dependencies": { - "@heroui/framer-utils": "2.1.23", - "@heroui/modal": "2.2.24", + "@heroui/framer-utils": "2.1.24", + "@heroui/modal": "2.2.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/drawer/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/dropdown": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/dropdown/-/dropdown-2.3.27.tgz", - "integrity": "sha512-6aedMmxC+St5Ixz9o3s0ERkLOR6ZQE2uRccmRchPCEt7ZJU6TAeJo7fSpxIvdEUjFDe+pNhR2ojIocZEXtBZZg==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/dropdown/-/dropdown-2.3.28.tgz", + "integrity": "sha512-q+bSLxdsHtauqpQ4529cSkjj8L20UdvbrRGmhRL3YLZyLEzGcCCp6kDRCchkCpTaxK7u869eF9TGSNoFeum92g==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/menu": "2.2.26", - "@heroui/popover": "2.3.27", + "@heroui/aria-utils": "2.2.25", + "@heroui/menu": "2.2.27", + "@heroui/popover": "2.3.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@react-aria/focus": "3.21.2", @@ -1920,53 +1700,37 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/dropdown/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/form": { - "version": "2.1.27", - "resolved": "https://registry.npmjs.org/@heroui/form/-/form-2.1.27.tgz", - "integrity": "sha512-vtaBqWhxppkJeWgbAZA/A1bRj6XIudBqJWSkoqYlejtLuvaxNwxQ2Z9u7ewxN96R6QqPrQwChlknIn0NgCWlXQ==", - "license": "MIT", + "version": "2.1.28", + "resolved": "https://registry.npmjs.org/@heroui/form/-/form-2.1.28.tgz", + "integrity": "sha512-skg9GooN1+rgQwM0/7wNqUenq6JBEf3T2tDBItJU/oeNC9oaX00JDpy8rpMz9zS0oUqfbJ0auT11+0FRo2W6CQ==", "dependencies": { "@heroui/shared-utils": "2.1.12", - "@heroui/system": "2.4.23", - "@heroui/theme": "2.4.23", + "@heroui/system": "2.4.24", + "@heroui/theme": "2.4.24", "@react-stately/form": "3.2.2", "@react-types/form": "3.7.16", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18", "react-dom": ">=18" } }, - "node_modules/@heroui/form/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/framer-utils": { - "version": "2.1.23", - "resolved": "https://registry.npmjs.org/@heroui/framer-utils/-/framer-utils-2.1.23.tgz", - "integrity": "sha512-crLLMjRmxs8/fysFv5gwghSGcDmYYkhNfAWh1rFzDy+FRPZN4f/bPH2rt85hdApmuHbWt0QCocqsrjHxLEzrAw==", - "license": "MIT", + "version": "2.1.24", + "resolved": "https://registry.npmjs.org/@heroui/framer-utils/-/framer-utils-2.1.24.tgz", + "integrity": "sha512-PiHEV8YS3Q0ve3ZnTASVvTeBK0fTFdLtLiPtCuLucC2WGeDFjUerE7++Y+HhWB85Jj/USknEpl0aGsatl3cbgg==", "dependencies": { - "@heroui/system": "2.4.23", + "@heroui/system": "2.4.24", "@heroui/use-measure": "2.1.8" }, "peerDependencies": { @@ -1976,10 +1740,9 @@ } }, "node_modules/@heroui/image": { - "version": "2.2.17", - "resolved": "https://registry.npmjs.org/@heroui/image/-/image-2.2.17.tgz", - "integrity": "sha512-B/MrWafTsiCBFnRc0hPTLDBh7APjb/lRuQf18umuh20/1n6KiQXJ7XGSjnrHaA6HQcrtMGh6mDFZDaXq9rHuoA==", - "license": "MIT", + "version": "2.2.18", + "resolved": "https://registry.npmjs.org/@heroui/image/-/image-2.2.18.tgz", + "integrity": "sha512-hrvj/hDM0+Khb9EqstZOPeO0vIGZvhrJWPMxk7a6i2PqhWWQI+ws+nrwsG5XqAkwE4mqqf9Uw8EMfIG1XE5YYg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -1987,25 +1750,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/image/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/input": { - "version": "2.4.28", - "resolved": "https://registry.npmjs.org/@heroui/input/-/input-2.4.28.tgz", - "integrity": "sha512-uaBubg814YOlVvX13yCAMqsR9HC4jg+asQdukbOvOnFtHY/d53her1BDdXhR9tMcrRTdYWQ3FoHqWbpvd5X4OQ==", - "license": "MIT", + "version": "2.4.29", + "resolved": "https://registry.npmjs.org/@heroui/input/-/input-2.4.29.tgz", + "integrity": "sha512-PIjFmN6BTLvnlI0I9f7PjxvnviauOczRJGaTnlHKDniknoh7mi8j0voXwL/f6BAkVKrgpT5JiFvdjq6og+cfSA==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -2020,18 +1775,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.19", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/input-otp": { - "version": "2.1.27", - "resolved": "https://registry.npmjs.org/@heroui/input-otp/-/input-otp-2.1.27.tgz", - "integrity": "sha512-VUzQ1u6/0okE0eqDx/2I/8zpGItSsn7Zml01IVwGM4wY2iJeQA+uRjfP+B1ff9jO/y8n582YU4uv/ZSOmmEQ7A==", - "license": "MIT", + "version": "2.1.28", + "resolved": "https://registry.npmjs.org/@heroui/input-otp/-/input-otp-2.1.28.tgz", + "integrity": "sha512-IHr35WqOHb8SBoMXYt6wxzKQg8iFMdc7iqFa8jqdshfVIS3bvxvJj6PGND3LoZxrRFplCv12lfmp2fWymQLleA==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-form-reset": "2.0.1", @@ -2044,53 +1798,30 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18", "react-dom": ">=18" } }, - "node_modules/@heroui/input-otp/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, - "node_modules/@heroui/input/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/kbd": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/kbd/-/kbd-2.2.22.tgz", - "integrity": "sha512-PKhgwGB7i53kBuqB1YdFZsg7H9fJ8YESMRRPwRRyPSz5feMdwGidyXs+/ix7lrlYp4mlC3wtPp7L79SEyPCpBA==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/kbd/-/kbd-2.2.23.tgz", + "integrity": "sha512-nKL1Kl044l1Xsk4U8Nib3wFD2NlZCZo6kdqiqUv+DchOo4s3BJcxWSWqHn6fDVmHNyj3DFMYDvA2f/geMasaHQ==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/system-rsc": "2.3.20" + "@heroui/system-rsc": "2.3.21" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/kbd/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/link": { - "version": "2.2.23", - "resolved": "https://registry.npmjs.org/@heroui/link/-/link-2.2.23.tgz", - "integrity": "sha512-lObtPRLy8ModlTvJiKhczuAV/CIt31hde6xPGFYRpPsaQN1b7RgQMmai5/Iv/M8WrzFmFZRpgW75RKYIB6hHVQ==", - "license": "MIT", + "version": "2.2.24", + "resolved": "https://registry.npmjs.org/@heroui/link/-/link-2.2.24.tgz", + "integrity": "sha512-rxtSC/8++wCtZs2GqBCukQHtDAbqB5bXT24v03q86oz7VOlbn8pox38LwFKrb/H+A3o+BjSKuTJsYidJcQ5clg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", @@ -2101,26 +1832,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/link/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/listbox": { - "version": "2.3.26", - "resolved": "https://registry.npmjs.org/@heroui/listbox/-/listbox-2.3.26.tgz", - "integrity": "sha512-/k3k+xyl2d+aFfT02h+/0njhsDX8vJDEkPK+dl9ETYI9Oz3L+xbHN9yIzuWjBXYkNGlQCjQ46N+0jWjhP5B4pA==", - "license": "MIT", + "version": "2.3.27", + "resolved": "https://registry.npmjs.org/@heroui/listbox/-/listbox-2.3.27.tgz", + "integrity": "sha512-NUBDwP9Xzx3A/0iX/09hhs4/y8Loo+bCTm/vqFqYyufR8AOGLw1Xn0poTybPfE4L5U+6Y1P7GM0VjgZVw9dFQQ==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/divider": "2.2.20", + "@heroui/aria-utils": "2.2.25", + "@heroui/divider": "2.2.21", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mobile": "2.2.12", @@ -2133,26 +1856,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/listbox/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/menu": { - "version": "2.2.26", - "resolved": "https://registry.npmjs.org/@heroui/menu/-/menu-2.2.26.tgz", - "integrity": "sha512-raR5pXgEqizKD9GsWS1yKqTm4RPWMrSQlqXLE2zNMQk0TkDqmPVw1z5griMqu2Zt9Vf2Ectf55vh4c0DNOUGlg==", - "license": "MIT", + "version": "2.2.27", + "resolved": "https://registry.npmjs.org/@heroui/menu/-/menu-2.2.27.tgz", + "integrity": "sha512-Ifsb9QBVpAFFcIEEcp3nU28DBtIU0iI7B5HHpblHDJoDtjIbkyNOnyxoEj8eX63QTWQcKrmNnFYdtsrtS9K1RA==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/divider": "2.2.20", + "@heroui/aria-utils": "2.2.25", + "@heroui/divider": "2.2.21", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mobile": "2.2.12", @@ -2165,26 +1880,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/menu/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/modal": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/modal/-/modal-2.2.24.tgz", - "integrity": "sha512-ISbgorNqgps9iUvQdgANxprdN+6H3Sx9TrGKpuW798qjc2f0T4rTbjrEfFPT8tFx6XYF4P5j7T7m3zoKcortHQ==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/modal/-/modal-2.2.25.tgz", + "integrity": "sha512-qoUk0fe/GMbKHUWcW8XThp+TifEG6GgmpBKZ4x8hhM5o/t1cKAD4+F2pKahtih0ba5qjM+tFtwnUV7z7Mt8+xg==", "dependencies": { "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -2200,27 +1907,19 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/modal/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/navbar": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/navbar/-/navbar-2.2.25.tgz", - "integrity": "sha512-5fNIMDpX2htDTMb/Xgv81qw/FuNWb+0Wpfc6rkFtNYd968I7G6Kjm782QB8WQjZ8DsMugcLEYUN4lpbJHRSdwg==", - "license": "MIT", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/navbar/-/navbar-2.2.26.tgz", + "integrity": "sha512-uQhISgbQgea1ki0et3hDJ8+IXc35zMNowRQTKgWeEF8T3yS5X2fKuLzJc7/cf0vUGnxH0FPB3Z5Cb7o1nwjr9A==", "dependencies": { "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-resize": "2.1.8", @@ -2234,27 +1933,19 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/navbar/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/number-input": { - "version": "2.0.18", - "resolved": "https://registry.npmjs.org/@heroui/number-input/-/number-input-2.0.18.tgz", - "integrity": "sha512-28v0/0FABs+yy3CcJimcr5uNlhaJSyKt1ENMSXfzPxdN2WgIs14+6NLMT+KV7ibcJl7kmqG0uc8vuIDLVrM5bQ==", - "license": "MIT", + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/@heroui/number-input/-/number-input-2.0.19.tgz", + "integrity": "sha512-5UHdznU9XIqjRH17dG277YQrTnUeifWmHdU76Jzf78+SVsJgQdLqcRINHPVj382q0kd6vLMzc4Hyb2fQ0g2WXg==", "dependencies": { - "@heroui/button": "2.2.27", - "@heroui/form": "2.1.27", + "@heroui/button": "2.2.28", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -2270,23 +1961,15 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.19", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/number-input/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/pagination": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/pagination/-/pagination-2.2.24.tgz", - "integrity": "sha512-5ObSJ1PzB9D1CjHV0MfDNzLR69vSYpx/rNQLBo/D4g5puaAR7kkGgw5ncf5eirhdKuy9y8VGAhjwhBxO4NUdpQ==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/pagination/-/pagination-2.2.25.tgz", + "integrity": "sha512-PQZMNQ7wiv++cLEpEXDAdID3IQE2FlG1UkcuYhVYLPJgGSxoKKcM81wmE/HYMgmIMXySiZ+9E/UM8HATrpvTzA==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", @@ -2301,28 +1984,20 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/pagination/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/popover": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/popover/-/popover-2.3.27.tgz", - "integrity": "sha512-PmSCKQcAvKIegK59Flr9cglbsEu7OAegQMtwNIjqWHsPT18NNphimmUSJrtuD78rcfKekrZ+Uo9qJEUf0zGZDw==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/popover/-/popover-2.3.28.tgz", + "integrity": "sha512-0KHClVQVhLTCqUOtsKEZQ3dqPpNjd7qTISD2Ud3vACdLXprSLWmOzo2ItT6PAh881oIZnPS8l/0/jZ1ON/izdA==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/button": "2.2.27", + "@heroui/aria-utils": "2.2.25", + "@heroui/button": "2.2.28", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-aria-button": "2.2.20", @@ -2336,24 +2011,16 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/popover/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/progress": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/progress/-/progress-2.2.22.tgz", - "integrity": "sha512-ch+iWEDo8d+Owz81vu4+Kj6CLfxi0nUlivQBhXeOzgU3VZbRmxJyW8S6l7wk6GyKJZxsCbYbjV1wPSjZhKJXCg==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/progress/-/progress-2.2.23.tgz", + "integrity": "sha512-5mfFPv5oW69yD5m/Y1cz0R+s4W8cwvLCZXzVtevoqyzkInNks8w2FKeGptkXcDeXVxqfhwDmNU4DXUmc4nRx3w==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -2363,25 +2030,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/progress/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/radio": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/radio/-/radio-2.3.27.tgz", - "integrity": "sha512-kfDxzPR0u4++lZX2Gf6wbEe/hGbFnoXI4XLbe4e+ZDjGdBSakNuJlcDvWHVoDFZH1xXyOO9w/dHfZuE6O2VGLA==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/radio/-/radio-2.3.28.tgz", + "integrity": "sha512-qrzZpEXRl4EH3zKeCujyKeK2yvcvaOaosxdZnMrT2O7wxX9LeOp6ZPMwIdMFmJYj7iyPym2nUwFfQBne7JNuvA==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@react-aria/focus": "3.21.2", @@ -2394,73 +2053,65 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/radio/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/react": { - "version": "2.8.5", - "resolved": "https://registry.npmjs.org/@heroui/react/-/react-2.8.5.tgz", - "integrity": "sha512-cGiG0/DCPsYopa+zACFDmtx9LQDfY5KU58Tt82ELANhmKRyYAesAq9tSa01dG+MjOXUTUR6cxp5i5RmRn8rPYg==", - "license": "MIT", - "dependencies": { - "@heroui/accordion": "2.2.24", - "@heroui/alert": "2.2.27", - "@heroui/autocomplete": "2.3.29", - "@heroui/avatar": "2.2.22", - "@heroui/badge": "2.2.17", - "@heroui/breadcrumbs": "2.2.22", - "@heroui/button": "2.2.27", - "@heroui/calendar": "2.2.27", - "@heroui/card": "2.2.25", - "@heroui/checkbox": "2.3.27", - "@heroui/chip": "2.2.22", - "@heroui/code": "2.2.21", - "@heroui/date-input": "2.3.27", - "@heroui/date-picker": "2.3.28", - "@heroui/divider": "2.2.20", - "@heroui/drawer": "2.2.24", - "@heroui/dropdown": "2.3.27", - "@heroui/form": "2.1.27", - "@heroui/framer-utils": "2.1.23", - "@heroui/image": "2.2.17", - "@heroui/input": "2.4.28", - "@heroui/input-otp": "2.1.27", - "@heroui/kbd": "2.2.22", - "@heroui/link": "2.2.23", - "@heroui/listbox": "2.3.26", - "@heroui/menu": "2.2.26", - "@heroui/modal": "2.2.24", - "@heroui/navbar": "2.2.25", - "@heroui/number-input": "2.0.18", - "@heroui/pagination": "2.2.24", - "@heroui/popover": "2.3.27", - "@heroui/progress": "2.2.22", - "@heroui/radio": "2.3.27", - "@heroui/ripple": "2.2.20", - "@heroui/scroll-shadow": "2.3.18", - "@heroui/select": "2.4.28", - "@heroui/skeleton": "2.2.17", - "@heroui/slider": "2.4.24", - "@heroui/snippet": "2.2.28", - "@heroui/spacer": "2.2.21", - "@heroui/spinner": "2.2.24", - "@heroui/switch": "2.2.24", - "@heroui/system": "2.4.23", - "@heroui/table": "2.2.27", - "@heroui/tabs": "2.2.24", - "@heroui/theme": "2.4.23", - "@heroui/toast": "2.0.17", - "@heroui/tooltip": "2.2.24", - "@heroui/user": "2.2.22", + "version": "2.8.6", + "resolved": "https://registry.npmjs.org/@heroui/react/-/react-2.8.6.tgz", + "integrity": "sha512-iDmmkqZZtBqVqsSSZiV6PIWN3AEOZLQFXwt9Lob2Oy7gQQuFDP+iljg/ARc3fZ9JBNbJTfgGFdNjrnaFpPtRyw==", + "dependencies": { + "@heroui/accordion": "2.2.25", + "@heroui/alert": "2.2.28", + "@heroui/autocomplete": "2.3.30", + "@heroui/avatar": "2.2.23", + "@heroui/badge": "2.2.18", + "@heroui/breadcrumbs": "2.2.23", + "@heroui/button": "2.2.28", + "@heroui/calendar": "2.2.28", + "@heroui/card": "2.2.26", + "@heroui/checkbox": "2.3.28", + "@heroui/chip": "2.2.23", + "@heroui/code": "2.2.22", + "@heroui/date-input": "2.3.28", + "@heroui/date-picker": "2.3.29", + "@heroui/divider": "2.2.21", + "@heroui/drawer": "2.2.25", + "@heroui/dropdown": "2.3.28", + "@heroui/form": "2.1.28", + "@heroui/framer-utils": "2.1.24", + "@heroui/image": "2.2.18", + "@heroui/input": "2.4.29", + "@heroui/input-otp": "2.1.28", + "@heroui/kbd": "2.2.23", + "@heroui/link": "2.2.24", + "@heroui/listbox": "2.3.27", + "@heroui/menu": "2.2.27", + "@heroui/modal": "2.2.25", + "@heroui/navbar": "2.2.26", + "@heroui/number-input": "2.0.19", + "@heroui/pagination": "2.2.25", + "@heroui/popover": "2.3.28", + "@heroui/progress": "2.2.23", + "@heroui/radio": "2.3.28", + "@heroui/ripple": "2.2.21", + "@heroui/scroll-shadow": "2.3.19", + "@heroui/select": "2.4.29", + "@heroui/skeleton": "2.2.18", + "@heroui/slider": "2.4.25", + "@heroui/snippet": "2.2.29", + "@heroui/spacer": "2.2.22", + "@heroui/spinner": "2.2.25", + "@heroui/switch": "2.2.25", + "@heroui/system": "2.4.24", + "@heroui/table": "2.2.28", + "@heroui/tabs": "2.2.25", + "@heroui/theme": "2.4.24", + "@heroui/toast": "2.0.18", + "@heroui/tooltip": "2.2.25", + "@heroui/user": "2.2.23", "@react-aria/visually-hidden": "3.8.28" }, "peerDependencies": { @@ -2473,7 +2124,6 @@ "version": "2.1.9", "resolved": "https://registry.npmjs.org/@heroui/react-rsc-utils/-/react-rsc-utils-2.1.9.tgz", "integrity": "sha512-e77OEjNCmQxE9/pnLDDb93qWkX58/CcgIqdNAczT/zUP+a48NxGq2A2WRimvc1uviwaNL2StriE2DmyZPyYW7Q==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2482,7 +2132,6 @@ "version": "2.1.14", "resolved": "https://registry.npmjs.org/@heroui/react-utils/-/react-utils-2.1.14.tgz", "integrity": "sha512-hhKklYKy9sRH52C9A8P0jWQ79W4MkIvOnKBIuxEMHhigjfracy0o0lMnAUdEsJni4oZKVJYqNGdQl+UVgcmeDA==", - "license": "MIT", "dependencies": { "@heroui/react-rsc-utils": "2.1.9", "@heroui/shared-utils": "2.1.12" @@ -2491,42 +2140,26 @@ "react": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/react-utils/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/ripple": { - "version": "2.2.20", - "resolved": "https://registry.npmjs.org/@heroui/ripple/-/ripple-2.2.20.tgz", - "integrity": "sha512-3+fBx5jO7l8SE84ZG0vB5BOxKKr23Ay180AeIWcf8m8lhXXd4iShVz2S+keW9PewqVHv52YBaxLoSVQ93Ddcxw==", - "license": "MIT", + "version": "2.2.21", + "resolved": "https://registry.npmjs.org/@heroui/ripple/-/ripple-2.2.21.tgz", + "integrity": "sha512-wairSq9LnhbIqTCJmUlJAQURQ1wcRK/L8pjg2s3R/XnvZlPXHy4ZzfphiwIlTI21z/f6tH3arxv/g1uXd1RY0g==", "dependencies": { "@heroui/dom-animation": "2.1.10", "@heroui/shared-utils": "2.1.12" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/ripple/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/scroll-shadow": { - "version": "2.3.18", - "resolved": "https://registry.npmjs.org/@heroui/scroll-shadow/-/scroll-shadow-2.3.18.tgz", - "integrity": "sha512-P/nLQbFPOlbTLRjO2tKoZCljJtU7iq81wsp7C8wZ1rZI1RmkTx3UgLLeoFWgmAp3ZlUIYgaewTnejt6eRx+28w==", - "license": "MIT", + "version": "2.3.19", + "resolved": "https://registry.npmjs.org/@heroui/scroll-shadow/-/scroll-shadow-2.3.19.tgz", + "integrity": "sha512-y5mdBlhiITVrFnQTDqEphYj7p5pHqoFSFtVuRRvl9wUec2lMxEpD85uMGsfL8OgQTKIAqGh2s6M360+VJm7ajQ==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -2534,33 +2167,25 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/scroll-shadow/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/select": { - "version": "2.4.28", - "resolved": "https://registry.npmjs.org/@heroui/select/-/select-2.4.28.tgz", - "integrity": "sha512-Dg3jv248Tu+g2WJMWseDjWA0FAG356elZIcE0OufVAIzQoWjLhgbkTqY9ths0HkcHy0nDwQWvyrrwkbif1kNqA==", - "license": "MIT", - "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/form": "2.1.27", - "@heroui/listbox": "2.3.26", - "@heroui/popover": "2.3.27", + "version": "2.4.29", + "resolved": "https://registry.npmjs.org/@heroui/select/-/select-2.4.29.tgz", + "integrity": "sha512-rFsI+UNUtK6WTm6oDM8A45tu8rDqt1zHoSoBQ8RJDkRITDcKRBTaTnvJI/Ez+kMRNH4fQ45LgoSPxw/JOOMg4w==", + "dependencies": { + "@heroui/aria-utils": "2.2.25", + "@heroui/form": "2.1.28", + "@heroui/listbox": "2.3.27", + "@heroui/popover": "2.3.28", "@heroui/react-utils": "2.1.14", - "@heroui/scroll-shadow": "2.3.18", + "@heroui/scroll-shadow": "2.3.19", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/spinner": "2.2.24", + "@heroui/spinner": "2.2.25", "@heroui/use-aria-button": "2.2.20", "@heroui/use-aria-multiselect": "2.4.19", "@heroui/use-form-reset": "2.0.1", @@ -2574,66 +2199,48 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/select/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/shared-icons": { "version": "2.1.10", "resolved": "https://registry.npmjs.org/@heroui/shared-icons/-/shared-icons-2.1.10.tgz", "integrity": "sha512-ePo60GjEpM0SEyZBGOeySsLueNDCqLsVL79Fq+5BphzlrBAcaKY7kUp74964ImtkXvknTxAWzuuTr3kCRqj6jg==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/shared-utils": { - "version": "2.1.11", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.11.tgz", - "integrity": "sha512-2zKVjCc9EMMk05peVpI1Q+vFf+dzqyVdf1DBCJ2SNQEUF7E+sRe1FvhHvPoye3TIFD/Fr6b3kZ6vzjxL9GxB6A==", - "hasInstallScript": true, - "license": "MIT" + "version": "2.1.12", + "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", + "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", + "hasInstallScript": true }, "node_modules/@heroui/skeleton": { - "version": "2.2.17", - "resolved": "https://registry.npmjs.org/@heroui/skeleton/-/skeleton-2.2.17.tgz", - "integrity": "sha512-WDzwODs+jW+GgMr3oOdLtXXfv8ScXuuWgxN2iPWWyDBcQYXX2XCKGVjCpM5lSKf1UG4Yp3iXuqKzH1m+E+m7kg==", - "license": "MIT", + "version": "2.2.18", + "resolved": "https://registry.npmjs.org/@heroui/skeleton/-/skeleton-2.2.18.tgz", + "integrity": "sha512-7AjU5kjk9rqrKP9mWQiAVj0dow4/vbK5/ejh4jqdb3DZm7bM2+DGzfnQPiS0c2eWR606CgOuuoImpwDS82HJtA==", "dependencies": { "@heroui/shared-utils": "2.1.12" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/skeleton/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/slider": { - "version": "2.4.24", - "resolved": "https://registry.npmjs.org/@heroui/slider/-/slider-2.4.24.tgz", - "integrity": "sha512-GKdqFTCe9O8tT3HEZ/W4TEWkz7ADtUBzuOBXw779Oqqf02HNg9vSnISlNvI6G0ymYjY42EanwA+dChHbPBIVJw==", - "license": "MIT", + "version": "2.4.25", + "resolved": "https://registry.npmjs.org/@heroui/slider/-/slider-2.4.25.tgz", + "integrity": "sha512-1ULgaqsu1Vzyyx6S7TGs+13PX5BGArZhLiApQfKwiA3TFvT0MNzTVoWVgyFZ8XLqh4esSUnqddhivqQhbRzrHw==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/tooltip": "2.2.24", + "@heroui/tooltip": "2.2.25", "@react-aria/focus": "3.21.2", "@react-aria/i18n": "3.12.13", "@react-aria/interactions": "3.25.6", @@ -2643,98 +2250,66 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.19", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/slider/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/snippet": { - "version": "2.2.28", - "resolved": "https://registry.npmjs.org/@heroui/snippet/-/snippet-2.2.28.tgz", - "integrity": "sha512-UfC/ZcYpmOutAcazxkizJWlhvqzr077szDyQ85thyUC5yhuRRLrsOHDIhyLWQrEKIcWw5+CaEGS2VLwAFlgfzw==", - "license": "MIT", + "version": "2.2.29", + "resolved": "https://registry.npmjs.org/@heroui/snippet/-/snippet-2.2.29.tgz", + "integrity": "sha512-RuyK/DldxvVYb6ToPk5cNNYeDkL+phKZPYHrUxBJK/PzuAkqi3AzQV7zHd+3IfTNxQbevRjzCXENE5F3GKP/MQ==", "dependencies": { - "@heroui/button": "2.2.27", + "@heroui/button": "2.2.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/tooltip": "2.2.24", + "@heroui/tooltip": "2.2.25", "@heroui/use-clipboard": "2.1.9", "@react-aria/focus": "3.21.2" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/snippet/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/spacer": { - "version": "2.2.21", - "resolved": "https://registry.npmjs.org/@heroui/spacer/-/spacer-2.2.21.tgz", - "integrity": "sha512-WKD+BlgHfqJ8lrkkg/6cvzSWNsbRjzr24HpZnv6cDeWX95wVLTOco9HVR8ohwStMqwu5zYeUd1bw6yCDVTo53w==", - "license": "MIT", + "version": "2.2.22", + "resolved": "https://registry.npmjs.org/@heroui/spacer/-/spacer-2.2.22.tgz", + "integrity": "sha512-BJ7RauvSY3gx10ntqZkCcyTy9K2FS4AeeryQUE9RgkMKQxP4t5TbeYLPEyomjWK+cCL/ERQCCruW16D3vKyWmw==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/system-rsc": "2.3.20" + "@heroui/system-rsc": "2.3.21" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/spacer/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/spinner": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/spinner/-/spinner-2.2.24.tgz", - "integrity": "sha512-HfKkFffrIN9UdJY2UaenlB8xEwIzolCCFCwU0j3wVnLMX+Dw+ixwaELdAxX14Z6gPQYec6AROKetkWWit14rlw==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/spinner/-/spinner-2.2.25.tgz", + "integrity": "sha512-zDuLJicUL51vGLEBbHWy/t6DlOvs9YILM4YLmzS/o84ExTgfrCycXNs6JkoteFiNu570qqZMeAA2aYneGfl/PQ==", "dependencies": { "@heroui/shared-utils": "2.1.12", - "@heroui/system": "2.4.23", - "@heroui/system-rsc": "2.3.20" + "@heroui/system": "2.4.24", + "@heroui/system-rsc": "2.3.21" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/spinner/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/switch": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/switch/-/switch-2.2.24.tgz", - "integrity": "sha512-RbV+MECncBKsthX3D8r+CGoQRu8Q3AAYUEdm/7ody6+bMZFmBilm695yLiqziMI33Ct/WQ0WkpvrTClIcmxU/A==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/switch/-/switch-2.2.25.tgz", + "integrity": "sha512-F0Yj+kgVfD2bdy6REFvNySeGuYg1OT2phwMPwSZGUl7ZFeGSvvWSnbYS4/wS3JIM5PyEibSaB8QIPc8r00xq1A==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -2747,26 +2322,19 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/switch/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/system": { - "version": "2.4.23", - "resolved": "https://registry.npmjs.org/@heroui/system/-/system-2.4.23.tgz", - "integrity": "sha512-kgYvfkIOQKM6CCBIlNSE2tXMtNrS1mvEUbvwnaU3pEYbMlceBtwA5v7SlpaJy/5dqKcTbfmVMUCmXnY/Kw4vaQ==", - "license": "MIT", + "version": "2.4.24", + "resolved": "https://registry.npmjs.org/@heroui/system/-/system-2.4.24.tgz", + "integrity": "sha512-9GKQgUc91otQfwmq6TLE72QKxtB341aK5NpBHS3gRoWYEuNN714Zl3OXwIZNvdXPJpsTaUo1ID1ibJU9tfgwdg==", + "peer": true, "dependencies": { "@heroui/react-utils": "2.1.14", - "@heroui/system-rsc": "2.3.20", + "@heroui/system-rsc": "2.3.21", "@react-aria/i18n": "3.12.13", "@react-aria/overlays": "3.30.0", "@react-aria/utils": "3.31.0" @@ -2778,39 +2346,26 @@ } }, "node_modules/@heroui/system-rsc": { - "version": "2.3.20", - "resolved": "https://registry.npmjs.org/@heroui/system-rsc/-/system-rsc-2.3.20.tgz", - "integrity": "sha512-uZwQErEud/lAX7KRXEdsDcGLyygBffHcgnbCDrLvmTf3cyBE84YziG7AjM7Ts8ZcrF+wBXX4+a1IqnKGlsGEdQ==", - "license": "MIT", + "version": "2.3.21", + "resolved": "https://registry.npmjs.org/@heroui/system-rsc/-/system-rsc-2.3.21.tgz", + "integrity": "sha512-icB7njbNgkI3dcfZhY5LP7VFspaVgWL1lcg9Q7uJMAaj6gGFqqSSnHkSMwpR9AGLxVRKTHey0TUx8CeZDe8XDw==", "dependencies": { - "@react-types/shared": "3.32.1", - "clsx": "^1.2.1" + "@react-types/shared": "3.32.1" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/system-rsc/node_modules/clsx": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", - "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/@heroui/table": { - "version": "2.2.27", - "resolved": "https://registry.npmjs.org/@heroui/table/-/table-2.2.27.tgz", - "integrity": "sha512-XFmbEgBzf89WH1VzmnwENxVzK4JrHV5jdlzyM3snNhk8uDSjfecnUY33qR62cpdZsKiCFFcYf7kQPkCnJGnD0Q==", - "license": "MIT", + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/@heroui/table/-/table-2.2.28.tgz", + "integrity": "sha512-0z3xs0kxDXvvd9gy/uHgvK0/bmpJF0m9t3omNMnB0I0EUx+gJ/CnaaPiF9M5veg/128rc45J7X2FgY3fPAKcmA==", "dependencies": { - "@heroui/checkbox": "2.3.27", + "@heroui/checkbox": "2.3.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/spacer": "2.2.21", "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", "@react-aria/table": "3.17.8", @@ -2823,25 +2378,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/table/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/tabs": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/tabs/-/tabs-2.2.24.tgz", - "integrity": "sha512-2SfxzAXe1t2Zz0v16kqkb7DR2wW86XoDwRUpLex6zhEN4/uT5ILeynxIVSUyAvVN3z95cnaQt0XPQBfUjAIQhQ==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/tabs/-/tabs-2.2.25.tgz", + "integrity": "sha512-bIpz/8TTNMabmzObN2zs+3WhQXbKyr9tZUPkk3rMQxIshpg9oyyEWOS8XiMBxrEzSByLfPNypl5sX1au6Dw2Ew==", "dependencies": { - "@heroui/aria-utils": "2.2.24", + "@heroui/aria-utils": "2.2.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mounted": "2.1.8", @@ -2854,64 +2401,39 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.22", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/tabs/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/theme": { - "version": "2.4.23", - "resolved": "https://registry.npmjs.org/@heroui/theme/-/theme-2.4.23.tgz", - "integrity": "sha512-5hoaRWG+/d/t06p7Pfhz70DUP0Uggjids7/z2Ytgup4A8KAOvDIXxvHUDlk6rRHKiN1wDMNA5H+EWsSXB/m03Q==", - "license": "MIT", + "version": "2.4.24", + "resolved": "https://registry.npmjs.org/@heroui/theme/-/theme-2.4.24.tgz", + "integrity": "sha512-lL+anmY4GGWwKyTbJ2PEBZE4talIZ3hu4yGpku9TktCVG2nC2YTwiWQFJ+Jcbf8Cf9vuLzI1sla5bz2jUqiBRA==", + "peer": true, "dependencies": { "@heroui/shared-utils": "2.1.12", - "clsx": "^1.2.1", "color": "^4.2.3", "color2k": "^2.0.3", "deepmerge": "4.3.1", "flat": "^5.0.2", - "tailwind-merge": "3.3.1", - "tailwind-variants": "3.1.1" + "tailwind-merge": "3.4.0", + "tailwind-variants": "3.2.2" }, "peerDependencies": { "tailwindcss": ">=4.0.0" } }, - "node_modules/@heroui/theme/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, - "node_modules/@heroui/theme/node_modules/clsx": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", - "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/@heroui/toast": { - "version": "2.0.17", - "resolved": "https://registry.npmjs.org/@heroui/toast/-/toast-2.0.17.tgz", - "integrity": "sha512-w3TaA1DYLcwdDjpwf9xw5YSr+odo9GGHsObsrMmLEQDS0JQhmKyK5sQqXUzb9d27EC6KVwGjeVg0hUHYQBK2JA==", - "license": "MIT", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/@heroui/toast/-/toast-2.0.18.tgz", + "integrity": "sha512-5IoqEq10W/AaUgKWKIR7bbTB6U+rHMkikzGwW+IndsvFLR3meyb5l4K5cmVCmDsMHubUaRa9UFDeAokyNXvpWA==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/spinner": "2.2.24", + "@heroui/spinner": "2.2.25", "@heroui/use-is-mobile": "2.2.12", "@react-aria/interactions": "3.25.6", "@react-aria/toast": "3.0.8", @@ -2919,28 +2441,20 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/toast/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/tooltip": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/tooltip/-/tooltip-2.2.24.tgz", - "integrity": "sha512-H+0STFea2/Z4obDdk+ZPoDzJxJQHIWGSjnW/jieThJbJ5zow/qBfcg5DqzIdiC+FCJ4dDD5jEDZ4W4H/fQUKQA==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/tooltip/-/tooltip-2.2.25.tgz", + "integrity": "sha512-f+WxkQy0YBzzE6VhzVgA/CeD7nvo0hhOapx0UScU8zsQ1J+n5Kr5YY/7CgMHmFLyC/Amrqlf7WSgljRl4iWivQ==", "dependencies": { - "@heroui/aria-utils": "2.2.24", + "@heroui/aria-utils": "2.2.25", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-aria-overlay": "2.0.4", @@ -2953,24 +2467,16 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/tooltip/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/use-aria-accordion": { "version": "2.2.18", "resolved": "https://registry.npmjs.org/@heroui/use-aria-accordion/-/use-aria-accordion-2.2.18.tgz", "integrity": "sha512-qjRkae2p4MFDrNqO6v6YCor0BtVi3idMd1dsI82XM16bxLQ2stqG4Ajrg60xV0AN+WKZUq10oetqkJuY6MYg0w==", - "license": "MIT", "dependencies": { "@react-aria/button": "3.14.2", "@react-aria/focus": "3.21.2", @@ -2987,7 +2493,6 @@ "version": "2.2.20", "resolved": "https://registry.npmjs.org/@heroui/use-aria-button/-/use-aria-button-2.2.20.tgz", "integrity": "sha512-Y0Bmze/pxEACKsHMbA1sYA3ghMJ+9fSnWvZBwlUxqiVXDEy2YrrK2JmXEgsuHGQdKD9RqU2Od3V4VqIIiaHiMA==", - "license": "MIT", "dependencies": { "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", @@ -3003,7 +2508,6 @@ "version": "2.2.21", "resolved": "https://registry.npmjs.org/@heroui/use-aria-link/-/use-aria-link-2.2.21.tgz", "integrity": "sha512-sG2rUutT/E/FYguzZmg715cXcM6+ue9wRfs2Gi6epWJwIVpS51uEagJKY0wIutJDfuCPfQ9AuxXfJek4CnxjKw==", - "license": "MIT", "dependencies": { "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", @@ -3019,7 +2523,6 @@ "version": "2.2.19", "resolved": "https://registry.npmjs.org/@heroui/use-aria-modal-overlay/-/use-aria-modal-overlay-2.2.19.tgz", "integrity": "sha512-MPvszNrt+1DauiSyOAwb0pKbYahpEVi9hrmidnO8cd1SA7B2ES0fNRBeNMAwcaeR/Nzsv+Cw1hRXt3egwqi0lg==", - "license": "MIT", "dependencies": { "@heroui/use-aria-overlay": "2.0.4", "@react-aria/overlays": "3.30.0", @@ -3035,7 +2538,6 @@ "version": "2.4.19", "resolved": "https://registry.npmjs.org/@heroui/use-aria-multiselect/-/use-aria-multiselect-2.4.19.tgz", "integrity": "sha512-RLDSpOLJqNESn6OK/zKuyTriK6sqMby76si/4kTMCs+4lmMPOyFKP3fREywu+zyJjRUCuZPa6xYuN2OHKQRDow==", - "license": "MIT", "dependencies": { "@react-aria/i18n": "3.12.13", "@react-aria/interactions": "3.25.6", @@ -3060,7 +2562,6 @@ "version": "2.0.4", "resolved": "https://registry.npmjs.org/@heroui/use-aria-overlay/-/use-aria-overlay-2.0.4.tgz", "integrity": "sha512-iv+y0+OvQd1eWiZftPI07JE3c5AdK85W5k3rDlhk5MFEI3dllkIpu8z8zLh3ge/BQGFiGkySVC5iXl8w84gMUQ==", - "license": "MIT", "dependencies": { "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", @@ -3076,7 +2577,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-callback-ref/-/use-callback-ref-2.1.8.tgz", "integrity": "sha512-D1JDo9YyFAprYpLID97xxQvf86NvyWLay30BeVVZT9kWmar6O9MbCRc7ACi7Ngko60beonj6+amTWkTm7QuY/Q==", - "license": "MIT", "dependencies": { "@heroui/use-safe-layout-effect": "2.1.8" }, @@ -3088,7 +2588,6 @@ "version": "2.1.9", "resolved": "https://registry.npmjs.org/@heroui/use-clipboard/-/use-clipboard-2.1.9.tgz", "integrity": "sha512-lkBq5RpXHiPvk1BXKJG8gMM0f7jRMIGnxAXDjAUzZyXKBuWLoM+XlaUWmZHtmkkjVFMX1L4vzA+vxi9rZbenEQ==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -3097,7 +2596,6 @@ "version": "2.2.13", "resolved": "https://registry.npmjs.org/@heroui/use-data-scroll-overflow/-/use-data-scroll-overflow-2.2.13.tgz", "integrity": "sha512-zboLXO1pgYdzMUahDcVt5jf+l1jAQ/D9dFqr7AxWLfn6tn7/EgY0f6xIrgWDgJnM0U3hKxVeY13pAeB4AFTqTw==", - "license": "MIT", "dependencies": { "@heroui/shared-utils": "2.1.12" }, @@ -3105,18 +2603,10 @@ "react": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/use-data-scroll-overflow/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/use-disclosure": { "version": "2.2.17", "resolved": "https://registry.npmjs.org/@heroui/use-disclosure/-/use-disclosure-2.2.17.tgz", "integrity": "sha512-S3pN0WmpcTTZuQHcXw4RcTVsxLaCZ95H5qi/JPN83ahhWTCC+pN8lwE37vSahbMTM1YriiHyTM6AWpv/E3Jq7w==", - "license": "MIT", "dependencies": { "@heroui/use-callback-ref": "2.1.8", "@react-aria/utils": "3.31.0", @@ -3130,7 +2620,6 @@ "version": "2.1.18", "resolved": "https://registry.npmjs.org/@heroui/use-draggable/-/use-draggable-2.1.18.tgz", "integrity": "sha512-ihQdmLGYJ6aTEaJ0/yCXYn6VRdrRV2eO03XD2A3KANZPb1Bj/n4r298xNMql5VnGq5ZNDJB9nTv8NNCu9pmPdg==", - "license": "MIT", "dependencies": { "@react-aria/interactions": "3.25.6" }, @@ -3142,7 +2631,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/@heroui/use-form-reset/-/use-form-reset-2.0.1.tgz", "integrity": "sha512-6slKWiLtVfgZnVeHVkM9eXgjwI07u0CUaLt2kQpfKPqTSTGfbHgCYJFduijtThhTdKBhdH6HCmzTcnbVlAxBXw==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -3151,7 +2639,6 @@ "version": "2.1.13", "resolved": "https://registry.npmjs.org/@heroui/use-image/-/use-image-2.1.13.tgz", "integrity": "sha512-NLApz+xin2bKHEXr+eSrtB0lN8geKP5VOea5QGbOCiHq4DBXu4QctpRkSfCHGIQzWdBVaLPoV+5wd0lR2S2Egg==", - "license": "MIT", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/use-safe-layout-effect": "2.1.8" @@ -3160,23 +2647,10 @@ "react": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/use-infinite-scroll": { - "version": "2.2.11", - "resolved": "https://registry.npmjs.org/@heroui/use-infinite-scroll/-/use-infinite-scroll-2.2.11.tgz", - "integrity": "sha512-Myhfq8CaeIDo5zCyYan/lM6gOvmvzaJzIiKIwRSrwVxXFBtrsYiaihC/THFw1VEWlOVOu5iPicESu08X7mOaqg==", - "license": "MIT", - "dependencies": { - "@heroui/shared-utils": "2.1.11" - }, - "peerDependencies": { - "react": ">=18 || >=19.0.0-rc.0" - } - }, "node_modules/@heroui/use-intersection-observer": { "version": "2.2.14", "resolved": "https://registry.npmjs.org/@heroui/use-intersection-observer/-/use-intersection-observer-2.2.14.tgz", "integrity": "sha512-qYJeMk4cTsF+xIckRctazCgWQ4BVOpJu+bhhkB1NrN+MItx19Lcb7ksOqMdN5AiSf85HzDcAEPIQ9w9RBlt5sg==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -3185,7 +2659,6 @@ "version": "2.2.12", "resolved": "https://registry.npmjs.org/@heroui/use-is-mobile/-/use-is-mobile-2.2.12.tgz", "integrity": "sha512-2UKa4v1xbvFwerWKoMTrg4q9ZfP9MVIVfCl1a7JuKQlXq3jcyV6z1as5bZ41pCsTOT+wUVOFnlr6rzzQwT9ZOA==", - "license": "MIT", "dependencies": { "@react-aria/ssr": "3.9.10" }, @@ -3197,7 +2670,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-is-mounted/-/use-is-mounted-2.1.8.tgz", "integrity": "sha512-DO/Th1vD4Uy8KGhd17oGlNA4wtdg91dzga+VMpmt94gSZe1WjsangFwoUBxF2uhlzwensCX9voye3kerP/lskg==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -3206,7 +2678,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-measure/-/use-measure-2.1.8.tgz", "integrity": "sha512-GjT9tIgluqYMZWfAX6+FFdRQBqyHeuqUMGzAXMTH9kBXHU0U5C5XU2c8WFORkNDoZIg1h13h1QdV+Vy4LE1dEA==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -3215,7 +2686,6 @@ "version": "2.2.18", "resolved": "https://registry.npmjs.org/@heroui/use-pagination/-/use-pagination-2.2.18.tgz", "integrity": "sha512-qm1mUe5UgV0kPZItcs/jiX/BxzdDagmcxaJkYR6DkhfMRoCuOdoJhcoh8ncbCAgHpzPESPn1VxsOcG4/Y+Jkdw==", - "license": "MIT", "dependencies": { "@heroui/shared-utils": "2.1.12", "@react-aria/i18n": "3.12.13" @@ -3224,18 +2694,10 @@ "react": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/use-pagination/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@heroui/use-resize": { "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-resize/-/use-resize-2.1.8.tgz", "integrity": "sha512-htF3DND5GmrSiMGnzRbISeKcH+BqhQ/NcsP9sBTIl7ewvFaWiDhEDiUHdJxflmJGd/c5qZq2nYQM/uluaqIkKA==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -3244,7 +2706,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-safe-layout-effect/-/use-safe-layout-effect-2.1.8.tgz", "integrity": "sha512-wbnZxVWCYqk10XRMu0veSOiVsEnLcmGUmJiapqgaz0fF8XcpSScmqjTSoWjHIEWaHjQZ6xr+oscD761D6QJN+Q==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -3253,7 +2714,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-scroll-position/-/use-scroll-position-2.1.8.tgz", "integrity": "sha512-NxanHKObxVfWaPpNRyBR8v7RfokxrzcHyTyQfbgQgAGYGHTMaOGkJGqF8kBzInc3zJi+F0zbX7Nb0QjUgsLNUQ==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -3262,36 +2722,27 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/@heroui/use-viewport-size/-/use-viewport-size-2.0.1.tgz", "integrity": "sha512-blv8BEB/QdLePLWODPRzRS2eELJ2eyHbdOIADbL0KcfLzOUEg9EiuVk90hcSUDAFqYiJ3YZ5Z0up8sdPcR8Y7g==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/user": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/user/-/user-2.2.22.tgz", - "integrity": "sha512-kOLxh9Bjgl/ya/f+W7/eKVO/n1GPsU5TPzwocC9+FU/+MbCOrmkevhAGGUrb259KCnp9WCv7WGRIcf8rrsreDw==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/user/-/user-2.2.23.tgz", + "integrity": "sha512-o/ngJ4yTD4svjYKSP3hJNwhyWLhHk5g/wjqGvH81INfpeV7wPlzpM/C6LIezGB3rZjGM9d4ozSofv6spbCKCiA==", "dependencies": { - "@heroui/avatar": "2.2.22", + "@heroui/avatar": "2.2.23", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@react-aria/focus": "3.21.2" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/user/node_modules/@heroui/shared-utils": { - "version": "2.1.12", - "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", - "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" - }, "node_modules/@humanwhocodes/config-array": { "version": "0.13.0", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", @@ -3354,15 +2805,25 @@ "dev": true, "license": "BSD-3-Clause" }, + "node_modules/@inquirer/ansi": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@inquirer/ansi/-/ansi-1.0.2.tgz", + "integrity": "sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, "node_modules/@inquirer/confirm": { - "version": "5.1.16", - "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.16.tgz", - "integrity": "sha512-j1a5VstaK5KQy8Mu8cHmuQvN1Zc62TbLhjJxwHvKPPKEoowSF6h/0UdOpA9DNdWZ+9Inq73+puRq1df6OJ8Sag==", + "version": "5.1.21", + "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.21.tgz", + "integrity": "sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==", "dev": true, "license": "MIT", "dependencies": { - "@inquirer/core": "^10.2.0", - "@inquirer/type": "^3.0.8" + "@inquirer/core": "^10.3.2", + "@inquirer/type": "^3.0.10" }, "engines": { "node": ">=18" @@ -3377,20 +2838,20 @@ } }, "node_modules/@inquirer/core": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.2.0.tgz", - "integrity": "sha512-NyDSjPqhSvpZEMZrLCYUquWNl+XC/moEcVFqS55IEYIYsY0a1cUCevSqk7ctOlnm/RaSBU5psFryNlxcmGrjaA==", + "version": "10.3.2", + "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.3.2.tgz", + "integrity": "sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==", "dev": true, "license": "MIT", "dependencies": { - "@inquirer/figures": "^1.0.13", - "@inquirer/type": "^3.0.8", - "ansi-escapes": "^4.3.2", + "@inquirer/ansi": "^1.0.2", + "@inquirer/figures": "^1.0.15", + "@inquirer/type": "^3.0.10", "cli-width": "^4.1.0", "mute-stream": "^2.0.0", "signal-exit": "^4.1.0", "wrap-ansi": "^6.2.0", - "yoctocolors-cjs": "^2.1.2" + "yoctocolors-cjs": "^2.1.3" }, "engines": { "node": ">=18" @@ -3404,22 +2865,6 @@ } } }, - "node_modules/@inquirer/core/node_modules/ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "type-fest": "^0.21.3" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/@inquirer/core/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", @@ -3445,24 +2890,11 @@ "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@inquirer/core/node_modules/type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "engines": { + "node": ">=8" } }, "node_modules/@inquirer/core/node_modules/wrap-ansi": { @@ -3481,9 +2913,9 @@ } }, "node_modules/@inquirer/figures": { - "version": "1.0.13", - "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.13.tgz", - "integrity": "sha512-lGPVU3yO9ZNqA7vTYz26jny41lE7yoQansmqdMLBEfqaGsmdg7V3W9mK9Pvb5IL4EVZ9GnSDGMO/cJXud5dMaw==", + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.15.tgz", + "integrity": "sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==", "dev": true, "license": "MIT", "engines": { @@ -3491,9 +2923,9 @@ } }, "node_modules/@inquirer/type": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.8.tgz", - "integrity": "sha512-lg9Whz8onIHRthWaN1Q9EGLa/0LFJjyM8mEUbL1eTi6yMGvBf8gvyDLtxSXztQsxMvhxxNpJYrwa1YHdq+w4Jw==", + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.10.tgz", + "integrity": "sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==", "dev": true, "license": "MIT", "engines": { @@ -3512,7 +2944,6 @@ "version": "3.10.0", "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.10.0.tgz", "integrity": "sha512-oxDR/NTEJ1k+UFVQElaNIk65E/Z83HK1z1WI3lQyhTtnNg4R5oVXaPzK3jcpKG8UHKDVuDQHzn+wsxSz8RP3aw==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -3521,7 +2952,6 @@ "version": "3.1.8", "resolved": "https://registry.npmjs.org/@internationalized/message/-/message-3.1.8.tgz", "integrity": "sha512-Rwk3j/TlYZhn3HQ6PyXUV0XP9Uv42jqZGNegt0BXlxjE6G3+LwHjbQZAGHhCnCPdaA6Tvd3ma/7QzLlLkJxAWA==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0", "intl-messageformat": "^10.1.0" @@ -3531,7 +2961,6 @@ "version": "3.6.5", "resolved": "https://registry.npmjs.org/@internationalized/number/-/number-3.6.5.tgz", "integrity": "sha512-6hY4Kl4HPBvtfS62asS/R22JzNNy8vi/Ssev7x6EobfCp+9QIB2hKvI2EtbdJ0VSQacxVNtqhE/NmF/NZ0gm6g==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -3540,80 +2969,10 @@ "version": "3.2.7", "resolved": "https://registry.npmjs.org/@internationalized/string/-/string-3.2.7.tgz", "integrity": "sha512-D4OHBjrinH+PFZPvfCXvG28n2LSykWcJ7GIioQL+ok0LON15SdfoUssoHzzOUmVZLbRoREsQXVzA6r8JKsbP6A==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.0.tgz", - "integrity": "sha512-TKY5pyBkHyADOPYlRT9Lx6F544mPl0vS5Ew7BJ45hA08Q+t3GjbueLliBWN3sMICk6+y7HdyxSzC4bWS8baBdg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/@isaacs/fs-minipass": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", - "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", - "license": "ISC", - "dependencies": { - "minipass": "^7.0.4" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@istanbuljs/schema": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", - "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.13", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", @@ -3650,9 +3009,9 @@ "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.30", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.30.tgz", - "integrity": "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==", + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", "license": "MIT", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", @@ -3684,9 +3043,9 @@ "license": "MIT" }, "node_modules/@monaco-editor/loader": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.5.0.tgz", - "integrity": "sha512-hKoGSM+7aAc7eRTRjpqAZucPmoNOC4UUbknb/VNoTkEIkCPhqV8LfbsgM1webRM7S/z21eHEx9Fkwx8Z/C/+Xw==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.7.0.tgz", + "integrity": "sha512-gIwR1HrJrrx+vfyOhYmCZ0/JcWqG5kbfG7+d3f/C1LXk2EvzAbHSg3MQ5lO2sMlo9izoAZ04shohfKLVT6crVA==", "license": "MIT", "dependencies": { "state-local": "^1.0.6" @@ -3707,9 +3066,9 @@ } }, "node_modules/@mswjs/interceptors": { - "version": "0.39.6", - "resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.39.6.tgz", - "integrity": "sha512-bndDP83naYYkfayr/qhBHMhk0YGwS1iv6vaEGcr0SQbO0IZtbOPqjKjds/WcG+bJA+1T5vCx6kprKOzn5Bg+Vw==", + "version": "0.39.8", + "resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.39.8.tgz", + "integrity": "sha512-2+BzZbjRO7Ct61k8fMNHEtoKjeWI9pIlHFTqBwZ5icHpqszIgEZbjb1MW5Z0+bITTCTl3gk4PDBxs9tA/csXvA==", "dev": true, "license": "MIT", "dependencies": { @@ -3777,68 +3136,6 @@ "node": ">= 8" } }, - "node_modules/@npmcli/git": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", - "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/promise-spawn": "^6.0.0", - "lru-cache": "^7.4.4", - "npm-pick-manifest": "^8.0.0", - "proc-log": "^3.0.0", - "promise-inflight": "^1.0.1", - "promise-retry": "^2.0.1", - "semver": "^7.3.5", - "which": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/git/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/@npmcli/package-json": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-4.0.1.tgz", - "integrity": "sha512-lRCEGdHZomFsURroh522YvA/2cVb9oPIJrjHanCJZkiasz1BzcnLr3tBJhlV7S86MBJBuAQ33is2D60YitZL2Q==", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/git": "^4.1.0", - "glob": "^10.2.2", - "hosted-git-info": "^6.1.1", - "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^5.0.0", - "proc-log": "^3.0.0", - "semver": "^7.5.3" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/promise-spawn": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz", - "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==", - "dev": true, - "license": "ISC", - "dependencies": { - "which": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/@open-draft/deferred-promise": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz", @@ -3864,17 +3161,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=14" - } - }, "node_modules/@pkgr/core": { "version": "0.2.9", "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.9.tgz", @@ -3889,12 +3175,13 @@ } }, "node_modules/@playwright/test": { - "version": "1.55.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.55.1.tgz", - "integrity": "sha512-IVAh/nOJaw6W9g+RJVlIQJ6gSiER+ae6mKQ5CX1bERzQgbC1VSeBlwdvczT7pxb0GWiyrxH4TGKbMfDb4Sq/ig==", + "version": "1.57.0", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.57.0.tgz", + "integrity": "sha512-6TyEnHgd6SArQO8UO2OMTxshln3QMWBtPGrOCgs3wVEmQmwyuNtB10IZMfmYDE0riwNR1cu4q+pPcxMVtaG3TA==", "dev": true, + "license": "Apache-2.0", "dependencies": { - "playwright": "1.55.1" + "playwright": "1.57.0" }, "bin": { "playwright": "cli.js" @@ -3910,35 +3197,17 @@ "license": "MIT" }, "node_modules/@posthog/core": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.5.2.tgz", - "integrity": "sha512-iedUP3EnOPPxTA2VaIrsrd29lSZnUV+ZrMnvY56timRVeZAXoYCkmjfIs3KBAsF8OUT5h1GXLSkoQdrV0r31OQ==", - "license": "MIT", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.8.1.tgz", + "integrity": "sha512-jfzBtQIk9auRi/biO+G/gumK5KxqsD5wOr7XpYMROE/I3pazjP4zIziinp21iQuIQJMXrDvwt9Af3njgOGwtew==", "dependencies": { "cross-spawn": "^7.0.6" } }, - "node_modules/@posthog/react": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@posthog/react/-/react-1.4.0.tgz", - "integrity": "sha512-xzPeZ753fQ0deZzdgY/0YavZvNpmdaxUzLYJYu5XjONNcZ8PwJnNLEK+7D/Cj8UM4Q8nWI7QC5mjum0uLWa4FA==", - "license": "MIT", - "peerDependencies": { - "@types/react": ">=16.8.0", - "posthog-js": ">=1.257.2", - "react": ">=16.8.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, "node_modules/@react-aria/breadcrumbs": { "version": "3.5.29", "resolved": "https://registry.npmjs.org/@react-aria/breadcrumbs/-/breadcrumbs-3.5.29.tgz", "integrity": "sha512-rKS0dryllaZJqrr3f/EAf2liz8CBEfmL5XACj+Z1TAig6GIYe1QuA3BtkX0cV9OkMugXdX8e3cbA7nD10ORRqg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/link": "^3.8.6", @@ -3956,7 +3225,6 @@ "version": "3.14.2", "resolved": "https://registry.npmjs.org/@react-aria/button/-/button-3.14.2.tgz", "integrity": "sha512-VbLIA+Kd6f/MDjd+TJBUg2+vNDw66pnvsj2E4RLomjI9dfBuN7d+Yo2UnsqKVyhePjCUZ6xxa2yDuD63IOSIYA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/toolbar": "3.0.0-beta.21", @@ -3975,7 +3243,6 @@ "version": "3.9.2", "resolved": "https://registry.npmjs.org/@react-aria/calendar/-/calendar-3.9.2.tgz", "integrity": "sha512-uSLxLgOPRnEU4Jg59lAhUVA+uDx/55NBg4lpfsP2ynazyiJ5LCXmYceJi+VuOqMml7d9W0dB87OldOeLdIxYVA==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@react-aria/i18n": "^3.12.13", @@ -3997,7 +3264,6 @@ "version": "3.16.2", "resolved": "https://registry.npmjs.org/@react-aria/checkbox/-/checkbox-3.16.2.tgz", "integrity": "sha512-29Mj9ZqXioJ0bcMnNGooHztnTau5pikZqX3qCRj5bYR3by/ZFFavYoMroh9F7s/MbFm/tsKX+Sf02lYFEdXRjA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/form": "^3.1.2", "@react-aria/interactions": "^3.25.6", @@ -4020,7 +3286,6 @@ "version": "3.14.0", "resolved": "https://registry.npmjs.org/@react-aria/combobox/-/combobox-3.14.0.tgz", "integrity": "sha512-z4ro0Hma//p4nL2IJx5iUa7NwxeXbzSoZ0se5uTYjG1rUUMszg+wqQh/AQoL+eiULn7rs18JY9wwNbVIkRNKWA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -4048,7 +3313,6 @@ "version": "3.15.2", "resolved": "https://registry.npmjs.org/@react-aria/datepicker/-/datepicker-3.15.2.tgz", "integrity": "sha512-th078hyNqPf4P2K10su/y32zPDjs3lOYVdHvsL9/+5K1dnTvLHCK5vgUyLuyn8FchhF7cmHV49D+LZVv65PEpQ==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@internationalized/number": "^3.6.5", @@ -4078,7 +3342,6 @@ "version": "3.5.31", "resolved": "https://registry.npmjs.org/@react-aria/dialog/-/dialog-3.5.31.tgz", "integrity": "sha512-inxQMyrzX0UBW9Mhraq0nZ4HjHdygQvllzloT1E/RlDd61lr3RbmJR6pLsrbKOTtSvDIBJpCso1xEdHCFNmA0Q==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/overlays": "^3.30.0", @@ -4096,7 +3359,6 @@ "version": "3.21.2", "resolved": "https://registry.npmjs.org/@react-aria/focus/-/focus-3.21.2.tgz", "integrity": "sha512-JWaCR7wJVggj+ldmM/cb/DXFg47CXR55lznJhZBh4XVqJjMKwaOOqpT5vNN7kpC1wUpXicGNuDnJDN1S/+6dhQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -4113,7 +3375,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/@react-aria/form/-/form-3.1.2.tgz", "integrity": "sha512-R3i7L7Ci61PqZQvOrnL9xJeWEbh28UkTVgkj72EvBBn39y4h7ReH++0stv7rRs8p5ozETSKezBbGfu4UsBewWw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -4130,7 +3391,6 @@ "version": "3.14.5", "resolved": "https://registry.npmjs.org/@react-aria/grid/-/grid-3.14.5.tgz", "integrity": "sha512-XHw6rgjlTqc85e3zjsWo3U0EVwjN5MOYtrolCKc/lc2ItNdcY3OlMhpsU9+6jHwg/U3VCSWkGvwAz9hg7krd8Q==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -4155,7 +3415,6 @@ "version": "3.12.13", "resolved": "https://registry.npmjs.org/@react-aria/i18n/-/i18n-3.12.13.tgz", "integrity": "sha512-YTM2BPg0v1RvmP8keHenJBmlx8FXUKsdYIEX7x6QWRd1hKlcDwphfjzvt0InX9wiLiPHsT5EoBTpuUk8SXc0Mg==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@internationalized/message": "^3.1.8", @@ -4175,7 +3434,6 @@ "version": "3.25.6", "resolved": "https://registry.npmjs.org/@react-aria/interactions/-/interactions-3.25.6.tgz", "integrity": "sha512-5UgwZmohpixwNMVkMvn9K1ceJe6TzlRlAfuYoQDUuOkk62/JVJNDLAPKIf5YMRc7d2B0rmfgaZLMtbREb0Zvkw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/ssr": "^3.9.10", "@react-aria/utils": "^3.31.0", @@ -4192,7 +3450,6 @@ "version": "3.7.22", "resolved": "https://registry.npmjs.org/@react-aria/label/-/label-3.7.22.tgz", "integrity": "sha512-jLquJeA5ZNqDT64UpTc9XJ7kQYltUlNcgxZ37/v4mHe0UZ7QohCKdKQhXHONb0h2jjNUpp2HOZI8J9++jOpzxA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/utils": "^3.31.0", "@react-types/shared": "^3.32.1", @@ -4207,7 +3464,6 @@ "version": "3.0.7", "resolved": "https://registry.npmjs.org/@react-aria/landmark/-/landmark-3.0.7.tgz", "integrity": "sha512-t8c610b8hPLS6Vwv+rbuSyljZosI1s5+Tosfa0Fk4q7d+Ex6Yj7hLfUFy59GxZAufhUYfGX396fT0gPqAbU1tg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/utils": "^3.31.0", "@react-types/shared": "^3.32.1", @@ -4223,7 +3479,6 @@ "version": "3.8.6", "resolved": "https://registry.npmjs.org/@react-aria/link/-/link-3.8.6.tgz", "integrity": "sha512-7F7UDJnwbU9IjfoAdl6f3Hho5/WB7rwcydUOjUux0p7YVWh/fTjIFjfAGyIir7MJhPapun1D0t97QQ3+8jXVcg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -4240,7 +3495,6 @@ "version": "3.15.0", "resolved": "https://registry.npmjs.org/@react-aria/listbox/-/listbox-3.15.0.tgz", "integrity": "sha512-Ub1Wu79R9sgxM7h4HeEdjOgOKDHwduvYcnDqsSddGXgpkL8ADjsy2YUQ0hHY5VnzA4BxK36bLp4mzSna8Qvj1w==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/label": "^3.7.22", @@ -4261,7 +3515,6 @@ "version": "3.4.4", "resolved": "https://registry.npmjs.org/@react-aria/live-announcer/-/live-announcer-3.4.4.tgz", "integrity": "sha512-PTTBIjNRnrdJOIRTDGNifY2d//kA7GUAwRFJNOEwSNG4FW+Bq9awqLiflw0JkpyB0VNIwou6lqKPHZVLsGWOXA==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -4270,7 +3523,6 @@ "version": "3.19.3", "resolved": "https://registry.npmjs.org/@react-aria/menu/-/menu-3.19.3.tgz", "integrity": "sha512-52fh8y8b2776R2VrfZPpUBJYC9oTP7XDy+zZuZTxPEd7Ywk0JNUl5F92y6ru22yPkS13sdhrNM/Op+V/KulmAg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -4296,7 +3548,6 @@ "version": "3.12.2", "resolved": "https://registry.npmjs.org/@react-aria/numberfield/-/numberfield-3.12.2.tgz", "integrity": "sha512-M2b+z0HIXiXpGAWOQkO2kpIjaLNUXJ5Q3/GMa3Fkr+B1piFX0VuOynYrtddKVrmXCe+r5t+XcGb0KS29uqv7nQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/interactions": "^3.25.6", @@ -4319,7 +3570,6 @@ "version": "3.30.0", "resolved": "https://registry.npmjs.org/@react-aria/overlays/-/overlays-3.30.0.tgz", "integrity": "sha512-UpjqSjYZx5FAhceWCRVsW6fX1sEwya1fQ/TKkL53FAlLFR8QKuoKqFlmiL43YUFTcGK3UdEOy3cWTleLQwdSmQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -4342,7 +3592,6 @@ "version": "3.4.27", "resolved": "https://registry.npmjs.org/@react-aria/progress/-/progress-3.4.27.tgz", "integrity": "sha512-0OA1shs1575g1zmO8+rWozdbTnxThFFhOfuoL1m7UV5Dley6FHpueoKB1ECv7B+Qm4dQt6DoEqLg7wsbbQDhmg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/label": "^3.7.22", @@ -4360,7 +3609,6 @@ "version": "3.12.2", "resolved": "https://registry.npmjs.org/@react-aria/radio/-/radio-3.12.2.tgz", "integrity": "sha512-I11f6I90neCh56rT/6ieAs3XyDKvEfbj/QmbU5cX3p+SJpRRPN0vxQi5D1hkh0uxDpeClxygSr31NmZsd4sqfg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/form": "^3.1.2", @@ -4382,7 +3630,6 @@ "version": "3.26.0", "resolved": "https://registry.npmjs.org/@react-aria/selection/-/selection-3.26.0.tgz", "integrity": "sha512-ZBH3EfWZ+RfhTj01dH8L17uT7iNbXWS8u77/fUpHgtrm0pwNVhx0TYVnLU1YpazQ/3WVpvWhmBB8sWwD1FlD/g==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -4401,7 +3648,6 @@ "version": "3.8.2", "resolved": "https://registry.npmjs.org/@react-aria/slider/-/slider-3.8.2.tgz", "integrity": "sha512-6KyUGaVzRE4xAz1LKHbNh1q5wzxe58pdTHFSnxNe6nk1SCoHw7NfI4h2s2m6LgJ0megFxsT0Ir8aHaFyyxmbgg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/interactions": "^3.25.6", @@ -4421,7 +3667,6 @@ "version": "3.6.19", "resolved": "https://registry.npmjs.org/@react-aria/spinbutton/-/spinbutton-3.6.19.tgz", "integrity": "sha512-xOIXegDpts9t3RSHdIN0iYQpdts0FZ3LbpYJIYVvdEHo9OpDS+ElnDzCGtwZLguvZlwc5s1LAKuKopDUsAEMkw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/live-announcer": "^3.4.4", @@ -4439,7 +3684,6 @@ "version": "3.9.10", "resolved": "https://registry.npmjs.org/@react-aria/ssr/-/ssr-3.9.10.tgz", "integrity": "sha512-hvTm77Pf+pMBhuBm760Li0BVIO38jv1IBws1xFm1NoL26PU+fe+FMW5+VZWyANR6nYL65joaJKZqOdTQMkO9IQ==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" }, @@ -4454,7 +3698,6 @@ "version": "3.7.8", "resolved": "https://registry.npmjs.org/@react-aria/switch/-/switch-3.7.8.tgz", "integrity": "sha512-AfsUq1/YiuoprhcBUD9vDPyWaigAwctQNW1fMb8dROL+i/12B+Zekj8Ml+jbU69/kIVtfL0Jl7/0Bo9KK3X0xQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/toggle": "^3.12.2", "@react-stately/toggle": "^3.9.2", @@ -4471,7 +3714,6 @@ "version": "3.17.8", "resolved": "https://registry.npmjs.org/@react-aria/table/-/table-3.17.8.tgz", "integrity": "sha512-bXiZoxTMbsqUJsYDhHPzKc3jw0HFJ/xMsJ49a0f7mp5r9zACxNLeIU0wJ4Uvx37dnYOHKzGliG+rj5l4sph7MA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/grid": "^3.14.5", @@ -4498,7 +3740,6 @@ "version": "3.10.8", "resolved": "https://registry.npmjs.org/@react-aria/tabs/-/tabs-3.10.8.tgz", "integrity": "sha512-sPPJyTyoAqsBh76JinBAxStOcbjZvyWFYKpJ9Uqw+XT0ObshAPPFSGeh8DiQemPs02RwJdrfARPMhyqiX8t59A==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -4518,7 +3759,6 @@ "version": "3.18.2", "resolved": "https://registry.npmjs.org/@react-aria/textfield/-/textfield-3.18.2.tgz", "integrity": "sha512-G+lM8VYSor6g9Yptc6hLZ6BF+0cq0pYol1z6wdQUQgJN8tg4HPtzq75lsZtlCSIznL3amgRAxJtd0dUrsAnvaQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/form": "^3.1.2", "@react-aria/interactions": "^3.25.6", @@ -4539,7 +3779,6 @@ "version": "3.0.8", "resolved": "https://registry.npmjs.org/@react-aria/toast/-/toast-3.0.8.tgz", "integrity": "sha512-rfJIms6AkMyQ7ZgKrMZgGfPwGcB/t1JoEwbc1PAmXcAvFI/hzF6YF7ZFDXiq38ucFsP9PnHmbXIzM9w4ccl18A==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/interactions": "^3.25.6", @@ -4559,7 +3798,6 @@ "version": "3.12.2", "resolved": "https://registry.npmjs.org/@react-aria/toggle/-/toggle-3.12.2.tgz", "integrity": "sha512-g25XLYqJuJpt0/YoYz2Rab8ax+hBfbssllcEFh0v0jiwfk2gwTWfRU9KAZUvxIqbV8Nm8EBmrYychDpDcvW1kw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -4577,7 +3815,6 @@ "version": "3.0.0-beta.21", "resolved": "https://registry.npmjs.org/@react-aria/toolbar/-/toolbar-3.0.0-beta.21.tgz", "integrity": "sha512-yRCk/GD8g+BhdDgxd3I0a0c8Ni4Wyo6ERzfSoBkPkwQ4X2E2nkopmraM9D0fXw4UcIr4bnmvADzkHXtBN0XrBg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -4594,7 +3831,6 @@ "version": "3.8.8", "resolved": "https://registry.npmjs.org/@react-aria/tooltip/-/tooltip-3.8.8.tgz", "integrity": "sha512-CmHUqtXtFWmG4AHMEr9hIVex+oscK6xcM2V47gq9ijNInxe3M6UBu/dBdkgGP/jYv9N7tzCAjTR8nNIHQXwvWw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -4612,7 +3848,6 @@ "version": "3.31.0", "resolved": "https://registry.npmjs.org/@react-aria/utils/-/utils-3.31.0.tgz", "integrity": "sha512-ABOzCsZrWzf78ysswmguJbx3McQUja7yeGj6/vZo4JVsZNlxAN+E9rs381ExBRI0KzVo6iBTeX5De8eMZPJXig==", - "license": "Apache-2.0", "dependencies": { "@react-aria/ssr": "^3.9.10", "@react-stately/flags": "^3.1.2", @@ -4630,7 +3865,6 @@ "version": "3.8.28", "resolved": "https://registry.npmjs.org/@react-aria/visually-hidden/-/visually-hidden-3.8.28.tgz", "integrity": "sha512-KRRjbVVob2CeBidF24dzufMxBveEUtUu7IM+hpdZKB+gxVROoh4XRLPv9SFmaH89Z7D9To3QoykVZoWD0lan6Q==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -4643,9 +3877,9 @@ } }, "node_modules/@react-router/dev": { - "version": "7.9.3", - "resolved": "https://registry.npmjs.org/@react-router/dev/-/dev-7.9.3.tgz", - "integrity": "sha512-oPaO+OpvCo/rNTJrRipHSp31/K4It19PE5A24x21FlYlemPTe3fbGX/kyC2+8au/abXbvzNHfRbuIBD/rfojmA==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/@react-router/dev/-/dev-7.11.0.tgz", + "integrity": "sha512-g1ou5Zw3r4mCU0L+EXH4vRtAiyt8qz1JOvL1k+PW4rZ4+71h5nBy/fLgD7cg5BnzQZmjRO1PzCgpF5BIrlKYxQ==", "dev": true, "dependencies": { "@babel/core": "^7.27.7", @@ -4655,8 +3889,7 @@ "@babel/preset-typescript": "^7.27.1", "@babel/traverse": "^7.27.7", "@babel/types": "^7.27.7", - "@npmcli/package-json": "^4.0.1", - "@react-router/node": "7.9.3", + "@react-router/node": "7.11.0", "@remix-run/node-fetch-server": "^0.9.0", "arg": "^5.0.1", "babel-dead-code-elimination": "^1.0.6", @@ -4667,13 +3900,15 @@ "isbot": "^5.1.11", "jsesc": "3.0.2", "lodash": "^4.17.21", + "p-map": "^7.0.3", "pathe": "^1.1.2", "picocolors": "^1.1.1", + "pkg-types": "^2.3.0", "prettier": "^3.6.2", "react-refresh": "^0.14.0", "semver": "^7.3.7", "tinyglobby": "^0.2.14", - "valibot": "^0.41.0", + "valibot": "^1.2.0", "vite-node": "^3.2.2" }, "bin": { @@ -4683,9 +3918,10 @@ "node": ">=20.0.0" }, "peerDependencies": { - "@react-router/serve": "^7.9.3", - "@vitejs/plugin-rsc": "*", - "react-router": "^7.9.3", + "@react-router/serve": "^7.11.0", + "@vitejs/plugin-rsc": "~0.5.7", + "react-router": "^7.11.0", + "react-server-dom-webpack": "^19.2.3", "typescript": "^5.1.0", "vite": "^5.1.0 || ^6.0.0 || ^7.0.0", "wrangler": "^3.28.2 || ^4.0.0" @@ -4697,6 +3933,9 @@ "@vitejs/plugin-rsc": { "optional": true }, + "react-server-dom-webpack": { + "optional": true + }, "typescript": { "optional": true }, @@ -4705,23 +3944,10 @@ } } }, - "node_modules/@react-router/dev/node_modules/jsesc": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", - "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/@react-router/node": { - "version": "7.9.3", - "resolved": "https://registry.npmjs.org/@react-router/node/-/node-7.9.3.tgz", - "integrity": "sha512-+OvWxPPUgouOshw85QlG0J6yFJM0GMCCpXqPj38IcveeFLlP7ppOAEkOi7RBFrDvg7vSUtCEBDnsbuDCvxUPJg==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/@react-router/node/-/node-7.11.0.tgz", + "integrity": "sha512-11ha8EW+F7wTMmPz2pdi11LJxz2irtuksiCpunpZjtpPmYU37S+GGihG8vFeTa2xFPNunEaHNlfzKyzeYm570Q==", "dependencies": { "@mjackson/node-fetch-server": "^0.2.0" }, @@ -4729,7 +3955,7 @@ "node": ">=20.0.0" }, "peerDependencies": { - "react-router": "7.9.3", + "react-router": "7.11.0", "typescript": "^5.1.0" }, "peerDependenciesMeta": { @@ -4739,17 +3965,17 @@ } }, "node_modules/@react-router/serve": { - "version": "7.9.3", - "resolved": "https://registry.npmjs.org/@react-router/serve/-/serve-7.9.3.tgz", - "integrity": "sha512-wtiDLo4sY3ouADXPm1xa4eg79zRXP517E0QcuBKPfoKh/40IcANTqN11VeEKNA9QgNxLeCm4CSY3dPbqePuwkA==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/@react-router/serve/-/serve-7.11.0.tgz", + "integrity": "sha512-U5Ht9PmUYF4Ti1ssaWlddLY4ZCbXBtHDGFU/u1h3VsHqleSdHsFuGAFrr/ZEuqTuEWp1CLqn2npEDAmlV9IUKQ==", "dependencies": { "@mjackson/node-fetch-server": "^0.2.0", - "@react-router/express": "7.9.3", - "@react-router/node": "7.9.3", - "compression": "^1.7.4", + "@react-router/express": "7.11.0", + "@react-router/node": "7.11.0", + "compression": "^1.8.1", "express": "^4.19.2", "get-port": "5.1.1", - "morgan": "^1.10.0", + "morgan": "^1.10.1", "source-map-support": "^0.5.21" }, "bin": { @@ -4759,22 +3985,22 @@ "node": ">=20.0.0" }, "peerDependencies": { - "react-router": "7.9.3" + "react-router": "7.11.0" } }, "node_modules/@react-router/serve/node_modules/@react-router/express": { - "version": "7.9.3", - "resolved": "https://registry.npmjs.org/@react-router/express/-/express-7.9.3.tgz", - "integrity": "sha512-XNVj/8AfecE1n61bXD41LqpXAixyWBpmBWkrzVA2iG+SrQOb+J6TjqZYEmZmoqJHuHmkOjt6/Iz1f81p93peGQ==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/@react-router/express/-/express-7.11.0.tgz", + "integrity": "sha512-o5DeO9tqUrZcUWAgmPGgK4I/S6iFpqnj/e20xMGA04trk+90b9KAx9eqmRMgHERubVKANTM9gTDPduobQjeH1A==", "dependencies": { - "@react-router/node": "7.9.3" + "@react-router/node": "7.11.0" }, "engines": { "node": ">=20.0.0" }, "peerDependencies": { "express": "^4.17.1 || ^5", - "react-router": "7.9.3", + "react-router": "7.11.0", "typescript": "^5.1.0" }, "peerDependenciesMeta": { @@ -4787,7 +4013,6 @@ "version": "3.9.0", "resolved": "https://registry.npmjs.org/@react-stately/calendar/-/calendar-3.9.0.tgz", "integrity": "sha512-U5Nf2kx9gDhJRxdDUm5gjfyUlt/uUfOvM1vDW2UA62cA6+2k2cavMLc2wNlXOb/twFtl6p0joYKHG7T4xnEFkg==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@react-stately/utils": "^3.10.8", @@ -4803,7 +4028,6 @@ "version": "3.7.2", "resolved": "https://registry.npmjs.org/@react-stately/checkbox/-/checkbox-3.7.2.tgz", "integrity": "sha512-j1ycUVz5JmqhaL6mDZgDNZqBilOB8PBW096sDPFaTtuYreDx2HOd1igxiIvwlvPESZwsJP7FVM3mYnaoXtpKPA==", - "license": "Apache-2.0", "dependencies": { "@react-stately/form": "^3.2.2", "@react-stately/utils": "^3.10.8", @@ -4819,7 +4043,6 @@ "version": "3.12.8", "resolved": "https://registry.npmjs.org/@react-stately/collections/-/collections-3.12.8.tgz", "integrity": "sha512-AceJYLLXt1Y2XIcOPi6LEJSs4G/ubeYW3LqOCQbhfIgMaNqKfQMIfagDnPeJX9FVmPFSlgoCBxb1pTJW2vjCAQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4832,7 +4055,6 @@ "version": "3.12.0", "resolved": "https://registry.npmjs.org/@react-stately/combobox/-/combobox-3.12.0.tgz", "integrity": "sha512-A6q9R/7cEa/qoQsBkdslXWvD7ztNLLQ9AhBhVN9QvzrmrH5B4ymUwcTU8lWl22ykH7RRwfonLeLXJL4C+/L2oQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/form": "^3.2.2", @@ -4851,7 +4073,6 @@ "version": "3.15.2", "resolved": "https://registry.npmjs.org/@react-stately/datepicker/-/datepicker-3.15.2.tgz", "integrity": "sha512-S5GL+W37chvV8knv9v0JRv0L6hKo732qqabCCHXzOpYxkLIkV4f/y3cHdEzFWzpZ0O0Gkg7WgeYo160xOdBKYg==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@internationalized/string": "^3.2.7", @@ -4870,7 +4091,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/@react-stately/flags/-/flags-3.1.2.tgz", "integrity": "sha512-2HjFcZx1MyQXoPqcBGALwWWmgFVUk2TuKVIQxCbRq7fPyWXIl6VHcakCLurdtYC2Iks7zizvz0Idv48MQ38DWg==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -4879,7 +4099,6 @@ "version": "3.2.2", "resolved": "https://registry.npmjs.org/@react-stately/form/-/form-3.2.2.tgz", "integrity": "sha512-soAheOd7oaTO6eNs6LXnfn0tTqvOoe3zN9FvtIhhrErKz9XPc5sUmh3QWwR45+zKbitOi1HOjfA/gifKhZcfWw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4892,7 +4111,6 @@ "version": "3.11.6", "resolved": "https://registry.npmjs.org/@react-stately/grid/-/grid-3.11.6.tgz", "integrity": "sha512-vWPAkzpeTIsrurHfMubzMuqEw7vKzFhIJeEK5sEcLunyr1rlADwTzeWrHNbPMl66NAIAi70Dr1yNq+kahQyvMA==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/selection": "^3.20.6", @@ -4908,7 +4126,6 @@ "version": "3.13.1", "resolved": "https://registry.npmjs.org/@react-stately/list/-/list-3.13.1.tgz", "integrity": "sha512-eHaoauh21twbcl0kkwULhVJ+CzYcy1jUjMikNVMHOQdhr4WIBdExf7PmSgKHKqsSPhpGg6IpTCY2dUX3RycjDg==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/selection": "^3.20.6", @@ -4924,7 +4141,6 @@ "version": "3.9.8", "resolved": "https://registry.npmjs.org/@react-stately/menu/-/menu-3.9.8.tgz", "integrity": "sha512-bo0NOhofnTHLESiYfsSSw6gyXiPVJJ0UlN2igUXtJk5PmyhWjFzUzTzcnd7B028OB0si9w3LIWM3stqz5271Eg==", - "license": "Apache-2.0", "dependencies": { "@react-stately/overlays": "^3.6.20", "@react-types/menu": "^3.10.5", @@ -4939,7 +4155,6 @@ "version": "3.10.2", "resolved": "https://registry.npmjs.org/@react-stately/numberfield/-/numberfield-3.10.2.tgz", "integrity": "sha512-jlKVFYaH3RX5KvQ7a+SAMQuPccZCzxLkeYkBE64u1Zvi7YhJ8hkTMHG/fmZMbk1rHlseE2wfBdk0Rlya3MvoNQ==", - "license": "Apache-2.0", "dependencies": { "@internationalized/number": "^3.6.5", "@react-stately/form": "^3.2.2", @@ -4955,7 +4170,6 @@ "version": "3.6.20", "resolved": "https://registry.npmjs.org/@react-stately/overlays/-/overlays-3.6.20.tgz", "integrity": "sha512-YAIe+uI8GUXX8F/0Pzr53YeC5c/bjqbzDFlV8NKfdlCPa6+Jp4B/IlYVjIooBj9+94QvbQdjylegvYWK/iPwlg==", - "license": "Apache-2.0", "dependencies": { "@react-stately/utils": "^3.10.8", "@react-types/overlays": "^3.9.2", @@ -4969,7 +4183,6 @@ "version": "3.11.2", "resolved": "https://registry.npmjs.org/@react-stately/radio/-/radio-3.11.2.tgz", "integrity": "sha512-UM7L6AW+k8edhSBUEPZAqiWNRNadfOKK7BrCXyBiG79zTz0zPcXRR+N+gzkDn7EMSawDeyK1SHYUuoSltTactg==", - "license": "Apache-2.0", "dependencies": { "@react-stately/form": "^3.2.2", "@react-stately/utils": "^3.10.8", @@ -4985,7 +4198,6 @@ "version": "3.20.6", "resolved": "https://registry.npmjs.org/@react-stately/selection/-/selection-3.20.6.tgz", "integrity": "sha512-a0bjuP2pJYPKEiedz2Us1W1aSz0iHRuyeQEdBOyL6Z6VUa6hIMq9H60kvseir2T85cOa4QggizuRV7mcO6bU5w==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/utils": "^3.10.8", @@ -5000,7 +4212,6 @@ "version": "3.7.2", "resolved": "https://registry.npmjs.org/@react-stately/slider/-/slider-3.7.2.tgz", "integrity": "sha512-EVBHUdUYwj++XqAEiQg2fGi8Reccznba0uyQ3gPejF0pAc390Q/J5aqiTEDfiCM7uJ6WHxTM6lcCqHQBISk2dQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/utils": "^3.10.8", "@react-types/shared": "^3.32.1", @@ -5015,7 +4226,6 @@ "version": "3.15.1", "resolved": "https://registry.npmjs.org/@react-stately/table/-/table-3.15.1.tgz", "integrity": "sha512-MhMAgE/LgAzHcAn1P3p/nQErzJ6DiixSJ1AOt2JlnAKEb5YJg4ATKWCb2IjBLwywt9ZCzfm3KMUzkctZqAoxwA==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/flags": "^3.1.2", @@ -5035,7 +4245,6 @@ "version": "3.8.6", "resolved": "https://registry.npmjs.org/@react-stately/tabs/-/tabs-3.8.6.tgz", "integrity": "sha512-9RYxmgjVIxUpIsGKPIF7uRoHWOEz8muwaYiStCVeyiYBPmarvZoIYtTXcwSMN/vEs7heVN5uGCL6/bfdY4+WiA==", - "license": "Apache-2.0", "dependencies": { "@react-stately/list": "^3.13.1", "@react-types/shared": "^3.32.1", @@ -5050,7 +4259,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/@react-stately/toast/-/toast-3.1.2.tgz", "integrity": "sha512-HiInm7bck32khFBHZThTQaAF6e6/qm57F4mYRWdTq8IVeGDzpkbUYibnLxRhk0UZ5ybc6me+nqqPkG/lVmM42Q==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0", "use-sync-external-store": "^1.4.0" @@ -5063,7 +4271,6 @@ "version": "3.9.2", "resolved": "https://registry.npmjs.org/@react-stately/toggle/-/toggle-3.9.2.tgz", "integrity": "sha512-dOxs9wrVXHUmA7lc8l+N9NbTJMAaXcYsnNGsMwfXIXQ3rdq+IjWGNYJ52UmNQyRYFcg0jrzRrU16TyGbNjOdNQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/utils": "^3.10.8", "@react-types/checkbox": "^3.10.2", @@ -5078,7 +4285,6 @@ "version": "3.5.8", "resolved": "https://registry.npmjs.org/@react-stately/tooltip/-/tooltip-3.5.8.tgz", "integrity": "sha512-gkcUx2ROhCiGNAYd2BaTejakXUUNLPnnoJ5+V/mN480pN+OrO8/2V9pqb/IQmpqxLsso93zkM3A4wFHHLBBmPQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/overlays": "^3.6.20", "@react-types/tooltip": "^3.4.21", @@ -5092,7 +4298,6 @@ "version": "3.9.3", "resolved": "https://registry.npmjs.org/@react-stately/tree/-/tree-3.9.3.tgz", "integrity": "sha512-ZngG79nLFxE/GYmpwX6E/Rma2MMkzdoJPRI3iWk3dgqnGMMzpPnUp/cvjDsU3UHF7xDVusC5BT6pjWN0uxCIFQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/selection": "^3.20.6", @@ -5108,7 +4313,6 @@ "version": "3.10.8", "resolved": "https://registry.npmjs.org/@react-stately/utils/-/utils-3.10.8.tgz", "integrity": "sha512-SN3/h7SzRsusVQjQ4v10LaVsDc81jyyR0DD5HnsQitm/I5WDpaSr2nRHtyloPFU48jlql1XX/S04T2DLQM7Y3g==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" }, @@ -5120,7 +4324,6 @@ "version": "4.4.4", "resolved": "https://registry.npmjs.org/@react-stately/virtualizer/-/virtualizer-4.4.4.tgz", "integrity": "sha512-ri8giqXSZOrznZDCCOE4U36wSkOhy+hrFK7yo/YVcpxTqqp3d3eisfKMqbDsgqBW+XTHycTU/xeAf0u9NqrfpQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -5134,7 +4337,6 @@ "version": "3.0.0-alpha.26", "resolved": "https://registry.npmjs.org/@react-types/accordion/-/accordion-3.0.0-alpha.26.tgz", "integrity": "sha512-OXf/kXcD2vFlEnkcZy/GG+a/1xO9BN7Uh3/5/Ceuj9z2E/WwD55YwU3GFM5zzkZ4+DMkdowHnZX37XnmbyD3Mg==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.27.0" }, @@ -5146,7 +4348,6 @@ "version": "3.7.17", "resolved": "https://registry.npmjs.org/@react-types/breadcrumbs/-/breadcrumbs-3.7.17.tgz", "integrity": "sha512-IhvVTcfli5o/UDlGACXxjlor2afGlMQA8pNR3faH0bBUay1Fmm3IWktVw9Xwmk+KraV2RTAg9e+E6p8DOQZfiw==", - "license": "Apache-2.0", "dependencies": { "@react-types/link": "^3.6.5", "@react-types/shared": "^3.32.1" @@ -5159,7 +4360,6 @@ "version": "3.14.1", "resolved": "https://registry.npmjs.org/@react-types/button/-/button-3.14.1.tgz", "integrity": "sha512-D8C4IEwKB7zEtiWYVJ3WE/5HDcWlze9mLWQ5hfsBfpePyWCgO3bT/+wjb/7pJvcAocrkXo90QrMm85LcpBtrpg==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5171,7 +4371,6 @@ "version": "3.8.0", "resolved": "https://registry.npmjs.org/@react-types/calendar/-/calendar-3.8.0.tgz", "integrity": "sha512-ZDZgfZgbz1ydWOFs1mH7QFfX3ioJrmb3Y/lkoubQE0HWXLZzyYNvhhKyFJRS1QJ40IofLSBHriwbQb/tsUnGlw==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@react-types/shared": "^3.32.1" @@ -5184,7 +4383,6 @@ "version": "3.10.2", "resolved": "https://registry.npmjs.org/@react-types/checkbox/-/checkbox-3.10.2.tgz", "integrity": "sha512-ktPkl6ZfIdGS1tIaGSU/2S5Agf2NvXI9qAgtdMDNva0oLyAZ4RLQb6WecPvofw1J7YKXu0VA5Mu7nlX+FM2weQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5196,7 +4394,6 @@ "version": "3.13.9", "resolved": "https://registry.npmjs.org/@react-types/combobox/-/combobox-3.13.9.tgz", "integrity": "sha512-G6GmLbzVkLW6VScxPAr/RtliEyPhBClfYaIllK1IZv+Z42SVnOpKzhnoe79BpmiFqy1AaC3+LjZX783mrsHCwA==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5208,7 +4405,6 @@ "version": "3.13.2", "resolved": "https://registry.npmjs.org/@react-types/datepicker/-/datepicker-3.13.2.tgz", "integrity": "sha512-+M6UZxJnejYY8kz0spbY/hP08QJ5rsZ3aNarRQQHc48xV2oelFLX5MhAqizfLEsvyfb0JYrhWoh4z1xZtAmYCg==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@react-types/calendar": "^3.8.0", @@ -5223,7 +4419,6 @@ "version": "3.5.22", "resolved": "https://registry.npmjs.org/@react-types/dialog/-/dialog-3.5.22.tgz", "integrity": "sha512-smSvzOcqKE196rWk0oqJDnz+ox5JM5+OT0PmmJXiUD4q7P5g32O6W5Bg7hMIFUI9clBtngo8kLaX2iMg+GqAzg==", - "license": "Apache-2.0", "dependencies": { "@react-types/overlays": "^3.9.2", "@react-types/shared": "^3.32.1" @@ -5236,7 +4431,6 @@ "version": "3.7.16", "resolved": "https://registry.npmjs.org/@react-types/form/-/form-3.7.16.tgz", "integrity": "sha512-Sb7KJoWEaQ/e4XIY+xRbjKvbP1luome98ZXevpD+zVSyGjEcfIroebizP6K1yMHCWP/043xH6GUkgEqWPoVGjg==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5248,7 +4442,6 @@ "version": "3.3.6", "resolved": "https://registry.npmjs.org/@react-types/grid/-/grid-3.3.6.tgz", "integrity": "sha512-vIZJlYTii2n1We9nAugXwM2wpcpsC6JigJFBd6vGhStRdRWRoU4yv1Gc98Usbx0FQ/J7GLVIgeG8+1VMTKBdxw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5260,7 +4453,6 @@ "version": "3.6.5", "resolved": "https://registry.npmjs.org/@react-types/link/-/link-3.6.5.tgz", "integrity": "sha512-+I2s3XWBEvLrzts0GnNeA84mUkwo+a7kLUWoaJkW0TOBDG7my95HFYxF9WnqKye7NgpOkCqz4s3oW96xPdIniQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5272,7 +4464,6 @@ "version": "3.7.4", "resolved": "https://registry.npmjs.org/@react-types/listbox/-/listbox-3.7.4.tgz", "integrity": "sha512-p4YEpTl/VQGrqVE8GIfqTS5LkT5jtjDTbVeZgrkPnX/fiPhsfbTPiZ6g0FNap4+aOGJFGEEZUv2q4vx+rCORww==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5284,7 +4475,6 @@ "version": "3.10.5", "resolved": "https://registry.npmjs.org/@react-types/menu/-/menu-3.10.5.tgz", "integrity": "sha512-HBTrKll2hm0VKJNM4ubIv1L9MNo8JuOnm2G3M+wXvb6EYIyDNxxJkhjsqsGpUXJdAOSkacHBDcNh2HsZABNX4A==", - "license": "Apache-2.0", "dependencies": { "@react-types/overlays": "^3.9.2", "@react-types/shared": "^3.32.1" @@ -5297,7 +4487,6 @@ "version": "3.8.15", "resolved": "https://registry.npmjs.org/@react-types/numberfield/-/numberfield-3.8.15.tgz", "integrity": "sha512-97r92D23GKCOjGIGMeW9nt+/KlfM3GeWH39Czcmd2/D5y3k6z4j0avbsfx2OttCtJszrnENjw3GraYGYI2KosQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5309,7 +4498,6 @@ "version": "3.9.2", "resolved": "https://registry.npmjs.org/@react-types/overlays/-/overlays-3.9.2.tgz", "integrity": "sha512-Q0cRPcBGzNGmC8dBuHyoPR7N3057KTS5g+vZfQ53k8WwmilXBtemFJPLsogJbspuewQ/QJ3o2HYsp2pne7/iNw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5321,7 +4509,6 @@ "version": "3.5.16", "resolved": "https://registry.npmjs.org/@react-types/progress/-/progress-3.5.16.tgz", "integrity": "sha512-I9tSdCFfvQ7gHJtm90VAKgwdTWXQgVNvLRStEc0z9h+bXBxdvZb+QuiRPERChwFQ9VkK4p4rDqaFo69nDqWkpw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5333,7 +4520,6 @@ "version": "3.9.2", "resolved": "https://registry.npmjs.org/@react-types/radio/-/radio-3.9.2.tgz", "integrity": "sha512-3UcJXu37JrTkRyP4GJPDBU7NmDTInrEdOe+bVzA1j4EegzdkJmLBkLg5cLDAbpiEHB+xIsvbJdx6dxeMuc+H3g==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5345,7 +4531,6 @@ "version": "3.32.1", "resolved": "https://registry.npmjs.org/@react-types/shared/-/shared-3.32.1.tgz", "integrity": "sha512-famxyD5emrGGpFuUlgOP6fVW2h/ZaF405G5KDi3zPHzyjAWys/8W6NAVJtNbkCkhedmvL0xOhvt8feGXyXaw5w==", - "license": "Apache-2.0", "peerDependencies": { "react": "^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1" } @@ -5354,7 +4539,6 @@ "version": "3.8.2", "resolved": "https://registry.npmjs.org/@react-types/slider/-/slider-3.8.2.tgz", "integrity": "sha512-MQYZP76OEOYe7/yA2To+Dl0LNb0cKKnvh5JtvNvDnAvEprn1RuLiay8Oi/rTtXmc2KmBa4VdTcsXsmkbbkeN2Q==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5366,7 +4550,6 @@ "version": "3.5.15", "resolved": "https://registry.npmjs.org/@react-types/switch/-/switch-3.5.15.tgz", "integrity": "sha512-r/ouGWQmIeHyYSP1e5luET+oiR7N7cLrAlWsrAfYRWHxqXOSNQloQnZJ3PLHrKFT02fsrQhx2rHaK2LfKeyN3A==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5378,7 +4561,6 @@ "version": "3.13.4", "resolved": "https://registry.npmjs.org/@react-types/table/-/table-3.13.4.tgz", "integrity": "sha512-I/DYiZQl6aNbMmjk90J9SOhkzVDZvyA3Vn3wMWCiajkMNjvubFhTfda5DDf2SgFP5l0Yh6TGGH5XumRv9LqL5Q==", - "license": "Apache-2.0", "dependencies": { "@react-types/grid": "^3.3.6", "@react-types/shared": "^3.32.1" @@ -5391,7 +4573,6 @@ "version": "3.3.19", "resolved": "https://registry.npmjs.org/@react-types/tabs/-/tabs-3.3.19.tgz", "integrity": "sha512-fE+qI43yR5pAMpeqPxGqQq9jDHXEPqXskuxNHERMW0PYMdPyem2Cw6goc5F4qeZO3Hf6uPZgHkvJz2OAq7TbBw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5403,7 +4584,6 @@ "version": "3.12.6", "resolved": "https://registry.npmjs.org/@react-types/textfield/-/textfield-3.12.6.tgz", "integrity": "sha512-hpEVKE+M3uUkTjw2WrX1NrH/B3rqDJFUa+ViNK2eVranLY4ZwFqbqaYXSzHupOF3ecSjJJv2C103JrwFvx6TPQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -5415,7 +4595,6 @@ "version": "3.4.21", "resolved": "https://registry.npmjs.org/@react-types/tooltip/-/tooltip-3.4.21.tgz", "integrity": "sha512-ugGHOZU6WbOdeTdbjnaEc+Ms7/WhsUCg+T3PCOIeOT9FG02Ce189yJ/+hd7oqL/tVwIhEMYJIqSCgSELFox+QA==", - "license": "Apache-2.0", "dependencies": { "@react-types/overlays": "^3.9.2", "@react-types/shared": "^3.32.1" @@ -5428,12 +4607,8 @@ "version": "0.9.0", "resolved": "https://registry.npmjs.org/@remix-run/node-fetch-server/-/node-fetch-server-0.9.0.tgz", "integrity": "sha512-SoLMv7dbH+njWzXnOY6fI08dFMI5+/dQ+vY3n8RnnbdG7MdJEgiP28Xj/xWlnRnED/aB6SFw56Zop+LbmaaKqA==", - "dev": true - }, - "node_modules/@rolldown/pluginutils": { - "version": "1.0.0-beta.38", - "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.38.tgz", - "integrity": "sha512-N/ICGKleNhA5nc9XXQG/kkKHJ7S55u0x0XUJbbkmdCnFuoRkM1Il12q9q0eX19+M7KKUEPw/daUPIRnxhcxAIw==" + "dev": true, + "license": "MIT" }, "node_modules/@rollup/pluginutils": { "version": "5.3.0", @@ -5479,9 +4654,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.50.0.tgz", - "integrity": "sha512-lVgpeQyy4fWN5QYebtW4buT/4kn4p4IJ+kDNB4uYNT5b8c8DLJDg6titg20NIg7E8RWwdWZORW6vUFfrLyG3KQ==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.53.3.tgz", + "integrity": "sha512-mRSi+4cBjrRLoaal2PnqH82Wqyb+d3HsPUN/W+WslCXsZsyHa9ZeQQX/pQsZaVIWDkPcpV6jJ+3KLbTbgnwv8w==", "cpu": [ "arm" ], @@ -5492,9 +4667,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.50.0.tgz", - "integrity": "sha512-2O73dR4Dc9bp+wSYhviP6sDziurB5/HCym7xILKifWdE9UsOe2FtNcM+I4xZjKrfLJnq5UR8k9riB87gauiQtw==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.53.3.tgz", + "integrity": "sha512-CbDGaMpdE9sh7sCmTrTUyllhrg65t6SwhjlMJsLr+J8YjFuPmCEjbBSx4Z/e4SmDyH3aB5hGaJUP2ltV/vcs4w==", "cpu": [ "arm64" ], @@ -5505,9 +4680,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.50.0.tgz", - "integrity": "sha512-vwSXQN8T4sKf1RHr1F0s98Pf8UPz7pS6P3LG9NSmuw0TVh7EmaE+5Ny7hJOZ0M2yuTctEsHHRTMi2wuHkdS6Hg==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.3.tgz", + "integrity": "sha512-Nr7SlQeqIBpOV6BHHGZgYBuSdanCXuw09hon14MGOLGmXAFYjx1wNvquVPmpZnl0tLjg25dEdr4IQ6GgyToCUA==", "cpu": [ "arm64" ], @@ -5518,9 +4693,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.50.0.tgz", - "integrity": "sha512-cQp/WG8HE7BCGyFVuzUg0FNmupxC+EPZEwWu2FCGGw5WDT1o2/YlENbm5e9SMvfDFR6FRhVCBePLqj0o8MN7Vw==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.3.tgz", + "integrity": "sha512-DZ8N4CSNfl965CmPktJ8oBnfYr3F8dTTNBQkRlffnUarJ2ohudQD17sZBa097J8xhQ26AwhHJ5mvUyQW8ddTsQ==", "cpu": [ "x64" ], @@ -5531,9 +4706,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.50.0.tgz", - "integrity": "sha512-UR1uTJFU/p801DvvBbtDD7z9mQL8J80xB0bR7DqW7UGQHRm/OaKzp4is7sQSdbt2pjjSS72eAtRh43hNduTnnQ==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.53.3.tgz", + "integrity": "sha512-yMTrCrK92aGyi7GuDNtGn2sNW+Gdb4vErx4t3Gv/Tr+1zRb8ax4z8GWVRfr3Jw8zJWvpGHNpss3vVlbF58DZ4w==", "cpu": [ "arm64" ], @@ -5544,9 +4719,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.50.0.tgz", - "integrity": "sha512-G/DKyS6PK0dD0+VEzH/6n/hWDNPDZSMBmqsElWnCRGrYOb2jC0VSupp7UAHHQ4+QILwkxSMaYIbQ72dktp8pKA==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.53.3.tgz", + "integrity": "sha512-lMfF8X7QhdQzseM6XaX0vbno2m3hlyZFhwcndRMw8fbAGUGL3WFMBdK0hbUBIUYcEcMhVLr1SIamDeuLBnXS+Q==", "cpu": [ "x64" ], @@ -5557,9 +4732,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.50.0.tgz", - "integrity": "sha512-u72Mzc6jyJwKjJbZZcIYmd9bumJu7KNmHYdue43vT1rXPm2rITwmPWF0mmPzLm9/vJWxIRbao/jrQmxTO0Sm9w==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.53.3.tgz", + "integrity": "sha512-k9oD15soC/Ln6d2Wv/JOFPzZXIAIFLp6B+i14KhxAfnq76ajt0EhYc5YPeX6W1xJkAdItcVT+JhKl1QZh44/qw==", "cpu": [ "arm" ], @@ -5570,9 +4745,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.50.0.tgz", - "integrity": "sha512-S4UefYdV0tnynDJV1mdkNawp0E5Qm2MtSs330IyHgaccOFrwqsvgigUD29uT+B/70PDY1eQ3t40+xf6wIvXJyg==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.53.3.tgz", + "integrity": "sha512-vTNlKq+N6CK/8UktsrFuc+/7NlEYVxgaEgRXVUVK258Z5ymho29skzW1sutgYjqNnquGwVUObAaxae8rZ6YMhg==", "cpu": [ "arm" ], @@ -5583,9 +4758,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.50.0.tgz", - "integrity": "sha512-1EhkSvUQXJsIhk4msxP5nNAUWoB4MFDHhtc4gAYvnqoHlaL9V3F37pNHabndawsfy/Tp7BPiy/aSa6XBYbaD1g==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.53.3.tgz", + "integrity": "sha512-RGrFLWgMhSxRs/EWJMIFM1O5Mzuz3Xy3/mnxJp/5cVhZ2XoCAxJnmNsEyeMJtpK+wu0FJFWz+QF4mjCA7AUQ3w==", "cpu": [ "arm64" ], @@ -5596,9 +4771,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.50.0.tgz", - "integrity": "sha512-EtBDIZuDtVg75xIPIK1l5vCXNNCIRM0OBPUG+tbApDuJAy9mKago6QxX+tfMzbCI6tXEhMuZuN1+CU8iDW+0UQ==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.53.3.tgz", + "integrity": "sha512-kASyvfBEWYPEwe0Qv4nfu6pNkITLTb32p4yTgzFCocHnJLAHs+9LjUu9ONIhvfT/5lv4YS5muBHyuV84epBo/A==", "cpu": [ "arm64" ], @@ -5608,10 +4783,10 @@ "linux" ] }, - "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.50.0.tgz", - "integrity": "sha512-BGYSwJdMP0hT5CCmljuSNx7+k+0upweM2M4YGfFBjnFSZMHOLYR0gEEj/dxyYJ6Zc6AiSeaBY8dWOa11GF/ppQ==", + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.53.3.tgz", + "integrity": "sha512-JiuKcp2teLJwQ7vkJ95EwESWkNRFJD7TQgYmCnrPtlu50b4XvT5MOmurWNrCj3IFdyjBQ5p9vnrX4JM6I8OE7g==", "cpu": [ "loong64" ], @@ -5622,9 +4797,9 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.50.0.tgz", - "integrity": "sha512-I1gSMzkVe1KzAxKAroCJL30hA4DqSi+wGc5gviD0y3IL/VkvcnAqwBf4RHXHyvH66YVHxpKO8ojrgc4SrWAnLg==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.53.3.tgz", + "integrity": "sha512-EoGSa8nd6d3T7zLuqdojxC20oBfNT8nexBbB/rkxgKj5T5vhpAQKKnD+h3UkoMuTyXkP5jTjK/ccNRmQrPNDuw==", "cpu": [ "ppc64" ], @@ -5635,9 +4810,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.50.0.tgz", - "integrity": "sha512-bSbWlY3jZo7molh4tc5dKfeSxkqnf48UsLqYbUhnkdnfgZjgufLS/NTA8PcP/dnvct5CCdNkABJ56CbclMRYCA==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.53.3.tgz", + "integrity": "sha512-4s+Wped2IHXHPnAEbIB0YWBv7SDohqxobiiPA1FIWZpX+w9o2i4LezzH/NkFUl8LRci/8udci6cLq+jJQlh+0g==", "cpu": [ "riscv64" ], @@ -5648,9 +4823,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.50.0.tgz", - "integrity": "sha512-LSXSGumSURzEQLT2e4sFqFOv3LWZsEF8FK7AAv9zHZNDdMnUPYH3t8ZlaeYYZyTXnsob3htwTKeWtBIkPV27iQ==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.53.3.tgz", + "integrity": "sha512-68k2g7+0vs2u9CxDt5ktXTngsxOQkSEV/xBbwlqYcUrAVh6P9EgMZvFsnHy4SEiUl46Xf0IObWVbMvPrr2gw8A==", "cpu": [ "riscv64" ], @@ -5661,9 +4836,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.50.0.tgz", - "integrity": "sha512-CxRKyakfDrsLXiCyucVfVWVoaPA4oFSpPpDwlMcDFQvrv3XY6KEzMtMZrA+e/goC8xxp2WSOxHQubP8fPmmjOQ==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.53.3.tgz", + "integrity": "sha512-VYsFMpULAz87ZW6BVYw3I6sWesGpsP9OPcyKe8ofdg9LHxSbRMd7zrVrr5xi/3kMZtpWL/wC+UIJWJYVX5uTKg==", "cpu": [ "s390x" ], @@ -5674,9 +4849,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.50.0.tgz", - "integrity": "sha512-8PrJJA7/VU8ToHVEPu14FzuSAqVKyo5gg/J8xUerMbyNkWkO9j2ExBho/68RnJsMGNJq4zH114iAttgm7BZVkA==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.3.tgz", + "integrity": "sha512-3EhFi1FU6YL8HTUJZ51imGJWEX//ajQPfqWLI3BQq4TlvHy4X0MOr5q3D2Zof/ka0d5FNdPwZXm3Yyib/UEd+w==", "cpu": [ "x64" ], @@ -5687,9 +4862,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.50.0.tgz", - "integrity": "sha512-SkE6YQp+CzpyOrbw7Oc4MgXFvTw2UIBElvAvLCo230pyxOLmYwRPwZ/L5lBe/VW/qT1ZgND9wJfOsdy0XptRvw==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.53.3.tgz", + "integrity": "sha512-eoROhjcc6HbZCJr+tvVT8X4fW3/5g/WkGvvmwz/88sDtSJzO7r/blvoBDgISDiCjDRZmHpwud7h+6Q9JxFwq1Q==", "cpu": [ "x64" ], @@ -5700,9 +4875,9 @@ ] }, "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.50.0.tgz", - "integrity": "sha512-PZkNLPfvXeIOgJWA804zjSFH7fARBBCpCXxgkGDRjjAhRLOR8o0IGS01ykh5GYfod4c2yiiREuDM8iZ+pVsT+Q==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.53.3.tgz", + "integrity": "sha512-OueLAWgrNSPGAdUdIjSWXw+u/02BRTcnfw9PN41D2vq/JSEPnJnVuBgw18VkN8wcd4fjUs+jFHVM4t9+kBSNLw==", "cpu": [ "arm64" ], @@ -5713,9 +4888,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.50.0.tgz", - "integrity": "sha512-q7cIIdFvWQoaCbLDUyUc8YfR3Jh2xx3unO8Dn6/TTogKjfwrax9SyfmGGK6cQhKtjePI7jRfd7iRYcxYs93esg==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.53.3.tgz", + "integrity": "sha512-GOFuKpsxR/whszbF/bzydebLiXIHSgsEUp6M0JI8dWvi+fFa1TD6YQa4aSZHtpmh2/uAlj/Dy+nmby3TJ3pkTw==", "cpu": [ "arm64" ], @@ -5726,9 +4901,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.50.0.tgz", - "integrity": "sha512-XzNOVg/YnDOmFdDKcxxK410PrcbcqZkBmz+0FicpW5jtjKQxcW1BZJEQOF0NJa6JO7CZhett8GEtRN/wYLYJuw==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.53.3.tgz", + "integrity": "sha512-iah+THLcBJdpfZ1TstDFbKNznlzoxa8fmnFYK4V67HvmuNYkVdAywJSoteUszvBQ9/HqN2+9AZghbajMsFT+oA==", "cpu": [ "ia32" ], @@ -5738,10 +4913,23 @@ "win32" ] }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.53.3.tgz", + "integrity": "sha512-J9QDiOIZlZLdcot5NXEepDkstocktoVjkaKUtqzgzpt2yWjGlbYiKyp05rWwk4nypbYUNoFAztEgixoLaSETkg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.50.0.tgz", - "integrity": "sha512-xMmiWRR8sp72Zqwjgtf3QbZfF1wdh8X2ABu3EaozvZcyHJeU0r+XAnXdKgs4cCAp6ORoYoCygipYP1mjmbjrsg==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.53.3.tgz", + "integrity": "sha512-UhTd8u31dXadv0MopwGgNOBpUVROFKWVQgAg5N1ESyCz8AuBcMqm4AuTjrwgQKGDfoFuz02EuMRHQIw/frmYKQ==", "cpu": [ "x64" ], @@ -5764,27 +4952,11 @@ "integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==", "license": "MIT" }, - "node_modules/@stripe/react-stripe-js": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@stripe/react-stripe-js/-/react-stripe-js-4.0.2.tgz", - "integrity": "sha512-l2wau+8/LOlHl+Sz8wQ1oDuLJvyw51nQCsu6/ljT6smqzTszcMHifjAJoXlnMfcou3+jK/kQyVe04u/ufyTXgg==", - "dependencies": { - "prop-types": "^15.7.2" - }, - "peerDependencies": { - "@stripe/stripe-js": ">=1.44.1 <8.0.0", - "react": ">=16.8.0 <20.0.0", - "react-dom": ">=16.8.0 <20.0.0" - } - }, - "node_modules/@stripe/stripe-js": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-7.9.0.tgz", - "integrity": "sha512-ggs5k+/0FUJcIgNY08aZTqpBTtbExkJMYMLSMwyucrhtWexVOEY1KJmhBsxf+E/Q15f5rbwBpj+t0t2AW2oCsQ==", - "license": "MIT", - "engines": { - "node": ">=12.16" - } + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "8.0.0", @@ -5954,47 +5126,21 @@ "resolved": "https://registry.npmjs.org/@svgr/core/-/core-8.1.0.tgz", "integrity": "sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA==", "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.21.3", - "@svgr/babel-preset": "8.1.0", - "camelcase": "^6.2.0", - "cosmiconfig": "^8.1.3", - "snake-case": "^3.0.4" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - } - }, - "node_modules/@svgr/core/node_modules/cosmiconfig": { - "version": "8.3.6", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", - "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", - "dev": true, - "license": "MIT", - "dependencies": { - "import-fresh": "^3.3.0", - "js-yaml": "^4.1.0", - "parse-json": "^5.2.0", - "path-type": "^4.0.0" + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/core": "^7.21.3", + "@svgr/babel-preset": "8.1.0", + "camelcase": "^6.2.0", + "cosmiconfig": "^8.1.3", + "snake-case": "^3.0.4" }, "engines": { "node": ">=14" }, "funding": { - "url": "https://github.com/sponsors/d-fischer" - }, - "peerDependencies": { - "typescript": ">=4.9.5" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "type": "github", + "url": "https://github.com/sponsors/gregberge" } }, "node_modules/@svgr/hast-util-to-babel-ast": { @@ -6055,62 +5201,53 @@ "version": "0.5.17", "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz", "integrity": "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==", - "license": "Apache-2.0", "dependencies": { "tslib": "^2.8.0" } }, "node_modules/@tailwindcss/node": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.13.tgz", - "integrity": "sha512-eq3ouolC1oEFOAvOMOBAmfCIqZBJuvWvvYWh5h5iOYfe1HFC6+GZ6EIL0JdM3/niGRJmnrOc+8gl9/HGUaaptw==", - "license": "MIT", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.18.tgz", + "integrity": "sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ==", "dependencies": { "@jridgewell/remapping": "^2.3.4", "enhanced-resolve": "^5.18.3", - "jiti": "^2.5.1", - "lightningcss": "1.30.1", - "magic-string": "^0.30.18", + "jiti": "^2.6.1", + "lightningcss": "1.30.2", + "magic-string": "^0.30.21", "source-map-js": "^1.2.1", - "tailwindcss": "4.1.13" + "tailwindcss": "4.1.18" } }, "node_modules/@tailwindcss/oxide": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.13.tgz", - "integrity": "sha512-CPgsM1IpGRa880sMbYmG1s4xhAy3xEt1QULgTJGQmZUeNgXFR7s1YxYygmJyBGtou4SyEosGAGEeYqY7R53bIA==", - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "detect-libc": "^2.0.4", - "tar": "^7.4.3" - }, + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.18.tgz", + "integrity": "sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A==", "engines": { "node": ">= 10" }, "optionalDependencies": { - "@tailwindcss/oxide-android-arm64": "4.1.13", - "@tailwindcss/oxide-darwin-arm64": "4.1.13", - "@tailwindcss/oxide-darwin-x64": "4.1.13", - "@tailwindcss/oxide-freebsd-x64": "4.1.13", - "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.13", - "@tailwindcss/oxide-linux-arm64-gnu": "4.1.13", - "@tailwindcss/oxide-linux-arm64-musl": "4.1.13", - "@tailwindcss/oxide-linux-x64-gnu": "4.1.13", - "@tailwindcss/oxide-linux-x64-musl": "4.1.13", - "@tailwindcss/oxide-wasm32-wasi": "4.1.13", - "@tailwindcss/oxide-win32-arm64-msvc": "4.1.13", - "@tailwindcss/oxide-win32-x64-msvc": "4.1.13" + "@tailwindcss/oxide-android-arm64": "4.1.18", + "@tailwindcss/oxide-darwin-arm64": "4.1.18", + "@tailwindcss/oxide-darwin-x64": "4.1.18", + "@tailwindcss/oxide-freebsd-x64": "4.1.18", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.18", + "@tailwindcss/oxide-linux-arm64-gnu": "4.1.18", + "@tailwindcss/oxide-linux-arm64-musl": "4.1.18", + "@tailwindcss/oxide-linux-x64-gnu": "4.1.18", + "@tailwindcss/oxide-linux-x64-musl": "4.1.18", + "@tailwindcss/oxide-wasm32-wasi": "4.1.18", + "@tailwindcss/oxide-win32-arm64-msvc": "4.1.18", + "@tailwindcss/oxide-win32-x64-msvc": "4.1.18" } }, "node_modules/@tailwindcss/oxide-android-arm64": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.13.tgz", - "integrity": "sha512-BrpTrVYyejbgGo57yc8ieE+D6VT9GOgnNdmh5Sac6+t0m+v+sKQevpFVpwX3pBrM2qKrQwJ0c5eDbtjouY/+ew==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.18.tgz", + "integrity": "sha512-dJHz7+Ugr9U/diKJA0W6N/6/cjI+ZTAoxPf9Iz9BFRF2GzEX8IvXxFIi/dZBloVJX/MZGvRuFA9rqwdiIEZQ0Q==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "android" @@ -6120,13 +5257,12 @@ } }, "node_modules/@tailwindcss/oxide-darwin-arm64": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.13.tgz", - "integrity": "sha512-YP+Jksc4U0KHcu76UhRDHq9bx4qtBftp9ShK/7UGfq0wpaP96YVnnjFnj3ZFrUAjc5iECzODl/Ts0AN7ZPOANQ==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.18.tgz", + "integrity": "sha512-Gc2q4Qhs660bhjyBSKgq6BYvwDz4G+BuyJ5H1xfhmDR3D8HnHCmT/BSkvSL0vQLy/nkMLY20PQ2OoYMO15Jd0A==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "darwin" @@ -6136,13 +5272,12 @@ } }, "node_modules/@tailwindcss/oxide-darwin-x64": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.13.tgz", - "integrity": "sha512-aAJ3bbwrn/PQHDxCto9sxwQfT30PzyYJFG0u/BWZGeVXi5Hx6uuUOQEI2Fa43qvmUjTRQNZnGqe9t0Zntexeuw==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.18.tgz", + "integrity": "sha512-FL5oxr2xQsFrc3X9o1fjHKBYBMD1QZNyc1Xzw/h5Qu4XnEBi3dZn96HcHm41c/euGV+GRiXFfh2hUCyKi/e+yw==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "darwin" @@ -6152,13 +5287,12 @@ } }, "node_modules/@tailwindcss/oxide-freebsd-x64": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.13.tgz", - "integrity": "sha512-Wt8KvASHwSXhKE/dJLCCWcTSVmBj3xhVhp/aF3RpAhGeZ3sVo7+NTfgiN8Vey/Fi8prRClDs6/f0KXPDTZE6nQ==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.18.tgz", + "integrity": "sha512-Fj+RHgu5bDodmV1dM9yAxlfJwkkWvLiRjbhuO2LEtwtlYlBgiAT4x/j5wQr1tC3SANAgD+0YcmWVrj8R9trVMA==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "freebsd" @@ -6168,13 +5302,12 @@ } }, "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.13.tgz", - "integrity": "sha512-mbVbcAsW3Gkm2MGwA93eLtWrwajz91aXZCNSkGTx/R5eb6KpKD5q8Ueckkh9YNboU8RH7jiv+ol/I7ZyQ9H7Bw==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.18.tgz", + "integrity": "sha512-Fp+Wzk/Ws4dZn+LV2Nqx3IilnhH51YZoRaYHQsVq3RQvEl+71VGKFpkfHrLM/Li+kt5c0DJe/bHXK1eHgDmdiA==", "cpu": [ "arm" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -6184,13 +5317,12 @@ } }, "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.13.tgz", - "integrity": "sha512-wdtfkmpXiwej/yoAkrCP2DNzRXCALq9NVLgLELgLim1QpSfhQM5+ZxQQF8fkOiEpuNoKLp4nKZ6RC4kmeFH0HQ==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.18.tgz", + "integrity": "sha512-S0n3jboLysNbh55Vrt7pk9wgpyTTPD0fdQeh7wQfMqLPM/Hrxi+dVsLsPrycQjGKEQk85Kgbx+6+QnYNiHalnw==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -6200,13 +5332,12 @@ } }, "node_modules/@tailwindcss/oxide-linux-arm64-musl": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.13.tgz", - "integrity": "sha512-hZQrmtLdhyqzXHB7mkXfq0IYbxegaqTmfa1p9MBj72WPoDD3oNOh1Lnxf6xZLY9C3OV6qiCYkO1i/LrzEdW2mg==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.18.tgz", + "integrity": "sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -6216,13 +5347,12 @@ } }, "node_modules/@tailwindcss/oxide-linux-x64-gnu": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.13.tgz", - "integrity": "sha512-uaZTYWxSXyMWDJZNY1Ul7XkJTCBRFZ5Fo6wtjrgBKzZLoJNrG+WderJwAjPzuNZOnmdrVg260DKwXCFtJ/hWRQ==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.18.tgz", + "integrity": "sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -6232,13 +5362,12 @@ } }, "node_modules/@tailwindcss/oxide-linux-x64-musl": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.13.tgz", - "integrity": "sha512-oXiPj5mi4Hdn50v5RdnuuIms0PVPI/EG4fxAfFiIKQh5TgQgX7oSuDWntHW7WNIi/yVLAiS+CRGW4RkoGSSgVQ==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.18.tgz", + "integrity": "sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -6248,9 +5377,9 @@ } }, "node_modules/@tailwindcss/oxide-wasm32-wasi": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.13.tgz", - "integrity": "sha512-+LC2nNtPovtrDwBc/nqnIKYh/W2+R69FA0hgoeOn64BdCX522u19ryLh3Vf3F8W49XBcMIxSe665kwy21FkhvA==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.18.tgz", + "integrity": "sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA==", "bundleDependencies": [ "@napi-rs/wasm-runtime", "@emnapi/core", @@ -6262,32 +5391,31 @@ "cpu": [ "wasm32" ], - "license": "MIT", "optional": true, "dependencies": { - "@emnapi/core": "^1.4.5", - "@emnapi/runtime": "^1.4.5", - "@emnapi/wasi-threads": "^1.0.4", - "@napi-rs/wasm-runtime": "^0.2.12", - "@tybys/wasm-util": "^0.10.0", - "tslib": "^2.8.0" + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1", + "@emnapi/wasi-threads": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.1.0", + "@tybys/wasm-util": "^0.10.1", + "tslib": "^2.4.0" }, "engines": { "node": ">=14.0.0" } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/core": { - "version": "1.4.5", + "version": "1.7.1", "inBundle": true, "license": "MIT", "optional": true, "dependencies": { - "@emnapi/wasi-threads": "1.0.4", + "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/runtime": { - "version": "1.4.5", + "version": "1.7.1", "inBundle": true, "license": "MIT", "optional": true, @@ -6296,7 +5424,7 @@ } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/wasi-threads": { - "version": "1.0.4", + "version": "1.1.0", "inBundle": true, "license": "MIT", "optional": true, @@ -6305,18 +5433,18 @@ } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { - "version": "0.2.12", + "version": "1.1.0", "inBundle": true, "license": "MIT", "optional": true, "dependencies": { - "@emnapi/core": "^1.4.3", - "@emnapi/runtime": "^1.4.3", - "@tybys/wasm-util": "^0.10.0" + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1", + "@tybys/wasm-util": "^0.10.1" } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@tybys/wasm-util": { - "version": "0.10.0", + "version": "0.10.1", "inBundle": true, "license": "MIT", "optional": true, @@ -6325,19 +5453,18 @@ } }, "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/tslib": { - "version": "2.8.0", + "version": "2.8.1", "inBundle": true, "license": "0BSD", "optional": true }, "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.13.tgz", - "integrity": "sha512-dziTNeQXtoQ2KBXmrjCxsuPk3F3CQ/yb7ZNZNA+UkNTeiTGgfeh+gH5Pi7mRncVgcPD2xgHvkFCh/MhZWSgyQg==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.18.tgz", + "integrity": "sha512-HjSA7mr9HmC8fu6bdsZvZ+dhjyGCLdotjVOgLA2vEqxEBZaQo9YTX4kwgEvPCpRh8o4uWc4J/wEoFzhEmjvPbA==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "win32" @@ -6347,13 +5474,12 @@ } }, "node_modules/@tailwindcss/oxide-win32-x64-msvc": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.13.tgz", - "integrity": "sha512-3+LKesjXydTkHk5zXX01b5KMzLV1xl2mcktBJkje7rhFUpUlYJy7IMOLqjIRQncLTa1WZZiFY/foAeB5nmaiTw==", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.18.tgz", + "integrity": "sha512-bJWbyYpUlqamC8dpR7pfjA0I7vdF6t5VpUGMWRkXVE3AXgIZjYUYAK7II1GNaxR8J1SSrSrppRar8G++JekE3Q==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "win32" @@ -6362,24 +5488,12 @@ "node": ">= 10" } }, - "node_modules/@tailwindcss/postcss": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.1.13.tgz", - "integrity": "sha512-HLgx6YSFKJT7rJqh9oJs/TkBFhxuMOfUKSBEPYwV+t78POOBsdQ7crhZLzwcH3T0UyUuOzU/GK5pk5eKr3wCiQ==", - "license": "MIT", - "dependencies": { - "@alloc/quick-lru": "^5.2.0", - "@tailwindcss/node": "4.1.13", - "@tailwindcss/oxide": "4.1.13", - "postcss": "^8.4.41", - "tailwindcss": "4.1.13" - } - }, "node_modules/@tailwindcss/typography": { "version": "0.5.19", "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.19.tgz", "integrity": "sha512-w31dd8HOx3k9vPtcQh5QHP9GwKcgbMp87j58qi6xgiBnFFtKEAgCWnDw4qUT8aHwkCp8bKvb/KGKWWHedP0AAg==", "dev": true, + "license": "MIT", "dependencies": { "postcss-selector-parser": "6.0.10" }, @@ -6388,24 +5502,24 @@ } }, "node_modules/@tailwindcss/vite": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.13.tgz", - "integrity": "sha512-0PmqLQ010N58SbMTJ7BVJ4I2xopiQn/5i6nlb4JmxzQf8zcS5+m2Cv6tqh+sfDwtIdjoEnOvwsGQ1hkUi8QEHQ==", - "license": "MIT", + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.18.tgz", + "integrity": "sha512-jVA+/UpKL1vRLg6Hkao5jldawNmRo7mQYrZtNHMIVpLfLhDml5nMRUo/8MwoX2vNXvnaXNNMedrMfMugAVX1nA==", "dependencies": { - "@tailwindcss/node": "4.1.13", - "@tailwindcss/oxide": "4.1.13", - "tailwindcss": "4.1.13" + "@tailwindcss/node": "4.1.18", + "@tailwindcss/oxide": "4.1.18", + "tailwindcss": "4.1.18" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" } }, "node_modules/@tanstack/eslint-plugin-query": { - "version": "5.91.0", - "resolved": "https://registry.npmjs.org/@tanstack/eslint-plugin-query/-/eslint-plugin-query-5.91.0.tgz", - "integrity": "sha512-Kn6yWyRe3dIPf7NqyDMhcsTBz2Oh8jPSOpBdlnLQhGBJ6iTMBFYA4B1UreGJ/WdfzQskSMh5imcyWF+wqa/Q5g==", + "version": "5.91.2", + "resolved": "https://registry.npmjs.org/@tanstack/eslint-plugin-query/-/eslint-plugin-query-5.91.2.tgz", + "integrity": "sha512-UPeWKl/Acu1IuuHJlsN+eITUHqAaa9/04geHHPedY8siVarSaWprY0SVMKrkpKfk5ehRT7+/MZ5QwWuEtkWrFw==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/utils": "^8.44.1" }, @@ -6418,20 +5532,22 @@ } }, "node_modules/@tanstack/query-core": { - "version": "5.90.2", - "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.2.tgz", - "integrity": "sha512-k/TcR3YalnzibscALLwxeiLUub6jN5EDLwKDiO7q5f4ICEoptJ+n9+7vcEFy5/x/i6Q+Lb/tXrsKCggf5uQJXQ==", + "version": "5.90.12", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.12.tgz", + "integrity": "sha512-T1/8t5DhV/SisWjDnaiU2drl6ySvsHj1bHBCWNXd+/T+Hh1cf6JodyEYMd5sgwm+b/mETT4EV3H+zCVczCU5hg==", + "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" } }, "node_modules/@tanstack/react-query": { - "version": "5.90.2", - "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.2.tgz", - "integrity": "sha512-CLABiR+h5PYfOWr/z+vWFt5VsOA2ekQeRQBFSKlcoW6Ndx/f8rfyVmq4LbgOM4GG2qtxAxjLYLOpCNTYm4uKzw==", + "version": "5.90.12", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.12.tgz", + "integrity": "sha512-graRZspg7EoEaw0a8faiUASCyJrqjKPdqJ9EwuDRUF9mEYJ1YPczI9H+/agJ0mOJkPCJDk0lsz5QTrLZ/jQ2rg==", + "license": "MIT", "dependencies": { - "@tanstack/query-core": "5.90.2" + "@tanstack/query-core": "5.90.12" }, "funding": { "type": "github", @@ -6445,7 +5561,6 @@ "version": "3.11.3", "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.11.3.tgz", "integrity": "sha512-vCU+OTylXN3hdC8RKg68tPlBPjjxtzon7Ys46MgrSLE+JhSjSTPvoQifV6DQJeJmA8Q3KT6CphJbejupx85vFw==", - "license": "MIT", "dependencies": { "@tanstack/virtual-core": "3.11.3" }, @@ -6462,7 +5577,6 @@ "version": "3.11.3", "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.11.3.tgz", "integrity": "sha512-v2mrNSnMwnPJtcVqNvV0c5roGCBqeogN8jDtgtuHCphdwBasOZ17x8UV8qpHUh+u0MLfX43c0uUHKje0s+Zb0w==", - "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" @@ -6474,6 +5588,7 @@ "integrity": "sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/runtime": "^7.12.5", @@ -6489,9 +5604,9 @@ } }, "node_modules/@testing-library/jest-dom": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.8.0.tgz", - "integrity": "sha512-WgXcWzVM6idy5JaftTVC8Vs83NKRmGJz4Hqs4oyOuO2J4r/y79vvKZsb+CaGyCSEbUPI6OsewfPd0G1A0/TUZQ==", + "version": "6.9.1", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.9.1.tgz", + "integrity": "sha512-zIcONa+hVtVSSep9UT3jZ5rizo2BsxgyDYU7WFD5eICBE7no3881HGeb/QkGfsJs6JTkY1aQhT7rIPC7e+0nnA==", "dev": true, "license": "MIT", "dependencies": { @@ -6516,11 +5631,10 @@ "license": "MIT" }, "node_modules/@testing-library/react": { - "version": "16.3.0", - "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.0.tgz", - "integrity": "sha512-kFSyxiEDwv1WLl2fgsq6pPBbw5aWKrsY2/noi1Id0TK0UParSF62oFQFGHXIyaG4pp2tEub/Zlel+fjjZILDsw==", + "version": "16.3.1", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.1.tgz", + "integrity": "sha512-gr4KtAWqIOQoucWYD/f6ki+j5chXfcPc74Col/6poTyqTmn7zRmodWahWRCp8tYd+GMqBonw6hstNzqjbs6gjw==", "dev": true, - "license": "MIT", "dependencies": { "@babel/runtime": "^7.12.5" }, @@ -6564,47 +5678,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/babel__core": { - "version": "7.20.5", - "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", - "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7", - "@types/babel__generator": "*", - "@types/babel__template": "*", - "@types/babel__traverse": "*" - } - }, - "node_modules/@types/babel__generator": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", - "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__template": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", - "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__traverse": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", - "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.28.2" - } - }, "node_modules/@types/base16": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@types/base16/-/base16-1.0.5.tgz", @@ -6612,22 +5685,15 @@ "license": "MIT" }, "node_modules/@types/chai": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", - "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", "dev": true, - "license": "MIT", "dependencies": { - "@types/deep-eql": "*" + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" } }, - "node_modules/@types/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/debug": { "version": "4.1.12", "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", @@ -6641,8 +5707,7 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/@types/estree": { "version": "1.0.8", @@ -6676,9 +5741,9 @@ "license": "MIT" }, "node_modules/@types/lodash": { - "version": "4.17.20", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.20.tgz", - "integrity": "sha512-H3MHACvFUEiujabxhaI/ImO6gUrd8oOurg7LQtS7mbwIXA/cUqWrvBsaeJ23aZEPk1TAYkurjfMbSELfoCXlGA==", + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-FOvQ0YPD5NOfPgMzJihoT+Za5pdkDJWcbpuj1DjaKZIr/gxodQjY/uWEFlTNqW2ugXHUiL8lRQgw63dzKHZdeQ==", "license": "MIT" }, "node_modules/@types/mdast": { @@ -6697,12 +5762,13 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "24.5.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.5.2.tgz", - "integrity": "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ==", + "version": "25.0.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz", + "integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==", "devOptional": true, + "peer": true, "dependencies": { - "undici-types": "~7.12.0" + "undici-types": "~7.16.0" } }, "node_modules/@types/prismjs": { @@ -6712,31 +5778,24 @@ "license": "MIT" }, "node_modules/@types/react": { - "version": "19.1.15", - "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.15.tgz", - "integrity": "sha512-+kLxJpaJzXybyDyFXYADyP1cznTO8HSuBpenGlnKOAkH4hyNINiywvXS/tGJhsrGGP/gM185RA3xpjY0Yg4erA==", + "version": "19.2.7", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz", + "integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==", + "license": "MIT", + "peer": true, "dependencies": { - "csstype": "^3.0.2" + "csstype": "^3.2.2" } }, "node_modules/@types/react-dom": { - "version": "19.1.9", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.9.tgz", - "integrity": "sha512-qXRuZaOsAdXKFyOhRBg6Lqqc0yay13vN7KrIg4L7N4aaHN68ma9OK3NE1BoDFgFOTfM7zg+3/8+2n8rLUH3OKQ==", + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", "dev": true, "license": "MIT", + "peer": true, "peerDependencies": { - "@types/react": "^19.0.0" - } - }, - "node_modules/@types/react-highlight": { - "version": "0.12.8", - "resolved": "https://registry.npmjs.org/@types/react-highlight/-/react-highlight-0.12.8.tgz", - "integrity": "sha512-V7O7zwXUw8WSPd//YUO8sz489J/EeobJljASGhP0rClrvq+1Y1qWEpToGu+Pp7YuChxhAXSgkLkrOYpZX5A62g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/react": "*" + "@types/react": "^19.2.0" } }, "node_modules/@types/react-syntax-highlighter": { @@ -6757,9 +5816,11 @@ "license": "MIT" }, "node_modules/@types/trusted-types": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-1.0.6.tgz", - "integrity": "sha512-230RC8sFeHoT6sSUlRO6a8cAnclO06eeiq1QDfiv2FGCLWFvvERWgwIQD4FWqD9A69BN7Lzee4OXwoMVnnsWDw==" + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "license": "MIT", + "optional": true }, "node_modules/@types/unist": { "version": "3.0.3", @@ -6767,22 +5828,13 @@ "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", "license": "MIT" }, - "node_modules/@types/ws": { - "version": "8.18.1", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", - "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "7.18.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.18.0.tgz", "integrity": "sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "7.18.0", @@ -6840,6 +5892,7 @@ "integrity": "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==", "dev": true, "license": "BSD-2-Clause", + "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "7.18.0", "@typescript-eslint/types": "7.18.0", @@ -6864,13 +5917,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.44.1.tgz", - "integrity": "sha512-ycSa60eGg8GWAkVsKV4E6Nz33h+HjTXbsDT4FILyL8Obk5/mx4tbvCNsLf9zret3ipSumAOG89UcCs/KRaKYrA==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.49.0.tgz", + "integrity": "sha512-/wJN0/DKkmRUMXjZUXYZpD1NEQzQAAn9QWfGwo+Ai8gnzqH7tvqS7oNVdTjKqOcPyVIdZdyCMoqN66Ia789e7g==", "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.44.1", - "@typescript-eslint/types": "^8.44.1", + "@typescript-eslint/tsconfig-utils": "^8.49.0", + "@typescript-eslint/types": "^8.49.0", "debug": "^4.3.4" }, "engines": { @@ -6885,10 +5939,11 @@ } }, "node_modules/@typescript-eslint/project-service/node_modules/@typescript-eslint/types": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.44.1.tgz", - "integrity": "sha512-Lk7uj7y9uQUOEguiDIDLYLJOrYHQa7oBiURYVFqIpGxclAFQ78f6VUOM8lI2XEuNOKNB7XuvM2+2cMXAoq4ALQ==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.49.0.tgz", + "integrity": "sha512-e9k/fneezorUo6WShlQpMxXh8/8wfyc+biu6tnAqA81oWrEic0k21RHzP9uqqpyBBeBKu4T+Bsjy9/b8u7obXQ==", "dev": true, + "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -6916,10 +5971,11 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.44.1.tgz", - "integrity": "sha512-B5OyACouEjuIvof3o86lRMvyDsFwZm+4fBOqFHccIctYgBjqR3qT39FBYGN87khcgf0ExpdCBeGKpKRhSFTjKQ==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.49.0.tgz", + "integrity": "sha512-8prixNi1/6nawsRYxet4YOhnbW+W9FK/bQPxsGB1D3ZrDzbJ5FXw5XmzxZv82X3B+ZccuSxo/X8q9nQ+mFecWA==", "dev": true, + "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -7026,15 +6082,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.44.1.tgz", - "integrity": "sha512-DpX5Fp6edTlocMCwA+mHY8Mra+pPjRZ0TfHkXI8QFelIKcbADQz1LUPNtzOFUriBB2UYqw4Pi9+xV4w9ZczHFg==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.49.0.tgz", + "integrity": "sha512-N3W7rJw7Rw+z1tRsHZbK395TWSYvufBXumYtEGzypgMUthlg0/hmCImeA8hgO2d2G4pd7ftpxxul2J8OdtdaFA==", "dev": true, + "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", - "@typescript-eslint/scope-manager": "8.44.1", - "@typescript-eslint/types": "8.44.1", - "@typescript-eslint/typescript-estree": "8.44.1" + "@typescript-eslint/scope-manager": "8.49.0", + "@typescript-eslint/types": "8.49.0", + "@typescript-eslint/typescript-estree": "8.49.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -7049,13 +6106,14 @@ } }, "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.44.1.tgz", - "integrity": "sha512-NdhWHgmynpSvyhchGLXh+w12OMT308Gm25JoRIyTZqEbApiBiQHD/8xgb6LqCWCFcxFtWwaVdFsLPQI3jvhywg==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.49.0.tgz", + "integrity": "sha512-npgS3zi+/30KSOkXNs0LQXtsg9ekZ8OISAOLGWA/ZOEn0ZH74Ginfl7foziV8DT+D98WfQ5Kopwqb/PZOaIJGg==", "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.44.1", - "@typescript-eslint/visitor-keys": "8.44.1" + "@typescript-eslint/types": "8.49.0", + "@typescript-eslint/visitor-keys": "8.49.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -7066,10 +6124,11 @@ } }, "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.44.1.tgz", - "integrity": "sha512-Lk7uj7y9uQUOEguiDIDLYLJOrYHQa7oBiURYVFqIpGxclAFQ78f6VUOM8lI2XEuNOKNB7XuvM2+2cMXAoq4ALQ==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.49.0.tgz", + "integrity": "sha512-e9k/fneezorUo6WShlQpMxXh8/8wfyc+biu6tnAqA81oWrEic0k21RHzP9uqqpyBBeBKu4T+Bsjy9/b8u7obXQ==", "dev": true, + "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -7079,20 +6138,20 @@ } }, "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.44.1.tgz", - "integrity": "sha512-qnQJ+mVa7szevdEyvfItbO5Vo+GfZ4/GZWWDRRLjrxYPkhM+6zYB2vRYwCsoJLzqFCdZT4mEqyJoyzkunsZ96A==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.49.0.tgz", + "integrity": "sha512-jrLdRuAbPfPIdYNppHJ/D0wN+wwNfJ32YTAm10eJVsFmrVpXQnDWBn8niCSMlWjvml8jsce5E/O+86IQtTbJWA==", "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.44.1", - "@typescript-eslint/tsconfig-utils": "8.44.1", - "@typescript-eslint/types": "8.44.1", - "@typescript-eslint/visitor-keys": "8.44.1", + "@typescript-eslint/project-service": "8.49.0", + "@typescript-eslint/tsconfig-utils": "8.49.0", + "@typescript-eslint/types": "8.49.0", + "@typescript-eslint/visitor-keys": "8.49.0", "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", + "tinyglobby": "^0.2.15", "ts-api-utils": "^2.1.0" }, "engines": { @@ -7107,12 +6166,13 @@ } }, "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.44.1.tgz", - "integrity": "sha512-576+u0QD+Jp3tZzvfRfxon0EA2lzcDt3lhUbsC6Lgzy9x2VR4E+JUiNyGHi5T8vk0TV+fpJ5GLG1JsJuWCaKhw==", + "version": "8.49.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.49.0.tgz", + "integrity": "sha512-LlKaciDe3GmZFphXIc79THF/YYBugZ7FS1pO581E/edlVVNbZKDy93evqmrfQ9/Y4uN0vVhX4iuchq26mK/iiA==", "dev": true, + "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.44.1", + "@typescript-eslint/types": "8.49.0", "eslint-visitor-keys": "^4.2.1" }, "engines": { @@ -7128,6 +6188,7 @@ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", "dev": true, + "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -7140,6 +6201,7 @@ "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=18.12" }, @@ -7184,61 +6246,30 @@ "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", "license": "ISC" }, - "node_modules/@vitejs/plugin-react": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.0.4.tgz", - "integrity": "sha512-La0KD0vGkVkSk6K+piWDKRUyg8Rl5iAIKRMH0vMJI0Eg47bq1eOxmoObAaQG37WMW9MSyk7Cs8EIWwJC1PtzKA==", - "dependencies": { - "@babel/core": "^7.28.4", - "@babel/plugin-transform-react-jsx-self": "^7.27.1", - "@babel/plugin-transform-react-jsx-source": "^7.27.1", - "@rolldown/pluginutils": "1.0.0-beta.38", - "@types/babel__core": "^7.20.5", - "react-refresh": "^0.17.0" - }, - "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "peerDependencies": { - "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" - } - }, - "node_modules/@vitejs/plugin-react/node_modules/react-refresh": { - "version": "0.17.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", - "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/@vitest/coverage-v8": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", - "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.0.16.tgz", + "integrity": "sha512-2rNdjEIsPRzsdu6/9Eq0AYAzYdpP6Bx9cje9tL3FE5XzXRQF1fNU9pe/1yE8fCrS0HD+fBtt6gLPh6LI57tX7A==", "dev": true, - "license": "MIT", "dependencies": { - "@ampproject/remapping": "^2.3.0", "@bcoe/v8-coverage": "^1.0.2", - "ast-v8-to-istanbul": "^0.3.3", - "debug": "^4.4.1", + "@vitest/utils": "4.0.16", + "ast-v8-to-istanbul": "^0.3.8", "istanbul-lib-coverage": "^3.2.2", "istanbul-lib-report": "^3.0.1", "istanbul-lib-source-maps": "^5.0.6", - "istanbul-reports": "^3.1.7", - "magic-string": "^0.30.17", - "magicast": "^0.3.5", - "std-env": "^3.9.0", - "test-exclude": "^7.0.1", - "tinyrainbow": "^2.0.0" + "istanbul-reports": "^3.2.0", + "magicast": "^0.5.1", + "obug": "^2.1.1", + "std-env": "^3.10.0", + "tinyrainbow": "^3.0.3" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "@vitest/browser": "3.2.4", - "vitest": "3.2.4" + "@vitest/browser": "4.0.16", + "vitest": "4.0.16" }, "peerDependenciesMeta": { "@vitest/browser": { @@ -7247,39 +6278,38 @@ } }, "node_modules/@vitest/expect": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", - "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.16.tgz", + "integrity": "sha512-eshqULT2It7McaJkQGLkPjPjNph+uevROGuIMJdG3V+0BSR2w9u6J9Lwu+E8cK5TETlfou8GRijhafIMhXsimA==", "dev": true, - "license": "MIT", "dependencies": { + "@standard-schema/spec": "^1.0.0", "@types/chai": "^5.2.2", - "@vitest/spy": "3.2.4", - "@vitest/utils": "3.2.4", - "chai": "^5.2.0", - "tinyrainbow": "^2.0.0" + "@vitest/spy": "4.0.16", + "@vitest/utils": "4.0.16", + "chai": "^6.2.1", + "tinyrainbow": "^3.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/mocker": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", - "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.16.tgz", + "integrity": "sha512-yb6k4AZxJTB+q9ycAvsoxGn+j/po0UaPgajllBgt1PzoMAAmJGYFdDk0uCcRcxb3BrME34I6u8gHZTQlkqSZpg==", "dev": true, - "license": "MIT", "dependencies": { - "@vitest/spy": "3.2.4", + "@vitest/spy": "4.0.16", "estree-walker": "^3.0.3", - "magic-string": "^0.30.17" + "magic-string": "^0.30.21" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { "msw": "^2.4.9", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + "vite": "^6.0.0 || ^7.0.0-0" }, "peerDependenciesMeta": { "msw": { @@ -7291,28 +6321,25 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", - "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.16.tgz", + "integrity": "sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==", "dev": true, - "license": "MIT", "dependencies": { - "tinyrainbow": "^2.0.0" + "tinyrainbow": "^3.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/runner": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", - "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.16.tgz", + "integrity": "sha512-VWEDm5Wv9xEo80ctjORcTQRJ539EGPB3Pb9ApvVRAY1U/WkHXmmYISqU5E79uCwcW7xYUV38gwZD+RV755fu3Q==", "dev": true, - "license": "MIT", "dependencies": { - "@vitest/utils": "3.2.4", - "pathe": "^2.0.3", - "strip-literal": "^3.0.0" + "@vitest/utils": "4.0.16", + "pathe": "^2.0.3" }, "funding": { "url": "https://opencollective.com/vitest" @@ -7322,18 +6349,16 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/@vitest/snapshot": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", - "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.16.tgz", + "integrity": "sha512-sf6NcrYhYBsSYefxnry+DR8n3UV4xWZwWxYbCJUt2YdvtqzSPR7VfGrY0zsv090DAbjFZsi7ZaMi1KnSRyK1XA==", "dev": true, - "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.2.4", - "magic-string": "^0.30.17", + "@vitest/pretty-format": "4.0.16", + "magic-string": "^0.30.21", "pathe": "^2.0.3" }, "funding": { @@ -7344,32 +6369,25 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/@vitest/spy": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", - "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.16.tgz", + "integrity": "sha512-4jIOWjKP0ZUaEmJm00E0cOBLU+5WE0BpeNr3XN6TEF05ltro6NJqHWxXD0kA8/Zc8Nh23AT8WQxwNG+WeROupw==", "dev": true, - "license": "MIT", - "dependencies": { - "tinyspy": "^4.0.3" - }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/utils": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", - "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.16.tgz", + "integrity": "sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==", "dev": true, - "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.2.4", - "loupe": "^3.1.4", - "tinyrainbow": "^2.0.0" + "@vitest/pretty-format": "4.0.16", + "tinyrainbow": "^3.0.3" }, "funding": { "url": "https://opencollective.com/vitest" @@ -7388,7 +6406,8 @@ "version": "5.5.0", "resolved": "https://registry.npmjs.org/@xterm/xterm/-/xterm-5.5.0.tgz", "integrity": "sha512-hqJHYaQb5OptNunnyAnkHyM8aCjZ1MEIDTQu1iIbbTD/xops91NB5yq1ZK/dC2JDbVWtF23zUtl9JE2NqwT87A==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/accepts": { "version": "1.3.8", @@ -7416,6 +6435,7 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -7461,10 +6481,11 @@ } }, "node_modules/ansi-escapes": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.1.1.tgz", - "integrity": "sha512-Zhl0ErHcSRUaVfGUeUdDuLgpkEo8KIFjB4Y9uAc46ScOpdDiU1Dbyplh7qWJeJ/ZHpbyMSM26+X3BySgnIz40Q==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.2.0.tgz", + "integrity": "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==", "dev": true, + "license": "MIT", "dependencies": { "environment": "^1.0.0" }, @@ -7705,7 +6726,6 @@ "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", "dev": true, - "license": "MIT", "engines": { "node": ">=12" } @@ -7718,13 +6738,13 @@ "license": "MIT" }, "node_modules/ast-v8-to-istanbul": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.5.tgz", - "integrity": "sha512-9SdXjNheSiE8bALAQCQQuT6fgQaoxJh7IRYrRGZ8/9nv8WhJeC1aXAwN8TbaOssGOukUvyvnkgD9+Yuykvl1aA==", + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.8.tgz", + "integrity": "sha512-szgSZqUxI5T8mLKvS7WTjF9is+MVbOeLADU73IseOcrqhxr/VAvy6wfoVE39KnKzA7JRhjF5eUagNlHwvZPlKQ==", "dev": true, "license": "MIT", "dependencies": { - "@jridgewell/trace-mapping": "^0.3.30", + "@jridgewell/trace-mapping": "^0.3.31", "estree-walker": "^3.0.3", "js-tokens": "^9.0.1" } @@ -7752,44 +6772,6 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", "license": "MIT" }, - "node_modules/autoprefixer": { - "version": "10.4.21", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.21.tgz", - "integrity": "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "browserslist": "^4.24.4", - "caniuse-lite": "^1.0.30001702", - "fraction.js": "^4.3.7", - "normalize-range": "^0.1.2", - "picocolors": "^1.1.1", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, "node_modules/available-typed-arrays": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", @@ -7807,9 +6789,9 @@ } }, "node_modules/axe-core": { - "version": "4.10.3", - "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.10.3.tgz", - "integrity": "sha512-Xm7bpRXnDSX2YE2YFfBk2FnF0ep6tmG7xPh8iHee8MIcrgq762Nkce856dYtJYLkuIoYZvGfTs/PbZhideTcEg==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.11.0.tgz", + "integrity": "sha512-ilYanEU8vxxBexpJd8cWM4ElSQq4QctCLKih0TSfjIfCQTeyH/6zVrmIJfLPrKTKJRbiG+cfnZbQIjAlJmF1jQ==", "dev": true, "license": "MPL-2.0", "engines": { @@ -7817,9 +6799,10 @@ } }, "node_modules/axios": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", - "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.2.tgz", + "integrity": "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==", + "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.4", @@ -7872,6 +6855,16 @@ "integrity": "sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==", "license": "MIT" }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.6", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.6.tgz", + "integrity": "sha512-v9BVVpOTLB59C9E7aSnmIF8h7qRsFpx+A2nugVMTszEOMcfjlZMsXRm4LF23I3Z9AJxc8ANpIvzbzONoX9VJlg==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, "node_modules/basic-auth": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", @@ -7895,27 +6888,28 @@ "resolved": "https://registry.npmjs.org/bidi-js/-/bidi-js-1.0.3.tgz", "integrity": "sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==", "dev": true, + "license": "MIT", "dependencies": { "require-from-string": "^2.0.2" } }, "node_modules/body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "version": "1.20.4", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz", + "integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==", "dependencies": { - "bytes": "3.1.2", + "bytes": "~3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", + "destroy": "~1.2.0", + "http-errors": "~2.0.1", + "iconv-lite": "~0.4.24", + "on-finished": "~2.4.1", + "qs": "~6.14.0", + "raw-body": "~2.5.3", "type-is": "~1.6.18", - "unpipe": "1.0.0" + "unpipe": "~1.0.0" }, "engines": { "node": ">= 0.8", @@ -7959,9 +6953,10 @@ } }, "node_modules/browserslist": { - "version": "4.25.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.4.tgz", - "integrity": "sha512-4jYpcjabC606xJ3kw2QwGEZKX0Aw7sgQdZCvIK9dhVSPh76BKo+C+btT1RRofH7B+8iNpEbgGNVWiLki5q93yg==", + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, "funding": [ { "type": "opencollective", @@ -7977,11 +6972,13 @@ } ], "license": "MIT", + "peer": true, "dependencies": { - "caniuse-lite": "^1.0.30001737", - "electron-to-chromium": "^1.5.211", - "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.3" + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" }, "bin": { "browserslist": "cli.js" @@ -8087,9 +7084,10 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001741", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001741.tgz", - "integrity": "sha512-QGUGitqsc8ARjLdgAfxETDhRbJ0REsP6O3I96TAth/mVjh2cYzN2u+3AzPP3aVSm2FehEItaJw1xd+IGBXWeSw==", + "version": "1.0.30001760", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001760.tgz", + "integrity": "sha512-7AAMPcueWELt1p3mi13HR/LHH0TJLT11cnwDJEs3xA4+CK/PLKeO9Kl1oru24htkyUKtkGCvAx4ohB0Ttry8Dw==", + "dev": true, "funding": [ { "type": "opencollective", @@ -8117,18 +7115,10 @@ } }, "node_modules/chai": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz", - "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==", + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.1.tgz", + "integrity": "sha512-p4Z49OGG5W/WBCPSS/dH3jQ73kD6tiMmUM+bckNK6Jr5JHMG3k9bg/BvKR8lKmtVBKmOiuVaV2ws8s9oSbwysg==", "dev": true, - "license": "MIT", - "dependencies": { - "assertion-error": "^2.0.1", - "check-error": "^2.1.1", - "deep-eql": "^5.0.1", - "loupe": "^3.1.0", - "pathval": "^2.0.0" - }, "engines": { "node": ">=18" } @@ -8190,16 +7180,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/check-error": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", - "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 16" - } - }, "node_modules/chokidar": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", @@ -8216,15 +7196,6 @@ "url": "https://paulmillr.com/funding/" } }, - "node_modules/chownr": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", - "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, "node_modules/class-variance-authority": { "version": "0.7.1", "resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.7.1.tgz", @@ -8242,6 +7213,7 @@ "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", "dev": true, + "license": "MIT", "dependencies": { "restore-cursor": "^5.0.0" }, @@ -8253,62 +7225,20 @@ } }, "node_modules/cli-truncate": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-5.1.0.tgz", - "integrity": "sha512-7JDGG+4Zp0CsknDCedl0DYdaeOhc46QNpXi3NLQblkZpXXgA6LncLDUUyvrjSvZeF3VRQa+KiMGomazQrC1V8g==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-5.1.1.tgz", + "integrity": "sha512-SroPvNHxUnk+vIW/dOSfNqdy1sPEFkrTk6TUtqLCnBlo3N7TNYYkzzN7uSD6+jVjrdO4+p8nH7JzH6cIvUem6A==", "dev": true, + "license": "MIT", "dependencies": { "slice-ansi": "^7.1.0", "string-width": "^8.0.0" }, "engines": { - "node": ">=20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-truncate/node_modules/ansi-regex": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", - "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/cli-truncate/node_modules/string-width": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz", - "integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==", - "dev": true, - "dependencies": { - "get-east-asian-width": "^1.3.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-truncate/node_modules/strip-ansi": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", - "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" + "node": ">=20" }, "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/cli-width": { @@ -8399,7 +7329,6 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", - "license": "MIT", "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" @@ -8439,14 +7368,14 @@ "node_modules/color2k": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/color2k/-/color2k-2.0.3.tgz", - "integrity": "sha512-zW190nQTIoXcGCaU08DvVNFTmQhUpnJfVuAKfWqUQkflXKpaDdpaYoM0iluLS9lgJNHyBF58KKA2FBEwkD7wog==", - "license": "MIT" + "integrity": "sha512-zW190nQTIoXcGCaU08DvVNFTmQhUpnJfVuAKfWqUQkflXKpaDdpaYoM0iluLS9lgJNHyBF58KKA2FBEwkD7wog==" }, "node_modules/colorette": { "version": "2.0.20", "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/combined-stream": { "version": "1.0.8", @@ -8471,10 +7400,11 @@ } }, "node_modules/commander": { - "version": "14.0.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.1.tgz", - "integrity": "sha512-2JkV3gUZUVrbNA+1sjBOYLsMZ5cEEl8GTFP2a4AVz5hvasAMCQ1D2l2le/cX+pV4N6ZU17zjUahLpIXRrnWL8A==", + "version": "14.0.2", + "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.2.tgz", + "integrity": "sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=20" } @@ -8537,6 +7467,13 @@ "dev": true, "license": "MIT" }, + "node_modules/confbox": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz", + "integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==", + "dev": true, + "license": "MIT" + }, "node_modules/confusing-browser-globals": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz", @@ -8576,25 +7513,26 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, "license": "MIT" }, "node_modules/cookie": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", - "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", "engines": { "node": ">= 0.6" } }, "node_modules/cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.7.tgz", + "integrity": "sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==" }, "node_modules/core-js": { - "version": "3.45.1", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.45.1.tgz", - "integrity": "sha512-L4NPsJlCfZsPeXukyzHFlg/i7IIVwHSItR0wg0FLNqYClJ4MQYTYLbC7EkjKYRLZF2iof2MUgN0EGy7MdQFChg==", + "version": "3.47.0", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.47.0.tgz", + "integrity": "sha512-c3Q2VVkGAUyupsjRnaNX6u8Dq2vAdzm9iuPj5FW0fRxzlxgq9Q39MDq10IvmQSpLgHQNyQzQmOo6bgGHmH3NNg==", "hasInstallScript": true, "license": "MIT", "funding": { @@ -8602,10 +7540,37 @@ "url": "https://opencollective.com/core-js" } }, + "node_modules/cosmiconfig": { + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, "node_modules/cross-env": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-10.0.0.tgz", - "integrity": "sha512-aU8qlEK/nHYtVuN4p7UQgAwVljzMg8hB4YK5ThRqD2l/ziSnryncPNn7bMLt5cFYsKVKBh8HqLqyCoTupEUu7Q==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-10.1.0.tgz", + "integrity": "sha512-GsYosgnACZTADcmEyJctkJIoqAhHjttw7RsFrVoJNXbsWWqaq6Ym+7kZjq6mS45O0jij6vtiReppKQEtqWy6Dw==", "dev": true, "license": "MIT", "dependencies": { @@ -8643,26 +7608,12 @@ "node": ">= 8" } }, - "node_modules/cross-spawn/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/css-tree": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.1.0.tgz", "integrity": "sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==", "dev": true, + "license": "MIT", "dependencies": { "mdn-data": "2.12.2", "source-map-js": "^1.0.1" @@ -8692,13 +7643,14 @@ } }, "node_modules/cssstyle": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.1.tgz", - "integrity": "sha512-g5PC9Aiph9eiczFpcgUhd9S4UUO3F+LHGRIi5NUMZ+4xtoIYbHNZwZnWA2JsFGe8OU8nl4WyaEFiZuGuxlutJQ==", + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.4.tgz", + "integrity": "sha512-KyOS/kJMEq5O9GdPnaf82noigg5X5DYn0kZPJTaAsCUaBizp6Xa1y9D4Qoqf/JazEXWuruErHgVXwjN5391ZJw==", "dev": true, + "license": "MIT", "dependencies": { - "@asamuzakjp/css-color": "^4.0.3", - "@csstools/css-syntax-patches-for-csstree": "^1.0.14", + "@asamuzakjp/css-color": "^4.1.0", + "@csstools/css-syntax-patches-for-csstree": "1.0.14", "css-tree": "^3.1.0" }, "engines": { @@ -8706,10 +7658,11 @@ } }, "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "license": "MIT" + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "license": "MIT", + "peer": true }, "node_modules/damerau-levenshtein": { "version": "1.0.8", @@ -8723,6 +7676,7 @@ "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-6.0.0.tgz", "integrity": "sha512-BnBS08aLUM+DKamupXs3w2tJJoqU+AkaE/+6vQxi/G/DPmIZFJJp9Dkb1kM03AZx8ADehDUZgsNxju3mPXZYIA==", "dev": true, + "license": "MIT", "dependencies": { "whatwg-mimetype": "^4.0.0", "whatwg-url": "^15.0.0" @@ -8785,20 +7739,10 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/date-fns": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz", - "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/kossnocorp" - } - }, "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -8846,16 +7790,6 @@ } } }, - "node_modules/deep-eql": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", - "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", @@ -8867,7 +7801,6 @@ "version": "4.3.1", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", - "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -8945,9 +7878,9 @@ } }, "node_modules/detect-libc": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", - "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", "license": "Apache-2.0", "engines": { "node": ">=8" @@ -8999,6 +7932,15 @@ "dev": true, "license": "MIT" }, + "node_modules/dompurify": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.7.tgz", + "integrity": "sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, "node_modules/dot-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", @@ -9011,9 +7953,9 @@ } }, "node_modules/downshift": { - "version": "9.0.10", - "resolved": "https://registry.npmjs.org/downshift/-/downshift-9.0.10.tgz", - "integrity": "sha512-TP/iqV6bBok6eGD5tZ8boM8Xt7/+DZvnVNr8cNIhbAm2oUBd79Tudiccs2hbcV9p7xAgS/ozE7Hxy3a9QqS6Mw==", + "version": "9.0.13", + "resolved": "https://registry.npmjs.org/downshift/-/downshift-9.0.13.tgz", + "integrity": "sha512-fPV+K5jwEzfEAhNhprgCmpWQ23MKwKNzdbtK0QQFiw4hbFcKhMeGB+ccorfWJzmsLR5Dty+CmLDduWlIs74G/w==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.24.5", @@ -9040,13 +7982,6 @@ "node": ">= 0.4" } }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true, - "license": "MIT" - }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -9054,9 +7989,10 @@ "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.5.214", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.214.tgz", - "integrity": "sha512-TpvUNdha+X3ybfU78NoQatKvQEm1oq3lf2QbnmCEdw+Bd9RuIAY+hJTvq1avzHM0f7EJfnH3vbCnbzKzisc/9Q==", + "version": "1.5.267", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", + "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "dev": true, "license": "ISC" }, "node_modules/emoji-regex": { @@ -9135,10 +8071,9 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.18.3", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", - "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", - "license": "MIT", + "version": "5.18.4", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.4.tgz", + "integrity": "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q==", "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" @@ -9165,6 +8100,7 @@ "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=18" }, @@ -9172,23 +8108,23 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/err-code": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", - "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", - "dev": true, - "license": "MIT" - }, "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", "dev": true, "license": "MIT", "dependencies": { "is-arrayish": "^0.2.1" } }, + "node_modules/error-ex/node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, "node_modules/es-abstract": { "version": "1.24.0", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", @@ -9370,11 +8306,10 @@ } }, "node_modules/esbuild": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.9.tgz", - "integrity": "sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.1.tgz", + "integrity": "sha512-yY35KZckJJuVVPXpvjgxiCuVEJT67F6zDeVTv4rizyPrfGBUpZQsvmxnN+C371c2esD/hNMjj4tpBhuueLN7aA==", "hasInstallScript": true, - "license": "MIT", "bin": { "esbuild": "bin/esbuild" }, @@ -9382,38 +8317,39 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.9", - "@esbuild/android-arm": "0.25.9", - "@esbuild/android-arm64": "0.25.9", - "@esbuild/android-x64": "0.25.9", - "@esbuild/darwin-arm64": "0.25.9", - "@esbuild/darwin-x64": "0.25.9", - "@esbuild/freebsd-arm64": "0.25.9", - "@esbuild/freebsd-x64": "0.25.9", - "@esbuild/linux-arm": "0.25.9", - "@esbuild/linux-arm64": "0.25.9", - "@esbuild/linux-ia32": "0.25.9", - "@esbuild/linux-loong64": "0.25.9", - "@esbuild/linux-mips64el": "0.25.9", - "@esbuild/linux-ppc64": "0.25.9", - "@esbuild/linux-riscv64": "0.25.9", - "@esbuild/linux-s390x": "0.25.9", - "@esbuild/linux-x64": "0.25.9", - "@esbuild/netbsd-arm64": "0.25.9", - "@esbuild/netbsd-x64": "0.25.9", - "@esbuild/openbsd-arm64": "0.25.9", - "@esbuild/openbsd-x64": "0.25.9", - "@esbuild/openharmony-arm64": "0.25.9", - "@esbuild/sunos-x64": "0.25.9", - "@esbuild/win32-arm64": "0.25.9", - "@esbuild/win32-ia32": "0.25.9", - "@esbuild/win32-x64": "0.25.9" + "@esbuild/aix-ppc64": "0.27.1", + "@esbuild/android-arm": "0.27.1", + "@esbuild/android-arm64": "0.27.1", + "@esbuild/android-x64": "0.27.1", + "@esbuild/darwin-arm64": "0.27.1", + "@esbuild/darwin-x64": "0.27.1", + "@esbuild/freebsd-arm64": "0.27.1", + "@esbuild/freebsd-x64": "0.27.1", + "@esbuild/linux-arm": "0.27.1", + "@esbuild/linux-arm64": "0.27.1", + "@esbuild/linux-ia32": "0.27.1", + "@esbuild/linux-loong64": "0.27.1", + "@esbuild/linux-mips64el": "0.27.1", + "@esbuild/linux-ppc64": "0.27.1", + "@esbuild/linux-riscv64": "0.27.1", + "@esbuild/linux-s390x": "0.27.1", + "@esbuild/linux-x64": "0.27.1", + "@esbuild/netbsd-arm64": "0.27.1", + "@esbuild/netbsd-x64": "0.27.1", + "@esbuild/openbsd-arm64": "0.27.1", + "@esbuild/openbsd-x64": "0.27.1", + "@esbuild/openharmony-arm64": "0.27.1", + "@esbuild/sunos-x64": "0.27.1", + "@esbuild/win32-arm64": "0.27.1", + "@esbuild/win32-ia32": "0.27.1", + "@esbuild/win32-x64": "0.27.1" } }, "node_modules/escalade": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -9444,6 +8380,7 @@ "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", @@ -9567,6 +8504,7 @@ "integrity": "sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==", "dev": true, "license": "MIT", + "peer": true, "bin": { "eslint-config-prettier": "bin/cli.js" }, @@ -9647,6 +8585,7 @@ "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@rtsao/scc": "^1.1.0", "array-includes": "^3.1.9", @@ -9738,6 +8677,7 @@ "integrity": "sha512-scB3nz4WmG75pV8+3eRUQOHZlNSUhFNq37xnpgRkCCELU3XMvXAxLk1eqWWyE22Ki4Q01Fnsw9BA3cJHDPgn2Q==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "aria-query": "^5.3.2", "array-includes": "^3.1.8", @@ -9833,6 +8773,7 @@ "integrity": "sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "array-includes": "^3.1.8", "array.prototype.findlast": "^1.2.5", @@ -9866,6 +8807,7 @@ "integrity": "sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=10" }, @@ -9939,9 +8881,9 @@ } }, "node_modules/eslint-plugin-unused-imports": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-4.2.0.tgz", - "integrity": "sha512-hLbJ2/wnjKq4kGA9AUaExVFIbNzyxYdVo49QZmKCnhk5pc9wcYRbfgLHvWJ8tnsdcseGhoUAddm9gn/lt+d74w==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-4.3.0.tgz", + "integrity": "sha512-ZFBmXMGBYfHttdRtOG9nFFpmUvMtbHSjsKrS20vdWdbfiVYsO3yA2SGYy9i9XmZJDfMGBflZGBCm70SEnFQtOA==", "dev": true, "license": "MIT", "peerDependencies": { @@ -10104,7 +9046,8 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/exit-hook": { "version": "2.2.1", @@ -10120,9 +9063,9 @@ } }, "node_modules/expect-type": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.2.tgz", - "integrity": "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -10130,38 +9073,39 @@ } }, "node_modules/express": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", - "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "version": "4.22.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz", + "integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==", + "peer": true, "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.3", - "content-disposition": "0.5.4", + "body-parser": "~1.20.3", + "content-disposition": "~0.5.4", "content-type": "~1.0.4", - "cookie": "0.7.1", - "cookie-signature": "1.0.6", + "cookie": "~0.7.1", + "cookie-signature": "~1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.3.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", + "finalhandler": "~1.3.1", + "fresh": "~0.5.2", + "http-errors": "~2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", - "on-finished": "2.4.1", + "on-finished": "~2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.12", + "path-to-regexp": "~0.1.12", "proxy-addr": "~2.0.7", - "qs": "6.13.0", + "qs": "~6.14.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.19.0", - "serve-static": "1.16.2", + "send": "~0.19.0", + "serve-static": "~1.16.2", "setprototypeof": "1.2.0", - "statuses": "2.0.1", + "statuses": "~2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" @@ -10187,6 +9131,13 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, + "node_modules/exsolve": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.8.tgz", + "integrity": "sha512-LmDxfWXwcTArk8fUEnOfSZpHOJ6zOMUJKOtFLFqJLoKJetuQG874Uc7/Kki7zFLzYybmZhp1M7+98pfMqeX8yA==", + "dev": true, + "license": "MIT" + }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -10307,16 +9258,16 @@ } }, "node_modules/finalhandler": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", - "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.2.tgz", + "integrity": "sha512-aA4RyPcd3badbdABGDuTXCMTtOneUCAYH/gxoYRTZlIJdF0YPWuGqiAsIrhNnnqdXGswYk6dGujem4w80UJFhg==", "dependencies": { "debug": "2.6.9", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", - "on-finished": "2.4.1", + "on-finished": "~2.4.1", "parseurl": "~1.3.3", - "statuses": "2.0.1", + "statuses": "~2.0.2", "unpipe": "~1.0.0" }, "engines": { @@ -10357,7 +9308,6 @@ "version": "5.0.2", "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", - "license": "BSD-3-Clause", "bin": { "flat": "cli.js" } @@ -10420,27 +9370,10 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/foreground-child": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", - "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", - "dev": true, - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.6", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/form-data": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", - "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", "license": "MIT", "dependencies": { "asynckit": "^0.4.0", @@ -10469,26 +9402,14 @@ "node": ">= 0.6" } }, - "node_modules/fraction.js": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", - "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", - "dev": true, - "license": "MIT", - "engines": { - "node": "*" - }, - "funding": { - "type": "patreon", - "url": "https://github.com/sponsors/rawify" - } - }, "node_modules/framer-motion": { - "version": "12.23.22", - "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.23.22.tgz", - "integrity": "sha512-ZgGvdxXCw55ZYvhoZChTlG6pUuehecgvEAJz0BHoC5pQKW1EC5xf1Mul1ej5+ai+pVY0pylyFfdl45qnM1/GsA==", + "version": "12.23.26", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.23.26.tgz", + "integrity": "sha512-cPcIhgR42xBn1Uj+PzOyheMtZ73H927+uWPDVhUMqxy8UHt6Okavb6xIz9J/phFUHUj0OncR6UvMfJTXoc/LKA==", + "license": "MIT", + "peer": true, "dependencies": { - "motion-dom": "^12.23.21", + "motion-dom": "^12.23.23", "motion-utils": "^12.23.6", "tslib": "^2.4.0" }, @@ -10578,10 +9499,21 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/generator-function": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz", + "integrity": "sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/gensync": { "version": "1.0.0-beta.2", "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, "license": "MIT", "engines": { "node": ">=6.9.0" @@ -10602,6 +9534,7 @@ "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz", "integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=18" }, @@ -10677,21 +9610,22 @@ } }, "node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, "license": "ISC", "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" }, - "bin": { - "glob": "dist/esm/bin.mjs" + "engines": { + "node": "*" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -10710,6 +9644,30 @@ "node": ">=10.13.0" } }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/globals": { "version": "13.24.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", @@ -10772,9 +9730,9 @@ "license": "MIT" }, "node_modules/goober": { - "version": "2.1.16", - "resolved": "https://registry.npmjs.org/goober/-/goober-2.1.16.tgz", - "integrity": "sha512-erjk19y1U33+XAMe1VTvIONHYoSqE4iS7BYUZfHaqeohLmnC0FdxEh7rQU+6MZ4OajItzjZFSRtVANrQwNq6/g==", + "version": "2.1.18", + "resolved": "https://registry.npmjs.org/goober/-/goober-2.1.18.tgz", + "integrity": "sha512-2vFqsaDVIT9Gz7N6kAL++pLpp41l3PfDuusHcjnGLfR6+huZkl6ziX+zgVC3ZxpqWhzH6pyDdGrCeDhMIvwaxw==", "license": "MIT", "peerDependencies": { "csstype": "^3.0.10" @@ -10795,8 +9753,7 @@ "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "license": "ISC" + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, "node_modules/graphemer": { "version": "1.4.0", @@ -10806,9 +9763,9 @@ "license": "MIT" }, "node_modules/graphql": { - "version": "16.11.0", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.11.0.tgz", - "integrity": "sha512-mS1lbMsxgQj6hge1XZ6p7GPhbrtFwUFYi3wRzXAC/FmYnyXMTvvI3td3rjmQ2u8ewXueaSvRPWaEcgVVOT9Jnw==", + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.12.0.tgz", + "integrity": "sha512-DKKrynuQRne0PNpEbzuEdHlYOMksHSUI8Zc9Unei5gTsMNA2/vMpoMz/yKba50pejK56qj98qM0SjYxAKi13gQ==", "dev": true, "license": "MIT", "engines": { @@ -10907,10 +9864,13 @@ } }, "node_modules/hast-util-parse-selector": { - "version": "2.2.5", - "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz", - "integrity": "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" @@ -10957,70 +9917,22 @@ } }, "node_modules/hastscript": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", - "integrity": "sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz", + "integrity": "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==", "license": "MIT", "dependencies": { - "@types/hast": "^2.0.0", - "comma-separated-tokens": "^1.0.0", - "hast-util-parse-selector": "^2.0.0", - "property-information": "^5.0.0", - "space-separated-tokens": "^1.0.0" + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/hastscript/node_modules/@types/hast": { - "version": "2.3.10", - "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz", - "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==", - "license": "MIT", - "dependencies": { - "@types/unist": "^2" - } - }, - "node_modules/hastscript/node_modules/@types/unist": { - "version": "2.0.11", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", - "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", - "license": "MIT" - }, - "node_modules/hastscript/node_modules/comma-separated-tokens": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", - "integrity": "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/hastscript/node_modules/property-information": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz", - "integrity": "sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==", - "license": "MIT", - "dependencies": { - "xtend": "^4.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/hastscript/node_modules/space-separated-tokens": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", - "integrity": "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, "node_modules/headers-polyfill": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/headers-polyfill/-/headers-polyfill-4.0.3.tgz", @@ -11043,29 +9955,6 @@ "integrity": "sha512-PDEfEF102G23vHmPhLyPboFCD+BkMGu+GuJe2d9/eH4FsCwvgBpnc9n0pGE+ffKdph38s6foEZiEjdgHdzp+IA==", "license": "CC0-1.0" }, - "node_modules/hosted-git-info": { - "version": "6.1.3", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.3.tgz", - "integrity": "sha512-HVJyzUrLIL1c0QmviVh5E8VGyUS7xCFPS6yydaVd1UegW+ibV/CohqTH9MkOLDp5o+rb82DMo77PTuc9F/8GKw==", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^7.5.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, "node_modules/html-encoding-sniffer": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", @@ -11106,18 +9995,22 @@ } }, "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" }, "engines": { "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/http-proxy-agent": { @@ -11165,9 +10058,9 @@ } }, "node_modules/i18next": { - "version": "25.5.2", - "resolved": "https://registry.npmjs.org/i18next/-/i18next-25.5.2.tgz", - "integrity": "sha512-lW8Zeh37i/o0zVr+NoCHfNnfvVw+M6FQbRp36ZZ/NyHDJ3NJVpp2HhAUyU9WafL5AssymNoOjMRB48mmx2P6Hw==", + "version": "25.7.3", + "resolved": "https://registry.npmjs.org/i18next/-/i18next-25.7.3.tgz", + "integrity": "sha512-2XaT+HpYGuc2uTExq9TVRhLsso+Dxym6PWaKpn36wfBmTI779OQ7iP/XaZHzrnGyzU4SHpFrTYLKfVyBfAhVNA==", "funding": [ { "type": "individual", @@ -11182,9 +10075,9 @@ "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project" } ], - "license": "MIT", + "peer": true, "dependencies": { - "@babel/runtime": "^7.27.6" + "@babel/runtime": "^7.28.4" }, "peerDependencies": { "typescript": "^5" @@ -11290,16 +10183,15 @@ "license": "ISC" }, "node_modules/inline-style-parser": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.4.tgz", - "integrity": "sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q==", + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.7.tgz", + "integrity": "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==", "license": "MIT" }, "node_modules/input-otp": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/input-otp/-/input-otp-1.4.1.tgz", "integrity": "sha512-+yvpmKYKHi9jIGngxagY9oWiiblPB7+nEO75F2l2o4vs+6vpPZZmUl4tBNYuTCvQjhvEIbdNeJu70bhfYP2nbw==", - "license": "MIT", "peerDependencies": { "react": "^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc" @@ -11324,7 +10216,6 @@ "version": "10.7.18", "resolved": "https://registry.npmjs.org/intl-messageformat/-/intl-messageformat-10.7.18.tgz", "integrity": "sha512-m3Ofv/X/tV8Y3tHXLohcuVuhWKo7BBq62cqY15etqmLxg2DZ34AGGgQDeR+SCta2+zICb1NX83af0GJmbQ1++g==", - "license": "BSD-3-Clause", "dependencies": { "@formatjs/ecma402-abstract": "2.3.6", "@formatjs/fast-memoize": "2.2.7", @@ -11383,10 +10274,9 @@ } }, "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", - "dev": true, + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.4.tgz", + "integrity": "sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==", "license": "MIT" }, "node_modules/is-async-function": { @@ -11547,6 +10437,7 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.1.0.tgz", "integrity": "sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==", "dev": true, + "license": "MIT", "dependencies": { "get-east-asian-width": "^1.3.1" }, @@ -11558,14 +10449,15 @@ } }, "node_modules/is-generator-function": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", - "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.2.tgz", + "integrity": "sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==", "dev": true, "license": "MIT", "dependencies": { - "call-bound": "^1.0.3", - "get-proto": "^1.0.0", + "call-bound": "^1.0.4", + "generator-function": "^2.0.0", + "get-proto": "^1.0.1", "has-tostringtag": "^1.0.2", "safe-regex-test": "^1.1.0" }, @@ -11841,9 +10733,10 @@ "license": "MIT" }, "node_modules/isbot": { - "version": "5.1.31", - "resolved": "https://registry.npmjs.org/isbot/-/isbot-5.1.31.tgz", - "integrity": "sha512-DPgQshehErHAqSCKDb3rNW03pa2wS/v5evvUqtxt6TTnHRqAG8FdzcSSJs9656pK6Y+NT7K9R4acEYXLHYfpUQ==", + "version": "5.1.32", + "resolved": "https://registry.npmjs.org/isbot/-/isbot-5.1.32.tgz", + "integrity": "sha512-VNfjM73zz2IBZmdShMfAUg10prm6t7HFUQmNAEOAVS4YH92ZrZcvkMcGX6cIgBJAzWDzPent/EeAtYEHNPNPBQ==", + "license": "Unlicense", "engines": { "node": ">=18" } @@ -11926,40 +10819,15 @@ "node": ">= 0.4" } }, - "node_modules/jackspeak": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", - "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, "node_modules/jiti": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.5.1.tgz", - "integrity": "sha512-twQoecYPiVA5K/h6SxtORw/Bs3ar+mLUtoPSc7iMXzQzK8d7eJ/R09wmTwAjiamETn1cXYPGfNnu7DMoHgu12w==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", "license": "MIT", "bin": { "jiti": "lib/jiti-cli.mjs" } }, - "node_modules/jose": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jose/-/jose-6.1.0.tgz", - "integrity": "sha512-TTQJyoEoKcC1lscpVDCSsVgYzUDg/0Bt3WE//WiTPK6uOCQC2KZS4MpugbMWt/zyjkopgZoXhZuCi00gLudfUA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/panva" - } - }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -11967,9 +10835,9 @@ "license": "MIT" }, "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "dev": true, "license": "MIT", "dependencies": { @@ -11980,21 +10848,23 @@ } }, "node_modules/jsdom": { - "version": "27.0.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.0.0.tgz", - "integrity": "sha512-lIHeR1qlIRrIN5VMccd8tI2Sgw6ieYXSVktcSHaNe3Z5nE/tcPQYQWOq00wxMvYOsz+73eAkNenVvmPC6bba9A==", + "version": "27.3.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.3.0.tgz", + "integrity": "sha512-GtldT42B8+jefDUC4yUKAvsaOrH7PDHmZxZXNgF2xMmymjUbRYJvpAybZAKEmXDGTM0mCsz8duOa4vTm5AY2Kg==", "dev": true, + "license": "MIT", + "peer": true, "dependencies": { - "@asamuzakjp/dom-selector": "^6.5.4", - "cssstyle": "^5.3.0", + "@acemir/cssom": "^0.9.28", + "@asamuzakjp/dom-selector": "^6.7.6", + "cssstyle": "^5.3.4", "data-urls": "^6.0.0", - "decimal.js": "^10.5.0", + "decimal.js": "^10.6.0", "html-encoding-sniffer": "^4.0.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.6", "is-potential-custom-element-name": "^1.0.1", - "parse5": "^7.3.0", - "rrweb-cssom": "^0.8.0", + "parse5": "^8.0.0", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", "tough-cookie": "^6.0.0", @@ -12002,12 +10872,12 @@ "webidl-conversions": "^8.0.0", "whatwg-encoding": "^3.1.1", "whatwg-mimetype": "^4.0.0", - "whatwg-url": "^15.0.0", - "ws": "^8.18.2", + "whatwg-url": "^15.1.0", + "ws": "^8.18.3", "xml-name-validator": "^5.0.0" }, "engines": { - "node": ">=20" + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { "canvas": "^3.0.0" @@ -12019,9 +10889,10 @@ } }, "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", + "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", + "dev": true, "license": "MIT", "bin": { "jsesc": "bin/jsesc" @@ -12038,14 +10909,11 @@ "license": "MIT" }, "node_modules/json-parse-even-better-errors": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", - "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true, - "license": "MIT", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } + "license": "MIT" }, "node_modules/json-schema-traverse": { "version": "0.4.1", @@ -12065,6 +10933,7 @@ "version": "2.2.3", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, "license": "MIT", "bin": { "json5": "lib/cli.js" @@ -12143,9 +11012,9 @@ } }, "node_modules/lightningcss": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz", - "integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz", + "integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==", "license": "MPL-2.0", "dependencies": { "detect-libc": "^2.0.3" @@ -12158,22 +11027,43 @@ "url": "https://opencollective.com/parcel" }, "optionalDependencies": { - "lightningcss-darwin-arm64": "1.30.1", - "lightningcss-darwin-x64": "1.30.1", - "lightningcss-freebsd-x64": "1.30.1", - "lightningcss-linux-arm-gnueabihf": "1.30.1", - "lightningcss-linux-arm64-gnu": "1.30.1", - "lightningcss-linux-arm64-musl": "1.30.1", - "lightningcss-linux-x64-gnu": "1.30.1", - "lightningcss-linux-x64-musl": "1.30.1", - "lightningcss-win32-arm64-msvc": "1.30.1", - "lightningcss-win32-x64-msvc": "1.30.1" + "lightningcss-android-arm64": "1.30.2", + "lightningcss-darwin-arm64": "1.30.2", + "lightningcss-darwin-x64": "1.30.2", + "lightningcss-freebsd-x64": "1.30.2", + "lightningcss-linux-arm-gnueabihf": "1.30.2", + "lightningcss-linux-arm64-gnu": "1.30.2", + "lightningcss-linux-arm64-musl": "1.30.2", + "lightningcss-linux-x64-gnu": "1.30.2", + "lightningcss-linux-x64-musl": "1.30.2", + "lightningcss-win32-arm64-msvc": "1.30.2", + "lightningcss-win32-x64-msvc": "1.30.2" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.30.2.tgz", + "integrity": "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==", + "cpu": [ + "arm64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" } }, "node_modules/lightningcss-darwin-arm64": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz", - "integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.2.tgz", + "integrity": "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==", "cpu": [ "arm64" ], @@ -12191,9 +11081,9 @@ } }, "node_modules/lightningcss-darwin-x64": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz", - "integrity": "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.2.tgz", + "integrity": "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==", "cpu": [ "x64" ], @@ -12211,9 +11101,9 @@ } }, "node_modules/lightningcss-freebsd-x64": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz", - "integrity": "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.2.tgz", + "integrity": "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==", "cpu": [ "x64" ], @@ -12231,9 +11121,9 @@ } }, "node_modules/lightningcss-linux-arm-gnueabihf": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz", - "integrity": "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.2.tgz", + "integrity": "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==", "cpu": [ "arm" ], @@ -12251,9 +11141,9 @@ } }, "node_modules/lightningcss-linux-arm64-gnu": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz", - "integrity": "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.2.tgz", + "integrity": "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==", "cpu": [ "arm64" ], @@ -12271,9 +11161,9 @@ } }, "node_modules/lightningcss-linux-arm64-musl": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz", - "integrity": "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.2.tgz", + "integrity": "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==", "cpu": [ "arm64" ], @@ -12291,9 +11181,9 @@ } }, "node_modules/lightningcss-linux-x64-gnu": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz", - "integrity": "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.2.tgz", + "integrity": "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==", "cpu": [ "x64" ], @@ -12311,9 +11201,9 @@ } }, "node_modules/lightningcss-linux-x64-musl": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz", - "integrity": "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.2.tgz", + "integrity": "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==", "cpu": [ "x64" ], @@ -12331,9 +11221,9 @@ } }, "node_modules/lightningcss-win32-arm64-msvc": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz", - "integrity": "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.2.tgz", + "integrity": "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==", "cpu": [ "arm64" ], @@ -12351,9 +11241,9 @@ } }, "node_modules/lightningcss-win32-x64-msvc": { - "version": "1.30.1", - "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz", - "integrity": "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg==", + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.2.tgz", + "integrity": "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==", "cpu": [ "x64" ], @@ -12378,15 +11268,16 @@ "license": "MIT" }, "node_modules/lint-staged": { - "version": "16.2.3", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-16.2.3.tgz", - "integrity": "sha512-1OnJEESB9zZqsp61XHH2fvpS1es3hRCxMplF/AJUDa8Ho8VrscYDIuxGrj3m8KPXbcWZ8fT9XTMUhEQmOVKpKw==", + "version": "16.2.7", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-16.2.7.tgz", + "integrity": "sha512-lDIj4RnYmK7/kXMya+qJsmkRFkGolciXjrsZ6PC25GdTfWOAWetR0ZbsNXRAj1EHHImRSalc+whZFg56F5DVow==", "dev": true, + "license": "MIT", "dependencies": { - "commander": "^14.0.1", - "listr2": "^9.0.4", + "commander": "^14.0.2", + "listr2": "^9.0.5", "micromatch": "^4.0.8", - "nano-spawn": "^1.0.3", + "nano-spawn": "^2.0.0", "pidtree": "^0.6.0", "string-argv": "^0.3.2", "yaml": "^2.8.1" @@ -12402,10 +11293,11 @@ } }, "node_modules/listr2": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-9.0.4.tgz", - "integrity": "sha512-1wd/kpAdKRLwv7/3OKC8zZ5U8e/fajCfWMxacUvB79S5nLrYGPtUI/8chMQhn3LQjsRVErTb9i1ECAwW0ZIHnQ==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-9.0.5.tgz", + "integrity": "sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g==", "dev": true, + "license": "MIT", "dependencies": { "cli-truncate": "^5.0.0", "colorette": "^2.0.20", @@ -12418,85 +11310,6 @@ "node": ">=20.0.0" } }, - "node_modules/listr2/node_modules/ansi-regex": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", - "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/listr2/node_modules/ansi-styles": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", - "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/listr2/node_modules/emoji-regex": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.5.0.tgz", - "integrity": "sha512-lb49vf1Xzfx080OKA0o6l8DQQpV+6Vg95zyCJX9VB/BqKYlhG7N4wgROUUHRA+ZPUefLnteQOad7z1kT2bV7bg==", - "dev": true - }, - "node_modules/listr2/node_modules/string-width": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", - "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", - "dev": true, - "dependencies": { - "emoji-regex": "^10.3.0", - "get-east-asian-width": "^1.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/listr2/node_modules/strip-ansi": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", - "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/listr2/node_modules/wrap-ansi": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", - "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", - "dev": true, - "dependencies": { - "ansi-styles": "^6.2.1", - "string-width": "^7.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/local-access": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/local-access/-/local-access-1.1.0.tgz", @@ -12547,6 +11360,7 @@ "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz", "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==", "dev": true, + "license": "MIT", "dependencies": { "ansi-escapes": "^7.0.0", "cli-cursor": "^5.0.0", @@ -12566,6 +11380,7 @@ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -12573,46 +11388,12 @@ "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, - "node_modules/log-update/node_modules/ansi-styles": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", - "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/log-update/node_modules/emoji-regex": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.5.0.tgz", - "integrity": "sha512-lb49vf1Xzfx080OKA0o6l8DQQpV+6Vg95zyCJX9VB/BqKYlhG7N4wgROUUHRA+ZPUefLnteQOad7z1kT2bV7bg==", - "dev": true - }, - "node_modules/log-update/node_modules/string-width": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", - "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", - "dev": true, - "dependencies": { - "emoji-regex": "^10.3.0", - "get-east-asian-width": "^1.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/log-update/node_modules/strip-ansi": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", "dev": true, + "license": "MIT", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -12623,23 +11404,6 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, - "node_modules/log-update/node_modules/wrap-ansi": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", - "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", - "dev": true, - "dependencies": { - "ansi-styles": "^6.2.1", - "string-width": "^7.0.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/longest-streak": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", @@ -12662,13 +11426,6 @@ "loose-envify": "cli.js" } }, - "node_modules/loupe": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz", - "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==", - "dev": true, - "license": "MIT" - }, "node_modules/lower-case": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", @@ -12697,15 +11454,16 @@ "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, "license": "ISC", "dependencies": { "yallist": "^3.0.2" } }, "node_modules/lucide-react": { - "version": "0.544.0", - "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.544.0.tgz", - "integrity": "sha512-t5tS44bqd825zAW45UQxpG2CvcC4urOwn2TrwSH8u+MjeE+1NnWl6QqeQ/6NdjMqdOygyiT9p3Ev0p1NJykxjw==", + "version": "0.562.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.562.0.tgz", + "integrity": "sha512-82hOAu7y0dbVuFfmO4bYF1XEwYk/mEbM5E+b1jgci/udUBEE/R7LF5Ip0CCEmXe8AybRM8L+04eP+LGZeDvkiw==", "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" } @@ -12721,24 +11479,24 @@ } }, "node_modules/magic-string": { - "version": "0.30.18", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.18.tgz", - "integrity": "sha512-yi8swmWbO17qHhwIBNeeZxTceJMeBvWJaId6dyvTSOwTipqeHhMhOrz6513r1sOKnpvQ7zkhlG8tPrpilwTxHQ==", + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", "license": "MIT", "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "node_modules/magicast": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.5.tgz", - "integrity": "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==", + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.5.1.tgz", + "integrity": "sha512-xrHS24IxaLrvuo613F719wvOIv9xPHFWQHuvGUBmPnCA/3MQxKI3b+r7n1jAoDHmsbC5bRhTZYR77invLAxVnw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/parser": "^7.25.4", - "@babel/types": "^7.25.4", - "source-map-js": "^1.2.0" + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "source-map-js": "^1.2.1" } }, "node_modules/make-dir": { @@ -12767,6 +11525,18 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/marked": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-14.0.0.tgz", + "integrity": "sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -13018,9 +11788,9 @@ } }, "node_modules/mdast-util-to-hast": { - "version": "13.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", - "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", + "version": "13.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", + "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", "license": "MIT", "dependencies": { "@types/hast": "^3.0.0", @@ -13076,7 +11846,8 @@ "version": "2.12.2", "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz", "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==", - "dev": true + "dev": true, + "license": "CC0-1.0" }, "node_modules/media-typer": { "version": "0.3.0", @@ -13735,6 +12506,7 @@ "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", "dev": true, + "license": "MIT", "engines": { "node": ">=18" }, @@ -13778,48 +12550,15 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/minizlib": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", - "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", - "license": "MIT", - "dependencies": { - "minipass": "^7.1.2" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "license": "MIT", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/monaco-editor": { - "version": "0.53.0", - "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.53.0.tgz", - "integrity": "sha512-0WNThgC6CMWNXXBxTbaYYcunj08iB5rnx4/G56UOPeL9UVIUGGHA1GR0EWIh9Ebabj7NpCRawQ5b0hfN1jQmYQ==", + "version": "0.55.1", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.55.1.tgz", + "integrity": "sha512-jz4x+TJNFHwHtwuV9vA9rMujcZRb0CEilTEwG2rRSpe/A7Jdkuj8xPKttCgOh+v/lkHy7HsZ64oj+q3xoAFl9A==", + "license": "MIT", + "peer": true, "dependencies": { - "@types/trusted-types": "^1.0.6" + "dompurify": "3.2.7", + "marked": "14.0.0" } }, "node_modules/morgan": { @@ -13866,9 +12605,10 @@ } }, "node_modules/motion-dom": { - "version": "12.23.21", - "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.23.21.tgz", - "integrity": "sha512-5xDXx/AbhrfgsQmSE7YESMn4Dpo6x5/DTZ4Iyy4xqDvVHWvFVoV+V2Ri2S/ksx+D40wrZ7gPYiMWshkdoqNgNQ==", + "version": "12.23.23", + "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.23.23.tgz", + "integrity": "sha512-n5yolOs0TQQBRUFImrRfs/+6X4p3Q4n1dUEqt/H58Vx7OW6RF+foWEgmTVDhIWJIMXOuNNL0apKH2S16en9eiA==", + "license": "MIT", "dependencies": { "motion-utils": "^12.23.6" } @@ -13876,7 +12616,8 @@ "node_modules/motion-utils": { "version": "12.23.6", "resolved": "https://registry.npmjs.org/motion-utils/-/motion-utils-12.23.6.tgz", - "integrity": "sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ==" + "integrity": "sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ==", + "license": "MIT" }, "node_modules/mri": { "version": "1.2.0", @@ -13903,30 +12644,31 @@ "license": "MIT" }, "node_modules/msw": { - "version": "2.11.1", - "resolved": "https://registry.npmjs.org/msw/-/msw-2.11.1.tgz", - "integrity": "sha512-dGSRx0AJmQVQfpGXTsAAq4JFdwdhOBdJ6sJS/jnN0ac3s0NZB6daacHF1z5Pefx+IejmvuiLWw260RlyQOf3sQ==", + "version": "2.12.4", + "resolved": "https://registry.npmjs.org/msw/-/msw-2.12.4.tgz", + "integrity": "sha512-rHNiVfTyKhzc0EjoXUBVGteNKBevdjOlVC6GlIRXpy+/3LHEIGRovnB5WPjcvmNODVQ1TNFnoa7wsGbd0V3epg==", "dev": true, "hasInstallScript": true, "license": "MIT", + "peer": true, "dependencies": { - "@bundled-es-modules/cookie": "^2.0.1", - "@bundled-es-modules/statuses": "^1.0.1", "@inquirer/confirm": "^5.0.0", - "@mswjs/interceptors": "^0.39.1", + "@mswjs/interceptors": "^0.40.0", "@open-draft/deferred-promise": "^2.2.0", - "@open-draft/until": "^2.1.0", - "@types/cookie": "^0.6.0", - "@types/statuses": "^2.0.4", - "graphql": "^16.8.1", + "@types/statuses": "^2.0.6", + "cookie": "^1.0.2", + "graphql": "^16.12.0", "headers-polyfill": "^4.0.2", "is-node-process": "^1.2.0", "outvariant": "^1.4.3", "path-to-regexp": "^6.3.0", "picocolors": "^1.1.1", + "rettime": "^0.7.0", + "statuses": "^2.0.2", "strict-event-emitter": "^0.5.1", "tough-cookie": "^6.0.0", - "type-fest": "^4.26.1", + "type-fest": "^5.2.0", + "until-async": "^3.0.2", "yargs": "^17.7.2" }, "bin": { @@ -13936,15 +12678,47 @@ "node": ">=18" }, "funding": { - "url": "https://github.com/sponsors/mswjs" - }, - "peerDependencies": { - "typescript": ">= 4.8.x" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "url": "https://github.com/sponsors/mswjs" + }, + "peerDependencies": { + "typescript": ">= 4.8.x" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/msw/node_modules/@mswjs/interceptors": { + "version": "0.40.0", + "resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.40.0.tgz", + "integrity": "sha512-EFd6cVbHsgLa6wa4RljGj6Wk75qoHxUSyc5asLyyPSyuhIcdS2Q3Phw6ImS1q+CkALthJRShiYfKANcQMuMqsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@open-draft/deferred-promise": "^2.2.0", + "@open-draft/logger": "^0.3.0", + "@open-draft/until": "^2.0.0", + "is-node-process": "^1.2.0", + "outvariant": "^1.4.3", + "strict-event-emitter": "^0.5.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/msw/node_modules/cookie": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/msw/node_modules/path-to-regexp": { @@ -13955,13 +12729,16 @@ "license": "MIT" }, "node_modules/msw/node_modules/type-fest": { - "version": "4.41.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", - "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-5.3.1.tgz", + "integrity": "sha512-VCn+LMHbd4t6sF3wfU/+HKT63C9OoyrSIf4b+vtWHpt2U7/4InZG467YDNMFMR70DdHjAdpPWmw2lzRdg0Xqqg==", "dev": true, "license": "(MIT OR CC0-1.0)", + "dependencies": { + "tagged-tag": "^1.0.0" + }, "engines": { - "node": ">=16" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -13978,9 +12755,9 @@ } }, "node_modules/nano-spawn": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/nano-spawn/-/nano-spawn-1.0.3.tgz", - "integrity": "sha512-jtpsQDetTnvS2Ts1fiRdci5rx0VYws5jGyC+4IYOTnIQ/wwdf6JdomlHBwqC3bJYOvaKu0C2GSZ1A60anrYpaA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/nano-spawn/-/nano-spawn-2.0.0.tgz", + "integrity": "sha512-tacvGzUY5o2D8CBh2rrwxyNojUsZNU2zjNTzKQrkgGJQTbGAfArVWXSKMBokBeeg6C7OLRGUEyoFlYbfeWQIqw==", "dev": true, "license": "MIT", "engines": { @@ -14078,91 +12855,11 @@ } }, "node_modules/node-releases": { - "version": "2.0.20", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.20.tgz", - "integrity": "sha512-7gK6zSXEH6neM212JgfYFXe+GmZQM+fia5SsusuBIUgnPheLFBmIPhtFoAQRj8/7wASYQnbDlHPVwY0BefoFgA==", - "license": "MIT" - }, - "node_modules/normalize-package-data": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-5.0.0.tgz", - "integrity": "sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^6.0.0", - "is-core-module": "^2.8.1", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/npm-install-checks": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.3.0.tgz", - "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "semver": "^7.1.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-normalize-package-bin": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz", - "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-package-arg": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", - "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==", - "dev": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^6.0.0", - "proc-log": "^3.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-pick-manifest": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz", - "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==", + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", "dev": true, - "license": "ISC", - "dependencies": { - "npm-install-checks": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "npm-package-arg": "^10.0.0", - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } + "license": "MIT" }, "node_modules/object-assign": { "version": "4.1.1", @@ -14285,6 +12982,17 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, "node_modules/on-finished": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", @@ -14320,6 +13028,7 @@ "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", "dev": true, + "license": "MIT", "dependencies": { "mimic-function": "^5.0.0" }, @@ -14405,12 +13114,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/package-json-from-dist": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "node_modules/p-map": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-7.0.4.tgz", + "integrity": "sha512-tkAQEw8ysMzmkhgw8k+1U/iPhWNhykKnSk4Rd5zLoPJCuJaGRPo6YposrZgaxHKzDHdDWWZvE/Sk7hsL2X/CpQ==", "dev": true, - "license": "BlueOak-1.0.0" + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/parent-module": { "version": "1.0.1", @@ -14469,17 +13184,10 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/parse-json/node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "dev": true, - "license": "MIT" - }, "node_modules/parse5": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", - "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-8.0.0.tgz", + "integrity": "sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==", "dev": true, "license": "MIT", "dependencies": { @@ -14533,30 +13241,6 @@ "dev": true, "license": "MIT" }, - "node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, "node_modules/path-to-regexp": { "version": "0.1.12", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", @@ -14579,16 +13263,6 @@ "dev": true, "license": "MIT" }, - "node_modules/pathval": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", - "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.16" - } - }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -14621,13 +13295,33 @@ "node": ">=0.10" } }, + "node_modules/pkg-types": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz", + "integrity": "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==", + "dev": true, + "license": "MIT", + "dependencies": { + "confbox": "^0.2.2", + "exsolve": "^1.0.7", + "pathe": "^2.0.3" + } + }, + "node_modules/pkg-types/node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, "node_modules/playwright": { - "version": "1.55.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.55.1.tgz", - "integrity": "sha512-cJW4Xd/G3v5ovXtJJ52MAOclqeac9S/aGGgRzLabuF8TnIb6xHvMzKIa6JmrRzUkeXJgfL1MhukP0NK6l39h3A==", + "version": "1.57.0", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.57.0.tgz", + "integrity": "sha512-ilYQj1s8sr2ppEJ2YVadYBN0Mb3mdo9J0wQ+UuDhzYqURwSoW4n1Xs5vs7ORwgDGmyEh33tRMeS8KhdkMoLXQw==", "dev": true, + "license": "Apache-2.0", "dependencies": { - "playwright-core": "1.55.1" + "playwright-core": "1.57.0" }, "bin": { "playwright": "cli.js" @@ -14640,10 +13334,11 @@ } }, "node_modules/playwright-core": { - "version": "1.55.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.55.1.tgz", - "integrity": "sha512-Z6Mh9mkwX+zxSlHqdr5AOcJnfp+xUWLCt9uKV18fhzA8eyxUd8NUWzAjxUh55RZKSYwDGX0cfaySdhZJGMoJ+w==", + "version": "1.57.0", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.57.0.tgz", + "integrity": "sha512-agTcKlMw/mjBWOnD6kFZttAAGHgi/Nw0CZ2o6JqWSbMlI219lAFLZZCyqByTsvVAJq5XA5H8cA6PrvBRpBWEuQ==", "dev": true, + "license": "Apache-2.0", "bin": { "playwright-core": "cli.js" }, @@ -14680,6 +13375,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", @@ -14703,36 +13399,22 @@ "node": ">=4" } }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", - "dev": true, - "license": "MIT" - }, "node_modules/posthog-js": { - "version": "1.290.0", - "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.290.0.tgz", - "integrity": "sha512-zavBwZkf+3JeiSDVE7ZDXBfzva/iOljicdhdJH+cZoqp0LsxjKxjnNhGOd3KpAhw0wqdwjhd7Lp1aJuI7DXyaw==", - "license": "SEE LICENSE IN LICENSE", + "version": "1.309.1", + "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.309.1.tgz", + "integrity": "sha512-JUJcQhYzNNKO0cgnSbowCsVi2RTu75XGZ2EmnTQti4tMGRCTOv/HCnZasdFniBGZ0rLugQkaScYca/84Ta2u5Q==", "dependencies": { - "@posthog/core": "1.5.2", + "@posthog/core": "1.8.1", "core-js": "^3.38.1", "fflate": "^0.4.8", "preact": "^10.19.3", "web-vitals": "^4.2.4" } }, - "node_modules/posthog-js/node_modules/web-vitals": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/web-vitals/-/web-vitals-4.2.4.tgz", - "integrity": "sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw==", - "license": "Apache-2.0" - }, "node_modules/preact": { - "version": "10.27.1", - "resolved": "https://registry.npmjs.org/preact/-/preact-10.27.1.tgz", - "integrity": "sha512-V79raXEWch/rbqoNc7nT9E4ep7lu+mI3+sBmfRD4i1M73R3WLYcCtdI0ibxGVf4eQL8ZIz2nFacqEC+rmnOORQ==", + "version": "10.28.0", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.28.0.tgz", + "integrity": "sha512-rytDAoiXr3+t6OIP3WGlDd0ouCUG1iCWzkcY3++Nreuoi17y6T5i/zRhe6uYfoVcxq6YU+sBtJouuRDsq8vvqA==", "license": "MIT", "funding": { "type": "opencollective", @@ -14750,11 +13432,12 @@ } }, "node_modules/prettier": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", - "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==", + "version": "3.7.4", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.7.4.tgz", + "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", "dev": true, "license": "MIT", + "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -14835,37 +13518,6 @@ "node": ">=6" } }, - "node_modules/proc-log": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-3.0.0.tgz", - "integrity": "sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/promise-inflight": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", - "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", - "dev": true, - "license": "ISC" - }, - "node_modules/promise-retry": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", - "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", - "dev": true, - "license": "MIT", - "dependencies": { - "err-code": "^2.0.2", - "retry": "^0.12.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/prop-types": { "version": "15.8.1", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", @@ -14922,12 +13574,11 @@ } }, "node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", - "license": "BSD-3-Clause", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", + "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", "dependencies": { - "side-channel": "^1.0.6" + "side-channel": "^1.1.0" }, "engines": { "node": ">=0.6" @@ -14966,24 +13617,24 @@ } }, "node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "version": "2.5.3", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz", + "integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==", "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" + "bytes": "~3.1.2", + "http-errors": "~2.0.1", + "iconv-lite": "~0.4.24", + "unpipe": "~1.0.0" }, "engines": { "node": ">= 0.8" } }, "node_modules/react": { - "version": "19.1.1", - "resolved": "https://registry.npmjs.org/react/-/react-19.1.1.tgz", - "integrity": "sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==", - "license": "MIT", + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz", + "integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==", + "peer": true, "engines": { "node": ">=0.10.0" } @@ -15029,24 +13680,15 @@ "license": "MIT" }, "node_modules/react-dom": { - "version": "19.1.1", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.1.1.tgz", - "integrity": "sha512-Dlq/5LAZgF0Gaz6yiqZCf6VCcZs1ghAJyrsu84Q/GT0gV+mCxbfmKNoGRKBYMJ8IEdGPqu49YWXD02GCknEDkw==", - "license": "MIT", + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg==", + "peer": true, "dependencies": { - "scheduler": "^0.26.0" + "scheduler": "^0.27.0" }, "peerDependencies": { - "react": "^19.1.1" - } - }, - "node_modules/react-highlight": { - "version": "0.15.0", - "resolved": "https://registry.npmjs.org/react-highlight/-/react-highlight-0.15.0.tgz", - "integrity": "sha512-5uV/b/N4Z421GSVVe05fz+OfTsJtFzx/fJBdafZyw4LS70XjIZwgEx3Lrkfc01W/RzZ2Dtfb0DApoaJFAIKBtA==", - "license": "MIT", - "dependencies": { - "highlight.js": "^10.5.0" + "react": "^19.2.3" } }, "node_modules/react-hot-toast": { @@ -15067,15 +13709,16 @@ } }, "node_modules/react-i18next": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/react-i18next/-/react-i18next-16.0.0.tgz", - "integrity": "sha512-JQ+dFfLnFSKJQt7W01lJHWRC0SX7eDPobI+MSTJ3/gP39xH2g33AuTE7iddAfXYHamJdAeMGM0VFboPaD3G68Q==", + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/react-i18next/-/react-i18next-16.5.0.tgz", + "integrity": "sha512-IMpPTyCTKxEj8klCrLKUTIUa8uYTd851+jcu2fJuUB9Agkk9Qq8asw4omyeHVnOXHrLgQJGTm5zTvn8HpaPiqw==", "dependencies": { "@babel/runtime": "^7.27.6", - "html-parse-stringify": "^3.0.1" + "html-parse-stringify": "^3.0.1", + "use-sync-external-store": "^1.6.0" }, "peerDependencies": { - "i18next": ">= 25.5.2", + "i18next": ">= 25.6.2", "react": ">= 16.8.0", "typescript": "^5" }, @@ -15150,9 +13793,10 @@ } }, "node_modules/react-router": { - "version": "7.9.3", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.9.3.tgz", - "integrity": "sha512-4o2iWCFIwhI/eYAIL43+cjORXYn/aRQPgtFRRZb3VzoyQ5Uej0Bmqj7437L97N9NJW4wnicSwLOLS+yCXfAPgg==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.11.0.tgz", + "integrity": "sha512-uI4JkMmjbWCZc01WVP2cH7ZfSzH91JAZUDd7/nIprDgWxBV1TkkmLToFh7EbMTcMak8URFRa2YoBL/W8GWnCTQ==", + "peer": true, "dependencies": { "cookie": "^1.0.1", "set-cookie-parser": "^2.6.0" @@ -15171,26 +13815,33 @@ } }, "node_modules/react-router/node_modules/cookie": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz", - "integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", "license": "MIT", "engines": { "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/react-syntax-highlighter": { - "version": "15.6.6", - "resolved": "https://registry.npmjs.org/react-syntax-highlighter/-/react-syntax-highlighter-15.6.6.tgz", - "integrity": "sha512-DgXrc+AZF47+HvAPEmn7Ua/1p10jNoVZVI/LoPiYdtY+OM+/nG5yefLHKJwdKqY1adMuHFbeyBaG9j64ML7vTw==", + "version": "16.1.0", + "resolved": "https://registry.npmjs.org/react-syntax-highlighter/-/react-syntax-highlighter-16.1.0.tgz", + "integrity": "sha512-E40/hBiP5rCNwkeBN1vRP+xow1X0pndinO+z3h7HLsHyjztbyjfzNWNKuAsJj+7DLam9iT4AaaOZnueCU+Nplg==", "license": "MIT", "dependencies": { - "@babel/runtime": "^7.3.1", + "@babel/runtime": "^7.28.4", "highlight.js": "^10.4.1", "highlightjs-vue": "^1.0.0", "lowlight": "^1.17.0", "prismjs": "^1.30.0", - "refractor": "^3.6.0" + "refractor": "^5.0.0" + }, + "engines": { + "node": ">= 16.20.2" }, "peerDependencies": { "react": ">= 0.14.0" @@ -15210,176 +13861,76 @@ "node": ">=10" }, "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, - "node_modules/readdirp": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", - "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14.18.0" - }, - "funding": { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/redent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", - "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", - "dev": true, - "license": "MIT", - "dependencies": { - "indent-string": "^4.0.0", - "strip-indent": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/reflect.getprototypeof": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", - "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.9", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "get-intrinsic": "^1.2.7", - "get-proto": "^1.0.1", - "which-builtin-type": "^1.2.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/refractor": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/refractor/-/refractor-3.6.0.tgz", - "integrity": "sha512-MY9W41IOWxxk31o+YvFCNyNzdkc9M20NoZK5vq6jkv4I/uh2zkWcfudj0Q1fovjUQJrNewS9NMzeTtqPf+n5EA==", - "license": "MIT", - "dependencies": { - "hastscript": "^6.0.0", - "parse-entities": "^2.0.0", - "prismjs": "~1.27.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/refractor/node_modules/character-entities": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", - "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/refractor/node_modules/character-entities-legacy": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", - "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/refractor/node_modules/character-reference-invalid": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", - "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, - "node_modules/refractor/node_modules/is-alphabetical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", - "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==", + "node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "type": "individual", + "url": "https://paulmillr.com/funding/" } }, - "node_modules/refractor/node_modules/is-alphanumerical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", - "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, "license": "MIT", "dependencies": { - "is-alphabetical": "^1.0.0", - "is-decimal": "^1.0.0" + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/refractor/node_modules/is-decimal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", - "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "engines": { + "node": ">=8" } }, - "node_modules/refractor/node_modules/is-hexadecimal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", - "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==", + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "dev": true, "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/refractor/node_modules/parse-entities": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", - "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", + "node_modules/refractor": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/refractor/-/refractor-5.0.0.tgz", + "integrity": "sha512-QXOrHQF5jOpjjLfiNk5GFnWhRXvxjUVnlFxkeDmewR5sXkr3iM46Zo+CnRR8B+MDVqkULW4EcLVcRBNOPXHosw==", "license": "MIT", "dependencies": { - "character-entities": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "character-reference-invalid": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-hexadecimal": "^1.0.0" + "@types/hast": "^3.0.0", + "@types/prismjs": "^1.0.0", + "hastscript": "^9.0.0", + "parse-entities": "^4.0.0" }, "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/refractor/node_modules/prismjs": { - "version": "1.27.0", - "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.27.0.tgz", - "integrity": "sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/regexp.prototype.flags": { "version": "1.5.4", "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", @@ -15497,6 +14048,7 @@ "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -15512,13 +14064,13 @@ } }, "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", "dev": true, "license": "MIT", "dependencies": { - "is-core-module": "^2.16.0", + "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, @@ -15547,6 +14099,7 @@ "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", "dev": true, + "license": "MIT", "dependencies": { "onetime": "^7.0.0", "signal-exit": "^4.1.0" @@ -15558,15 +14111,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "node_modules/rettime": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/rettime/-/rettime-0.7.0.tgz", + "integrity": "sha512-LPRKoHnLKd/r3dVxcwO7vhCW+orkOGj9ViueosEBK6ie89CijnfRlhaDhHq/3Hxu4CkWQtxwlBG0mzTQY6uQjw==", "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } + "license": "MIT" }, "node_modules/reusify": { "version": "1.1.0", @@ -15583,7 +14133,8 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/rimraf": { "version": "3.0.2", @@ -15602,57 +14153,12 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/rimraf/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/rimraf/node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/rollup": { - "version": "4.50.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.50.0.tgz", - "integrity": "sha512-/Zl4D8zPifNmyGzJS+3kVoyXeDeT/GrsJM94sACNg9RtUE0hrHa1bNPtRSrfHTMH5HjRzce6K7rlTh3Khiw+pw==", + "version": "4.53.3", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.53.3.tgz", + "integrity": "sha512-w8GmOxZfBmKknvdXU1sdM9NHcoQejwF/4mNgj2JuEEdRaHwwF12K7e9eXn1nLZ07ad+du76mkVsyeb2rKGllsA==", "license": "MIT", + "peer": true, "dependencies": { "@types/estree": "1.0.8" }, @@ -15664,36 +14170,31 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.50.0", - "@rollup/rollup-android-arm64": "4.50.0", - "@rollup/rollup-darwin-arm64": "4.50.0", - "@rollup/rollup-darwin-x64": "4.50.0", - "@rollup/rollup-freebsd-arm64": "4.50.0", - "@rollup/rollup-freebsd-x64": "4.50.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.50.0", - "@rollup/rollup-linux-arm-musleabihf": "4.50.0", - "@rollup/rollup-linux-arm64-gnu": "4.50.0", - "@rollup/rollup-linux-arm64-musl": "4.50.0", - "@rollup/rollup-linux-loongarch64-gnu": "4.50.0", - "@rollup/rollup-linux-ppc64-gnu": "4.50.0", - "@rollup/rollup-linux-riscv64-gnu": "4.50.0", - "@rollup/rollup-linux-riscv64-musl": "4.50.0", - "@rollup/rollup-linux-s390x-gnu": "4.50.0", - "@rollup/rollup-linux-x64-gnu": "4.50.0", - "@rollup/rollup-linux-x64-musl": "4.50.0", - "@rollup/rollup-openharmony-arm64": "4.50.0", - "@rollup/rollup-win32-arm64-msvc": "4.50.0", - "@rollup/rollup-win32-ia32-msvc": "4.50.0", - "@rollup/rollup-win32-x64-msvc": "4.50.0", + "@rollup/rollup-android-arm-eabi": "4.53.3", + "@rollup/rollup-android-arm64": "4.53.3", + "@rollup/rollup-darwin-arm64": "4.53.3", + "@rollup/rollup-darwin-x64": "4.53.3", + "@rollup/rollup-freebsd-arm64": "4.53.3", + "@rollup/rollup-freebsd-x64": "4.53.3", + "@rollup/rollup-linux-arm-gnueabihf": "4.53.3", + "@rollup/rollup-linux-arm-musleabihf": "4.53.3", + "@rollup/rollup-linux-arm64-gnu": "4.53.3", + "@rollup/rollup-linux-arm64-musl": "4.53.3", + "@rollup/rollup-linux-loong64-gnu": "4.53.3", + "@rollup/rollup-linux-ppc64-gnu": "4.53.3", + "@rollup/rollup-linux-riscv64-gnu": "4.53.3", + "@rollup/rollup-linux-riscv64-musl": "4.53.3", + "@rollup/rollup-linux-s390x-gnu": "4.53.3", + "@rollup/rollup-linux-x64-gnu": "4.53.3", + "@rollup/rollup-linux-x64-musl": "4.53.3", + "@rollup/rollup-openharmony-arm64": "4.53.3", + "@rollup/rollup-win32-arm64-msvc": "4.53.3", + "@rollup/rollup-win32-ia32-msvc": "4.53.3", + "@rollup/rollup-win32-x64-gnu": "4.53.3", + "@rollup/rollup-win32-x64-msvc": "4.53.3", "fsevents": "~2.3.2" } }, - "node_modules/rrweb-cssom": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz", - "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==", - "dev": true - }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -15825,16 +14326,15 @@ } }, "node_modules/scheduler": { - "version": "0.26.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz", - "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==", + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", "license": "MIT" }, "node_modules/scroll-into-view-if-needed": { "version": "3.0.10", "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.0.10.tgz", "integrity": "sha512-t44QCeDKAPf1mtQH3fYpWz8IM/DyvHLjs8wUvvwMYxk5moOqCzrMSxK6HQVD0QVmVjXFavoFIPRVrMuJPKAvtg==", - "license": "MIT", "dependencies": { "compute-scroll-into-view": "^3.0.2" } @@ -15849,9 +14349,9 @@ } }, "node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", "dev": true, "license": "ISC", "bin": { @@ -15862,23 +14362,23 @@ } }, "node_modules/send": { - "version": "0.19.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", - "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "version": "0.19.2", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.2.tgz", + "integrity": "sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg==", "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", + "fresh": "~0.5.2", + "http-errors": "~2.0.1", "mime": "1.6.0", "ms": "2.1.3", - "on-finished": "2.4.1", + "on-finished": "~2.4.1", "range-parser": "~1.2.1", - "statuses": "2.0.1" + "statuses": "~2.0.2" }, "engines": { "node": ">= 0.8.0" @@ -15897,32 +14397,24 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, - "node_modules/send/node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", - "engines": { - "node": ">= 0.8" - } - }, "node_modules/serve-static": { - "version": "1.16.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", - "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "version": "1.16.3", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.3.tgz", + "integrity": "sha512-x0RTqQel6g5SY7Lg6ZreMmsOzncHFU7nhnRWkKgWuMTu5NN0DR5oruckMqRvacAN9d5w6ARnRBXl9xhDCgfMeA==", "dependencies": { "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", - "send": "0.19.0" + "send": "~0.19.1" }, "engines": { "node": ">= 0.8.0" } }, "node_modules/set-cookie-parser": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz", - "integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==", + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", "license": "MIT" }, "node_modules/set-function-length": { @@ -16093,20 +14585,14 @@ } }, "node_modules/simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.4.tgz", + "integrity": "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==", "license": "MIT", "dependencies": { "is-arrayish": "^0.3.1" } }, - "node_modules/simple-swizzle/node_modules/is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", - "license": "MIT" - }, "node_modules/sirv": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz", @@ -16158,6 +14644,7 @@ "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.2.tgz", "integrity": "sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^6.2.1", "is-fullwidth-code-point": "^5.0.0" @@ -16174,6 +14661,7 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -16254,6 +14742,15 @@ } } }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/source-map-js": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", @@ -16273,15 +14770,6 @@ "source-map": "^0.6.0" } }, - "node_modules/source-map-support/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/space-separated-tokens": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", @@ -16292,42 +14780,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", - "dev": true, - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.22", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", - "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", - "dev": true, - "license": "CC0-1.0" - }, "node_modules/stackback": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", @@ -16342,18 +14794,18 @@ "license": "MIT" }, "node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", "license": "MIT", "engines": { "node": ">= 0.8" } }, "node_modules/std-env": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", - "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", "dev": true, "license": "MIT" }, @@ -16389,60 +14841,26 @@ } }, "node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz", + "integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==", "dev": true, "license": "MIT", "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" + "get-east-asian-width": "^1.3.0", + "strip-ansi": "^7.1.0" }, "engines": { - "node": ">=12" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/string-width/node_modules/ansi-regex": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.0.tgz", - "integrity": "sha512-TKY5pyBkHyADOPYlRT9Lx6F544mPl0vS5Ew7BJ45hA08Q+t3GjbueLliBWN3sMICk6+y7HdyxSzC4bWS8baBdg==", + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", "dev": true, "license": "MIT", "engines": { @@ -16453,9 +14871,9 @@ } }, "node_modules/string-width/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", "dev": true, "license": "MIT", "dependencies": { @@ -16608,20 +15026,6 @@ "node": ">=8" } }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", @@ -16634,87 +15038,46 @@ }, "node_modules/strip-indent": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", - "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "min-indent": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/strip-literal": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", - "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", "dev": true, "license": "MIT", "dependencies": { - "js-tokens": "^9.0.1" + "min-indent": "^1.0.0" }, - "funding": { - "url": "https://github.com/sponsors/antfu" + "engines": { + "node": ">=8" } }, - "node_modules/strip-literal/node_modules/js-tokens": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", - "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/stripe": { - "version": "18.5.0", - "resolved": "https://registry.npmjs.org/stripe/-/stripe-18.5.0.tgz", - "integrity": "sha512-Hp+wFiEQtCB0LlNgcFh5uVyKznpDjzyUZ+CNVEf+I3fhlYvh7rZruIg+jOwzJRCpy0ZTPMjlzm7J2/M2N6d+DA==", + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true, "license": "MIT", - "dependencies": { - "qs": "^6.11.0" - }, "engines": { - "node": ">=12.*" - }, - "peerDependencies": { - "@types/node": ">=12.x.x" + "node": ">=8" }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - } + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/style-to-js": { - "version": "1.1.17", - "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.17.tgz", - "integrity": "sha512-xQcBGDxJb6jjFCTzvQtfiPn6YvvP2O8U1MDIPNfJQlWMYfktPy+iGsHE7cssjs7y84d9fQaK4UF3RIJaAHSoYA==", + "version": "1.1.21", + "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz", + "integrity": "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==", "license": "MIT", "dependencies": { - "style-to-object": "1.0.9" + "style-to-object": "1.0.14" } }, "node_modules/style-to-object": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.9.tgz", - "integrity": "sha512-G4qppLgKu/k6FwRpHiGiKPaPTFcG3g4wNVX/Qsfu+RqQM30E7Tyu/TEgxcL9PNLF5pdRLwQdE3YKKf+KF2Dzlw==", + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.14.tgz", + "integrity": "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==", "license": "MIT", "dependencies": { - "inline-style-parser": "0.2.4" + "inline-style-parser": "0.2.7" } }, "node_modules/supports-color": { @@ -16773,11 +15136,25 @@ "url": "https://opencollective.com/synckit" } }, + "node_modules/tagged-tag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/tagged-tag/-/tagged-tag-1.0.0.tgz", + "integrity": "sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tailwind-merge": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.3.1.tgz", - "integrity": "sha512-gBXpgUm/3rp1lMZZrM/w7D8GKqshif0zAymAhbCyIt8KMe+0v9DQ7cdYLR4FHH/cKpdTXb+A/tKKU3eolfsI+g==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.4.0.tgz", + "integrity": "sha512-uSaO4gnW+b3Y2aWoWfFpX62vn2sR3skfhbjsEnaBI81WD1wBLlHZe5sWf0AqjksNdYTbGBEd0UasQMT3SNV15g==", "license": "MIT", + "peer": true, "funding": { "type": "github", "url": "https://github.com/sponsors/dcastil" @@ -16799,10 +15176,9 @@ } }, "node_modules/tailwind-variants": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/tailwind-variants/-/tailwind-variants-3.1.1.tgz", - "integrity": "sha512-ftLXe3krnqkMHsuBTEmaVUXYovXtPyTK7ckEfDRXS8PBZx0bAUas+A0jYxuKA5b8qg++wvQ3d2MQ7l/xeZxbZQ==", - "license": "MIT", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/tailwind-variants/-/tailwind-variants-3.2.2.tgz", + "integrity": "sha512-Mi4kHeMTLvKlM98XPnK+7HoBPmf4gygdFmqQPaDivc3DpYS6aIY6KiG/PgThrGvii5YZJqRsPz0aPyhoFzmZgg==", "engines": { "node": ">=16.x", "pnpm": ">=7.x" @@ -16818,16 +15194,14 @@ } }, "node_modules/tailwindcss": { - "version": "4.1.13", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.13.tgz", - "integrity": "sha512-i+zidfmTqtwquj4hMEwdjshYYgMbOrPzb9a0M3ZgNa0JMoZeFC6bxZvO8yr8ozS6ix2SDz0+mvryPeBs2TFE+w==", - "license": "MIT" + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.18.tgz", + "integrity": "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw==" }, "node_modules/tapable": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.3.tgz", - "integrity": "sha512-ZL6DDuAlRlLGghwcfmSn9sK3Hr6ArtyudlSAiCqQ6IfE+b+HHbydbYDIG15IfS5do+7XQQBdBiubF/cV2dnDzg==", - "license": "MIT", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", + "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", "engines": { "node": ">=6" }, @@ -16836,47 +15210,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/tar": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", - "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", - "license": "ISC", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", - "yallist": "^5.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/tar/node_modules/yallist": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", - "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/test-exclude": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-7.0.1.tgz", - "integrity": "sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==", - "dev": true, - "license": "ISC", - "dependencies": { - "@istanbuljs/schema": "^0.1.2", - "glob": "^10.4.1", - "minimatch": "^9.0.4" - }, - "engines": { - "node": ">=18" - } - }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -16901,16 +15234,20 @@ } }, "node_modules/tinyexec": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", - "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", "dev": true, - "license": "MIT" + "license": "MIT", + "engines": { + "node": ">=18" + } }, "node_modules/tinyglobby": { "version": "0.2.15", "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "license": "MIT", "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" @@ -16944,6 +15281,7 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -16951,30 +15289,10 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/tinypool": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", - "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.0.0 || >=20.0.0" - } - }, "node_modules/tinyrainbow": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", - "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/tinyspy": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", - "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz", + "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==", "dev": true, "license": "MIT", "engines": { @@ -16982,22 +15300,24 @@ } }, "node_modules/tldts": { - "version": "7.0.16", - "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.16.tgz", - "integrity": "sha512-5bdPHSwbKTeHmXrgecID4Ljff8rQjv7g8zKQPkCozRo2HWWni+p310FSn5ImI+9kWw9kK4lzOB5q/a6iv0IJsw==", + "version": "7.0.19", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.19.tgz", + "integrity": "sha512-8PWx8tvC4jDB39BQw1m4x8y5MH1BcQ5xHeL2n7UVFulMPH/3Q0uiamahFJ3lXA0zO2SUyRXuVVbWSDmstlt9YA==", "dev": true, + "license": "MIT", "dependencies": { - "tldts-core": "^7.0.16" + "tldts-core": "^7.0.19" }, "bin": { "tldts": "bin/cli.js" } }, "node_modules/tldts-core": { - "version": "7.0.16", - "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.16.tgz", - "integrity": "sha512-XHhPmHxphLi+LGbH0G/O7dmUH9V65OY20R7vH8gETHsp5AZCjBk9l8sqmRKLaGOxnETU7XNSDUPtewAy/K6jbA==", - "dev": true + "version": "7.0.19", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.19.tgz", + "integrity": "sha512-lJX2dEWx0SGH4O6p+7FPwYmJ/bu1JbcGJ8RLaG9b7liIgZ85itUVEPbMtWRVrde/0fnDPEPHW10ZsKW3kVsE9A==", + "dev": true, + "license": "MIT" }, "node_modules/to-regex-range": { "version": "5.0.1", @@ -17034,6 +15354,7 @@ "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.0.tgz", "integrity": "sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "tldts": "^7.0.5" }, @@ -17046,6 +15367,7 @@ "resolved": "https://registry.npmjs.org/tr46/-/tr46-6.0.0.tgz", "integrity": "sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw==", "dev": true, + "license": "MIT", "dependencies": { "punycode": "^2.3.1" }, @@ -17256,11 +15578,12 @@ } }, "node_modules/typescript": { - "version": "5.9.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz", - "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==", + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "devOptional": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -17289,10 +15612,11 @@ } }, "node_modules/undici-types": { - "version": "7.12.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.12.0.tgz", - "integrity": "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ==", - "devOptional": true + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "devOptional": true, + "license": "MIT" }, "node_modules/unified": { "version": "11.0.5", @@ -17314,9 +15638,9 @@ } }, "node_modules/unist-util-is": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", - "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", + "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", "license": "MIT", "dependencies": { "@types/unist": "^3.0.0" @@ -17368,9 +15692,9 @@ } }, "node_modules/unist-util-visit-parents": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", - "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", + "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", "license": "MIT", "dependencies": { "@types/unist": "^3.0.0", @@ -17389,10 +15713,21 @@ "node": ">= 0.8" } }, + "node_modules/until-async": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/until-async/-/until-async-3.0.2.tgz", + "integrity": "sha512-IiSk4HlzAMqTUseHHe3VhIGyuFmN90zMTpD3Z3y8jeQbzLIq500MVM7Jq2vUAnTKAFPJrqwkzr6PoTcPhGcOiw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/kettanaito" + } + }, "node_modules/update-browserslist-db": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", - "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.2.tgz", + "integrity": "sha512-E85pfNzMQ9jpKkA7+TJAi4TJN+tBCuWh5rUcS/sv6cFi+1q9LYDwDI5dpUL0u/73EElyQ8d3TEaeW4sPedBqYA==", + "dev": true, "funding": [ { "type": "opencollective", @@ -17475,9 +15810,9 @@ } }, "node_modules/use-sync-external-store": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.5.0.tgz", - "integrity": "sha512-Rb46I4cGGVBmjamjphe8L/UnvJD+uPPtTkNvX5mZgqdbavhI4EbgIWJiIHXJ8bc/i9EQGPRh4DwEURJ552Do0A==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz", + "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==", "license": "MIT", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" @@ -17499,9 +15834,9 @@ } }, "node_modules/valibot": { - "version": "0.41.0", - "resolved": "https://registry.npmjs.org/valibot/-/valibot-0.41.0.tgz", - "integrity": "sha512-igDBb8CTYr8YTQlOKgaN9nSS0Be7z+WRuaeYqGf3Cjz3aKmSnqEmYnkfVjzIuumGqfHpa3fLIvMEAfhrpqN8ng==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/valibot/-/valibot-1.2.0.tgz", + "integrity": "sha512-mm1rxUsmOxzrwnX5arGS+U4T25RdvpPjPN4yR0u9pUBov9+zGVtO84tif1eY4r6zWxVxu3KzIyknJy3rxfRZZg==", "dev": true, "license": "MIT", "peerDependencies": { @@ -17513,27 +15848,6 @@ } } }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/validate-npm-package-name": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz", - "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -17572,11 +15886,12 @@ } }, "node_modules/vite": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.7.tgz", - "integrity": "sha512-VbA8ScMvAISJNJVbRDTJdCwqQoAareR/wutevKanhR2/1EkoXVZVkkORaYm/tNVCjP/UDTKtcw3bAkwOUdedmA==", + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.0.tgz", + "integrity": "sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==", + "peer": true, "dependencies": { - "esbuild": "^0.25.0", + "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", @@ -17690,11 +16005,10 @@ } }, "node_modules/vite-tsconfig-paths": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-5.1.4.tgz", - "integrity": "sha512-cYj0LRuLV2c2sMqhqhGpaO3LretdtMn/BVX4cPLanIZuwwrkVl+lK84E/miEXkCHWXuq65rhNN4rXsBcOB3S4w==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-6.0.3.tgz", + "integrity": "sha512-7bL7FPX/DSviaZGYUKowWF1AiDVWjMjxNbE8lyaVGDezkedWqfGhlnQ4BZXre0ZN5P4kAgIJfAlgFDVyjrCIyg==", "dev": true, - "license": "MIT", "dependencies": { "debug": "^4.1.1", "globrex": "^0.1.2", @@ -17745,6 +16059,7 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -17753,51 +16068,50 @@ } }, "node_modules/vitest": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", - "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/chai": "^5.2.2", - "@vitest/expect": "3.2.4", - "@vitest/mocker": "3.2.4", - "@vitest/pretty-format": "^3.2.4", - "@vitest/runner": "3.2.4", - "@vitest/snapshot": "3.2.4", - "@vitest/spy": "3.2.4", - "@vitest/utils": "3.2.4", - "chai": "^5.2.0", - "debug": "^4.4.1", - "expect-type": "^1.2.1", - "magic-string": "^0.30.17", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.16.tgz", + "integrity": "sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==", + "dev": true, + "peer": true, + "dependencies": { + "@vitest/expect": "4.0.16", + "@vitest/mocker": "4.0.16", + "@vitest/pretty-format": "4.0.16", + "@vitest/runner": "4.0.16", + "@vitest/snapshot": "4.0.16", + "@vitest/spy": "4.0.16", + "@vitest/utils": "4.0.16", + "es-module-lexer": "^1.7.0", + "expect-type": "^1.2.2", + "magic-string": "^0.30.21", + "obug": "^2.1.1", "pathe": "^2.0.3", - "picomatch": "^4.0.2", - "std-env": "^3.9.0", + "picomatch": "^4.0.3", + "std-env": "^3.10.0", "tinybench": "^2.9.0", - "tinyexec": "^0.3.2", - "tinyglobby": "^0.2.14", - "tinypool": "^1.1.1", - "tinyrainbow": "^2.0.0", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", - "vite-node": "3.2.4", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.0.3", + "vite": "^6.0.0 || ^7.0.0", "why-is-node-running": "^2.3.0" }, "bin": { "vitest": "vitest.mjs" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + "node": "^20.0.0 || ^22.0.0 || >=24.0.0" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { "@edge-runtime/vm": "*", - "@types/debug": "^4.1.12", - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.2.4", - "@vitest/ui": "3.2.4", + "@opentelemetry/api": "^1.9.0", + "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", + "@vitest/browser-playwright": "4.0.16", + "@vitest/browser-preview": "4.0.16", + "@vitest/browser-webdriverio": "4.0.16", + "@vitest/ui": "4.0.16", "happy-dom": "*", "jsdom": "*" }, @@ -17805,13 +16119,19 @@ "@edge-runtime/vm": { "optional": true }, - "@types/debug": { + "@opentelemetry/api": { "optional": true }, "@types/node": { "optional": true }, - "@vitest/browser": { + "@vitest/browser-playwright": { + "optional": true + }, + "@vitest/browser-preview": { + "optional": true + }, + "@vitest/browser-webdriverio": { "optional": true }, "@vitest/ui": { @@ -17868,9 +16188,9 @@ } }, "node_modules/web-vitals": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/web-vitals/-/web-vitals-5.1.0.tgz", - "integrity": "sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg==", + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/web-vitals/-/web-vitals-4.2.4.tgz", + "integrity": "sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw==", "license": "Apache-2.0" }, "node_modules/webidl-conversions": { @@ -17878,6 +16198,7 @@ "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-8.0.0.tgz", "integrity": "sha512-n4W4YFyz5JzOfQeA8oN7dUYpR+MBP3PIUsn2jLjWXwK5ASUzt0Jc/A5sAUZoCYFJRGF0FBKJ+1JjN43rNdsQzA==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=20" } @@ -17913,6 +16234,7 @@ "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", "dev": true, + "license": "MIT", "engines": { "node": ">=18" } @@ -17922,6 +16244,7 @@ "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-15.1.0.tgz", "integrity": "sha512-2ytDk0kiEj/yu90JOAp44PVPUkO9+jVhyf+SybKlRHSDlvOOZhdPIrr7xTH64l4WixO2cP+wQIcgujkGBPPz6g==", "dev": true, + "license": "MIT", "dependencies": { "tr46": "^6.0.0", "webidl-conversions": "^8.0.0" @@ -17931,19 +16254,18 @@ } }, "node_modules/which": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz", - "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==", - "dev": true, + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "license": "ISC", "dependencies": { "isexe": "^2.0.0" }, "bin": { - "node-which": "bin/which.js" + "node-which": "bin/node-which" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": ">= 8" } }, "node_modules/which-boxed-primitive": { @@ -18063,104 +16385,78 @@ } }, "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", + "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", "dev": true, "license": "MIT", "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", "dev": true, "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, "engines": { - "node": ">=10" + "node": ">=12" }, "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/wrap-ansi-cjs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" + "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, - "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.0.tgz", - "integrity": "sha512-TKY5pyBkHyADOPYlRT9Lx6F544mPl0vS5Ew7BJ45hA08Q+t3GjbueLliBWN3sMICk6+y7HdyxSzC4bWS8baBdg==", + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", "dev": true, "license": "MIT", "engines": { "node": ">=12" }, "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "node_modules/wrap-ansi/node_modules/emoji-regex": { + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", + "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", "dev": true, "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", "dev": true, "license": "MIT", "dependencies": { @@ -18184,6 +16480,7 @@ "version": "8.18.3", "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "dev": true, "license": "MIT", "engines": { "node": ">=10.0.0" @@ -18226,15 +16523,6 @@ "node": ">=0.4.0" } }, - "node_modules/xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "license": "MIT", - "engines": { - "node": ">=0.4" - } - }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", @@ -18249,12 +16537,13 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, "license": "ISC" }, "node_modules/yaml": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", - "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", "devOptional": true, "license": "ISC", "bin": { @@ -18262,6 +16551,9 @@ }, "engines": { "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" } }, "node_modules/yargs": { @@ -18352,9 +16644,9 @@ } }, "node_modules/zustand": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.8.tgz", - "integrity": "sha512-gyPKpIaxY9XcO2vSMrLbiER7QMAMGOQZVRdJ6Zi782jkbzZygq5GI9nG8g+sMgitRtndwaBSl7uiqC49o1SSiw==", + "version": "5.0.9", + "resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.9.tgz", + "integrity": "sha512-ALBtUj0AfjJt3uNRQoL1tL2tMvj6Gp/6e39dnfT6uzpelGru8v1tPOGBzayOWbPJvujM8JojDk3E1LxeFisBNg==", "license": "MIT", "engines": { "node": ">=12.20.0" diff --git a/frontend/package.json b/frontend/package.json index 8925a5d391ac..f08f6ea3b64d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,63 +1,51 @@ { "name": "openhands-frontend", - "version": "0.62.0", + "version": "1.0.0", "private": true, "type": "module", "engines": { "node": ">=22.0.0" }, "dependencies": { - "@heroui/react": "2.8.5", - "@heroui/use-infinite-scroll": "^2.2.11", + "@heroui/react": "2.8.6", "@microlink/react-json-view": "^1.26.2", "@monaco-editor/react": "^4.7.0-rc.0", - "@posthog/react": "^1.4.0", - "@react-router/node": "^7.9.3", - "@react-router/serve": "^7.9.3", - "@react-types/shared": "^3.32.0", - "@stripe/react-stripe-js": "^4.0.2", - "@stripe/stripe-js": "^7.9.0", - "@tailwindcss/postcss": "^4.1.13", - "@tailwindcss/vite": "^4.1.13", - "@tanstack/react-query": "^5.90.2", + "@react-router/node": "^7.11.0", + "@react-router/serve": "^7.11.0", + "@tailwindcss/vite": "^4.1.18", + "@tanstack/react-query": "^5.90.12", "@uidotdev/usehooks": "^2.4.1", - "@vitejs/plugin-react": "^5.0.4", "@xterm/addon-fit": "^0.10.0", "@xterm/xterm": "^5.4.0", - "axios": "^1.12.2", + "axios": "^1.13.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", - "date-fns": "^4.1.0", - "downshift": "^9.0.10", + "downshift": "^9.0.13", "eslint-config-airbnb-typescript": "^18.0.0", - "framer-motion": "^12.23.22", - "i18next": "^25.5.2", + "framer-motion": "^12.23.25", + "i18next": "^25.7.3", "i18next-browser-languagedetector": "^8.2.0", "i18next-http-backend": "^3.0.2", - "isbot": "^5.1.31", - "jose": "^6.1.0", - "lucide-react": "^0.544.0", - "monaco-editor": "^0.53.0", - "posthog-js": "^1.290.0", - "react": "^19.1.1", - "react-dom": "^19.1.1", - "react-highlight": "^0.15.0", + "isbot": "^5.1.32", + "lucide-react": "^0.562.0", + "monaco-editor": "^0.55.1", + "posthog-js": "^1.309.1", + "react": "^19.2.3", + "react-dom": "^19.2.3", "react-hot-toast": "^2.6.0", - "react-i18next": "^16.0.0", + "react-i18next": "^16.5.0", "react-icons": "^5.5.0", "react-markdown": "^10.1.0", - "react-router": "^7.9.3", - "react-syntax-highlighter": "^15.6.6", + "react-router": "^7.11.0", + "react-syntax-highlighter": "^16.1.0", "remark-breaks": "^4.0.0", "remark-gfm": "^4.0.1", "sirv-cli": "^3.0.1", "socket.io-client": "^4.8.1", - "tailwind-merge": "^3.3.1", + "tailwind-merge": "^3.4.0", "tailwind-scrollbar": "^4.0.2", - "vite": "^7.1.7", - "web-vitals": "^5.1.0", - "ws": "^8.18.2", - "zustand": "^5.0.8" + "vite": "^7.3.0", + "zustand": "^5.0.9" }, "scripts": { "dev": "npm run make-i18n && cross-env VITE_MOCK_API=false react-router dev", @@ -92,29 +80,23 @@ ] }, "devDependencies": { - "@babel/parser": "^7.28.3", - "@babel/traverse": "^7.28.3", - "@babel/types": "^7.28.2", "@mswjs/socket.io-binding": "^0.2.0", - "@playwright/test": "^1.55.1", - "@react-router/dev": "^7.9.3", + "@playwright/test": "^1.57.0", + "@react-router/dev": "^7.11.0", "@tailwindcss/typography": "^0.5.19", "@tanstack/eslint-plugin-query": "^5.91.0", "@testing-library/dom": "^10.4.1", - "@testing-library/jest-dom": "^6.8.0", - "@testing-library/react": "^16.3.0", + "@testing-library/jest-dom": "^6.9.1", + "@testing-library/react": "^16.3.1", "@testing-library/user-event": "^14.6.1", - "@types/node": "^24.5.2", - "@types/react": "^19.1.15", - "@types/react-dom": "^19.1.9", - "@types/react-highlight": "^0.12.8", + "@types/node": "^25.0.3", + "@types/react": "^19.2.7", + "@types/react-dom": "^19.2.3", "@types/react-syntax-highlighter": "^15.5.13", - "@types/ws": "^8.18.1", "@typescript-eslint/eslint-plugin": "^7.18.0", "@typescript-eslint/parser": "^7.18.0", - "@vitest/coverage-v8": "^3.2.3", - "autoprefixer": "^10.4.21", - "cross-env": "^10.0.0", + "@vitest/coverage-v8": "^4.0.16", + "cross-env": "^10.1.0", "eslint": "^8.57.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-airbnb-typescript": "^18.0.0", @@ -127,16 +109,15 @@ "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-unused-imports": "^4.2.0", "husky": "^9.1.7", - "jsdom": "^27.0.0", - "lint-staged": "^16.2.3", + "jsdom": "^27.3.0", + "lint-staged": "^16.2.7", "msw": "^2.6.6", - "prettier": "^3.6.2", - "stripe": "^18.5.0", + "prettier": "^3.7.3", "tailwindcss": "^4.1.8", - "typescript": "^5.9.2", + "typescript": "^5.9.3", "vite-plugin-svgr": "^4.5.0", - "vite-tsconfig-paths": "^5.1.4", - "vitest": "^3.0.2" + "vite-tsconfig-paths": "^6.0.3", + "vitest": "^4.0.14" }, "packageManager": "npm@10.5.0", "volta": { diff --git a/frontend/public/mockServiceWorker.js b/frontend/public/mockServiceWorker.js index 7e23102e0b28..558540fa5762 100644 --- a/frontend/public/mockServiceWorker.js +++ b/frontend/public/mockServiceWorker.js @@ -7,8 +7,8 @@ * - Please do NOT modify this file. */ -const PACKAGE_VERSION = '2.11.1' -const INTEGRITY_CHECKSUM = 'f5825c521429caf22a4dd13b66e243af' +const PACKAGE_VERSION = '2.12.4' +const INTEGRITY_CHECKSUM = '4db4a41e972cec1b64cc569c66952d82' const IS_MOCKED_RESPONSE = Symbol('isMockedResponse') const activeClientIds = new Set() @@ -71,11 +71,6 @@ addEventListener('message', async function (event) { break } - case 'MOCK_DEACTIVATE': { - activeClientIds.delete(clientId) - break - } - case 'CLIENT_CLOSED': { activeClientIds.delete(clientId) @@ -94,6 +89,8 @@ addEventListener('message', async function (event) { }) addEventListener('fetch', function (event) { + const requestInterceptedAt = Date.now() + // Bypass navigation requests. if (event.request.mode === 'navigate') { return @@ -110,23 +107,29 @@ addEventListener('fetch', function (event) { // Bypass all requests when there are no active clients. // Prevents the self-unregistered worked from handling requests - // after it's been deleted (still remains active until the next reload). + // after it's been terminated (still remains active until the next reload). if (activeClientIds.size === 0) { return } const requestId = crypto.randomUUID() - event.respondWith(handleRequest(event, requestId)) + event.respondWith(handleRequest(event, requestId, requestInterceptedAt)) }) /** * @param {FetchEvent} event * @param {string} requestId + * @param {number} requestInterceptedAt */ -async function handleRequest(event, requestId) { +async function handleRequest(event, requestId, requestInterceptedAt) { const client = await resolveMainClient(event) const requestCloneForEvents = event.request.clone() - const response = await getResponse(event, client, requestId) + const response = await getResponse( + event, + client, + requestId, + requestInterceptedAt, + ) // Send back the response clone for the "response:*" life-cycle events. // Ensure MSW is active and ready to handle the message, otherwise @@ -202,9 +205,10 @@ async function resolveMainClient(event) { * @param {FetchEvent} event * @param {Client | undefined} client * @param {string} requestId + * @param {number} requestInterceptedAt * @returns {Promise} */ -async function getResponse(event, client, requestId) { +async function getResponse(event, client, requestId, requestInterceptedAt) { // Clone the request because it might've been already used // (i.e. its body has been read and sent to the client). const requestClone = event.request.clone() @@ -255,6 +259,7 @@ async function getResponse(event, client, requestId) { type: 'REQUEST', payload: { id: requestId, + interceptedAt: requestInterceptedAt, ...serializedRequest, }, }, diff --git a/frontend/src/api/conversation-service/v1-conversation-service.api.ts b/frontend/src/api/conversation-service/v1-conversation-service.api.ts index 5ca7daf09a19..d2f8f51ff5ae 100644 --- a/frontend/src/api/conversation-service/v1-conversation-service.api.ts +++ b/frontend/src/api/conversation-service/v1-conversation-service.api.ts @@ -11,6 +11,7 @@ import type { V1AppConversationStartTask, V1AppConversationStartTaskPage, V1AppConversation, + GetSkillsResponse, } from "./v1-conversation-service.types"; class V1ConversationService { @@ -296,6 +297,37 @@ class V1ConversationService { const { data } = await openHands.get<{ runtime_id: string }>(url); return data; } + + /** + * Read a file from a specific conversation's sandbox workspace + * @param conversationId The conversation ID + * @param filePath Path to the file to read within the sandbox workspace (defaults to /workspace/project/PLAN.md) + * @returns The content of the file or an empty string if the file doesn't exist + */ + static async readConversationFile( + conversationId: string, + filePath: string = "/workspace/project/PLAN.md", + ): Promise { + const params = new URLSearchParams(); + params.append("file_path", filePath); + + const { data } = await openHands.get( + `/api/v1/app-conversations/${conversationId}/file?${params.toString()}`, + ); + return data; + } + + /** + * Get all skills associated with a V1 conversation + * @param conversationId The conversation ID + * @returns The available skills associated with the conversation + */ + static async getSkills(conversationId: string): Promise { + const { data } = await openHands.get( + `/api/v1/app-conversations/${conversationId}/skills`, + ); + return data; + } } export default V1ConversationService; diff --git a/frontend/src/api/conversation-service/v1-conversation-service.types.ts b/frontend/src/api/conversation-service/v1-conversation-service.types.ts index 789925047d95..7c8b04ccbf1e 100644 --- a/frontend/src/api/conversation-service/v1-conversation-service.types.ts +++ b/frontend/src/api/conversation-service/v1-conversation-service.types.ts @@ -3,15 +3,19 @@ import { Provider } from "#/types/settings"; import { V1SandboxStatus } from "../sandbox-service/sandbox-service.types"; // V1 API Types for requests -// Note: This represents the serialized API format, not the internal TextContent/ImageContent types -export interface V1MessageContent { - type: "text" | "image_url"; - text?: string; - image_url?: { - url: string; - }; +// These types match the SDK's TextContent and ImageContent formats +export interface V1TextContent { + type: "text"; + text: string; } +export interface V1ImageContent { + type: "image"; + image_urls: string[]; +} + +export type V1MessageContent = V1TextContent | V1ImageContent; + type V1Role = "user" | "system" | "assistant" | "tool"; export interface V1SendMessageRequest { @@ -95,3 +99,14 @@ export interface V1AppConversation { conversation_url: string | null; session_api_key: string | null; } + +export interface Skill { + name: string; + type: "repo" | "knowledge"; + content: string; + triggers: string[]; +} + +export interface GetSkillsResponse { + skills: Skill[]; +} diff --git a/frontend/src/api/event-service/event-service.api.ts b/frontend/src/api/event-service/event-service.api.ts index 3e7a42666b02..7464480d5c27 100644 --- a/frontend/src/api/event-service/event-service.api.ts +++ b/frontend/src/api/event-service/event-service.api.ts @@ -5,7 +5,6 @@ import type { ConfirmationResponseRequest, ConfirmationResponseResponse, } from "./event-service.types"; -import { openHands } from "../open-hands-axios"; class EventService { /** @@ -38,11 +37,27 @@ class EventService { return data; } - static async getEventCount(conversationId: string): Promise { - const params = new URLSearchParams(); - params.append("conversation_id__eq", conversationId); - const { data } = await openHands.get( - `/api/v1/events/count?${params.toString()}`, + /** + * Get event count for a V1 conversation + * @param conversationId The conversation ID + * @param conversationUrl The conversation URL (e.g., "http://localhost:54928/api/conversations/...") + * @param sessionApiKey Session API key for authentication (required for V1) + * @returns The event count + */ + static async getEventCount( + conversationId: string, + conversationUrl: string, + sessionApiKey?: string | null, + ): Promise { + // Build the runtime URL using the conversation URL + const runtimeUrl = buildHttpBaseUrl(conversationUrl); + + // Build session headers for authentication + const headers = buildSessionHeaders(sessionApiKey); + + const { data } = await axios.get( + `${runtimeUrl}/api/conversations/${conversationId}/events/count`, + { headers }, ); return data; } diff --git a/frontend/src/ui/microagent-management-service/microagent-management-service.api.ts b/frontend/src/api/microagent-management-service/microagent-management-service.api.ts similarity index 100% rename from frontend/src/ui/microagent-management-service/microagent-management-service.api.ts rename to frontend/src/api/microagent-management-service/microagent-management-service.api.ts diff --git a/frontend/src/settings-service/settings-service.api.ts b/frontend/src/api/settings-service/settings-service.api.ts similarity index 57% rename from frontend/src/settings-service/settings-service.api.ts rename to frontend/src/api/settings-service/settings-service.api.ts index 6d7309b8d1b0..1b0d1d5e0e04 100644 --- a/frontend/src/settings-service/settings-service.api.ts +++ b/frontend/src/api/settings-service/settings-service.api.ts @@ -1,5 +1,5 @@ -import { openHands } from "../api/open-hands-axios"; -import { ApiSettings, PostApiSettings } from "./settings.types"; +import { openHands } from "../open-hands-axios"; +import { Settings } from "#/types/settings"; /** * Settings service for managing application settings @@ -8,8 +8,8 @@ class SettingsService { /** * Get the settings from the server or use the default settings if not found */ - static async getSettings(): Promise { - const { data } = await openHands.get("/api/settings"); + static async getSettings(): Promise { + const { data } = await openHands.get("/api/settings"); return data; } @@ -17,9 +17,7 @@ class SettingsService { * Save the settings to the server. Only valid settings are saved. * @param settings - the settings to save */ - static async saveSettings( - settings: Partial, - ): Promise { + static async saveSettings(settings: Partial): Promise { const data = await openHands.post("/api/settings", settings); return data.status === 200; } diff --git a/frontend/src/components/features/browser/browser.tsx b/frontend/src/components/features/browser/browser.tsx index 8c3842edd4b7..95c8f1fa1a51 100644 --- a/frontend/src/components/features/browser/browser.tsx +++ b/frontend/src/components/features/browser/browser.tsx @@ -12,10 +12,9 @@ export function BrowserPanel() { reset(); }, [conversationId, reset]); - const imgSrc = - screenshotSrc && screenshotSrc.startsWith("data:image/png;base64,") - ? screenshotSrc - : `data:image/png;base64,${screenshotSrc || ""}`; + const imgSrc = screenshotSrc?.startsWith("data:image/png;base64,") + ? screenshotSrc + : `data:image/png;base64,${screenshotSrc ?? ""}`; return (
diff --git a/frontend/src/components/features/chat/change-agent-button.tsx b/frontend/src/components/features/chat/change-agent-button.tsx index 62575879631e..28de891db973 100644 --- a/frontend/src/components/features/chat/change-agent-button.tsx +++ b/frontend/src/components/features/chat/change-agent-button.tsx @@ -5,27 +5,22 @@ import { I18nKey } from "#/i18n/declaration"; import CodeTagIcon from "#/icons/code-tag.svg?react"; import ChevronDownSmallIcon from "#/icons/chevron-down-small.svg?react"; import LessonPlanIcon from "#/icons/lesson-plan.svg?react"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; import { ChangeAgentContextMenu } from "./change-agent-context-menu"; import { cn } from "#/utils/utils"; import { USE_PLANNING_AGENT } from "#/utils/feature-flags"; import { useAgentState } from "#/hooks/use-agent-state"; import { AgentState } from "#/types/agent-state"; import { useActiveConversation } from "#/hooks/query/use-active-conversation"; -import { useCreateConversation } from "#/hooks/mutation/use-create-conversation"; -import { displaySuccessToast } from "#/utils/custom-toast-handlers"; import { useUnifiedWebSocketStatus } from "#/hooks/use-unified-websocket-status"; import { useSubConversationTaskPolling } from "#/hooks/query/use-sub-conversation-task-polling"; +import { useHandlePlanClick } from "#/hooks/use-handle-plan-click"; export function ChangeAgentButton() { const [contextMenuOpen, setContextMenuOpen] = useState(false); - const { - conversationMode, - setConversationMode, - setSubConversationTaskId, - subConversationTaskId, - } = useConversationStore(); + const { conversationMode, setConversationMode, subConversationTaskId } = + useConversationStore(); const webSocketStatus = useUnifiedWebSocketStatus(); @@ -40,8 +35,6 @@ export function ChangeAgentButton() { const isAgentRunning = curAgentState === AgentState.RUNNING; const { data: conversation } = useActiveConversation(); - const { mutate: createConversation, isPending: isCreatingConversation } = - useCreateConversation(); // Poll sub-conversation task and invalidate parent conversation when ready useSubConversationTaskPolling( @@ -49,6 +42,9 @@ export function ChangeAgentButton() { conversation?.conversation_id || null, ); + // Get handlePlanClick and isCreatingConversation from custom hook + const { handlePlanClick, isCreatingConversation } = useHandlePlanClick(); + // Close context menu when agent starts running useEffect(() => { if ((isAgentRunning || !isWebSocketConnected) && contextMenuOpen) { @@ -56,45 +52,6 @@ export function ChangeAgentButton() { } }, [isAgentRunning, contextMenuOpen, isWebSocketConnected]); - const handlePlanClick = ( - event: React.MouseEvent | KeyboardEvent, - ) => { - event.preventDefault(); - event.stopPropagation(); - - // Set conversation mode to "plan" immediately - setConversationMode("plan"); - - // Check if sub_conversation_ids is not empty - if ( - (conversation?.sub_conversation_ids && - conversation.sub_conversation_ids.length > 0) || - !conversation?.conversation_id - ) { - // Do nothing if both conditions are true - return; - } - - // Create a new sub-conversation if we have a current conversation ID - createConversation( - { - parentConversationId: conversation.conversation_id, - agentType: "plan", - }, - { - onSuccess: (data) => { - displaySuccessToast( - t(I18nKey.PLANNING_AGENTT$PLANNING_AGENT_INITIALIZED), - ); - // Track the task ID to poll for sub-conversation creation - if (data.v1_task_id) { - setSubConversationTaskId(data.v1_task_id); - } - }, - }, - ); - }; - const isButtonDisabled = isAgentRunning || isCreatingConversation || diff --git a/frontend/src/components/features/chat/chat-interface.tsx b/frontend/src/components/features/chat/chat-interface.tsx index f37bd59c26ce..84c269dac35d 100644 --- a/frontend/src/components/features/chat/chat-interface.tsx +++ b/frontend/src/components/features/chat/chat-interface.tsx @@ -38,7 +38,7 @@ import { import { useUnifiedUploadFiles } from "#/hooks/mutation/use-unified-upload-files"; import { useConfig } from "#/hooks/query/use-config"; import { validateFiles } from "#/utils/file-validation"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; import ConfirmationModeEnabled from "./confirmation-mode-enabled"; import { isV0Event, diff --git a/frontend/src/components/features/chat/chat-message.tsx b/frontend/src/components/features/chat/chat-message.tsx index b822220c4dac..6f2f388682c1 100644 --- a/frontend/src/components/features/chat/chat-message.tsx +++ b/frontend/src/components/features/chat/chat-message.tsx @@ -1,15 +1,9 @@ import React from "react"; -import Markdown from "react-markdown"; -import remarkGfm from "remark-gfm"; -import remarkBreaks from "remark-breaks"; -import { code } from "../markdown/code"; import { cn } from "#/utils/utils"; -import { ul, ol } from "../markdown/list"; import { CopyToClipboardButton } from "#/components/shared/buttons/copy-to-clipboard-button"; -import { anchor } from "../markdown/anchor"; import { OpenHandsSourceType } from "#/types/core/base"; -import { paragraph } from "../markdown/paragraph"; import { TooltipButton } from "#/components/shared/buttons/tooltip-button"; +import { MarkdownRenderer } from "../markdown/markdown-renderer"; interface ChatMessageProps { type: OpenHandsSourceType; @@ -19,6 +13,7 @@ interface ChatMessageProps { onClick: () => void; tooltip?: string; }>; + isFromPlanningAgent?: boolean; } export function ChatMessage({ @@ -26,6 +21,7 @@ export function ChatMessage({ message, children, actions, + isFromPlanningAgent = false, }: React.PropsWithChildren) { const [isHovering, setIsHovering] = React.useState(false); const [isCopy, setIsCopy] = React.useState(false); @@ -59,6 +55,7 @@ export function ChatMessage({ "flex flex-col gap-2", type === "user" && " p-4 bg-tertiary self-end", type === "agent" && "mt-6 w-full max-w-full bg-transparent", + isFromPlanningAgent && "border border-[#597ff4] bg-tertiary p-4", )} >
- - {message} - + {message}
{children} diff --git a/frontend/src/components/features/chat/chat-suggestions.tsx b/frontend/src/components/features/chat/chat-suggestions.tsx index 8abc5aa41c19..1f9a9345adfe 100644 --- a/frontend/src/components/features/chat/chat-suggestions.tsx +++ b/frontend/src/components/features/chat/chat-suggestions.tsx @@ -4,7 +4,7 @@ import { Suggestions } from "#/components/features/suggestions/suggestions"; import { I18nKey } from "#/i18n/declaration"; import BuildIt from "#/icons/build-it.svg?react"; import { SUGGESTIONS } from "#/utils/suggestions"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; interface ChatSuggestionsProps { onSuggestionsClick: (value: string) => void; diff --git a/frontend/src/components/features/chat/components/chat-input-container.tsx b/frontend/src/components/features/chat/components/chat-input-container.tsx index acba3074f50d..e950ce785b49 100644 --- a/frontend/src/components/features/chat/components/chat-input-container.tsx +++ b/frontend/src/components/features/chat/components/chat-input-container.tsx @@ -3,7 +3,7 @@ import { DragOver } from "../drag-over"; import { UploadedFiles } from "../uploaded-files"; import { ChatInputRow } from "./chat-input-row"; import { ChatInputActions } from "./chat-input-actions"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; import { cn } from "#/utils/utils"; interface ChatInputContainerProps { diff --git a/frontend/src/components/features/chat/components/chat-input-field.tsx b/frontend/src/components/features/chat/components/chat-input-field.tsx index 4c52b7980b90..7a6fe4f52be2 100644 --- a/frontend/src/components/features/chat/components/chat-input-field.tsx +++ b/frontend/src/components/features/chat/components/chat-input-field.tsx @@ -1,7 +1,7 @@ import React from "react"; import { useTranslation } from "react-i18next"; import { I18nKey } from "#/i18n/declaration"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; interface ChatInputFieldProps { chatInputRef: React.RefObject; diff --git a/frontend/src/components/features/chat/confirmation-mode-enabled.tsx b/frontend/src/components/features/chat/confirmation-mode-enabled.tsx index 6094d9a4c395..0e7df1afb8a6 100644 --- a/frontend/src/components/features/chat/confirmation-mode-enabled.tsx +++ b/frontend/src/components/features/chat/confirmation-mode-enabled.tsx @@ -9,7 +9,7 @@ function ConfirmationModeEnabled() { const { data: settings } = useSettings(); - if (!settings?.CONFIRMATION_MODE) { + if (!settings?.confirmation_mode) { return null; } diff --git a/frontend/src/components/features/chat/custom-chat-input.tsx b/frontend/src/components/features/chat/custom-chat-input.tsx index 92ec264a3439..624457b35b54 100644 --- a/frontend/src/components/features/chat/custom-chat-input.tsx +++ b/frontend/src/components/features/chat/custom-chat-input.tsx @@ -8,7 +8,7 @@ import { useChatSubmission } from "#/hooks/chat/use-chat-submission"; import { ChatInputGrip } from "./components/chat-input-grip"; import { ChatInputContainer } from "./components/chat-input-container"; import { HiddenFileInput } from "./components/hidden-file-input"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; export interface CustomChatInputProps { disabled?: boolean; diff --git a/frontend/src/components/features/chat/error-message.tsx b/frontend/src/components/features/chat/error-message.tsx index 8de367a9a2bf..da40b3786e5a 100644 --- a/frontend/src/components/features/chat/error-message.tsx +++ b/frontend/src/components/features/chat/error-message.tsx @@ -1,13 +1,9 @@ import React from "react"; -import Markdown from "react-markdown"; -import remarkGfm from "remark-gfm"; -import remarkBreaks from "remark-breaks"; import { useTranslation } from "react-i18next"; -import { code } from "../markdown/code"; -import { ol, ul } from "../markdown/list"; import ArrowDown from "#/icons/angle-down-solid.svg?react"; import ArrowUp from "#/icons/angle-up-solid.svg?react"; import i18n from "#/i18n"; +import { MarkdownRenderer } from "../markdown/markdown-renderer"; interface ErrorMessageProps { errorId?: string; @@ -40,18 +36,7 @@ export function ErrorMessage({ errorId, defaultMessage }: ErrorMessageProps) {
- {showDetails && ( - - {defaultMessage} - - )} + {showDetails && {defaultMessage}}
); } diff --git a/frontend/src/components/features/chat/event-content-helpers/get-observation-content.ts b/frontend/src/components/features/chat/event-content-helpers/get-observation-content.ts index 56bf58226368..11276a4e39a7 100644 --- a/frontend/src/components/features/chat/event-content-helpers/get-observation-content.ts +++ b/frontend/src/components/features/chat/event-content-helpers/get-observation-content.ts @@ -22,6 +22,13 @@ const getCommandObservationContent = ( if (content.length > MAX_CONTENT_LENGTH) { content = `${content.slice(0, MAX_CONTENT_LENGTH)}...`; } + + const command = event.observation === "run" ? event.extras.command : null; + + if (command) { + return `Command:\n\`\`\`sh\n${command}\n\`\`\`\n\nOutput:\n\`\`\`sh\n${content.trim() || i18n.t("OBSERVATION$COMMAND_NO_OUTPUT")}\n\`\`\``; + } + return `Output:\n\`\`\`sh\n${content.trim() || i18n.t("OBSERVATION$COMMAND_NO_OUTPUT")}\n\`\`\``; }; @@ -133,7 +140,7 @@ const getTaskTrackingObservationContent = ( content += "\n\n**Task List:** Empty"; } - if (event.content && event.content.trim()) { + if (event.content?.trim()) { content += `\n\n**Result:** ${event.content.trim()}`; } diff --git a/frontend/src/components/features/chat/expandable-message.tsx b/frontend/src/components/features/chat/expandable-message.tsx index 918eafd6b859..f1f7fe6869cb 100644 --- a/frontend/src/components/features/chat/expandable-message.tsx +++ b/frontend/src/components/features/chat/expandable-message.tsx @@ -1,21 +1,15 @@ import { useEffect, useState } from "react"; import { Trans, useTranslation } from "react-i18next"; -import Markdown from "react-markdown"; import { Link } from "react-router"; -import remarkGfm from "remark-gfm"; -import remarkBreaks from "remark-breaks"; import { useConfig } from "#/hooks/query/use-config"; import { I18nKey } from "#/i18n/declaration"; import ArrowDown from "#/icons/angle-down-solid.svg?react"; import ArrowUp from "#/icons/angle-up-solid.svg?react"; import CheckCircle from "#/icons/check-circle-solid.svg?react"; -import XCircle from "#/icons/x-circle-solid.svg?react"; import { OpenHandsAction } from "#/types/core/actions"; import { OpenHandsObservation } from "#/types/core/observations"; import { cn } from "#/utils/utils"; -import { code } from "../markdown/code"; -import { ol, ul } from "../markdown/list"; -import { paragraph } from "../markdown/paragraph"; +import { MarkdownRenderer } from "../markdown/markdown-renderer"; import { MonoComponent } from "./mono-component"; import { PathComponent } from "./path-component"; @@ -100,7 +94,7 @@ export function ExpandableMessage({ const statusIconClasses = "h-4 w-4 ml-2 inline"; if ( - config?.FEATURE_FLAGS.ENABLE_BILLING && + config?.FEATURE_FLAGS?.ENABLE_BILLING && config?.APP_MODE === "saas" && id === I18nKey.STATUS$ERROR_LLM_OUT_OF_CREDITS ) { @@ -174,35 +168,18 @@ export function ExpandableMessage({ )} - {type === "action" && success !== undefined && ( + {type === "action" && success && ( - {success ? ( - - ) : ( - - )} + )}
{showDetails && (
- - {details} - + {details}
)} diff --git a/frontend/src/components/features/chat/generic-event-message.tsx b/frontend/src/components/features/chat/generic-event-message.tsx index e5124b69fef9..ff2ab633b189 100644 --- a/frontend/src/components/features/chat/generic-event-message.tsx +++ b/frontend/src/components/features/chat/generic-event-message.tsx @@ -1,13 +1,9 @@ import React from "react"; -import Markdown from "react-markdown"; -import remarkGfm from "remark-gfm"; -import remarkBreaks from "remark-breaks"; -import { code } from "../markdown/code"; -import { ol, ul } from "../markdown/list"; import ArrowDown from "#/icons/angle-down-solid.svg?react"; import ArrowUp from "#/icons/angle-up-solid.svg?react"; import { SuccessIndicator } from "./success-indicator"; import { ObservationResultStatus } from "./event-content-helpers/get-observation-result"; +import { MarkdownRenderer } from "../markdown/markdown-renderer"; interface GenericEventMessageProps { title: React.ReactNode; @@ -49,16 +45,7 @@ export function GenericEventMessage({ {showDetails && (typeof details === "string" ? ( - - {details} - + {details} ) : ( details ))} diff --git a/frontend/src/components/features/chat/interactive-chat-box.tsx b/frontend/src/components/features/chat/interactive-chat-box.tsx index 56a4def14dd7..a2f1df8348a0 100644 --- a/frontend/src/components/features/chat/interactive-chat-box.tsx +++ b/frontend/src/components/features/chat/interactive-chat-box.tsx @@ -5,7 +5,7 @@ import { CustomChatInput } from "./custom-chat-input"; import { AgentState } from "#/types/agent-state"; import { useActiveConversation } from "#/hooks/query/use-active-conversation"; import { GitControlBar } from "./git-control-bar"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; import { useAgentState } from "#/hooks/use-agent-state"; import { processFiles, processImages } from "#/utils/file-processing"; import { useSubConversationTaskPolling } from "#/hooks/query/use-sub-conversation-task-polling"; diff --git a/frontend/src/components/features/chat/messages.tsx b/frontend/src/components/features/chat/messages.tsx index 0d9032164dbc..6e68089b134d 100644 --- a/frontend/src/components/features/chat/messages.tsx +++ b/frontend/src/components/features/chat/messages.tsx @@ -192,8 +192,7 @@ export const Messages: React.FC = React.memo( ) => { const conversationInstructions = `Target file: ${target}\n\nDescription: ${query}\n\nTriggers: ${triggers.join(", ")}`; if ( - !conversation || - !conversation.selected_repository || + !conversation?.selected_repository || !conversation.selected_branch || !conversation.git_provider || !selectedEventId diff --git a/frontend/src/components/features/chat/success-indicator.tsx b/frontend/src/components/features/chat/success-indicator.tsx index 4e5ac4779a9c..12e16d67fe65 100644 --- a/frontend/src/components/features/chat/success-indicator.tsx +++ b/frontend/src/components/features/chat/success-indicator.tsx @@ -1,6 +1,5 @@ import { FaClock } from "react-icons/fa"; import CheckCircle from "#/icons/check-circle-solid.svg?react"; -import XCircle from "#/icons/x-circle-solid.svg?react"; import { ObservationResultStatus } from "./event-content-helpers/get-observation-result"; interface SuccessIndicatorProps { @@ -17,13 +16,6 @@ export function SuccessIndicator({ status }: SuccessIndicatorProps) { /> )} - {status === "error" && ( - - )} - {status === "timeout" && ( ; case "in_progress": - return ; + return ; case "done": return ; default: diff --git a/frontend/src/components/features/chat/uploaded-files.tsx b/frontend/src/components/features/chat/uploaded-files.tsx index 78b0c3dc49c9..ced7849ef049 100644 --- a/frontend/src/components/features/chat/uploaded-files.tsx +++ b/frontend/src/components/features/chat/uploaded-files.tsx @@ -1,6 +1,6 @@ import { UploadedFile } from "./uploaded-file"; import { UploadedImage } from "./uploaded-image"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; export function UploadedFiles() { const { diff --git a/frontend/src/components/features/context-menu/account-settings-context-menu.tsx b/frontend/src/components/features/context-menu/account-settings-context-menu.tsx index c09920e61472..0c2541237d31 100644 --- a/frontend/src/components/features/context-menu/account-settings-context-menu.tsx +++ b/frontend/src/components/features/context-menu/account-settings-context-menu.tsx @@ -5,11 +5,10 @@ import { ContextMenu } from "#/ui/context-menu"; import { ContextMenuListItem } from "./context-menu-list-item"; import { Divider } from "#/ui/divider"; import { useClickOutsideElement } from "#/hooks/use-click-outside-element"; -import { useConfig } from "#/hooks/query/use-config"; import { I18nKey } from "#/i18n/declaration"; import LogOutIcon from "#/icons/log-out.svg?react"; import DocumentIcon from "#/icons/document.svg?react"; -import { SAAS_NAV_ITEMS, OSS_NAV_ITEMS } from "#/constants/settings-nav"; +import { useSettingsNavItems } from "#/hooks/use-settings-nav-items"; interface AccountSettingsContextMenuProps { onLogout: () => void; @@ -22,21 +21,17 @@ export function AccountSettingsContextMenu({ }: AccountSettingsContextMenuProps) { const ref = useClickOutsideElement(onClose); const { t } = useTranslation(); - const { data: config } = useConfig(); + // Get navigation items and filter out LLM settings if the feature flag is enabled + const items = useSettingsNavItems(); - const isSaas = config?.APP_MODE === "saas"; - const navItems = (isSaas ? SAAS_NAV_ITEMS : OSS_NAV_ITEMS).map((item) => ({ + const navItems = items.map((item) => ({ ...item, icon: React.cloneElement(item.icon, { width: 16, height: 16, } as React.SVGProps), })); - - const handleNavigationClick = () => { - onClose(); - // The Link component will handle the actual navigation - }; + const handleNavigationClick = () => onClose(); return ( ( handleNavigationClick()} + onClick={handleNavigationClick} className="flex items-center gap-2 p-2 hover:bg-[#5C5D62] rounded h-[30px]" > {icon} diff --git a/frontend/src/components/features/controls/agent-status.tsx b/frontend/src/components/features/controls/agent-status.tsx index 078eb5f40f25..f62472bf9540 100644 --- a/frontend/src/components/features/controls/agent-status.tsx +++ b/frontend/src/components/features/controls/agent-status.tsx @@ -1,6 +1,6 @@ import { useTranslation } from "react-i18next"; import { useEffect } from "react"; -import { useStatusStore } from "#/state/status-store"; +import { useStatusStore } from "#/stores/status-store"; import { useActiveConversation } from "#/hooks/query/use-active-conversation"; import { getStatusCode } from "#/utils/status"; import { ChatStopButton } from "../chat/chat-stop-button"; @@ -9,7 +9,7 @@ import ClockIcon from "#/icons/u-clock-three.svg?react"; import { ChatResumeAgentButton } from "../chat/chat-play-button"; import { cn, isTaskPolling } from "#/utils/utils"; import { AgentLoading } from "./agent-loading"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; import CircleErrorIcon from "#/icons/circle-error.svg?react"; import { useAgentState } from "#/hooks/use-agent-state"; import { useUnifiedWebSocketStatus } from "#/hooks/use-unified-websocket-status"; @@ -59,13 +59,15 @@ export function AgentStatus({ ); const shouldShownAgentLoading = - isPausing || curAgentState === AgentState.INIT || curAgentState === AgentState.LOADING || (webSocketStatus === "CONNECTING" && taskStatus !== "ERROR") || isTaskPolling(taskStatus) || isTaskPolling(subConversationTaskStatus); + // For UI rendering - includes pause state + const isLoading = shouldShownAgentLoading || isPausing; + const shouldShownAgentError = curAgentState === AgentState.ERROR || curAgentState === AgentState.RATE_LIMITED || @@ -93,25 +95,28 @@ export function AgentStatus({
- {shouldShownAgentLoading && } - {!shouldShownAgentLoading && shouldShownAgentStop && ( + {isLoading && } + {!isLoading && shouldShownAgentStop && ( )} - {!shouldShownAgentLoading && shouldShownAgentResume && ( + {!isLoading && shouldShownAgentResume && ( )} - {!shouldShownAgentLoading && shouldShownAgentError && ( - + {!isLoading && shouldShownAgentError && ( + )} - {!shouldShownAgentLoading && + {!isLoading && !shouldShownAgentStop && !shouldShownAgentResume && !shouldShownAgentError && } diff --git a/frontend/src/components/features/controls/git-tools-submenu.tsx b/frontend/src/components/features/controls/git-tools-submenu.tsx index e55dd93952cc..97db554aba0d 100644 --- a/frontend/src/components/features/controls/git-tools-submenu.tsx +++ b/frontend/src/components/features/controls/git-tools-submenu.tsx @@ -10,7 +10,7 @@ import { getCreatePRPrompt, getCreateNewBranchPrompt, } from "#/utils/utils"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; import ArrowUpIcon from "#/icons/u-arrow-up.svg?react"; import ArrowDownIcon from "#/icons/u-arrow-down.svg?react"; diff --git a/frontend/src/components/features/controls/macros-submenu.tsx b/frontend/src/components/features/controls/macros-submenu.tsx index 8705e11959ad..b167501d0b32 100644 --- a/frontend/src/components/features/controls/macros-submenu.tsx +++ b/frontend/src/components/features/controls/macros-submenu.tsx @@ -8,7 +8,7 @@ import PrStatusIcon from "#/icons/pr-status.svg?react"; import DocumentIcon from "#/icons/document.svg?react"; import WaterIcon from "#/icons/u-water.svg?react"; import { I18nKey } from "#/i18n/declaration"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; import { REPO_SUGGESTIONS } from "#/utils/suggestions/repo-suggestions"; import { CONTEXT_MENU_ICON_TEXT_CLASSNAME } from "#/utils/constants"; diff --git a/frontend/src/components/features/controls/tools-context-menu.tsx b/frontend/src/components/features/controls/tools-context-menu.tsx index 39330e25e44b..2089f9511195 100644 --- a/frontend/src/components/features/controls/tools-context-menu.tsx +++ b/frontend/src/components/features/controls/tools-context-menu.tsx @@ -26,14 +26,14 @@ const contextMenuListItemClassName = cn( interface ToolsContextMenuProps { onClose: () => void; - onShowMicroagents: (event: React.MouseEvent) => void; + onShowSkills: (event: React.MouseEvent) => void; onShowAgentTools: (event: React.MouseEvent) => void; shouldShowAgentTools?: boolean; } export function ToolsContextMenu({ onClose, - onShowMicroagents, + onShowSkills, onShowAgentTools, shouldShowAgentTools = true, }: ToolsContextMenuProps) { @@ -41,7 +41,6 @@ export function ToolsContextMenu({ const { data: conversation } = useActiveConversation(); const { providers } = useUserProviders(); - // TODO: Hide microagent menu items for V1 conversations // This is a temporary measure and may be re-enabled in the future const isV1Conversation = conversation?.conversation_version === "V1"; @@ -130,20 +129,17 @@ export function ToolsContextMenu({ {(!isV1Conversation || shouldShowAgentTools) && } - {/* Show Available Microagents - Hidden for V1 conversations */} - {!isV1Conversation && ( - - } - text={t(I18nKey.CONVERSATION$SHOW_MICROAGENTS)} - className={CONTEXT_MENU_ICON_TEXT_CLASSNAME} - /> - - )} + + } + text={t(I18nKey.CONVERSATION$SHOW_SKILLS)} + className={CONTEXT_MENU_ICON_TEXT_CLASSNAME} + /> + {/* Show Agent Tools and Metadata - Only show if system message is available */} {shouldShowAgentTools && ( diff --git a/frontend/src/components/features/controls/tools.tsx b/frontend/src/components/features/controls/tools.tsx index 56ef58bc8ead..80994cbe651b 100644 --- a/frontend/src/components/features/controls/tools.tsx +++ b/frontend/src/components/features/controls/tools.tsx @@ -7,7 +7,7 @@ import { ToolsContextMenu } from "./tools-context-menu"; import { useConversationNameContextMenu } from "#/hooks/use-conversation-name-context-menu"; import { useActiveConversation } from "#/hooks/query/use-active-conversation"; import { SystemMessageModal } from "../conversation-panel/system-message-modal"; -import { MicroagentsModal } from "../conversation-panel/microagents-modal"; +import { SkillsModal } from "../conversation-panel/skills-modal"; export function Tools() { const { t } = useTranslation(); @@ -17,11 +17,11 @@ export function Tools() { const { handleShowAgentTools, - handleShowMicroagents, + handleShowSkills, systemModalVisible, setSystemModalVisible, - microagentsModalVisible, - setMicroagentsModalVisible, + skillsModalVisible, + setSkillsModalVisible, systemMessage, shouldShowAgentTools, } = useConversationNameContextMenu({ @@ -51,7 +51,7 @@ export function Tools() { {contextMenuOpen && ( setContextMenuOpen(false)} - onShowMicroagents={handleShowMicroagents} + onShowSkills={handleShowSkills} onShowAgentTools={handleShowAgentTools} shouldShowAgentTools={shouldShowAgentTools} /> @@ -64,9 +64,9 @@ export function Tools() { systemMessage={systemMessage ? systemMessage.args : null} /> - {/* Microagents Modal */} - {microagentsModalVisible && ( - setMicroagentsModalVisible(false)} /> + {/* Skills Modal */} + {skillsModalVisible && ( + setSkillsModalVisible(false)} /> )}
); diff --git a/frontend/src/components/features/conversation-panel/conversation-card-context-menu.tsx b/frontend/src/components/features/conversation-panel/conversation-card-context-menu.tsx deleted file mode 100644 index 63ea33152b2e..000000000000 --- a/frontend/src/components/features/conversation-panel/conversation-card-context-menu.tsx +++ /dev/null @@ -1,147 +0,0 @@ -import { - Trash, - Power, - Pencil, - Download, - Wallet, - Wrench, - Bot, -} from "lucide-react"; -import { useTranslation } from "react-i18next"; -import { useClickOutsideElement } from "#/hooks/use-click-outside-element"; -import { cn } from "#/utils/utils"; -import { ContextMenu } from "#/ui/context-menu"; -import { ContextMenuListItem } from "../context-menu/context-menu-list-item"; -import { Divider } from "#/ui/divider"; -import { I18nKey } from "#/i18n/declaration"; -import { ContextMenuIconText } from "../context-menu/context-menu-icon-text"; -import { useActiveConversation } from "#/hooks/query/use-active-conversation"; - -interface ConversationCardContextMenuProps { - onClose: () => void; - onDelete?: (event: React.MouseEvent) => void; - onStop?: (event: React.MouseEvent) => void; - onEdit?: (event: React.MouseEvent) => void; - onDisplayCost?: (event: React.MouseEvent) => void; - onShowAgentTools?: (event: React.MouseEvent) => void; - onShowMicroagents?: (event: React.MouseEvent) => void; - onDownloadViaVSCode?: (event: React.MouseEvent) => void; - position?: "top" | "bottom"; -} - -export function ConversationCardContextMenu({ - onClose, - onDelete, - onStop, - onEdit, - onDisplayCost, - onShowAgentTools, - onShowMicroagents, - onDownloadViaVSCode, - position = "bottom", -}: ConversationCardContextMenuProps) { - const { t } = useTranslation(); - const ref = useClickOutsideElement(onClose); - const { data: conversation } = useActiveConversation(); - - // TODO: Hide microagent menu items for V1 conversations - // This is a temporary measure and may be re-enabled in the future - const isV1Conversation = conversation?.conversation_version === "V1"; - - const hasEdit = Boolean(onEdit); - const hasDownload = Boolean(onDownloadViaVSCode); - const hasTools = Boolean(onShowAgentTools || onShowMicroagents); - const hasInfo = Boolean(onDisplayCost); - const hasControl = Boolean(onStop || onDelete); - - return ( - - {onEdit && ( - - - - )} - - {hasEdit && (hasDownload || hasTools || hasInfo || hasControl) && ( - - )} - - {onDownloadViaVSCode && ( - - - - )} - - {hasDownload && (hasTools || hasInfo || hasControl) && } - - {onShowAgentTools && ( - - - - )} - - {onShowMicroagents && !isV1Conversation && ( - - - - )} - - {hasTools && (hasInfo || hasControl) && } - - {onDisplayCost && ( - - - - )} - - {hasInfo && hasControl && } - - {onStop && ( - - - - )} - - {onDelete && ( - - - - )} - - ); -} diff --git a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx index 6565a83a1099..30a7ec42cbc1 100644 --- a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx +++ b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx @@ -22,7 +22,7 @@ interface ConversationCardContextMenuProps { onEdit?: (event: React.MouseEvent) => void; onDisplayCost?: (event: React.MouseEvent) => void; onShowAgentTools?: (event: React.MouseEvent) => void; - onShowMicroagents?: (event: React.MouseEvent) => void; + onShowSkills?: (event: React.MouseEvent) => void; onDownloadViaVSCode?: (event: React.MouseEvent) => void; position?: "top" | "bottom"; } @@ -37,7 +37,7 @@ export function ConversationCardContextMenu({ onEdit, onDisplayCost, onShowAgentTools, - onShowMicroagents, + onShowSkills, onDownloadViaVSCode, position = "bottom", }: ConversationCardContextMenuProps) { @@ -96,15 +96,15 @@ export function ConversationCardContextMenu({ />
), - onShowMicroagents && ( + onShowSkills && ( } - text={t(I18nKey.CONVERSATION$SHOW_MICROAGENTS)} + text={t(I18nKey.CONVERSATION$SHOW_SKILLS)} /> ), diff --git a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-footer.tsx b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-footer.tsx index fb77b582428d..f44a2f814125 100644 --- a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-footer.tsx +++ b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-footer.tsx @@ -39,7 +39,7 @@ export function ConversationCardFooter({ {(createdAt ?? lastUpdatedAt) && (

)} diff --git a/frontend/src/components/features/conversation-panel/conversation-panel-wrapper.tsx b/frontend/src/components/features/conversation-panel/conversation-panel-wrapper.tsx index 3ecb6d01a6dd..57bde067271d 100644 --- a/frontend/src/components/features/conversation-panel/conversation-panel-wrapper.tsx +++ b/frontend/src/components/features/conversation-panel/conversation-panel-wrapper.tsx @@ -20,7 +20,7 @@ export function ConversationPanelWrapper({ return ReactDOM.createPortal(
diff --git a/frontend/src/components/features/conversation-panel/microagent-content.tsx b/frontend/src/components/features/conversation-panel/skill-content.tsx similarity index 76% rename from frontend/src/components/features/conversation-panel/microagent-content.tsx rename to frontend/src/components/features/conversation-panel/skill-content.tsx index fad048560788..9303047e3afb 100644 --- a/frontend/src/components/features/conversation-panel/microagent-content.tsx +++ b/frontend/src/components/features/conversation-panel/skill-content.tsx @@ -3,17 +3,17 @@ import { I18nKey } from "#/i18n/declaration"; import { Typography } from "#/ui/typography"; import { Pre } from "#/ui/pre"; -interface MicroagentContentProps { +interface SkillContentProps { content: string; } -export function MicroagentContent({ content }: MicroagentContentProps) { +export function SkillContent({ content }: SkillContentProps) { const { t } = useTranslation(); return (
- {t(I18nKey.MICROAGENTS_MODAL$CONTENT)} + {t(I18nKey.COMMON$CONTENT)}
-        {content || t(I18nKey.MICROAGENTS_MODAL$NO_CONTENT)}
+        {content || t(I18nKey.SKILLS_MODAL$NO_CONTENT)}
       
); diff --git a/frontend/src/components/features/conversation-panel/microagent-item.tsx b/frontend/src/components/features/conversation-panel/skill-item.tsx similarity index 65% rename from frontend/src/components/features/conversation-panel/microagent-item.tsx rename to frontend/src/components/features/conversation-panel/skill-item.tsx index d23febb09900..c76bf10be95c 100644 --- a/frontend/src/components/features/conversation-panel/microagent-item.tsx +++ b/frontend/src/components/features/conversation-panel/skill-item.tsx @@ -1,35 +1,31 @@ import { ChevronDown, ChevronRight } from "lucide-react"; -import { Microagent } from "#/api/open-hands.types"; import { Typography } from "#/ui/typography"; -import { MicroagentTriggers } from "./microagent-triggers"; -import { MicroagentContent } from "./microagent-content"; +import { SkillTriggers } from "./skill-triggers"; +import { SkillContent } from "./skill-content"; +import { Skill } from "#/api/conversation-service/v1-conversation-service.types"; -interface MicroagentItemProps { - agent: Microagent; +interface SkillItemProps { + skill: Skill; isExpanded: boolean; onToggle: (agentName: string) => void; } -export function MicroagentItem({ - agent, - isExpanded, - onToggle, -}: MicroagentItemProps) { +export function SkillItem({ skill, isExpanded, onToggle }: SkillItemProps) { return (
+ )}
); } diff --git a/frontend/src/components/features/conversation/conversation-tabs/conversation-tabs.tsx b/frontend/src/components/features/conversation/conversation-tabs/conversation-tabs.tsx index e84466bd2241..ed649f373c87 100644 --- a/frontend/src/components/features/conversation/conversation-tabs/conversation-tabs.tsx +++ b/frontend/src/components/features/conversation/conversation-tabs/conversation-tabs.tsx @@ -16,11 +16,13 @@ import { VSCodeTooltipContent } from "./vscode-tooltip-content"; import { useConversationStore, type ConversationTab, -} from "#/state/conversation-store"; +} from "#/stores/conversation-store"; import { ConversationTabsContextMenu } from "./conversation-tabs-context-menu"; import { USE_PLANNING_AGENT } from "#/utils/feature-flags"; +import { useConversationId } from "#/hooks/use-conversation-id"; export function ConversationTabs() { + const { conversationId } = useConversationId(); const { selectedTab, isRightPanelShown, @@ -30,18 +32,21 @@ export function ConversationTabs() { const [isMenuOpen, setIsMenuOpen] = useState(false); - // Persist selectedTab and isRightPanelShown in localStorage + // Persist selectedTab and isRightPanelShown in localStorage per conversation const [persistedSelectedTab, setPersistedSelectedTab] = useLocalStorage( - "conversation-selected-tab", + `conversation-selected-tab-${conversationId}`, "editor", ); const [persistedIsRightPanelShown, setPersistedIsRightPanelShown] = - useLocalStorage("conversation-right-panel-shown", true); + useLocalStorage( + `conversation-right-panel-shown-${conversationId}`, + true, + ); const [persistedUnpinnedTabs] = useLocalStorage( - "conversation-unpinned-tabs", + `conversation-unpinned-tabs-${conversationId}`, [], ); diff --git a/frontend/src/components/features/conversation/metrics-modal/context-window-section.tsx b/frontend/src/components/features/conversation/metrics-modal/context-window-section.tsx index 5908003caede..2fbfd1bc2d55 100644 --- a/frontend/src/components/features/conversation/metrics-modal/context-window-section.tsx +++ b/frontend/src/components/features/conversation/metrics-modal/context-window-section.tsx @@ -12,7 +12,8 @@ export function ContextWindowSection({ }: ContextWindowSectionProps) { const { t } = useTranslation(); - const usagePercentage = (perTurnToken / contextWindow) * 100; + const usagePercentage = + contextWindow > 0 ? (perTurnToken / contextWindow) * 100 : 0; const progressWidth = Math.min(100, usagePercentage); return ( diff --git a/frontend/src/components/features/guards/email-verification-guard.tsx b/frontend/src/components/features/guards/email-verification-guard.tsx index b21201650334..3fbb774842ac 100644 --- a/frontend/src/components/features/guards/email-verification-guard.tsx +++ b/frontend/src/components/features/guards/email-verification-guard.tsx @@ -20,13 +20,13 @@ export function EmailVerificationGuard({ if (isLoading) return; // If EMAIL_VERIFIED is explicitly false (not undefined or null) - if (settings?.EMAIL_VERIFIED === false) { + if (settings?.email_verified === false) { // Allow access to /settings/user but redirect from any other page if (pathname !== "/settings/user") { navigate("/settings/user", { replace: true }); } } - }, [settings?.EMAIL_VERIFIED, pathname, navigate, isLoading]); + }, [settings?.email_verified, pathname, navigate, isLoading]); return children; } diff --git a/frontend/src/components/features/home/git-provider-dropdown/git-provider-dropdown.tsx b/frontend/src/components/features/home/git-provider-dropdown/git-provider-dropdown.tsx index fdc9b21b001b..c5ab171ca8c8 100644 --- a/frontend/src/components/features/home/git-provider-dropdown/git-provider-dropdown.tsx +++ b/frontend/src/components/features/home/git-provider-dropdown/git-provider-dropdown.tsx @@ -75,7 +75,7 @@ export function GitProviderDropdown({ } // If no input value, show all providers - if (!inputValue || !inputValue.trim()) { + if (!inputValue?.trim()) { return providers; } diff --git a/frontend/src/components/features/home/git-repo-dropdown/git-repo-dropdown.tsx b/frontend/src/components/features/home/git-repo-dropdown/git-repo-dropdown.tsx index 485f574f7997..45b75bbd9fa5 100644 --- a/frontend/src/components/features/home/git-repo-dropdown/git-repo-dropdown.tsx +++ b/frontend/src/components/features/home/git-repo-dropdown/git-repo-dropdown.tsx @@ -99,7 +99,7 @@ export function GitRepoDropdown({ ); // If no input value, return all recent repos for this provider - if (!inputValue || !inputValue.trim()) { + if (!inputValue?.trim()) { return providerFilteredRepos; } @@ -139,7 +139,7 @@ export function GitRepoDropdown({ baseRepositories = repositories; } // If no input value, show all repositories - else if (!inputValue || !inputValue.trim()) { + else if (!inputValue?.trim()) { baseRepositories = repositories; } // For URL inputs, use the processed search input for filtering @@ -246,8 +246,7 @@ export function GitRepoDropdown({ // Create sticky footer item for GitHub provider const stickyFooterItem = useMemo(() => { if ( - !config || - !config.APP_SLUG || + !config?.APP_SLUG || provider !== ProviderOptions.github || config.APP_MODE !== "saas" ) diff --git a/frontend/src/components/features/home/recent-conversations/recent-conversation.tsx b/frontend/src/components/features/home/recent-conversations/recent-conversation.tsx index 7fcabe2f1fb9..d86bac55bff4 100644 --- a/frontend/src/components/features/home/recent-conversations/recent-conversation.tsx +++ b/frontend/src/components/features/home/recent-conversations/recent-conversation.tsx @@ -67,12 +67,14 @@ export function RecentConversation({ conversation }: RecentConversationProps) {
) : null} - - {formatTimeDelta( - new Date(conversation.created_at || conversation.last_updated_at), - )}{" "} - {t(I18nKey.CONVERSATION$AGO)} - + {(conversation.created_at || conversation.last_updated_at) && ( + + {formatTimeDelta( + conversation.created_at || conversation.last_updated_at, + )}{" "} + {t(I18nKey.CONVERSATION$AGO)} + + )} diff --git a/frontend/src/components/features/home/recent-conversations/recent-conversations.tsx b/frontend/src/components/features/home/recent-conversations/recent-conversations.tsx index 3d6bc644106c..d0bd560c7d37 100644 --- a/frontend/src/components/features/home/recent-conversations/recent-conversations.tsx +++ b/frontend/src/components/features/home/recent-conversations/recent-conversations.tsx @@ -78,7 +78,7 @@ export function RecentConversations() { )} - {!isInitialLoading && displayedConversations?.length === 0 && ( + {!isInitialLoading && !error && displayedConversations?.length === 0 && ( {t(I18nKey.HOME$NO_RECENT_CONVERSATIONS)} diff --git a/frontend/src/components/features/home/repo-selection-form.tsx b/frontend/src/components/features/home/repo-selection-form.tsx index f891f25d1f35..f70364975a5e 100644 --- a/frontend/src/components/features/home/repo-selection-form.tsx +++ b/frontend/src/components/features/home/repo-selection-form.tsx @@ -35,7 +35,11 @@ export function RepositorySelectionForm({ React.useState(null); const { providers } = useUserProviders(); - const { addRecentRepository } = useHomeStore(); + const { + addRecentRepository, + setLastSelectedProvider, + getLastSelectedProvider, + } = useHomeStore(); const { mutate: createConversation, isPending, @@ -46,12 +50,24 @@ export function RepositorySelectionForm({ const { t } = useTranslation(); - // Auto-select provider if there's only one + // Auto-select provider logic React.useEffect(() => { + if (providers.length === 0) return; + + // If there's only one provider, auto-select it if (providers.length === 1 && !selectedProvider) { setSelectedProvider(providers[0]); + return; + } + + // If there are multiple providers and none is selected, try to use the last selected one + if (providers.length > 1 && !selectedProvider) { + const lastSelected = getLastSelectedProvider(); + if (lastSelected && providers.includes(lastSelected)) { + setSelectedProvider(lastSelected); + } } - }, [providers, selectedProvider]); + }, [providers, selectedProvider, getLastSelectedProvider]); // We check for isSuccess because the app might require time to render // into the new conversation screen after the conversation is created. @@ -66,6 +82,7 @@ export function RepositorySelectionForm({ } setSelectedProvider(provider); + setLastSelectedProvider(provider); // Store the selected provider setSelectedRepository(null); // Reset repository selection when provider changes setSelectedBranch(null); // Reset branch selection when provider changes onRepoSelection(null); // Reset parent component's selected repo diff --git a/frontend/src/components/features/home/shared/dropdown-item.tsx b/frontend/src/components/features/home/shared/dropdown-item.tsx index 08e22dc12bfd..36a0e25967c1 100644 --- a/frontend/src/components/features/home/shared/dropdown-item.tsx +++ b/frontend/src/components/features/home/shared/dropdown-item.tsx @@ -45,7 +45,7 @@ export function DropdownItem({ // eslint-disable-next-line react/jsx-props-no-spreading
  • - {renderIcon && renderIcon(item)} + {renderIcon?.(item)} {getDisplayText(item)}
  • diff --git a/frontend/src/components/features/markdown/headings.tsx b/frontend/src/components/features/markdown/headings.tsx index 2e12fc7db4fe..3098a4514a64 100644 --- a/frontend/src/components/features/markdown/headings.tsx +++ b/frontend/src/components/features/markdown/headings.tsx @@ -8,7 +8,7 @@ export function h1({ React.HTMLAttributes & ExtraProps) { return ( -

    +

    {children}

    ); diff --git a/frontend/src/components/features/markdown/markdown-renderer.tsx b/frontend/src/components/features/markdown/markdown-renderer.tsx new file mode 100644 index 000000000000..0cb55498d63f --- /dev/null +++ b/frontend/src/components/features/markdown/markdown-renderer.tsx @@ -0,0 +1,80 @@ +import Markdown, { Components } from "react-markdown"; +import remarkGfm from "remark-gfm"; +import remarkBreaks from "remark-breaks"; +import { code } from "./code"; +import { ul, ol } from "./list"; +import { paragraph } from "./paragraph"; +import { anchor } from "./anchor"; +import { h1, h2, h3, h4, h5, h6 } from "./headings"; + +interface MarkdownRendererProps { + /** + * The markdown content to render. Can be passed as children (string) or content prop. + */ + children?: string; + content?: string; + /** + * Additional or override components for markdown elements. + * Default components (code, ul, ol) are always included unless overridden. + */ + components?: Partial; + /** + * Whether to include standard components (anchor, paragraph). + * Defaults to false. + */ + includeStandard?: boolean; + /** + * Whether to include heading components (h1-h6). + * Defaults to false. + */ + includeHeadings?: boolean; +} + +/** + * A reusable Markdown renderer component that provides consistent + * markdown rendering across the application. + * + * By default, includes: + * - code, ul, ol components + * - remarkGfm and remarkBreaks plugins + * + * Can be extended with: + * - includeStandard: adds anchor and paragraph components + * - includeHeadings: adds h1-h6 heading components + * - components prop: allows custom overrides or additional components + */ +export function MarkdownRenderer({ + children, + content, + components: customComponents, + includeStandard = false, + includeHeadings = false, +}: MarkdownRendererProps) { + // Build the components object with defaults and optional additions + const components: Components = { + code, + ul, + ol, + ...(includeStandard && { + a: anchor, + p: paragraph, + }), + ...(includeHeadings && { + h1, + h2, + h3, + h4, + h5, + h6, + }), + ...customComponents, // Custom components override defaults + }; + + const markdownContent = content ?? children ?? ""; + + return ( + + {markdownContent} + + ); +} diff --git a/frontend/src/components/features/microagent-management/microagent-management-add-microagent-button.tsx b/frontend/src/components/features/microagent-management/microagent-management-add-microagent-button.tsx index 0fca5981c16d..9bcb282ce832 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-add-microagent-button.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-add-microagent-button.tsx @@ -1,6 +1,6 @@ import { useTranslation } from "react-i18next"; import { I18nKey } from "#/i18n/declaration"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import { GitRepository } from "#/types/git"; interface MicroagentManagementAddMicroagentButtonProps { diff --git a/frontend/src/components/features/microagent-management/microagent-management-content.tsx b/frontend/src/components/features/microagent-management/microagent-management-content.tsx index dc16e1da98a5..fe50ea78492e 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-content.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-content.tsx @@ -3,7 +3,7 @@ import { useTranslation } from "react-i18next"; import { MicroagentManagementSidebar } from "./microagent-management-sidebar"; import { MicroagentManagementMain } from "./microagent-management-main"; import { MicroagentManagementUpsertMicroagentModal } from "./microagent-management-upsert-microagent-modal"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import { useCreateConversationAndSubscribeMultiple } from "#/hooks/use-create-conversation-and-subscribe-multiple"; import { LearnThisRepoFormData, diff --git a/frontend/src/components/features/microagent-management/microagent-management-conversation-stopped.tsx b/frontend/src/components/features/microagent-management/microagent-management-conversation-stopped.tsx index 817c31f15fe2..6c1eb627fda9 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-conversation-stopped.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-conversation-stopped.tsx @@ -2,7 +2,7 @@ import { useTranslation } from "react-i18next"; import { I18nKey } from "#/i18n/declaration"; import { BrandButton } from "../settings/brand-button"; import { Loader } from "#/components/shared/loader"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; export function MicroagentManagementConversationStopped() { const { t } = useTranslation(); diff --git a/frontend/src/components/features/microagent-management/microagent-management-error.tsx b/frontend/src/components/features/microagent-management/microagent-management-error.tsx index 5918d6f08117..022a7187364a 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-error.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-error.tsx @@ -2,7 +2,7 @@ import { useTranslation } from "react-i18next"; import { I18nKey } from "#/i18n/declaration"; import { BrandButton } from "../settings/brand-button"; import { Loader } from "#/components/shared/loader"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; export function MicroagentManagementError() { const { t } = useTranslation(); diff --git a/frontend/src/components/features/microagent-management/microagent-management-learn-this-repo-modal.tsx b/frontend/src/components/features/microagent-management/microagent-management-learn-this-repo-modal.tsx index bbc46c8425d8..9bb1b40c6d31 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-learn-this-repo-modal.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-learn-this-repo-modal.tsx @@ -5,7 +5,7 @@ import { ModalBackdrop } from "#/components/shared/modals/modal-backdrop"; import { ModalBody } from "#/components/shared/modals/modal-body"; import { BrandButton } from "../settings/brand-button"; import { I18nKey } from "#/i18n/declaration"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import XIcon from "#/icons/x.svg?react"; import { cn, getRepoMdCreatePrompt } from "#/utils/utils"; import { LearnThisRepoFormData } from "#/types/microagent-management"; diff --git a/frontend/src/components/features/microagent-management/microagent-management-learn-this-repo.tsx b/frontend/src/components/features/microagent-management/microagent-management-learn-this-repo.tsx index dbb76c162a46..82d09f29361e 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-learn-this-repo.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-learn-this-repo.tsx @@ -1,6 +1,6 @@ import { useTranslation } from "react-i18next"; import { I18nKey } from "#/i18n/declaration"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import { GitRepository } from "#/types/git"; interface MicroagentManagementLearnThisRepoProps { diff --git a/frontend/src/components/features/microagent-management/microagent-management-main.tsx b/frontend/src/components/features/microagent-management/microagent-management-main.tsx index 6647d231215f..c8405044f9e3 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-main.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-main.tsx @@ -1,4 +1,4 @@ -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import { MicroagentManagementDefault } from "./microagent-management-default"; import { MicroagentManagementOpeningPr } from "./microagent-management-opening-pr"; import { MicroagentManagementReviewPr } from "./microagent-management-review-pr"; diff --git a/frontend/src/components/features/microagent-management/microagent-management-microagent-card.tsx b/frontend/src/components/features/microagent-management/microagent-management-microagent-card.tsx index 9da7f8383dc1..f8ac499f55cd 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-microagent-card.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-microagent-card.tsx @@ -3,7 +3,7 @@ import { useTranslation } from "react-i18next"; import { I18nKey } from "#/i18n/declaration"; import { RepositoryMicroagent } from "#/types/microagent-management"; import { Conversation } from "#/api/open-hands.types"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import { cn } from "#/utils/utils"; import { GitRepository } from "#/types/git"; diff --git a/frontend/src/components/features/microagent-management/microagent-management-opening-pr.tsx b/frontend/src/components/features/microagent-management/microagent-management-opening-pr.tsx index f4a3beedc081..c9f3ad925da2 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-opening-pr.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-opening-pr.tsx @@ -2,7 +2,7 @@ import { useTranslation } from "react-i18next"; import { I18nKey } from "#/i18n/declaration"; import { BrandButton } from "../settings/brand-button"; import { Loader } from "#/components/shared/loader"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; export function MicroagentManagementOpeningPr() { const { t } = useTranslation(); diff --git a/frontend/src/components/features/microagent-management/microagent-management-repo-microagents.tsx b/frontend/src/components/features/microagent-management/microagent-management-repo-microagents.tsx index 868580550a7c..b2fc4464c0a7 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-repo-microagents.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-repo-microagents.tsx @@ -6,7 +6,7 @@ import { MicroagentManagementLearnThisRepo } from "./microagent-management-learn import { useRepositoryMicroagents } from "#/hooks/query/use-repository-microagents"; import { useMicroagentManagementConversations } from "#/hooks/query/use-microagent-management-conversations"; import { GitRepository } from "#/types/git"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import { cn } from "#/utils/utils"; import { I18nKey } from "#/i18n/declaration"; diff --git a/frontend/src/components/features/microagent-management/microagent-management-review-pr.tsx b/frontend/src/components/features/microagent-management/microagent-management-review-pr.tsx index 1031b7202ed8..57f4ff129d0d 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-review-pr.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-review-pr.tsx @@ -3,7 +3,7 @@ import { I18nKey } from "#/i18n/declaration"; import { BrandButton } from "../settings/brand-button"; import { getProviderName, constructPullRequestUrl } from "#/utils/utils"; import { Provider } from "#/types/settings"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; export function MicroagentManagementReviewPr() { const { t } = useTranslation(); diff --git a/frontend/src/components/features/microagent-management/microagent-management-sidebar-tabs.tsx b/frontend/src/components/features/microagent-management/microagent-management-sidebar-tabs.tsx index 378f0ad7a134..d7a088c60817 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-sidebar-tabs.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-sidebar-tabs.tsx @@ -2,7 +2,7 @@ import { Tab, Tabs } from "@heroui/react"; import { useTranslation } from "react-i18next"; import { MicroagentManagementRepositories } from "./microagent-management-repositories"; import { I18nKey } from "#/i18n/declaration"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; interface MicroagentManagementSidebarTabsProps { isSearchLoading?: boolean; diff --git a/frontend/src/components/features/microagent-management/microagent-management-sidebar.tsx b/frontend/src/components/features/microagent-management/microagent-management-sidebar.tsx index 3ef952bb7925..99719a3f862c 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-sidebar.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-sidebar.tsx @@ -6,7 +6,7 @@ import { MicroagentManagementSidebarTabs } from "./microagent-management-sidebar import { useGitRepositories } from "#/hooks/query/use-git-repositories"; import { useSearchRepositories } from "#/hooks/query/use-search-repositories"; import { GitProviderDropdown } from "#/components/features/home/git-provider-dropdown"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import { GitRepository } from "#/types/git"; import { Provider } from "#/types/settings"; import { diff --git a/frontend/src/components/features/microagent-management/microagent-management-upsert-microagent-modal.tsx b/frontend/src/components/features/microagent-management/microagent-management-upsert-microagent-modal.tsx index 6e41583f0238..1c5bf37c80c8 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-upsert-microagent-modal.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-upsert-microagent-modal.tsx @@ -5,7 +5,7 @@ import { ModalBackdrop } from "#/components/shared/modals/modal-backdrop"; import { ModalBody } from "#/components/shared/modals/modal-body"; import { BrandButton } from "../settings/brand-button"; import { I18nKey } from "#/i18n/declaration"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import XIcon from "#/icons/x.svg?react"; import { cn, extractRepositoryInfo } from "#/utils/utils"; import { BadgeInput } from "#/components/shared/inputs/badge-input"; diff --git a/frontend/src/components/features/microagent-management/microagent-management-view-microagent-content.tsx b/frontend/src/components/features/microagent-management/microagent-management-view-microagent-content.tsx index dc5b5fecaa59..d89ce2d19914 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-view-microagent-content.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-view-microagent-content.tsx @@ -1,16 +1,10 @@ import { useTranslation } from "react-i18next"; import { Spinner } from "@heroui/react"; -import Markdown from "react-markdown"; -import remarkGfm from "remark-gfm"; -import remarkBreaks from "remark-breaks"; -import { code } from "../markdown/code"; -import { ul, ol } from "../markdown/list"; -import { paragraph } from "../markdown/paragraph"; -import { anchor } from "../markdown/anchor"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import { useRepositoryMicroagentContent } from "#/hooks/query/use-repository-microagent-content"; import { I18nKey } from "#/i18n/declaration"; import { extractRepositoryInfo } from "#/utils/utils"; +import { MarkdownRenderer } from "../markdown/markdown-renderer"; export function MicroagentManagementViewMicroagentContent() { const { t } = useTranslation(); @@ -49,18 +43,9 @@ export function MicroagentManagementViewMicroagentContent() { )} {microagentData && !isLoading && !error && ( - + {microagentData.content} - + )} ); diff --git a/frontend/src/components/features/microagent-management/microagent-management-view-microagent-header.tsx b/frontend/src/components/features/microagent-management/microagent-management-view-microagent-header.tsx index bf28adbabeb5..a60f055adb76 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-view-microagent-header.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-view-microagent-header.tsx @@ -2,7 +2,7 @@ import { useTranslation } from "react-i18next"; import { BrandButton } from "../settings/brand-button"; import { getProviderName, constructMicroagentUrl } from "#/utils/utils"; import { I18nKey } from "#/i18n/declaration"; -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; export function MicroagentManagementViewMicroagentHeader() { const { t } = useTranslation(); diff --git a/frontend/src/components/features/microagent-management/microagent-management-view-microagent.tsx b/frontend/src/components/features/microagent-management/microagent-management-view-microagent.tsx index 2deaf1ef22e4..a7c6b6bdb8a1 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-view-microagent.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-view-microagent.tsx @@ -1,4 +1,4 @@ -import { useMicroagentManagementStore } from "#/state/microagent-management-store"; +import { useMicroagentManagementStore } from "#/stores/microagent-management-store"; import { MicroagentManagementViewMicroagentHeader } from "./microagent-management-view-microagent-header"; import { MicroagentManagementViewMicroagentContent } from "./microagent-management-view-microagent-content"; diff --git a/frontend/src/components/features/payment/setup-payment-modal.tsx b/frontend/src/components/features/payment/setup-payment-modal.tsx index 30cb0a4e54c9..7d8883a719af 100644 --- a/frontend/src/components/features/payment/setup-payment-modal.tsx +++ b/frontend/src/components/features/payment/setup-payment-modal.tsx @@ -1,24 +1,14 @@ -import { useMutation } from "@tanstack/react-query"; import { Trans, useTranslation } from "react-i18next"; import { I18nKey } from "#/i18n/declaration"; import OpenHandsLogo from "#/assets/branding/openhands-logo.svg?react"; import { ModalBackdrop } from "#/components/shared/modals/modal-backdrop"; import { ModalBody } from "#/components/shared/modals/modal-body"; -import BillingService from "#/api/billing-service/billing-service.api"; import { BrandButton } from "../settings/brand-button"; -import { displayErrorToast } from "#/utils/custom-toast-handlers"; +import { useCreateBillingSession } from "#/hooks/mutation/use-create-billing-session"; export function SetupPaymentModal() { const { t } = useTranslation(); - const { mutate, isPending } = useMutation({ - mutationFn: BillingService.createBillingSessionResponse, - onSuccess: (data) => { - window.location.href = data; - }, - onError: () => { - displayErrorToast(t(I18nKey.BILLING$ERROR_WHILE_CREATING_SESSION)); - }, - }); + const { mutate, isPending } = useCreateBillingSession(); return ( diff --git a/frontend/src/components/features/settings/api-keys-manager.tsx b/frontend/src/components/features/settings/api-keys-manager.tsx index 82d86fb4a98c..20a8807aa06a 100644 --- a/frontend/src/components/features/settings/api-keys-manager.tsx +++ b/frontend/src/components/features/settings/api-keys-manager.tsx @@ -13,10 +13,8 @@ import { CreateApiKeyModal } from "./create-api-key-modal"; import { DeleteApiKeyModal } from "./delete-api-key-modal"; import { NewApiKeyModal } from "./new-api-key-modal"; import { useApiKeys } from "#/hooks/query/use-api-keys"; -import { - useLlmApiKey, - useRefreshLlmApiKey, -} from "#/hooks/query/use-llm-api-key"; +import { useLlmApiKey } from "#/hooks/query/use-llm-api-key"; +import { useRefreshLlmApiKey } from "#/hooks/mutation/use-refresh-llm-api-key"; interface LlmApiKeyManagerProps { llmApiKey: { key: string | null } | undefined; diff --git a/frontend/src/components/features/settings/settings-layout.tsx b/frontend/src/components/features/settings/settings-layout.tsx index 6ac82cf8d0ff..7d00ab259617 100644 --- a/frontend/src/components/features/settings/settings-layout.tsx +++ b/frontend/src/components/features/settings/settings-layout.tsx @@ -1,16 +1,11 @@ import { useState } from "react"; import { MobileHeader } from "./mobile-header"; import { SettingsNavigation } from "./settings-navigation"; - -interface NavigationItem { - to: string; - icon: React.ReactNode; - text: string; -} +import { SettingsNavItem } from "#/constants/settings-nav"; interface SettingsLayoutProps { children: React.ReactNode; - navigationItems: NavigationItem[]; + navigationItems: SettingsNavItem[]; } export function SettingsLayout({ @@ -19,13 +14,8 @@ export function SettingsLayout({ }: SettingsLayoutProps) { const [isMobileMenuOpen, setIsMobileMenuOpen] = useState(false); - const toggleMobileMenu = () => { - setIsMobileMenuOpen(!isMobileMenuOpen); - }; - - const closeMobileMenu = () => { - setIsMobileMenuOpen(false); - }; + const toggleMobileMenu = () => setIsMobileMenuOpen(!isMobileMenuOpen); + const closeMobileMenu = () => setIsMobileMenuOpen(false); return (
    @@ -34,7 +24,6 @@ export function SettingsLayout({ isMobileMenuOpen={isMobileMenuOpen} onToggleMenu={toggleMobileMenu} /> - {/* Desktop layout with navigation and main content */}
    {/* Navigation */} @@ -43,7 +32,6 @@ export function SettingsLayout({ onCloseMobileMenu={closeMobileMenu} navigationItems={navigationItems} /> - {/* Main content */}
    {children} diff --git a/frontend/src/components/features/settings/settings-navigation.tsx b/frontend/src/components/features/settings/settings-navigation.tsx index ce9e49aa0917..5a35f0149597 100644 --- a/frontend/src/components/features/settings/settings-navigation.tsx +++ b/frontend/src/components/features/settings/settings-navigation.tsx @@ -5,17 +5,12 @@ import { Typography } from "#/ui/typography"; import { I18nKey } from "#/i18n/declaration"; import SettingsIcon from "#/icons/settings-gear.svg?react"; import CloseIcon from "#/icons/close.svg?react"; - -interface NavigationItem { - to: string; - icon: React.ReactNode; - text: string; -} +import { SettingsNavItem } from "#/constants/settings-nav"; interface SettingsNavigationProps { isMobileMenuOpen: boolean; onCloseMobileMenu: () => void; - navigationItems: NavigationItem[]; + navigationItems: SettingsNavItem[]; } export function SettingsNavigation({ @@ -34,7 +29,6 @@ export function SettingsNavigation({ onClick={onCloseMobileMenu} /> )} - {/* Navigation sidebar */}
    - +
    - settings?.EMAIL_VERIFIED === false + settings?.email_verified === false ? null : setConversationPanelIsOpen((prev) => !prev) } - disabled={settings?.EMAIL_VERIFIED === false} + disabled={settings?.email_verified === false} />
    diff --git a/frontend/src/components/features/sidebar/user-actions.tsx b/frontend/src/components/features/sidebar/user-actions.tsx index aaa766c8855a..27b1543b07d1 100644 --- a/frontend/src/components/features/sidebar/user-actions.tsx +++ b/frontend/src/components/features/sidebar/user-actions.tsx @@ -58,6 +58,9 @@ export function UserActions({ onLogout, user, isLoading }: UserActionsProps) { className={cn( "opacity-0 pointer-events-none group-hover:opacity-100 group-hover:pointer-events-auto", showMenu && "opacity-100 pointer-events-auto", + // Invisible hover bridge: extends hover zone to create a "safe corridor" + // for diagonal mouse movement to the menu (only active when menu is visible) + "group-hover:before:absolute group-hover:before:bottom-0 group-hover:before:right-0 group-hover:before:w-[200px] group-hover:before:h-[300px]", )} > ; currentModel?: string; - onChange?: (model: string | null) => void; + onChange?: (provider: string | null, model: string | null) => void; + onDefaultValuesChanged?: ( + provider: string | null, + model: string | null, + ) => void; wrapperClassName?: string; labelClassName?: string; } @@ -31,6 +35,7 @@ export function ModelSelector({ models, currentModel, onChange, + onDefaultValuesChanged, wrapperClassName, labelClassName, }: ModelSelectorProps) { @@ -56,6 +61,7 @@ export function ModelSelector({ setLitellmId(currentModel); setSelectedProvider(provider); setSelectedModel(model); + onDefaultValuesChanged?.(provider, model); } }, [currentModel]); @@ -65,6 +71,7 @@ export function ModelSelector({ const separator = models[provider]?.separator || ""; setLitellmId(provider + separator); + onChange?.(provider, null); }; const handleChangeModel = (model: string) => { @@ -76,7 +83,7 @@ export function ModelSelector({ } setLitellmId(fullModel); setSelectedModel(model); - onChange?.(fullModel); + onChange?.(selectedProvider, model); }; const clear = () => { diff --git a/frontend/src/components/shared/modals/settings/settings-form.tsx b/frontend/src/components/shared/modals/settings/settings-form.tsx index e08b59c8e0c8..b31b04eb5310 100644 --- a/frontend/src/components/shared/modals/settings/settings-form.tsx +++ b/frontend/src/components/shared/modals/settings/settings-form.tsx @@ -41,11 +41,11 @@ export function SettingsForm({ settings, models, onClose }: SettingsFormProps) { onClose(); posthog.capture("settings_saved", { - LLM_MODEL: newSettings.LLM_MODEL, - LLM_API_KEY_SET: newSettings.LLM_API_KEY_SET ? "SET" : "UNSET", - SEARCH_API_KEY_SET: newSettings.SEARCH_API_KEY ? "SET" : "UNSET", + LLM_MODEL: newSettings.llm_model, + LLM_API_KEY_SET: newSettings.llm_api_key_set ? "SET" : "UNSET", + SEARCH_API_KEY_SET: newSettings.search_api_key ? "SET" : "UNSET", REMOTE_RUNTIME_RESOURCE_FACTOR: - newSettings.REMOTE_RUNTIME_RESOURCE_FACTOR, + newSettings.remote_runtime_resource_factor, }); }, }); @@ -67,7 +67,7 @@ export function SettingsForm({ settings, models, onClose }: SettingsFormProps) { } }; - const isLLMKeySet = settings.LLM_API_KEY_SET; + const isLLMKeySet = settings.llm_api_key_set; return (
    @@ -80,7 +80,7 @@ export function SettingsForm({ settings, models, onClose }: SettingsFormProps) {
    diff --git a/frontend/src/components/v1/chat/event-content-helpers/create-skill-ready-event.ts b/frontend/src/components/v1/chat/event-content-helpers/create-skill-ready-event.ts new file mode 100644 index 000000000000..4682b8a90f59 --- /dev/null +++ b/frontend/src/components/v1/chat/event-content-helpers/create-skill-ready-event.ts @@ -0,0 +1,56 @@ +import { MessageEvent } from "#/types/v1/core"; +import { BaseEvent } from "#/types/v1/core/base/event"; +import { getSkillReadyContent } from "./get-skill-ready-content"; + +/** + * Synthetic event type for Skill Ready events. + * This extends BaseEvent and includes a marker to identify it as a skill ready event. + */ +export interface SkillReadyEvent extends BaseEvent { + _isSkillReadyEvent: true; + _skillReadyContent: string; +} + +/** + * Type guard for Skill Ready events. + */ +export const isSkillReadyEvent = (event: unknown): event is SkillReadyEvent => + typeof event === "object" && + event !== null && + "_isSkillReadyEvent" in event && + event._isSkillReadyEvent === true; + +/** + * Creates a synthetic "Skill Ready" event from a user MessageEvent. + * This event appears as originating from the agent and contains formatted + * information about activated skills and extended content. + */ +export const createSkillReadyEvent = ( + userEvent: MessageEvent, +): SkillReadyEvent => { + // Support both activated_skills and activated_microagents field names + const activatedSkills = + (userEvent as unknown as { activated_skills?: string[] }) + .activated_skills || + userEvent.activated_microagents || + []; + + const extendedContent = userEvent.extended_content || []; + + // Only create event if we have skills or extended content + if (activatedSkills.length === 0 && extendedContent.length === 0) { + throw new Error( + "Cannot create skill ready event without activated skills or extended content", + ); + } + + const content = getSkillReadyContent(activatedSkills, extendedContent); + + return { + id: `${userEvent.id}-skill-ready`, + timestamp: userEvent.timestamp, + source: "agent", + _isSkillReadyEvent: true, + _skillReadyContent: content, + }; +}; diff --git a/frontend/src/components/v1/chat/event-content-helpers/get-action-content.ts b/frontend/src/components/v1/chat/event-content-helpers/get-action-content.ts index fe6bf842c3e1..148949365215 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/get-action-content.ts +++ b/frontend/src/components/v1/chat/event-content-helpers/get-action-content.ts @@ -4,6 +4,7 @@ import i18n from "#/i18n"; import { SecurityRisk } from "#/types/v1/core/base/common"; import { ExecuteBashAction, + TerminalAction, FileEditorAction, StrReplaceEditorAction, MCPToolAction, @@ -58,7 +59,7 @@ const getFileEditorActionContent = ( // Command Actions const getExecuteBashActionContent = ( - event: ActionEvent, + event: ActionEvent, ): string => { let content = `Command:\n\`${event.action.command}\``; @@ -131,27 +132,61 @@ type BrowserAction = const getBrowserActionContent = (action: BrowserAction): string => { switch (action.kind) { - case "BrowserNavigateAction": - if ("url" in action) { - return `Browsing ${action.url}`; + case "BrowserNavigateAction": { + let content = `Browsing ${action.url}`; + if (action.new_tab) { + content += `\n**New Tab:** Yes`; } - break; - case "BrowserClickAction": - case "BrowserTypeAction": - case "BrowserGetStateAction": - case "BrowserGetContentAction": - case "BrowserScrollAction": - case "BrowserGoBackAction": - case "BrowserListTabsAction": - case "BrowserSwitchTabAction": - case "BrowserCloseTabAction": - // These browser actions typically don't need detailed content display + return content; + } + case "BrowserClickAction": { + let content = `**Element Index:** ${action.index}`; + if (action.new_tab) { + content += `\n**New Tab:** Yes`; + } + return content; + } + case "BrowserTypeAction": { + const textPreview = + action.text.length > 50 + ? `${action.text.slice(0, 50)}...` + : action.text; + return `**Element Index:** ${action.index}\n**Text:** ${textPreview}`; + } + case "BrowserGetStateAction": { + if (action.include_screenshot) { + return `**Include Screenshot:** Yes`; + } + return getNoContentActionContent(); + } + case "BrowserGetContentAction": { + const parts: string[] = []; + if (action.extract_links) { + parts.push(`**Extract Links:** Yes`); + } + if (action.start_from_char > 0) { + parts.push(`**Start From Character:** ${action.start_from_char}`); + } + return parts.length > 0 ? parts.join("\n") : getNoContentActionContent(); + } + case "BrowserScrollAction": { + return `**Direction:** ${action.direction}`; + } + case "BrowserGoBackAction": { return getNoContentActionContent(); + } + case "BrowserListTabsAction": { + return getNoContentActionContent(); + } + case "BrowserSwitchTabAction": { + return `**Tab ID:** ${action.tab_id}`; + } + case "BrowserCloseTabAction": { + return `**Tab ID:** ${action.tab_id}`; + } default: return getNoContentActionContent(); } - - return getNoContentActionContent(); }; export const getActionContent = (event: ActionEvent): string => { @@ -164,8 +199,9 @@ export const getActionContent = (event: ActionEvent): string => { return getFileEditorActionContent(action); case "ExecuteBashAction": + case "TerminalAction": return getExecuteBashActionContent( - event as ActionEvent, + event as ActionEvent, ); case "MCPToolAction": diff --git a/frontend/src/components/v1/chat/event-content-helpers/get-event-content.tsx b/frontend/src/components/v1/chat/event-content-helpers/get-event-content.tsx index 7eab7df1a77d..dec57f385f00 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/get-event-content.tsx +++ b/frontend/src/components/v1/chat/event-content-helpers/get-event-content.tsx @@ -8,6 +8,7 @@ import { getActionContent } from "./get-action-content"; import { getObservationContent } from "./get-observation-content"; import { TaskTrackingObservationContent } from "../task-tracking/task-tracking-observation-content"; import { TaskTrackerObservation } from "#/types/v1/core/base/observation"; +import { SkillReadyEvent, isSkillReadyEvent } from "./create-skill-ready-event"; import i18n from "#/i18n"; const trimText = (text: string, maxLength: number): string => { @@ -49,6 +50,7 @@ const getActionEventTitle = (event: OpenHandsEvent): React.ReactNode => { switch (actionType) { case "ExecuteBashAction": + case "TerminalAction": actionKey = "ACTION_MESSAGE$RUN"; actionValues = { command: trimText(event.action.command, 80), @@ -83,11 +85,20 @@ const getActionEventTitle = (event: OpenHandsEvent): React.ReactNode => { actionKey = "ACTION_MESSAGE$TASK_TRACKING"; break; case "BrowserNavigateAction": + case "BrowserClickAction": + case "BrowserTypeAction": + case "BrowserGetStateAction": + case "BrowserGetContentAction": + case "BrowserScrollAction": + case "BrowserGoBackAction": + case "BrowserListTabsAction": + case "BrowserSwitchTabAction": + case "BrowserCloseTabAction": actionKey = "ACTION_MESSAGE$BROWSE"; break; default: // For unknown actions, use the type name - return actionType.replace("Action", "").toUpperCase(); + return String(actionType).replace("Action", "").toUpperCase(); } if (actionKey) { @@ -110,6 +121,7 @@ const getObservationEventTitle = (event: OpenHandsEvent): React.ReactNode => { switch (observationType) { case "ExecuteBashObservation": + case "TerminalObservation": observationKey = "OBSERVATION_MESSAGE$RUN"; observationValues = { command: event.observation.command @@ -147,6 +159,9 @@ const getObservationEventTitle = (event: OpenHandsEvent): React.ReactNode => { } break; } + case "ThinkObservation": + observationKey = "OBSERVATION_MESSAGE$THINK"; + break; default: // For unknown observations, use the type name return observationType.replace("Observation", "").toUpperCase(); @@ -159,11 +174,21 @@ const getObservationEventTitle = (event: OpenHandsEvent): React.ReactNode => { return observationType; }; -export const getEventContent = (event: OpenHandsEvent) => { +export const getEventContent = (event: OpenHandsEvent | SkillReadyEvent) => { let title: React.ReactNode = ""; let details: string | React.ReactNode = ""; - if (isActionEvent(event)) { + // Handle Skill Ready events first + if (isSkillReadyEvent(event)) { + // Use translation key if available, otherwise use "SKILL READY" + const skillReadyKey = "OBSERVATION_MESSAGE$SKILL_READY"; + if (i18n.exists(skillReadyKey)) { + title = createTitleFromKey(skillReadyKey, {}); + } else { + title = "Skill Ready"; + } + details = event._skillReadyContent; + } else if (isActionEvent(event)) { title = getActionEventTitle(event); details = getActionContent(event); } else if (isObservationEvent(event)) { diff --git a/frontend/src/components/v1/chat/event-content-helpers/get-observation-content.ts b/frontend/src/components/v1/chat/event-content-helpers/get-observation-content.ts index 6bfc3734e53a..7fb1c2ce1ceb 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/get-observation-content.ts +++ b/frontend/src/components/v1/chat/event-content-helpers/get-observation-content.ts @@ -8,6 +8,7 @@ import { ThinkObservation, BrowserObservation, ExecuteBashObservation, + TerminalObservation, FileEditorObservation, StrReplaceEditorObservation, TaskTrackerObservation, @@ -23,6 +24,15 @@ const getFileEditorObservationContent = ( return `**Error:**\n${observation.error}`; } + // Extract text content from the observation if it exists + const textContent = + "content" in observation && Array.isArray(observation.content) + ? observation.content + .filter((c) => c.type === "text") + .map((c) => c.text) + .join("\n") + : null; + const successMessage = getObservationResult(event) === "success"; // For view commands or successful edits with content changes, format as code block @@ -34,16 +44,18 @@ const getFileEditorObservationContent = ( observation.new_content) || observation.command === "view" ) { - return `\`\`\`\n${observation.output}\n\`\`\``; + // Prefer content over output for view commands, fallback to output if content is not available + const displayContent = textContent || observation.output; + return `\`\`\`\n${displayContent}\n\`\`\``; } - // For other commands, return the output as-is - return observation.output; + // For other commands, prefer content if available, otherwise use output + return textContent || observation.output; }; // Command Observations -const getExecuteBashObservationContent = ( - event: ObservationEvent, +const getTerminalObservationContent = ( + event: ObservationEvent, ): string => { const { observation } = event; @@ -59,7 +71,18 @@ const getExecuteBashObservationContent = ( content = `${content.slice(0, MAX_CONTENT_LENGTH)}...`; } - return `Output:\n\`\`\`sh\n${content.trim() || i18n.t("OBSERVATION$COMMAND_NO_OUTPUT")}\n\`\`\``; + // Build the output string + let output = ""; + + // Display the command if available + if (observation.command) { + output += `Command: \`${observation.command}\`\n\n`; + } + + // Display the output + output += `Output:\n\`\`\`sh\n${content.trim() || i18n.t("OBSERVATION$COMMAND_NO_OUTPUT")}\n\`\`\``; + + return output; }; // Tool Observations @@ -68,14 +91,25 @@ const getBrowserObservationContent = ( ): string => { const { observation } = event; + // Extract text content from the observation + const textContent = + "content" in observation && Array.isArray(observation.content) + ? observation.content + .filter((c) => c.type === "text") + .map((c) => c.text) + .join("\n") + : observation.output || ""; + let contentDetails = ""; - if ("error" in observation && observation.error) { - contentDetails += `**Error:**\n${observation.error}\n\n`; + if (observation.error) { + contentDetails += `**Error:**\n${observation.error}`; + } else if (textContent) { + contentDetails += `**Output:**\n${textContent}`; + } else { + contentDetails += "Browser action completed successfully."; } - contentDetails += `**Output:**\n${observation.output}`; - if (contentDetails.length > MAX_CONTENT_LENGTH) { contentDetails = `${contentDetails.slice(0, MAX_CONTENT_LENGTH)}...(truncated)`; } @@ -156,14 +190,35 @@ const getThinkObservationContent = ( event: ObservationEvent, ): string => { const { observation } = event; - return observation.content || ""; + + const textContent = observation.content + .filter((c) => c.type === "text") + .map((c) => c.text) + .join("\n"); + + return textContent || ""; }; const getFinishObservationContent = ( event: ObservationEvent, ): string => { const { observation } = event; - return observation.message || ""; + + // Extract text content from the observation + const textContent = observation.content + .filter((c) => c.type === "text") + .map((c) => c.text) + .join("\n"); + + let content = ""; + + if (observation.is_error) { + content += `**Error:**\n${textContent}`; + } else { + content += textContent; + } + + return content; }; export const getObservationContent = (event: ObservationEvent): string => { @@ -179,8 +234,9 @@ export const getObservationContent = (event: ObservationEvent): string => { ); case "ExecuteBashObservation": - return getExecuteBashObservationContent( - event as ObservationEvent, + case "TerminalObservation": + return getTerminalObservationContent( + event as ObservationEvent, ); case "BrowserObservation": diff --git a/frontend/src/components/v1/chat/event-content-helpers/get-observation-result.ts b/frontend/src/components/v1/chat/event-content-helpers/get-observation-result.ts index e5a52bfe9540..790ecb00cf86 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/get-observation-result.ts +++ b/frontend/src/components/v1/chat/event-content-helpers/get-observation-result.ts @@ -17,6 +17,15 @@ export const getObservationResult = ( if (exitCode === 0 || metadata.exit_code === 0) return "success"; // Command executed successfully return "error"; // Command failed } + case "TerminalObservation": { + const exitCode = + observation.exit_code ?? observation.metadata.exit_code ?? null; + + if (observation.timeout || exitCode === -1) return "timeout"; + if (exitCode === 0) return "success"; + if (observation.is_error) return "error"; + return "success"; + } case "FileEditorObservation": case "StrReplaceEditorObservation": // Check if there's an error diff --git a/frontend/src/components/v1/chat/event-content-helpers/get-skill-ready-content.ts b/frontend/src/components/v1/chat/event-content-helpers/get-skill-ready-content.ts new file mode 100644 index 000000000000..5f4b14f84698 --- /dev/null +++ b/frontend/src/components/v1/chat/event-content-helpers/get-skill-ready-content.ts @@ -0,0 +1,108 @@ +import { TextContent } from "#/types/v1/core/base/common"; + +/** + * Extracts all text content from an array of TextContent items. + */ +const extractAllText = (extendedContent: TextContent[]): string => + extendedContent + .filter((c) => c.type === "text") + .map((c) => c.text) + .join(""); + +/** + * Extracts all blocks from the given text. + * Returns an array of content strings (without the wrapper tags). + */ +const extractExtraInfoBlocks = (text: string): string[] => { + const blocks: string[] = []; + const blockRegex = /([\s\S]*?)<\/EXTRA_INFO>/gi; + let match = blockRegex.exec(text); + + while (match !== null) { + const blockContent = match[1].trim(); + if (blockContent.length > 0) { + blocks.push(blockContent); + } + match = blockRegex.exec(text); + } + + return blocks; +}; + +/** + * Formats a single skill with its corresponding content block. + */ +const formatSkillWithContent = ( + skill: string, + contentBlock: string | undefined, +): string => { + let formatted = `\n\n- **${skill}**`; + + if (contentBlock && contentBlock.trim().length > 0) { + formatted += `\n\n${contentBlock}`; + } + + return formatted; +}; + +/** + * Formats skills paired with their corresponding extended content blocks. + */ +const formatSkillKnowledge = ( + activatedSkills: string[], + extraInfoBlocks: string[], +): string => { + let content = `\n\n**Triggered Skill Knowledge:**`; + + activatedSkills.forEach((skill, index) => { + const contentBlock = + index < extraInfoBlocks.length ? extraInfoBlocks[index] : undefined; + content += formatSkillWithContent(skill, contentBlock); + }); + + return content; +}; + +/** + * Formats extended content blocks when no skills are present. + */ +const formatExtendedContentOnly = (extraInfoBlocks: string[]): string => { + let content = `\n\n**Extended Content:**`; + + extraInfoBlocks.forEach((block) => { + if (block.trim().length > 0) { + content += `\n\n${block}`; + } + }); + + return content; +}; + +/** + * Formats activated skills and extended content into markdown for display. + * Similar to how v0 formats microagent knowledge in recall observations. + * + * Each skill is paired with its corresponding block by index. + */ +export const getSkillReadyContent = ( + activatedSkills: string[], + extendedContent: TextContent[], +): string => { + // Extract all blocks from extended_content + const extraInfoBlocks: string[] = []; + if (extendedContent && extendedContent.length > 0) { + const allText = extractAllText(extendedContent); + extraInfoBlocks.push(...extractExtraInfoBlocks(allText)); + } + + // Format output based on what we have + if (activatedSkills && activatedSkills.length > 0) { + return formatSkillKnowledge(activatedSkills, extraInfoBlocks); + } + + if (extraInfoBlocks.length > 0) { + return formatExtendedContentOnly(extraInfoBlocks); + } + + return ""; +}; diff --git a/frontend/src/components/v1/chat/event-content-helpers/parse-message-from-event.ts b/frontend/src/components/v1/chat/event-content-helpers/parse-message-from-event.ts index 17824a51c8c2..8e2a0cb25371 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/parse-message-from-event.ts +++ b/frontend/src/components/v1/chat/event-content-helpers/parse-message-from-event.ts @@ -5,7 +5,7 @@ export const parseMessageFromEvent = (event: MessageEvent): string => { const message = event.llm_message; // Safety check: ensure llm_message exists and has content - if (!message || !message.content) { + if (!message?.content) { return ""; } diff --git a/frontend/src/components/v1/chat/event-content-helpers/should-render-event.ts b/frontend/src/components/v1/chat/event-content-helpers/should-render-event.ts index a5fdc6225263..1171c21c92a4 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/should-render-event.ts +++ b/frontend/src/components/v1/chat/event-content-helpers/should-render-event.ts @@ -18,6 +18,10 @@ export const shouldRenderEvent = (event: OpenHandsEvent) => { // For V1, action is an object with kind property const actionType = event.action.kind; + if (!actionType) { + return false; + } + // Hide user commands from the chat interface if (actionType === "ExecuteBashAction" && event.source === "user") { return false; diff --git a/frontend/src/components/v1/chat/event-message-components/generic-event-message-wrapper.tsx b/frontend/src/components/v1/chat/event-message-components/generic-event-message-wrapper.tsx index c4c53f7f1a3f..95c2652549a8 100644 --- a/frontend/src/components/v1/chat/event-message-components/generic-event-message-wrapper.tsx +++ b/frontend/src/components/v1/chat/event-message-components/generic-event-message-wrapper.tsx @@ -3,10 +3,16 @@ import { GenericEventMessage } from "../../../features/chat/generic-event-messag import { getEventContent } from "../event-content-helpers/get-event-content"; import { getObservationResult } from "../event-content-helpers/get-observation-result"; import { isObservationEvent } from "#/types/v1/type-guards"; +import { + SkillReadyEvent, + isSkillReadyEvent, +} from "../event-content-helpers/create-skill-ready-event"; import { V1ConfirmationButtons } from "#/components/shared/buttons/v1-confirmation-buttons"; +import { ObservationResultStatus } from "../../../features/chat/event-content-helpers/get-observation-result"; +import { MarkdownRenderer } from "#/components/features/markdown/markdown-renderer"; interface GenericEventMessageWrapperProps { - event: OpenHandsEvent; + event: OpenHandsEvent | SkillReadyEvent; isLastMessage: boolean; } @@ -16,11 +22,29 @@ export function GenericEventMessageWrapper({ }: GenericEventMessageWrapperProps) { const { title, details } = getEventContent(event); - if ( - isObservationEvent(event) && - event.observation.kind === "TaskTrackerObservation" - ) { - return
    {details}
    ; + // SkillReadyEvent is not an observation event, so skip the observation checks + if (!isSkillReadyEvent(event)) { + if (isObservationEvent(event)) { + if (event.observation.kind === "TaskTrackerObservation") { + return
    {details}
    ; + } + if (event.observation.kind === "FinishObservation") { + return ( + + {details as string} + + ); + } + } + } + + // Determine success status + let success: ObservationResultStatus | undefined; + if (isSkillReadyEvent(event)) { + // Skill Ready events should show success indicator, same as v0 recall observations + success = "success"; + } else if (isObservationEvent(event)) { + success = getObservationResult(event); } return ( @@ -28,9 +52,7 @@ export function GenericEventMessageWrapper({ {isLastMessage && } diff --git a/frontend/src/components/v1/chat/event-message-components/observation-pair-event-message.tsx b/frontend/src/components/v1/chat/event-message-components/observation-pair-event-message.tsx index aa0bbc09b46d..221d758dd62a 100644 --- a/frontend/src/components/v1/chat/event-message-components/observation-pair-event-message.tsx +++ b/frontend/src/components/v1/chat/event-message-components/observation-pair-event-message.tsx @@ -34,7 +34,12 @@ export function ObservationPairEventMessage({ .map((t) => t.text) .join("\n"); - if (thoughtContent && event.action.kind !== "ThinkAction") { + // Defensive check: ensure action exists and has kind property + if ( + thoughtContent && + event.action?.kind && + event.action.kind !== "ThinkAction" + ) { return (
    diff --git a/frontend/src/components/v1/chat/event-message-components/user-assistant-event-message.tsx b/frontend/src/components/v1/chat/event-message-components/user-assistant-event-message.tsx index 6455dadbe3ad..a51b912860a1 100644 --- a/frontend/src/components/v1/chat/event-message-components/user-assistant-event-message.tsx +++ b/frontend/src/components/v1/chat/event-message-components/user-assistant-event-message.tsx @@ -22,6 +22,7 @@ interface UserAssistantEventMessageProps { tooltip?: string; }>; isLastMessage: boolean; + isFromPlanningAgent: boolean; } export function UserAssistantEventMessage({ @@ -31,6 +32,7 @@ export function UserAssistantEventMessage({ microagentPRUrl, actions, isLastMessage, + isFromPlanningAgent, }: UserAssistantEventMessageProps) { const message = parseMessageFromEvent(event); @@ -46,7 +48,12 @@ export function UserAssistantEventMessage({ return ( <> - + {imageUrls.length > 0 && ( )} diff --git a/frontend/src/components/v1/chat/event-message.tsx b/frontend/src/components/v1/chat/event-message.tsx index dbe327b31b38..95690d89844b 100644 --- a/frontend/src/components/v1/chat/event-message.tsx +++ b/frontend/src/components/v1/chat/event-message.tsx @@ -5,6 +5,7 @@ import { isActionEvent, isObservationEvent, isAgentErrorEvent, + isUserMessageEvent, } from "#/types/v1/type-guards"; import { MicroagentStatus } from "#/types/microagent-status"; import { useConfig } from "#/hooks/query/use-config"; @@ -17,9 +18,10 @@ import { GenericEventMessageWrapper, ThoughtEventMessage, } from "./event-message-components"; +import { createSkillReadyEvent } from "./event-content-helpers/create-skill-ready-event"; interface EventMessageProps { - event: OpenHandsEvent; + event: OpenHandsEvent & { isFromPlanningAgent?: boolean }; messages: OpenHandsEvent[]; isLastMessage: boolean; microagentStatus?: MicroagentStatus | null; @@ -33,6 +35,104 @@ interface EventMessageProps { isInLast10Actions: boolean; } +/** + * Extracts activated skills from a MessageEvent, supporting both + * activated_skills and activated_microagents field names. + */ +const getActivatedSkills = (event: MessageEvent): string[] => + (event as unknown as { activated_skills?: string[] }).activated_skills || + event.activated_microagents || + []; + +/** + * Checks if extended content contains valid text content. + */ +const hasValidExtendedContent = ( + extendedContent: MessageEvent["extended_content"], +): boolean => { + if (!extendedContent || extendedContent.length === 0) { + return false; + } + + return extendedContent.some( + (content) => content.type === "text" && content.text.trim().length > 0, + ); +}; + +/** + * Determines if a Skill Ready event should be displayed for the given message event. + */ +const shouldShowSkillReadyEvent = (messageEvent: MessageEvent): boolean => { + const activatedSkills = getActivatedSkills(messageEvent); + const hasActivatedSkills = activatedSkills.length > 0; + const hasExtendedContent = hasValidExtendedContent( + messageEvent.extended_content, + ); + + return hasActivatedSkills && hasExtendedContent; +}; + +interface CommonProps { + microagentStatus?: MicroagentStatus | null; + microagentConversationId?: string; + microagentPRUrl?: string; + actions?: Array<{ + icon: React.ReactNode; + onClick: () => void; + tooltip?: string; + }>; + isLastMessage: boolean; + isInLast10Actions: boolean; + config: unknown; + isCheckingFeedback: boolean; + feedbackData: { exists: boolean }; + isFromPlanningAgent: boolean; +} + +/** + * Renders a user message with its corresponding Skill Ready event. + */ +const renderUserMessageWithSkillReady = ( + messageEvent: MessageEvent, + commonProps: CommonProps, + isLastMessage: boolean, +): React.ReactElement => { + try { + const skillReadyEvent = createSkillReadyEvent(messageEvent); + return ( + <> + + + + ); + } catch (error) { + // If skill ready event creation fails, just render the user message + // Failed to create skill ready event, fallback to user message + return ( + + ); + } +}; + /* eslint-disable react/jsx-props-no-spreading */ export function EventMessage({ event, @@ -51,6 +151,9 @@ export function EventMessage({ const feedbackData = { exists: false }; const isCheckingFeedback = false; + // Read isFromPlanningAgent directly from the event object + const isFromPlanningAgent = event.isFromPlanningAgent || false; + // Common props for components that need them const commonProps = { microagentStatus, @@ -62,6 +165,7 @@ export function EventMessage({ config, isCheckingFeedback, feedbackData, + isFromPlanningAgent, }; // Agent error events @@ -114,10 +218,21 @@ export function EventMessage({ // Message events (user and assistant messages) if (!isActionEvent(event) && !isObservationEvent(event)) { - // This is a MessageEvent + const messageEvent = event as MessageEvent; + + // Check if this is a user message that should display a Skill Ready event + if (isUserMessageEvent(event) && shouldShowSkillReadyEvent(messageEvent)) { + return renderUserMessageWithSkillReady( + messageEvent, + commonProps, + isLastMessage, + ); + } + + // Render normal message event (user or assistant) return ( diff --git a/frontend/src/components/v1/chat/task-tracking/task-item.tsx b/frontend/src/components/v1/chat/task-tracking/task-item.tsx index b25664a61140..a50b6829d38e 100644 --- a/frontend/src/components/v1/chat/task-tracking/task-item.tsx +++ b/frontend/src/components/v1/chat/task-tracking/task-item.tsx @@ -20,9 +20,7 @@ export function TaskItem({ task }: TaskItemProps) { case "todo": return ; case "in_progress": - return ( - - ); + return ; case "done": return ; default: diff --git a/frontend/src/contexts/conversation-websocket-context.tsx b/frontend/src/contexts/conversation-websocket-context.tsx index 685f6c93ab88..73bd2b365af4 100644 --- a/frontend/src/contexts/conversation-websocket-context.tsx +++ b/frontend/src/contexts/conversation-websocket-context.tsx @@ -7,13 +7,15 @@ import React, { useMemo, useRef, } from "react"; +import { useTranslation } from "react-i18next"; import { useQueryClient } from "@tanstack/react-query"; import { useWebSocket, WebSocketHookOptions } from "#/hooks/use-websocket"; import { useEventStore } from "#/stores/use-event-store"; import { useErrorMessageStore } from "#/stores/error-message-store"; import { useOptimisticUserMessageStore } from "#/stores/optimistic-user-message-store"; import { useV1ConversationStateStore } from "#/stores/v1-conversation-state-store"; -import { useCommandStore } from "#/state/command-store"; +import { useCommandStore } from "#/stores/command-store"; +import { useBrowserStore } from "#/stores/browser-store"; import { isV1Event, isAgentErrorEvent, @@ -22,10 +24,15 @@ import { isConversationStateUpdateEvent, isFullStateConversationStateUpdateEvent, isAgentStatusConversationStateUpdateEvent, + isStatsConversationStateUpdateEvent, isExecuteBashActionEvent, isExecuteBashObservationEvent, isConversationErrorEvent, + isPlanningFileEditorObservationEvent, + isBrowserObservationEvent, + isBrowserNavigateActionEvent, } from "#/types/v1/type-guards"; +import { ConversationStateUpdateEventStats } from "#/types/v1/core/events/conversation-state-event"; import { handleActionEventCacheInvalidation } from "#/utils/cache-utils"; import { buildWebSocketUrl } from "#/utils/websocket-url"; import type { @@ -33,9 +40,12 @@ import type { V1SendMessageRequest, } from "#/api/conversation-service/v1-conversation-service.types"; import EventService from "#/api/event-service/event-service.api"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; import { isBudgetOrCreditError } from "#/utils/error-handler"; import { useTracking } from "#/hooks/use-tracking"; +import { useReadConversationFile } from "#/hooks/mutation/use-read-conversation-file"; +import useMetricsStore from "#/stores/metrics-store"; +import { I18nKey } from "#/i18n/declaration"; // eslint-disable-next-line @typescript-eslint/naming-convention export type V1_WebSocketConnectionState = @@ -99,12 +109,55 @@ export function ConversationWebSocketProvider({ number | null >(null); - const { conversationMode } = useConversationStore(); + const { conversationMode, setPlanContent } = useConversationStore(); + + // Hook for reading conversation file + const { mutate: readConversationFile } = useReadConversationFile(); // Separate received event count tracking per connection const receivedEventCountRefMain = useRef(0); const receivedEventCountRefPlanning = useRef(0); + // Track the latest PlanningFileEditorObservation event during history replay + // We'll only call the API once after history loading completes + const latestPlanningFileEventRef = useRef<{ + path: string; + conversationId: string; + } | null>(null); + + const { t } = useTranslation(); + + // Helper function to update metrics from stats event + const updateMetricsFromStats = useCallback( + (event: ConversationStateUpdateEventStats) => { + if (event.value.usage_to_metrics?.agent) { + const agentMetrics = event.value.usage_to_metrics.agent; + const metrics = { + cost: agentMetrics.accumulated_cost, + max_budget_per_task: agentMetrics.max_budget_per_task ?? null, + usage: agentMetrics.accumulated_token_usage + ? { + prompt_tokens: + agentMetrics.accumulated_token_usage.prompt_tokens, + completion_tokens: + agentMetrics.accumulated_token_usage.completion_tokens, + cache_read_tokens: + agentMetrics.accumulated_token_usage.cache_read_tokens, + cache_write_tokens: + agentMetrics.accumulated_token_usage.cache_write_tokens, + context_window: + agentMetrics.accumulated_token_usage.context_window, + per_turn_token: + agentMetrics.accumulated_token_usage.per_turn_token, + } + : null, + }; + useMetricsStore.getState().setMetrics(metrics); + } + }, + [], + ); + // Build WebSocket URL from props // Only build URL if we have both conversationId and conversationUrl // This prevents connection attempts during task polling phase @@ -201,11 +254,40 @@ export function ConversationWebSocketProvider({ receivedEventCountRefPlanning, ]); + // Call API once after history loading completes if we tracked any PlanningFileEditorObservation events + useEffect(() => { + if (!isLoadingHistoryPlanning && latestPlanningFileEventRef.current) { + const { path, conversationId: currentPlanningConversationId } = + latestPlanningFileEventRef.current; + + readConversationFile( + { + conversationId: currentPlanningConversationId, + filePath: path, + }, + { + onSuccess: (fileContent) => { + setPlanContent(fileContent); + }, + onError: (error) => { + // eslint-disable-next-line no-console + console.warn("Failed to read conversation file:", error); + }, + }, + ); + + // Clear the ref after calling the API + latestPlanningFileEventRef.current = null; + } + }, [isLoadingHistoryPlanning, readConversationFile, setPlanContent]); + useEffect(() => { hasConnectedRefMain.current = false; setIsLoadingHistoryPlanning(!!subConversationIds?.length); setExpectedEventCountPlanning(null); receivedEventCountRefPlanning.current = 0; + // Reset the tracked event ref when sub-conversations change + latestPlanningFileEventRef.current = null; }, [subConversationIds]); // Merged loading history state - true if either connection is still loading @@ -220,6 +302,8 @@ export function ConversationWebSocketProvider({ setIsLoadingHistoryMain(true); setExpectedEventCountMain(null); receivedEventCountRefMain.current = 0; + // Reset the tracked event ref when conversation changes + latestPlanningFileEventRef.current = null; }, [conversationId]); // Separate message handlers for each connection @@ -287,6 +371,9 @@ export function ConversationWebSocketProvider({ if (isAgentStatusConversationStateUpdateEvent(event)) { setExecutionStatus(event.value); } + if (isStatsConversationStateUpdateEvent(event)) { + updateMetricsFromStats(event); + } } // Handle ExecuteBashAction events - add command as input to terminal @@ -303,6 +390,22 @@ export function ConversationWebSocketProvider({ .join("\n"); appendOutput(textContent); } + + // Handle BrowserObservation events - update browser store with screenshot + if (isBrowserObservationEvent(event)) { + const { screenshot_data: screenshotData } = event.observation; + if (screenshotData) { + const screenshotSrc = screenshotData.startsWith("data:") + ? screenshotData + : `data:image/png;base64,${screenshotData}`; + useBrowserStore.getState().setScreenshotSrc(screenshotSrc); + } + } + + // Handle BrowserNavigateAction events - update browser store with URL + if (isBrowserNavigateActionEvent(event)) { + useBrowserStore.getState().setUrl(event.action.url); + } } } catch (error) { // eslint-disable-next-line no-console @@ -320,6 +423,7 @@ export function ConversationWebSocketProvider({ setExecutionStatus, appendInput, appendOutput, + updateMetricsFromStats, ], ); @@ -343,7 +447,12 @@ export function ConversationWebSocketProvider({ // Use type guard to validate v1 event structure if (isV1Event(event)) { - addEvent(event); + // Mark this event as coming from the planning agent + const eventWithPlanningFlag = { + ...event, + isFromPlanningAgent: true, + }; + addEvent(eventWithPlanningFlag); // Handle AgentErrorEvent specifically if (isAgentErrorEvent(event)) { @@ -376,6 +485,9 @@ export function ConversationWebSocketProvider({ if (isAgentStatusConversationStateUpdateEvent(event)) { setExecutionStatus(event.value); } + if (isStatsConversationStateUpdateEvent(event)) { + updateMetricsFromStats(event); + } } // Handle ExecuteBashAction events - add command as input to terminal @@ -392,6 +504,41 @@ export function ConversationWebSocketProvider({ .join("\n"); appendOutput(textContent); } + + // Handle PlanningFileEditorObservation events - read and update plan content + if (isPlanningFileEditorObservationEvent(event)) { + const planningAgentConversation = subConversations?.[0]; + const planningConversationId = planningAgentConversation?.id; + + if (planningConversationId && event.observation.path) { + // During history replay, track the latest event but don't call API + // After history loading completes, we'll call the API once with the latest event + if (isLoadingHistoryPlanning) { + latestPlanningFileEventRef.current = { + path: event.observation.path, + conversationId: planningConversationId, + }; + } else { + // History loading is complete - this is a new real-time event + // Call the API immediately for real-time updates + readConversationFile( + { + conversationId: planningConversationId, + filePath: event.observation.path, + }, + { + onSuccess: (fileContent) => { + setPlanContent(fileContent); + }, + onError: (error) => { + // eslint-disable-next-line no-console + console.warn("Failed to read conversation file:", error); + }, + }, + ); + } + } + } } } catch (error) { // eslint-disable-next-line no-console @@ -409,6 +556,9 @@ export function ConversationWebSocketProvider({ setExecutionStatus, appendInput, appendOutput, + readConversationFile, + setPlanContent, + updateMetricsFromStats, ], ); @@ -432,9 +582,13 @@ export function ConversationWebSocketProvider({ removeErrorMessage(); // Clear any previous error messages on successful connection // Fetch expected event count for history loading detection - if (conversationId) { + if (conversationId && conversationUrl) { try { - const count = await EventService.getEventCount(conversationId); + const count = await EventService.getEventCount( + conversationId, + conversationUrl, + sessionApiKey, + ); setExpectedEventCountMain(count); // If no events expected, mark as loaded immediately @@ -453,7 +607,7 @@ export function ConversationWebSocketProvider({ // This prevents showing errors during initial connection attempts (e.g., when auto-starting a conversation) if (event.code !== 1000 && hasConnectedRefMain.current) { setErrorMessage( - `Connection lost: ${event.reason || "Unexpected disconnect"}`, + `${t(I18nKey.STATUS$CONNECTION_LOST)}: ${event.reason || t(I18nKey.STATUS$DISCONNECTED_REFRESH_PAGE)}`, ); } }, @@ -472,6 +626,7 @@ export function ConversationWebSocketProvider({ removeErrorMessage, sessionApiKey, conversationId, + conversationUrl, ]); // Separate WebSocket options for planning agent connection @@ -496,10 +651,15 @@ export function ConversationWebSocketProvider({ removeErrorMessage(); // Clear any previous error messages on successful connection // Fetch expected event count for history loading detection - if (planningAgentConversation?.id) { + if ( + planningAgentConversation?.id && + planningAgentConversation.conversation_url + ) { try { const count = await EventService.getEventCount( planningAgentConversation.id, + planningAgentConversation.conversation_url, + planningAgentConversation.session_api_key, ); setExpectedEventCountPlanning(count); @@ -519,7 +679,7 @@ export function ConversationWebSocketProvider({ // This prevents showing errors during initial connection attempts (e.g., when auto-starting a conversation) if (event.code !== 1000 && hasConnectedRefPlanning.current) { setErrorMessage( - `Connection lost: ${event.reason || "Unexpected disconnect"}`, + `${t(I18nKey.STATUS$CONNECTION_LOST)}: ${event.reason || t(I18nKey.STATUS$DISCONNECTED_REFRESH_PAGE)}`, ); } }, diff --git a/frontend/src/hooks/chat/use-chat-input-logic.ts b/frontend/src/hooks/chat/use-chat-input-logic.ts index d908882a4071..21dc682fc98f 100644 --- a/frontend/src/hooks/chat/use-chat-input-logic.ts +++ b/frontend/src/hooks/chat/use-chat-input-logic.ts @@ -4,7 +4,7 @@ import { clearEmptyContent, getTextContent, } from "#/components/features/chat/utils/chat-input.utils"; -import { useConversationStore } from "#/state/conversation-store"; +import { useConversationStore } from "#/stores/conversation-store"; /** * Hook for managing chat input content logic diff --git a/frontend/src/hooks/chat/use-grip-resize.ts b/frontend/src/hooks/chat/use-grip-resize.ts index b29f82039914..a46d3fbe4f6c 100644 --- a/frontend/src/hooks/chat/use-grip-resize.ts +++ b/frontend/src/hooks/chat/use-grip-resize.ts @@ -4,7 +4,7 @@ import { CHAT_INPUT } from "#/utils/constants"; import { IMessageToSend, useConversationStore, -} from "#/state/conversation-store"; +} from "#/stores/conversation-store"; /** * Hook for managing grip resize functionality diff --git a/frontend/src/hooks/mutation/use-accept-tos.ts b/frontend/src/hooks/mutation/use-accept-tos.ts new file mode 100644 index 000000000000..a159b1458cd5 --- /dev/null +++ b/frontend/src/hooks/mutation/use-accept-tos.ts @@ -0,0 +1,54 @@ +import { useMutation } from "@tanstack/react-query"; +import { usePostHog } from "posthog-js/react"; +import { useNavigate } from "react-router"; +import { openHands } from "#/api/open-hands-axios"; +import { handleCaptureConsent } from "#/utils/handle-capture-consent"; +import { useTracking } from "#/hooks/use-tracking"; + +interface AcceptTosVariables { + redirectUrl: string; +} + +interface AcceptTosResponse { + redirect_url?: string; +} + +export const useAcceptTos = () => { + const posthog = usePostHog(); + const navigate = useNavigate(); + const { trackUserSignupCompleted } = useTracking(); + + return useMutation({ + mutationFn: async ({ redirectUrl }: AcceptTosVariables) => { + // Set consent for analytics + handleCaptureConsent(posthog, true); + + // Call the API to record TOS acceptance in the database + return openHands.post("/api/accept_tos", { + redirect_url: redirectUrl, + }); + }, + onSuccess: (response, { redirectUrl }) => { + // Track user signup completion + trackUserSignupCompleted(); + + // Get the redirect URL from the response + const finalRedirectUrl = response.data.redirect_url || redirectUrl; + + // Check if the redirect URL is an external URL (starts with http or https) + if ( + finalRedirectUrl.startsWith("http://") || + finalRedirectUrl.startsWith("https://") + ) { + // For external URLs, redirect using window.location + window.location.href = finalRedirectUrl; + } else { + // For internal routes, use navigate + navigate(finalRedirectUrl); + } + }, + onError: () => { + window.location.href = "/"; + }, + }); +}; diff --git a/frontend/src/hooks/mutation/use-add-mcp-server.ts b/frontend/src/hooks/mutation/use-add-mcp-server.ts index 92725cbe58b4..c9aaf4e4466b 100644 --- a/frontend/src/hooks/mutation/use-add-mcp-server.ts +++ b/frontend/src/hooks/mutation/use-add-mcp-server.ts @@ -1,6 +1,6 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { useSettings } from "#/hooks/query/use-settings"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import { MCPSSEServer, MCPStdioServer, MCPSHTTPServer } from "#/types/settings"; type MCPServerType = "sse" | "stdio" | "shttp"; @@ -24,7 +24,7 @@ export function useAddMcpServer() { mutationFn: async (server: MCPServerConfig): Promise => { if (!settings) return; - const currentConfig = settings.MCP_CONFIG || { + const currentConfig = settings.mcp_config || { sse_servers: [], stdio_servers: [], shttp_servers: [], @@ -57,6 +57,7 @@ export function useAddMcpServer() { const apiSettings = { mcp_config: newConfig, + v1_enabled: settings.v1_enabled, }; await SettingsService.saveSettings(apiSettings); diff --git a/frontend/src/hooks/mutation/use-create-billing-session.ts b/frontend/src/hooks/mutation/use-create-billing-session.ts new file mode 100644 index 000000000000..f8f0716cb2d5 --- /dev/null +++ b/frontend/src/hooks/mutation/use-create-billing-session.ts @@ -0,0 +1,19 @@ +import { useMutation } from "@tanstack/react-query"; +import { useTranslation } from "react-i18next"; +import { I18nKey } from "#/i18n/declaration"; +import BillingService from "#/api/billing-service/billing-service.api"; +import { displayErrorToast } from "#/utils/custom-toast-handlers"; + +export const useCreateBillingSession = () => { + const { t } = useTranslation(); + + return useMutation({ + mutationFn: BillingService.createBillingSessionResponse, + onSuccess: (data) => { + window.location.href = data; + }, + onError: () => { + displayErrorToast(t(I18nKey.BILLING$ERROR_WHILE_CREATING_SESSION)); + }, + }); +}; diff --git a/frontend/src/hooks/mutation/use-create-conversation.ts b/frontend/src/hooks/mutation/use-create-conversation.ts index a44d921e4460..85e8dd880cb9 100644 --- a/frontend/src/hooks/mutation/use-create-conversation.ts +++ b/frontend/src/hooks/mutation/use-create-conversation.ts @@ -4,8 +4,8 @@ import V1ConversationService from "#/api/conversation-service/v1-conversation-se import { SuggestedTask } from "#/utils/types"; import { Provider } from "#/types/settings"; import { CreateMicroagent, Conversation } from "#/api/open-hands.types"; -import { USE_V1_CONVERSATION_API } from "#/utils/feature-flags"; import { useTracking } from "#/hooks/use-tracking"; +import { useSettings } from "#/hooks/query/use-settings"; interface CreateConversationVariables { query?: string; @@ -34,6 +34,7 @@ interface CreateConversationResponse extends Partial { export const useCreateConversation = () => { const queryClient = useQueryClient(); const { trackConversationCreated } = useTracking(); + const { data: settings } = useSettings(); return useMutation({ mutationKey: ["create-conversation"], @@ -50,7 +51,7 @@ export const useCreateConversation = () => { agentType, } = variables; - const useV1 = USE_V1_CONVERSATION_API() && !createMicroagent; + const useV1 = !!settings?.v1_enabled && !createMicroagent; if (useV1) { // Use V1 API - creates a conversation start task diff --git a/frontend/src/hooks/mutation/use-delete-mcp-server.ts b/frontend/src/hooks/mutation/use-delete-mcp-server.ts index f060890ae860..43d1b2a7ccc4 100644 --- a/frontend/src/hooks/mutation/use-delete-mcp-server.ts +++ b/frontend/src/hooks/mutation/use-delete-mcp-server.ts @@ -1,6 +1,6 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { useSettings } from "#/hooks/query/use-settings"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import { MCPConfig } from "#/types/settings"; export function useDeleteMcpServer() { @@ -9,9 +9,9 @@ export function useDeleteMcpServer() { return useMutation({ mutationFn: async (serverId: string): Promise => { - if (!settings?.MCP_CONFIG) return; + if (!settings?.mcp_config) return; - const newConfig: MCPConfig = { ...settings.MCP_CONFIG }; + const newConfig: MCPConfig = { ...settings.mcp_config }; const [serverType, indexStr] = serverId.split("-"); const index = parseInt(indexStr, 10); @@ -25,6 +25,7 @@ export function useDeleteMcpServer() { const apiSettings = { mcp_config: newConfig, + v1_enabled: settings.v1_enabled, }; await SettingsService.saveSettings(apiSettings); diff --git a/frontend/src/hooks/mutation/use-read-conversation-file.ts b/frontend/src/hooks/mutation/use-read-conversation-file.ts new file mode 100644 index 000000000000..5dd8c51eb965 --- /dev/null +++ b/frontend/src/hooks/mutation/use-read-conversation-file.ts @@ -0,0 +1,17 @@ +import { useMutation } from "@tanstack/react-query"; +import V1ConversationService from "#/api/conversation-service/v1-conversation-service.api"; + +interface UseReadConversationFileVariables { + conversationId: string; + filePath?: string; +} + +export const useReadConversationFile = () => + useMutation({ + mutationKey: ["read-conversation-file"], + mutationFn: async ({ + conversationId, + filePath, + }: UseReadConversationFileVariables): Promise => + V1ConversationService.readConversationFile(conversationId, filePath), + }); diff --git a/frontend/src/hooks/mutation/use-refresh-llm-api-key.ts b/frontend/src/hooks/mutation/use-refresh-llm-api-key.ts new file mode 100644 index 000000000000..11a112e182b9 --- /dev/null +++ b/frontend/src/hooks/mutation/use-refresh-llm-api-key.ts @@ -0,0 +1,23 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; +import { openHands } from "#/api/open-hands-axios"; +import { + LLM_API_KEY_QUERY_KEY, + LlmApiKeyResponse, +} from "#/hooks/query/use-llm-api-key"; + +export function useRefreshLlmApiKey() { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async () => { + const { data } = await openHands.post( + "/api/keys/llm/byor/refresh", + ); + return data; + }, + onSuccess: () => { + // Invalidate the LLM API key query to trigger a refetch + queryClient.invalidateQueries({ queryKey: [LLM_API_KEY_QUERY_KEY] }); + }, + }); +} diff --git a/frontend/src/hooks/mutation/use-save-settings.ts b/frontend/src/hooks/mutation/use-save-settings.ts index c8a433e9dc39..f335fd83eccf 100644 --- a/frontend/src/hooks/mutation/use-save-settings.ts +++ b/frontend/src/hooks/mutation/use-save-settings.ts @@ -1,43 +1,29 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { usePostHog } from "posthog-js/react"; import { DEFAULT_SETTINGS } from "#/services/settings"; -import SettingsService from "#/settings-service/settings-service.api"; -import { PostSettings } from "#/types/settings"; -import { PostApiSettings } from "#/settings-service/settings.types"; +import SettingsService from "#/api/settings-service/settings-service.api"; +import { Settings } from "#/types/settings"; import { useSettings } from "../query/use-settings"; -const saveSettingsMutationFn = async (settings: Partial) => { - const apiSettings: Partial = { - llm_model: settings.LLM_MODEL, - llm_base_url: settings.LLM_BASE_URL, - agent: settings.AGENT || DEFAULT_SETTINGS.AGENT, - language: settings.LANGUAGE || DEFAULT_SETTINGS.LANGUAGE, - confirmation_mode: settings.CONFIRMATION_MODE, - security_analyzer: settings.SECURITY_ANALYZER, +const saveSettingsMutationFn = async (settings: Partial) => { + const settingsToSave: Partial = { + ...settings, + agent: settings.agent || DEFAULT_SETTINGS.agent, + language: settings.language || DEFAULT_SETTINGS.language, llm_api_key: settings.llm_api_key === "" ? "" : settings.llm_api_key?.trim() || undefined, - remote_runtime_resource_factor: settings.REMOTE_RUNTIME_RESOURCE_FACTOR, - enable_default_condenser: settings.ENABLE_DEFAULT_CONDENSER, condenser_max_size: - settings.CONDENSER_MAX_SIZE ?? DEFAULT_SETTINGS.CONDENSER_MAX_SIZE, - enable_sound_notifications: settings.ENABLE_SOUND_NOTIFICATIONS, - user_consents_to_analytics: settings.user_consents_to_analytics, - provider_tokens_set: settings.PROVIDER_TOKENS_SET, - mcp_config: settings.MCP_CONFIG, - enable_proactive_conversation_starters: - settings.ENABLE_PROACTIVE_CONVERSATION_STARTERS, - enable_solvability_analysis: settings.ENABLE_SOLVABILITY_ANALYSIS, - search_api_key: settings.SEARCH_API_KEY?.trim() || "", - max_budget_per_task: settings.MAX_BUDGET_PER_TASK, + settings.condenser_max_size ?? DEFAULT_SETTINGS.condenser_max_size, + search_api_key: settings.search_api_key?.trim() || "", git_user_name: - settings.GIT_USER_NAME?.trim() || DEFAULT_SETTINGS.GIT_USER_NAME, + settings.git_user_name?.trim() || DEFAULT_SETTINGS.git_user_name, git_user_email: - settings.GIT_USER_EMAIL?.trim() || DEFAULT_SETTINGS.GIT_USER_EMAIL, + settings.git_user_email?.trim() || DEFAULT_SETTINGS.git_user_email, }; - await SettingsService.saveSettings(apiSettings); + await SettingsService.saveSettings(settingsToSave); }; export const useSaveSettings = () => { @@ -46,18 +32,18 @@ export const useSaveSettings = () => { const { data: currentSettings } = useSettings(); return useMutation({ - mutationFn: async (settings: Partial) => { + mutationFn: async (settings: Partial) => { const newSettings = { ...currentSettings, ...settings }; // Track MCP configuration changes if ( - settings.MCP_CONFIG && - currentSettings?.MCP_CONFIG !== settings.MCP_CONFIG + settings.mcp_config && + currentSettings?.mcp_config !== settings.mcp_config ) { - const hasMcpConfig = !!settings.MCP_CONFIG; - const sseServersCount = settings.MCP_CONFIG?.sse_servers?.length || 0; + const hasMcpConfig = !!settings.mcp_config; + const sseServersCount = settings.mcp_config?.sse_servers?.length || 0; const stdioServersCount = - settings.MCP_CONFIG?.stdio_servers?.length || 0; + settings.mcp_config?.stdio_servers?.length || 0; // Track MCP configuration usage posthog.capture("mcp_config_updated", { diff --git a/frontend/src/hooks/mutation/use-update-mcp-server.ts b/frontend/src/hooks/mutation/use-update-mcp-server.ts index 83ef5dfcf3fa..558997b50000 100644 --- a/frontend/src/hooks/mutation/use-update-mcp-server.ts +++ b/frontend/src/hooks/mutation/use-update-mcp-server.ts @@ -1,6 +1,6 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; import { useSettings } from "#/hooks/query/use-settings"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import { MCPSSEServer, MCPStdioServer, MCPSHTTPServer } from "#/types/settings"; type MCPServerType = "sse" | "stdio" | "shttp"; @@ -28,9 +28,9 @@ export function useUpdateMcpServer() { serverId: string; server: MCPServerConfig; }): Promise => { - if (!settings?.MCP_CONFIG) return; + if (!settings?.mcp_config) return; - const newConfig = { ...settings.MCP_CONFIG }; + const newConfig = { ...settings.mcp_config }; const [serverType, indexStr] = serverId.split("-"); const index = parseInt(indexStr, 10); @@ -59,6 +59,7 @@ export function useUpdateMcpServer() { const apiSettings = { mcp_config: newConfig, + v1_enabled: settings.v1_enabled, }; await SettingsService.saveSettings(apiSettings); diff --git a/frontend/src/hooks/query/use-balance.ts b/frontend/src/hooks/query/use-balance.ts index 1d89454f74f0..1bc7075e9f6d 100644 --- a/frontend/src/hooks/query/use-balance.ts +++ b/frontend/src/hooks/query/use-balance.ts @@ -13,6 +13,6 @@ export const useBalance = () => { enabled: !isOnTosPage && config?.APP_MODE === "saas" && - config?.FEATURE_FLAGS.ENABLE_BILLING, + config?.FEATURE_FLAGS?.ENABLE_BILLING, }); }; diff --git a/frontend/src/hooks/query/use-conversation-microagents.ts b/frontend/src/hooks/query/use-conversation-skills.ts similarity index 62% rename from frontend/src/hooks/query/use-conversation-microagents.ts rename to frontend/src/hooks/query/use-conversation-skills.ts index d51b2b311dc0..43cf23bd379f 100644 --- a/frontend/src/hooks/query/use-conversation-microagents.ts +++ b/frontend/src/hooks/query/use-conversation-skills.ts @@ -1,19 +1,29 @@ import { useQuery } from "@tanstack/react-query"; import ConversationService from "#/api/conversation-service/conversation-service.api"; +import V1ConversationService from "#/api/conversation-service/v1-conversation-service.api"; import { useConversationId } from "../use-conversation-id"; import { AgentState } from "#/types/agent-state"; import { useAgentState } from "#/hooks/use-agent-state"; +import { useSettings } from "./use-settings"; -export const useConversationMicroagents = () => { +export const useConversationSkills = () => { const { conversationId } = useConversationId(); const { curAgentState } = useAgentState(); + const { data: settings } = useSettings(); return useQuery({ - queryKey: ["conversation", conversationId, "microagents"], + queryKey: ["conversation", conversationId, "skills", settings?.v1_enabled], queryFn: async () => { if (!conversationId) { throw new Error("No conversation ID provided"); } + + // Check if V1 is enabled and use the appropriate API + if (settings?.v1_enabled) { + const data = await V1ConversationService.getSkills(conversationId); + return data.skills; + } + const data = await ConversationService.getMicroagents(conversationId); return data.microagents; }, diff --git a/frontend/src/hooks/query/use-llm-api-key.ts b/frontend/src/hooks/query/use-llm-api-key.ts index 5dcea9f71422..58dee11411c5 100644 --- a/frontend/src/hooks/query/use-llm-api-key.ts +++ b/frontend/src/hooks/query/use-llm-api-key.ts @@ -1,4 +1,4 @@ -import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { useQuery } from "@tanstack/react-query"; import { openHands } from "#/api/open-hands-axios"; import { useConfig } from "./use-config"; @@ -23,20 +23,3 @@ export function useLlmApiKey() { gcTime: 1000 * 60 * 15, // 15 minutes }); } - -export function useRefreshLlmApiKey() { - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async () => { - const { data } = await openHands.post( - "/api/keys/llm/byor/refresh", - ); - return data; - }, - onSuccess: () => { - // Invalidate the LLM API key query to trigger a refetch - queryClient.invalidateQueries({ queryKey: [LLM_API_KEY_QUERY_KEY] }); - }, - }); -} diff --git a/frontend/src/hooks/query/use-microagent-management-conversations.ts b/frontend/src/hooks/query/use-microagent-management-conversations.ts index 4c83ca2f75a6..947cbcf5092a 100644 --- a/frontend/src/hooks/query/use-microagent-management-conversations.ts +++ b/frontend/src/hooks/query/use-microagent-management-conversations.ts @@ -1,5 +1,5 @@ import { useQuery } from "@tanstack/react-query"; -import MicroagentManagementService from "#/ui/microagent-management-service/microagent-management-service.api"; +import MicroagentManagementService from "#/api/microagent-management-service/microagent-management-service.api"; export const useMicroagentManagementConversations = ( selectedRepository: string, diff --git a/frontend/src/hooks/query/use-settings.ts b/frontend/src/hooks/query/use-settings.ts index 74a516f4a6b4..faf34d5dae35 100644 --- a/frontend/src/hooks/query/use-settings.ts +++ b/frontend/src/hooks/query/use-settings.ts @@ -1,41 +1,23 @@ import { useQuery } from "@tanstack/react-query"; -import SettingsService from "#/settings-service/settings-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; import { DEFAULT_SETTINGS } from "#/services/settings"; import { useIsOnTosPage } from "#/hooks/use-is-on-tos-page"; import { Settings } from "#/types/settings"; import { useIsAuthed } from "./use-is-authed"; const getSettingsQueryFn = async (): Promise => { - const apiSettings = await SettingsService.getSettings(); + const settings = await SettingsService.getSettings(); return { - LLM_MODEL: apiSettings.llm_model, - LLM_BASE_URL: apiSettings.llm_base_url, - AGENT: apiSettings.agent, - LANGUAGE: apiSettings.language, - CONFIRMATION_MODE: apiSettings.confirmation_mode, - SECURITY_ANALYZER: apiSettings.security_analyzer, - LLM_API_KEY_SET: apiSettings.llm_api_key_set, - SEARCH_API_KEY_SET: apiSettings.search_api_key_set, - REMOTE_RUNTIME_RESOURCE_FACTOR: apiSettings.remote_runtime_resource_factor, - PROVIDER_TOKENS_SET: apiSettings.provider_tokens_set, - ENABLE_DEFAULT_CONDENSER: apiSettings.enable_default_condenser, - CONDENSER_MAX_SIZE: - apiSettings.condenser_max_size ?? DEFAULT_SETTINGS.CONDENSER_MAX_SIZE, - ENABLE_SOUND_NOTIFICATIONS: apiSettings.enable_sound_notifications, - ENABLE_PROACTIVE_CONVERSATION_STARTERS: - apiSettings.enable_proactive_conversation_starters, - ENABLE_SOLVABILITY_ANALYSIS: apiSettings.enable_solvability_analysis, - USER_CONSENTS_TO_ANALYTICS: apiSettings.user_consents_to_analytics, - SEARCH_API_KEY: apiSettings.search_api_key || "", - MAX_BUDGET_PER_TASK: apiSettings.max_budget_per_task, - EMAIL: apiSettings.email || "", - EMAIL_VERIFIED: apiSettings.email_verified, - MCP_CONFIG: apiSettings.mcp_config, - GIT_USER_NAME: apiSettings.git_user_name || DEFAULT_SETTINGS.GIT_USER_NAME, - GIT_USER_EMAIL: - apiSettings.git_user_email || DEFAULT_SETTINGS.GIT_USER_EMAIL, - IS_NEW_USER: false, + ...settings, + condenser_max_size: + settings.condenser_max_size ?? DEFAULT_SETTINGS.condenser_max_size, + search_api_key: settings.search_api_key || "", + email: settings.email || "", + git_user_name: settings.git_user_name || DEFAULT_SETTINGS.git_user_name, + git_user_email: settings.git_user_email || DEFAULT_SETTINGS.git_user_email, + is_new_user: false, + v1_enabled: settings.v1_enabled ?? DEFAULT_SETTINGS.v1_enabled, }; }; diff --git a/frontend/src/hooks/query/use-start-tasks.ts b/frontend/src/hooks/query/use-start-tasks.ts index 833ce86258db..3fb1e8d47d64 100644 --- a/frontend/src/hooks/query/use-start-tasks.ts +++ b/frontend/src/hooks/query/use-start-tasks.ts @@ -1,6 +1,6 @@ import { useQuery } from "@tanstack/react-query"; import V1ConversationService from "#/api/conversation-service/v1-conversation-service.api"; -import { USE_V1_CONVERSATION_API } from "#/utils/feature-flags"; +import { useSettings } from "#/hooks/query/use-settings"; /** * Hook to fetch in-progress V1 conversation start tasks @@ -13,13 +13,17 @@ import { USE_V1_CONVERSATION_API } from "#/utils/feature-flags"; * @param limit Maximum number of tasks to return (max 100) * @returns Query result with array of in-progress start tasks */ -export const useStartTasks = (limit = 10) => - useQuery({ +export const useStartTasks = (limit = 10) => { + const { data: settings } = useSettings(); + const isV1Enabled = settings?.v1_enabled; + + return useQuery({ queryKey: ["start-tasks", "search", limit], queryFn: () => V1ConversationService.searchStartTasks(limit), - enabled: USE_V1_CONVERSATION_API(), + enabled: isV1Enabled, select: (tasks) => tasks.filter( (task) => task.status !== "READY" && task.status !== "ERROR", ), }); +}; diff --git a/frontend/src/hooks/query/use-unified-get-git-changes.ts b/frontend/src/hooks/query/use-unified-get-git-changes.ts index ae5600469ac0..6b0856031c6b 100644 --- a/frontend/src/hooks/query/use-unified-get-git-changes.ts +++ b/frontend/src/hooks/query/use-unified-get-git-changes.ts @@ -103,5 +103,6 @@ export const useUnifiedGetGitChanges = () => { isSuccess: result.isSuccess, isError: result.isError, error: result.error, + refetch: result.refetch, }; }; diff --git a/frontend/src/hooks/use-auto-resize.ts b/frontend/src/hooks/use-auto-resize.ts index 6a86784e0f0e..52546d78eccd 100644 --- a/frontend/src/hooks/use-auto-resize.ts +++ b/frontend/src/hooks/use-auto-resize.ts @@ -1,5 +1,5 @@ import { useCallback, useEffect, RefObject, useRef } from "react"; -import { IMessageToSend } from "#/state/conversation-store"; +import { IMessageToSend } from "#/stores/conversation-store"; import { EPS } from "#/utils/constants"; import { getStyleHeightPx, setStyleHeightPx } from "#/utils/utils"; import { useDragResize } from "./use-drag-resize"; diff --git a/frontend/src/hooks/use-conversation-name-context-menu.ts b/frontend/src/hooks/use-conversation-name-context-menu.ts index 0e2c3e837ec4..6072d5331e9d 100644 --- a/frontend/src/hooks/use-conversation-name-context-menu.ts +++ b/frontend/src/hooks/use-conversation-name-context-menu.ts @@ -41,8 +41,7 @@ export function useConversationNameContextMenu({ const [metricsModalVisible, setMetricsModalVisible] = React.useState(false); const [systemModalVisible, setSystemModalVisible] = React.useState(false); - const [microagentsModalVisible, setMicroagentsModalVisible] = - React.useState(false); + const [skillsModalVisible, setSkillsModalVisible] = React.useState(false); const [confirmDeleteModalVisible, setConfirmDeleteModalVisible] = React.useState(false); const [confirmStopModalVisible, setConfirmStopModalVisible] = @@ -161,11 +160,9 @@ export function useConversationNameContextMenu({ onContextMenuToggle?.(false); }; - const handleShowMicroagents = ( - event: React.MouseEvent, - ) => { + const handleShowSkills = (event: React.MouseEvent) => { event.stopPropagation(); - setMicroagentsModalVisible(true); + setSkillsModalVisible(true); onContextMenuToggle?.(false); }; @@ -178,7 +175,7 @@ export function useConversationNameContextMenu({ handleDownloadViaVSCode, handleDisplayCost, handleShowAgentTools, - handleShowMicroagents, + handleShowSkills, handleConfirmDelete, handleConfirmStop, @@ -187,8 +184,8 @@ export function useConversationNameContextMenu({ setMetricsModalVisible, systemModalVisible, setSystemModalVisible, - microagentsModalVisible, - setMicroagentsModalVisible, + skillsModalVisible, + setSkillsModalVisible, confirmDeleteModalVisible, setConfirmDeleteModalVisible, confirmStopModalVisible, @@ -204,6 +201,6 @@ export function useConversationNameContextMenu({ shouldShowExport: Boolean(conversationId && showOptions), shouldShowDisplayCost: showOptions, shouldShowAgentTools: Boolean(showOptions && systemMessage), - shouldShowMicroagents: Boolean(showOptions && conversationId), + shouldShowSkills: Boolean(showOptions && conversationId), }; } diff --git a/frontend/src/hooks/use-handle-plan-click.ts b/frontend/src/hooks/use-handle-plan-click.ts new file mode 100644 index 000000000000..150766581276 --- /dev/null +++ b/frontend/src/hooks/use-handle-plan-click.ts @@ -0,0 +1,71 @@ +import { useCallback } from "react"; +import { useTranslation } from "react-i18next"; +import { I18nKey } from "#/i18n/declaration"; +import { useConversationStore } from "#/stores/conversation-store"; +import { useActiveConversation } from "#/hooks/query/use-active-conversation"; +import { useCreateConversation } from "#/hooks/mutation/use-create-conversation"; +import { displaySuccessToast } from "#/utils/custom-toast-handlers"; + +/** + * Custom hook that encapsulates the logic for handling plan creation. + * Returns a function that can be called to create a plan conversation and + * the pending state of the conversation creation. + * + * @returns An object containing handlePlanClick function and isCreatingConversation boolean + */ +export const useHandlePlanClick = () => { + const { t } = useTranslation(); + const { setConversationMode, setSubConversationTaskId } = + useConversationStore(); + const { data: conversation } = useActiveConversation(); + const { mutate: createConversation, isPending: isCreatingConversation } = + useCreateConversation(); + + const handlePlanClick = useCallback( + (event?: React.MouseEvent | KeyboardEvent) => { + event?.preventDefault(); + event?.stopPropagation(); + + // Set conversation mode to "plan" immediately + setConversationMode("plan"); + + // Check if sub_conversation_ids is not empty + if ( + (conversation?.sub_conversation_ids && + conversation.sub_conversation_ids.length > 0) || + !conversation?.conversation_id + ) { + // Do nothing if both conditions are true + return; + } + + // Create a new sub-conversation if we have a current conversation ID + createConversation( + { + parentConversationId: conversation.conversation_id, + agentType: "plan", + }, + { + onSuccess: (data) => { + displaySuccessToast( + t(I18nKey.PLANNING_AGENTT$PLANNING_AGENT_INITIALIZED), + ); + // Track the task ID to poll for sub-conversation creation + if (data.v1_task_id) { + setSubConversationTaskId(data.v1_task_id); + } + }, + }, + ); + }, + [ + conversation, + createConversation, + setConversationMode, + setSubConversationTaskId, + t, + ], + ); + + return { handlePlanClick, isCreatingConversation }; +}; diff --git a/frontend/src/hooks/use-send-message.ts b/frontend/src/hooks/use-send-message.ts index 1e1d627181d4..c6655b823076 100644 --- a/frontend/src/hooks/use-send-message.ts +++ b/frontend/src/hooks/use-send-message.ts @@ -41,13 +41,11 @@ export function useSendMessage() { }, ]; - // Add images if present + // Add images if present - using SDK's ImageContent format if (args.image_urls && args.image_urls.length > 0) { - args.image_urls.forEach((url) => { - content.push({ - type: "image_url", - image_url: { url }, - }); + content.push({ + type: "image", + image_urls: args.image_urls, }); } diff --git a/frontend/src/hooks/use-settings-nav-items.ts b/frontend/src/hooks/use-settings-nav-items.ts new file mode 100644 index 000000000000..aa67e8cb9a75 --- /dev/null +++ b/frontend/src/hooks/use-settings-nav-items.ts @@ -0,0 +1,15 @@ +import { useConfig } from "#/hooks/query/use-config"; +import { SAAS_NAV_ITEMS, OSS_NAV_ITEMS } from "#/constants/settings-nav"; + +export function useSettingsNavItems() { + const { data: config } = useConfig(); + + const shouldHideLlmSettings = !!config?.FEATURE_FLAGS?.HIDE_LLM_SETTINGS; + const isSaasMode = config?.APP_MODE === "saas"; + + const items = isSaasMode ? SAAS_NAV_ITEMS : OSS_NAV_ITEMS; + + return shouldHideLlmSettings + ? items.filter((item) => item.to !== "/settings") + : items; +} diff --git a/frontend/src/hooks/use-sync-posthog-consent.ts b/frontend/src/hooks/use-sync-posthog-consent.ts index 615aa9a1bf8f..50321227940c 100644 --- a/frontend/src/hooks/use-sync-posthog-consent.ts +++ b/frontend/src/hooks/use-sync-posthog-consent.ts @@ -19,7 +19,7 @@ export const useSyncPostHogConsent = () => { return; } - const backendConsent = settings.USER_CONSENTS_TO_ANALYTICS; + const backendConsent = settings.user_consents_to_analytics; // Only sync if there's a backend preference set if (backendConsent !== null) { diff --git a/frontend/src/hooks/use-terminal.ts b/frontend/src/hooks/use-terminal.ts index b5ffb6baf9f7..caa2e42a15d0 100644 --- a/frontend/src/hooks/use-terminal.ts +++ b/frontend/src/hooks/use-terminal.ts @@ -1,7 +1,7 @@ import { FitAddon } from "@xterm/addon-fit"; import { Terminal } from "@xterm/xterm"; import React from "react"; -import { Command, useCommandStore } from "#/state/command-store"; +import { Command, useCommandStore } from "#/stores/command-store"; import { parseTerminalOutput } from "#/utils/parse-terminal-output"; /* @@ -29,6 +29,47 @@ const renderCommand = ( } }; +/** + * Check if the terminal is ready for fit operations. + * This prevents the "Cannot read properties of undefined (reading 'dimensions')" error + * that occurs when fit() is called on a terminal that is hidden, disposed, or not fully initialized. + */ +const canFitTerminal = ( + terminalInstance: Terminal | null, + fitAddonInstance: FitAddon | null, + containerElement: HTMLDivElement | null, +): boolean => { + // Check terminal and fitAddon exist + if (!terminalInstance || !fitAddonInstance) { + return false; + } + + // Check container element exists + if (!containerElement) { + return false; + } + + // Check element is visible (not display: none) + // When display is none, offsetParent is null (except for fixed/body elements) + const computedStyle = window.getComputedStyle(containerElement); + if (computedStyle.display === "none") { + return false; + } + + // Check element has dimensions + const { clientWidth, clientHeight } = containerElement; + if (clientWidth === 0 || clientHeight === 0) { + return false; + } + + // Check terminal has been opened (element property is set after open()) + if (!terminalInstance.element) { + return false; + } + + return true; +}; + // Create a persistent reference that survives component unmounts // This ensures terminal history is preserved when navigating away and back const persistentLastCommandIndex = { current: 0 }; @@ -39,12 +80,13 @@ export const useTerminal = () => { const fitAddon = React.useRef(null); const ref = React.useRef(null); const lastCommandIndex = persistentLastCommandIndex; // Use the persistent reference + const isDisposed = React.useRef(false); const createTerminal = () => new Terminal({ fontFamily: "Menlo, Monaco, 'Courier New', monospace", fontSize: 14, - scrollback: 1000, + scrollback: 10000, scrollSensitivity: 1, fastScrollModifier: "alt", fastScrollSensitivity: 5, @@ -55,6 +97,15 @@ export const useTerminal = () => { }, }); + const fitTerminalSafely = React.useCallback(() => { + if (isDisposed.current) { + return; + } + if (canFitTerminal(terminal.current, fitAddon.current, ref.current)) { + fitAddon.current!.fit(); + } + }, []); + const initializeTerminal = () => { if (terminal.current) { if (fitAddon.current) terminal.current.loadAddon(fitAddon.current); @@ -62,12 +113,14 @@ export const useTerminal = () => { terminal.current.open(ref.current); // Hide cursor for read-only terminal using ANSI escape sequence terminal.current.write("\x1b[?25l"); + fitTerminalSafely(); } } }; // Initialize terminal and handle cleanup React.useEffect(() => { + isDisposed.current = false; terminal.current = createTerminal(); fitAddon.current = new FitAddon(); @@ -90,6 +143,7 @@ export const useTerminal = () => { } return () => { + isDisposed.current = true; terminal.current?.dispose(); lastCommandIndex.current = 0; }; @@ -117,7 +171,10 @@ export const useTerminal = () => { let resizeObserver: ResizeObserver | null = null; resizeObserver = new ResizeObserver(() => { - fitAddon.current?.fit(); + // Use requestAnimationFrame to debounce resize events and ensure DOM is ready + requestAnimationFrame(() => { + fitTerminalSafely(); + }); }); if (ref.current) { @@ -127,7 +184,7 @@ export const useTerminal = () => { return () => { resizeObserver?.disconnect(); }; - }, []); + }, [fitTerminalSafely]); return ref; }; diff --git a/frontend/src/hooks/use-tracking.ts b/frontend/src/hooks/use-tracking.ts index 0dfc0f0705b7..d04cdbb81a63 100644 --- a/frontend/src/hooks/use-tracking.ts +++ b/frontend/src/hooks/use-tracking.ts @@ -17,7 +17,7 @@ export const useTracking = () => { app_surface: config?.APP_MODE || "unknown", plan_tier: null, current_url: window.location.href, - user_email: settings?.EMAIL || settings?.GIT_USER_EMAIL || null, + user_email: settings?.email || settings?.git_user_email || null, }; const trackLoginButtonClick = ({ provider }: { provider: Provider }) => { diff --git a/frontend/src/hooks/use-user-providers.ts b/frontend/src/hooks/use-user-providers.ts index d60102c2e046..c09130990bcd 100644 --- a/frontend/src/hooks/use-user-providers.ts +++ b/frontend/src/hooks/use-user-providers.ts @@ -6,8 +6,8 @@ export const useUserProviders = () => { const { data: settings, isLoading: isLoadingSettings } = useSettings(); const providers = React.useMemo( - () => convertRawProvidersToList(settings?.PROVIDER_TOKENS_SET), - [settings?.PROVIDER_TOKENS_SET], + () => convertRawProvidersToList(settings?.provider_tokens_set), + [settings?.provider_tokens_set], ); return { diff --git a/frontend/src/i18n/declaration.ts b/frontend/src/i18n/declaration.ts index b62dd9c1d925..1b330730d942 100644 --- a/frontend/src/i18n/declaration.ts +++ b/frontend/src/i18n/declaration.ts @@ -436,6 +436,8 @@ export enum I18nKey { BUTTON$CREATE = "BUTTON$CREATE", BUTTON$DELETE = "BUTTON$DELETE", BUTTON$COPY_TO_CLIPBOARD = "BUTTON$COPY_TO_CLIPBOARD", + BUTTON$HOME = "BUTTON$HOME", + BUTTON$OPEN_IN_NEW_TAB = "BUTTON$OPEN_IN_NEW_TAB", BUTTON$REFRESH = "BUTTON$REFRESH", ERROR$REQUIRED_FIELD = "ERROR$REQUIRED_FIELD", PLANNER$EMPTY_MESSAGE = "PLANNER$EMPTY_MESSAGE", @@ -532,6 +534,8 @@ export enum I18nKey { SUGGESTIONS$ADD_DOCS = "SUGGESTIONS$ADD_DOCS", SUGGESTIONS$ADD_DOCKERFILE = "SUGGESTIONS$ADD_DOCKERFILE", STATUS$CONNECTED = "STATUS$CONNECTED", + STATUS$CONNECTION_LOST = "STATUS$CONNECTION_LOST", + STATUS$DISCONNECTED_REFRESH_PAGE = "STATUS$DISCONNECTED_REFRESH_PAGE", BROWSER$NO_PAGE_LOADED = "BROWSER$NO_PAGE_LOADED", USER$AVATAR_PLACEHOLDER = "USER$AVATAR_PLACEHOLDER", ACCOUNT_SETTINGS$LOGOUT = "ACCOUNT_SETTINGS$LOGOUT", @@ -638,17 +642,16 @@ export enum I18nKey { TOS$CONTINUE = "TOS$CONTINUE", TOS$ERROR_ACCEPTING = "TOS$ERROR_ACCEPTING", TIPS$CUSTOMIZE_MICROAGENT = "TIPS$CUSTOMIZE_MICROAGENT", - CONVERSATION$SHOW_MICROAGENTS = "CONVERSATION$SHOW_MICROAGENTS", - CONVERSATION$NO_MICROAGENTS = "CONVERSATION$NO_MICROAGENTS", + CONVERSATION$NO_SKILLS = "CONVERSATION$NO_SKILLS", CONVERSATION$FAILED_TO_FETCH_MICROAGENTS = "CONVERSATION$FAILED_TO_FETCH_MICROAGENTS", MICROAGENTS_MODAL$TITLE = "MICROAGENTS_MODAL$TITLE", - MICROAGENTS_MODAL$WARNING = "MICROAGENTS_MODAL$WARNING", - MICROAGENTS_MODAL$TRIGGERS = "MICROAGENTS_MODAL$TRIGGERS", + SKILLS_MODAL$WARNING = "SKILLS_MODAL$WARNING", + COMMON$TRIGGERS = "COMMON$TRIGGERS", MICROAGENTS_MODAL$INPUTS = "MICROAGENTS_MODAL$INPUTS", MICROAGENTS_MODAL$TOOLS = "MICROAGENTS_MODAL$TOOLS", - MICROAGENTS_MODAL$CONTENT = "MICROAGENTS_MODAL$CONTENT", - MICROAGENTS_MODAL$NO_CONTENT = "MICROAGENTS_MODAL$NO_CONTENT", - MICROAGENTS_MODAL$FETCH_ERROR = "MICROAGENTS_MODAL$FETCH_ERROR", + COMMON$CONTENT = "COMMON$CONTENT", + SKILLS_MODAL$NO_CONTENT = "SKILLS_MODAL$NO_CONTENT", + COMMON$FETCH_ERROR = "COMMON$FETCH_ERROR", TIPS$SETUP_SCRIPT = "TIPS$SETUP_SCRIPT", TIPS$VSCODE_INSTANCE = "TIPS$VSCODE_INSTANCE", TIPS$SAVE_WORK = "TIPS$SAVE_WORK", @@ -954,4 +957,7 @@ export enum I18nKey { COMMON$CODE_AGENT_DESCRIPTION = "COMMON$CODE_AGENT_DESCRIPTION", COMMON$PLAN_AGENT_DESCRIPTION = "COMMON$PLAN_AGENT_DESCRIPTION", PLANNING_AGENTT$PLANNING_AGENT_INITIALIZED = "PLANNING_AGENTT$PLANNING_AGENT_INITIALIZED", + OBSERVATION_MESSAGE$SKILL_READY = "OBSERVATION_MESSAGE$SKILL_READY", + CONVERSATION$SHOW_SKILLS = "CONVERSATION$SHOW_SKILLS", + SKILLS_MODAL$TITLE = "SKILLS_MODAL$TITLE", } diff --git a/frontend/src/i18n/translation.json b/frontend/src/i18n/translation.json index c43bb8dc07eb..a421de5ddfc4 100644 --- a/frontend/src/i18n/translation.json +++ b/frontend/src/i18n/translation.json @@ -6975,6 +6975,38 @@ "es": "Copiar al portapapeles", "tr": "Panoya Kopyala" }, + "BUTTON$HOME": { + "en": "Home", + "ja": "ホーム", + "zh-CN": "主页", + "zh-TW": "首頁", + "ko-KR": "홈", + "no": "Hjem", + "it": "Home", + "pt": "Início", + "es": "Inicio", + "ar": "الرئيسية", + "fr": "Accueil", + "tr": "Ana Sayfa", + "de": "Startseite", + "uk": "Головна" + }, + "BUTTON$OPEN_IN_NEW_TAB": { + "en": "Open in New Tab", + "ja": "新しいタブで開く", + "zh-CN": "在新标签页中打开", + "zh-TW": "在新分頁中開啟", + "ko-KR": "새 탭에서 열기", + "no": "Åpne i ny fane", + "it": "Apri in una nuova scheda", + "pt": "Abrir em nova aba", + "es": "Abrir en nueva pestaña", + "ar": "فتح في علامة تبويب جديدة", + "fr": "Ouvrir dans un nouvel onglet", + "tr": "Yeni Sekmede Aç", + "de": "In neuem Tab öffnen", + "uk": "Відкрити в новій вкладці" + }, "BUTTON$REFRESH": { "en": "Refresh", "ja": "更新", @@ -8511,6 +8543,38 @@ "tr": "Bağlandı", "uk": "Підключено" }, + "STATUS$CONNECTION_LOST": { + "en": "Connection lost", + "ja": "接続が切断されました", + "zh-CN": "连接已断开", + "zh-TW": "連接已斷開", + "ko-KR": "연결이 끊어졌습니다", + "de": "Verbindung verloren", + "no": "Tilkobling mistet", + "it": "Connessione persa", + "pt": "Conexão perdida", + "es": "Conexión perdida", + "ar": "فُقد الاتصال", + "fr": "Connexion perdue", + "tr": "Bağlantı kesildi", + "uk": "Втрачено з'єднання" + }, + "STATUS$DISCONNECTED_REFRESH_PAGE": { + "en": "Disconnected. Please refresh the page", + "ja": "切断されました。ページを更新してください", + "zh-CN": "已断开连接。请刷新页面", + "zh-TW": "已斷開連接。請重新整理頁面", + "ko-KR": "연결이 끊어졌습니다. 페이지를 새로고침하세요", + "de": "Getrennt. Bitte aktualisieren Sie die Seite", + "no": "Koblet fra. Vennligst oppdater siden", + "it": "Disconnesso. Si prega di aggiornare la pagina", + "pt": "Desconectado. Por favor, atualize a página", + "es": "Desconectado. Por favor, actualice la página", + "ar": "تم قطع الاتصال. يرجى تحديث الصفحة", + "fr": "Déconnecté. Veuillez actualiser la page", + "tr": "Bağlantı kesildi. Lütfen sayfayı yenileyin", + "uk": "Відключено. Будь ласка, оновіть сторінку" + }, "BROWSER$NO_PAGE_LOADED": { "en": "No page loaded", "ja": "ブラウザは空です", @@ -10207,37 +10271,21 @@ "tr": "Kullanılabilir bir mikro ajan kullanarak OpenHands'i deponuz için özelleştirebilirsiniz. OpenHands'ten deponun açıklamasını, kodun nasıl çalıştırılacağı dahil, .openhands/microagents/repo.md dosyasına koymasını isteyin.", "uk": "Ви можете налаштувати OpenHands для свого репозиторію за допомогою доступного мікроагента. Попросіть OpenHands розмістити опис репозиторію, включаючи інформацію про те, як запустити код, у файлі .openhands/microagents/repo.md." }, - "CONVERSATION$SHOW_MICROAGENTS": { - "en": "Show Available Microagents", - "ja": "利用可能なマイクロエージェントを表示", - "zh-CN": "显示可用微代理", - "zh-TW": "顯示可用微代理", - "ko-KR": "사용 가능한 마이크로에이전트 표시", - "no": "Vis tilgjengelige mikroagenter", - "ar": "عرض الوكلاء المصغرين المتاحة", - "de": "Verfügbare Mikroagenten anzeigen", - "fr": "Afficher les micro-agents disponibles", - "it": "Mostra microagenti disponibili", - "pt": "Mostrar microagentes disponíveis", - "es": "Mostrar microagentes disponibles", - "tr": "Kullanılabilir mikro ajanları göster", - "uk": "Показати доступних мікроагентів" - }, - "CONVERSATION$NO_MICROAGENTS": { - "en": "No available microagents found for this conversation.", - "ja": "この会話用の利用可能なマイクロエージェントが見つかりませんでした。", - "zh-CN": "未找到此对话的可用微代理。", - "zh-TW": "未找到此對話的可用微代理。", - "ko-KR": "이 대화에 대한 사용 가능한 마이크로에이전트를 찾을 수 없습니다.", - "no": "Ingen tilgjengelige mikroagenter funnet for denne samtalen.", - "ar": "لم يتم العثور على وكلاء مصغرين متاحة لهذه المحادثة.", - "de": "Keine verfügbaren Mikroagenten für dieses Gespräch gefunden.", - "fr": "Aucun micro-agent disponible trouvé pour cette conversation.", - "it": "Nessun microagente disponibile trovato per questa conversazione.", - "pt": "Nenhum microagente disponível encontrado para esta conversa.", - "es": "No se encontraron microagentes disponibles para esta conversación.", - "tr": "Bu konuşma için kullanılabilir mikro ajan bulunamadı.", - "uk": "Для цієї розмови не знайдено доступних мікроагентів." + "CONVERSATION$NO_SKILLS": { + "en": "No available skills found for this conversation.", + "ja": "この会話には利用可能なスキルが見つかりません。", + "zh-CN": "本会话未找到可用技能。", + "zh-TW": "此對話中未找到可用技能。", + "ko-KR": "이 대화에서 사용 가능한 스킬을 찾을 수 없습니다.", + "no": "Ingen tilgjengelige ferdigheter ble funnet for denne samtalen.", + "ar": "لم يتم العثور على مهارات متاحة لهذه المحادثة.", + "de": "Für diese Unterhaltung wurden keine verfügbaren Skills gefunden.", + "fr": "Aucune compétence disponible trouvée pour cette conversation.", + "it": "Nessuna abilità disponibile trovata per questa conversazione.", + "pt": "Nenhuma habilidade disponível encontrada para esta conversa.", + "es": "No se encontraron habilidades disponibles para esta conversación.", + "tr": "Bu sohbet için kullanılabilir yetenek bulunamadı.", + "uk": "У цій розмові не знайдено доступних навичок." }, "CONVERSATION$FAILED_TO_FETCH_MICROAGENTS": { "en": "Failed to fetch available microagents", @@ -10271,23 +10319,23 @@ "tr": "Kullanılabilir mikro ajanlar", "uk": "Доступні мікроагенти" }, - "MICROAGENTS_MODAL$WARNING": { - "en": "If you update the microagents, you will need to stop the conversation and then click on the refresh button to see the changes.", - "ja": "マイクロエージェントを更新する場合、会話を停止してから更新ボタンをクリックして変更を確認する必要があります。", - "zh-CN": "如果您更新微代理,您需要停止对话,然后点击刷新按钮以查看更改。", - "zh-TW": "如果您更新微代理,您需要停止對話,然後點擊重新整理按鈕以查看更改。", - "ko-KR": "마이크로에이전트를 업데이트하는 경우 대화를 중지한 후 새로고침 버튼을 클릭하여 변경사항을 확인해야 합니다.", - "no": "Hvis du oppdaterer mikroagentene, må du stoppe samtalen og deretter klikke på oppdater-knappen for å se endringene.", - "ar": "إذا قمت بتحديث الوكلاء المصغرين، فستحتاج إلى إيقاف المحادثة ثم النقر على زر التحديث لرؤية التغييرات.", - "de": "Wenn Sie die Mikroagenten aktualisieren, müssen Sie das Gespräch beenden und dann auf die Aktualisieren-Schaltfläche klicken, um die Änderungen zu sehen.", - "fr": "Si vous mettez à jour les micro-agents, vous devrez arrêter la conversation puis cliquer sur le bouton actualiser pour voir les changements.", - "it": "Se aggiorni i microagenti, dovrai fermare la conversazione e poi cliccare sul pulsante aggiorna per vedere le modifiche.", - "pt": "Se você atualizar os microagentes, precisará parar a conversa e depois clicar no botão atualizar para ver as alterações.", - "es": "Si actualiza los microagentes, necesitará detener la conversación y luego hacer clic en el botón actualizar para ver los cambios.", - "tr": "Mikro ajanları güncellerseniz, konuşmayı durdurmanız ve ardından değişiklikleri görmek için yenile düğmesine tıklamanız gerekecektir.", - "uk": "Якщо ви оновите мікроагенти, вам потрібно буде зупинити розмову, а потім натиснути кнопку оновлення, щоб побачити зміни." - }, - "MICROAGENTS_MODAL$TRIGGERS": { + "SKILLS_MODAL$WARNING": { + "en": "If you update the skills, you will need to stop the conversation and then click on the refresh button to see the changes.", + "ja": "スキルを更新する場合、会話を停止し、その後、更新ボタンをクリックして変更を反映させる必要があります。", + "zh-CN": "如果您更新技能,需要先停止对话,然后点击刷新按钮以查看更改。", + "zh-TW": "如果您更新技能,需要先停止對話,然後點擊刷新按鈕以查看更改。", + "ko-KR": "스킬을 업데이트하면 대화를 중단한 후 새로 고침 버튼을 클릭해야 변경 사항을 볼 수 있습니다.", + "no": "Hvis du oppdaterer ferdighetene, må du stoppe samtalen og deretter klikke på oppdateringsknappen for å se endringene.", + "ar": "إذا قمت بتحديث المهارات، ستحتاج إلى إيقاف المحادثة ثم النقر على زر التحديث لرؤية التغييرات.", + "de": "Wenn Sie die Fähigkeiten aktualisieren, müssen Sie das Gespräch beenden und dann auf die Schaltfläche 'Aktualisieren' klicken, um die Änderungen zu sehen.", + "fr": "Si vous mettez à jour les compétences, vous devrez arrêter la conversation, puis cliquer sur le bouton d’actualisation pour voir les modifications.", + "it": "Se aggiorni le competenze, dovrai interrompere la conversazione e poi cliccare sul pulsante di aggiornamento per vedere le modifiche.", + "pt": "Se você atualizar as habilidades, precisará interromper a conversa e clicar no botão de atualizar para ver as mudanças.", + "es": "Si actualizas las habilidades, deberás detener la conversación y luego hacer clic en el botón de actualizar para ver los cambios.", + "tr": "Yetenekleri güncellerseniz, değişiklikleri görmek için sohbeti durdurmalı ve ardından yenile düğmesine tıklamalısınız.", + "uk": "Якщо ви оновите навички, вам потрібно буде зупинити розмову, а потім натиснути кнопку оновлення, щоб побачити зміни." + }, + "COMMON$TRIGGERS": { "en": "Triggers", "ja": "トリガー", "zh-CN": "触发器", @@ -10335,7 +10383,7 @@ "tr": "Araçlar", "uk": "Інструменти" }, - "MICROAGENTS_MODAL$CONTENT": { + "COMMON$CONTENT": { "en": "Content", "ja": "コンテンツ", "zh-CN": "内容", @@ -10351,37 +10399,37 @@ "tr": "İçerik", "uk": "Вміст" }, - "MICROAGENTS_MODAL$NO_CONTENT": { - "en": "Microagent has no content", - "ja": "マイクロエージェントにコンテンツがありません", - "zh-CN": "微代理没有内容", - "zh-TW": "微代理沒有內容", - "ko-KR": "마이크로에이전트에 콘텐츠가 없습니다", - "no": "Mikroagenten har ikke innhold", - "ar": "الوكيل المصغر ليس لديه محتوى", - "de": "Mikroagent hat keinen Inhalt", - "fr": "Le micro-agent n'a pas de contenu", - "it": "Il microagente non ha contenuto", - "pt": "Microagente não tem conteúdo", - "es": "El microagente no tiene contenido", - "tr": "Mikroajanın içeriği yok", - "uk": "Мікроагент не має вмісту" - }, - "MICROAGENTS_MODAL$FETCH_ERROR": { - "en": "Failed to fetch microagents. Please try again later.", - "ja": "マイクロエージェントの取得に失敗しました。後でもう一度お試しください。", - "zh-CN": "获取微代理失败。请稍后再试。", - "zh-TW": "獲取微代理失敗。請稍後再試。", - "ko-KR": "마이크로에이전트를 가져오지 못했습니다. 나중에 다시 시도해 주세요.", - "no": "Kunne ikke hente mikroagenter. Prøv igjen senere.", - "ar": "فشل في جلب الوكلاء المصغرين. يرجى المحاولة مرة أخرى لاحقًا.", - "de": "Mikroagenten konnten nicht abgerufen werden. Bitte versuchen Sie es später erneut.", - "fr": "Échec de la récupération des micro-agents. Veuillez réessayer plus tard.", - "it": "Impossibile recuperare i microagenti. Riprova più tardi.", - "pt": "Falha ao buscar microagentes. Por favor, tente novamente mais tarde.", - "es": "Error al obtener microagentes. Por favor, inténtelo de nuevo más tarde.", - "tr": "Mikroajanlar getirilemedi. Lütfen daha sonra tekrar deneyin.", - "uk": "Не вдалося отримати мікроагентів. Будь ласка, спробуйте пізніше." + "SKILLS_MODAL$NO_CONTENT": { + "en": "Skill has no content", + "ja": "スキルにはコンテンツがありません", + "zh-CN": "技能没有内容", + "zh-TW": "技能沒有內容", + "ko-KR": "스킬에 컨텐츠가 없습니다", + "no": "Ferdighet har ikke noe innhold", + "ar": "المهارة ليس لديها محتوى", + "de": "Die Fähigkeit hat keinen Inhalt", + "fr": "La compétence n'a pas de contenu", + "it": "La competenza non ha contenuti", + "pt": "A habilidade não possui conteúdo", + "es": "La habilidad no tiene contenido", + "tr": "Beceride içerik yok", + "uk": "У навички немає вмісту" + }, + "COMMON$FETCH_ERROR": { + "en": "Failed to fetch skills. Please try again later.", + "ja": "スキルの取得に失敗しました。後でもう一度お試しください。", + "zh-CN": "获取技能失败。请稍后再试。", + "zh-TW": "取得技能失敗。請稍後再試。", + "ko-KR": "스킬을 가져오지 못했습니다. 나중에 다시 시도해주세요.", + "no": "Kunne ikke hente ferdigheter. Prøv igjen senere.", + "ar": "فشل في جلب المهارات. يرجى المحاولة لاحقًا.", + "de": "Die Fähigkeiten konnten nicht abgerufen werden. Bitte versuchen Sie es später erneut.", + "fr": "Échec de la récupération des compétences. Veuillez réessayer plus tard.", + "it": "Impossibile recuperare le competenze. Riprova più tardi.", + "pt": "Falha ao buscar as habilidades. Por favor, tente novamente mais tarde.", + "es": "No se pudieron obtener las habilidades. Por favor, inténtalo de nuevo más tarde.", + "tr": "Beceriler alınamadı. Lütfen daha sonra tekrar deneyin.", + "uk": "Не вдалося отримати навички. Будь ласка, спробуйте пізніше." }, "TIPS$SETUP_SCRIPT": { "en": "You can add .openhands/setup.sh to your repository to automatically run a setup script every time you start an OpenHands conversation.", @@ -15262,5 +15310,53 @@ "tr": "Planlama ajanı başlatıldı", "de": "Planungsagent wurde initialisiert", "uk": "Агент планування ініціалізовано" + }, + "OBSERVATION_MESSAGE$SKILL_READY": { + "en": "Skill Ready", + "ja": "スキル準備完了", + "zh-CN": "技能已就绪", + "zh-TW": "技能已就緒", + "ko-KR": "스킬 준비 완료", + "no": "Ferdighet klar", + "it": "Abilità pronta", + "pt": "Habilidade pronta", + "es": "Habilidad lista", + "ar": "المهارة جاهزة", + "fr": "Compétence prête", + "tr": "Yetenek hazır", + "de": "Fähigkeit bereit", + "uk": "Навичка готова" + }, + "CONVERSATION$SHOW_SKILLS": { + "en": "Show Available Skills", + "ja": "利用可能なスキルを表示", + "zh-CN": "显示可用技能", + "zh-TW": "顯示可用技能", + "ko-KR": "사용 가능한 스킬 표시", + "no": "Vis tilgjengelige ferdigheter", + "ar": "عرض المهارات المتاحة", + "de": "Verfügbare Fähigkeiten anzeigen", + "fr": "Afficher les compétences disponibles", + "it": "Mostra abilità disponibili", + "pt": "Mostrar habilidades disponíveis", + "es": "Mostrar habilidades disponibles", + "tr": "Kullanılabilir yetenekleri göster", + "uk": "Показати доступні навички" + }, + "SKILLS_MODAL$TITLE": { + "en": "Available Skills", + "ja": "利用可能なスキル", + "zh-CN": "可用技能", + "zh-TW": "可用技能", + "ko-KR": "사용 가능한 스킬", + "no": "Tilgjengelige ferdigheter", + "ar": "المهارات المتاحة", + "de": "Verfügbare Fähigkeiten", + "fr": "Compétences disponibles", + "it": "Abilità disponibili", + "pt": "Habilidades disponíveis", + "es": "Habilidades disponibles", + "tr": "Kullanılabilir yetenekler", + "uk": "Доступні навички" } } diff --git a/frontend/src/icons/loading.svg b/frontend/src/icons/loading.svg index 2da678957f05..a5217fd60874 100644 --- a/frontend/src/icons/loading.svg +++ b/frontend/src/icons/loading.svg @@ -1,3 +1,3 @@ - - + + diff --git a/frontend/src/icons/u-refresh.svg b/frontend/src/icons/u-refresh.svg new file mode 100644 index 000000000000..9e3a2051d262 --- /dev/null +++ b/frontend/src/icons/u-refresh.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/src/mocks/analytics-handlers.ts b/frontend/src/mocks/analytics-handlers.ts new file mode 100644 index 000000000000..09b3ac0c60a6 --- /dev/null +++ b/frontend/src/mocks/analytics-handlers.ts @@ -0,0 +1,7 @@ +import { http, HttpResponse } from "msw"; + +export const ANALYTICS_HANDLERS = [ + http.post("https://us.i.posthog.com/e", async () => + HttpResponse.json(null, { status: 200 }), + ), +]; diff --git a/frontend/src/mocks/auth-handlers.ts b/frontend/src/mocks/auth-handlers.ts new file mode 100644 index 000000000000..bb4baf2397f9 --- /dev/null +++ b/frontend/src/mocks/auth-handlers.ts @@ -0,0 +1,23 @@ +import { http, HttpResponse } from "msw"; +import { GitUser } from "#/types/git"; + +export const AUTH_HANDLERS = [ + http.get("/api/user/info", () => { + const user: GitUser = { + id: "1", + login: "octocat", + avatar_url: "https://avatars.githubusercontent.com/u/583231?v=4", + company: "GitHub", + email: "placeholder@placeholder.placeholder", + name: "monalisa octocat", + }; + + return HttpResponse.json(user); + }), + + http.post("/api/authenticate", async () => + HttpResponse.json({ message: "Authenticated" }), + ), + + http.post("/api/logout", () => HttpResponse.json(null, { status: 200 })), +]; diff --git a/frontend/src/mocks/conversation-handlers.ts b/frontend/src/mocks/conversation-handlers.ts new file mode 100644 index 000000000000..1ec536fd92c8 --- /dev/null +++ b/frontend/src/mocks/conversation-handlers.ts @@ -0,0 +1,118 @@ +import { http, delay, HttpResponse } from "msw"; +import { Conversation, ResultSet } from "#/api/open-hands.types"; + +const conversations: Conversation[] = [ + { + conversation_id: "1", + title: "My New Project", + selected_repository: null, + git_provider: null, + selected_branch: null, + last_updated_at: new Date().toISOString(), + created_at: new Date().toISOString(), + status: "RUNNING", + runtime_status: "STATUS$READY", + url: null, + session_api_key: null, + }, + { + conversation_id: "2", + title: "Repo Testing", + selected_repository: "octocat/hello-world", + git_provider: "github", + selected_branch: null, + last_updated_at: new Date( + Date.now() - 2 * 24 * 60 * 60 * 1000, + ).toISOString(), + created_at: new Date(Date.now() - 2 * 24 * 60 * 60 * 1000).toISOString(), + status: "STOPPED", + runtime_status: null, + url: null, + session_api_key: null, + }, + { + conversation_id: "3", + title: "Another Project", + selected_repository: "octocat/earth", + git_provider: null, + selected_branch: "main", + last_updated_at: new Date( + Date.now() - 5 * 24 * 60 * 60 * 1000, + ).toISOString(), + created_at: new Date(Date.now() - 5 * 24 * 60 * 60 * 1000).toISOString(), + status: "STOPPED", + runtime_status: null, + url: null, + session_api_key: null, + }, +]; + +const CONVERSATIONS = new Map( + conversations.map((c) => [c.conversation_id, c]), +); + +export const CONVERSATION_HANDLERS = [ + http.get("/api/conversations", async () => { + const values = Array.from(CONVERSATIONS.values()); + const results: ResultSet = { + results: values, + next_page_id: null, + }; + return HttpResponse.json(results); + }), + + http.get("/api/conversations/:conversationId", async ({ params }) => { + const conversationId = params.conversationId as string; + const project = CONVERSATIONS.get(conversationId); + if (project) return HttpResponse.json(project); + return HttpResponse.json(null, { status: 404 }); + }), + + http.post("/api/conversations", async () => { + await delay(); + const conversation: Conversation = { + conversation_id: (Math.random() * 100).toString(), + title: "New Conversation", + selected_repository: null, + git_provider: null, + selected_branch: null, + last_updated_at: new Date().toISOString(), + created_at: new Date().toISOString(), + status: "RUNNING", + runtime_status: "STATUS$READY", + url: null, + session_api_key: null, + }; + CONVERSATIONS.set(conversation.conversation_id, conversation); + return HttpResponse.json(conversation, { status: 201 }); + }), + + http.patch( + "/api/conversations/:conversationId", + async ({ params, request }) => { + const conversationId = params.conversationId as string; + const conversation = CONVERSATIONS.get(conversationId); + + if (conversation) { + const body = await request.json(); + if (typeof body === "object" && body?.title) { + CONVERSATIONS.set(conversationId, { + ...conversation, + title: body.title, + }); + return HttpResponse.json(null, { status: 200 }); + } + } + return HttpResponse.json(null, { status: 404 }); + }, + ), + + http.delete("/api/conversations/:conversationId", async ({ params }) => { + const conversationId = params.conversationId as string; + if (CONVERSATIONS.has(conversationId)) { + CONVERSATIONS.delete(conversationId); + return HttpResponse.json(null, { status: 200 }); + } + return HttpResponse.json(null, { status: 404 }); + }), +]; diff --git a/frontend/src/mocks/feedback-handlers.ts b/frontend/src/mocks/feedback-handlers.ts new file mode 100644 index 000000000000..8e4e602b33bd --- /dev/null +++ b/frontend/src/mocks/feedback-handlers.ts @@ -0,0 +1,15 @@ +import { http, delay, HttpResponse } from "msw"; + +export const FEEDBACK_HANDLERS = [ + http.post("/api/submit-feedback", async () => { + await delay(1200); + return HttpResponse.json({ + statusCode: 200, + body: { message: "Success", link: "fake-url.com", password: "abc123" }, + }); + }), + + http.post("/api/submit-feedback", async () => + HttpResponse.json({ statusCode: 200 }, { status: 200 }), + ), +]; diff --git a/frontend/src/mocks/handlers.ts b/frontend/src/mocks/handlers.ts index 04f4c03ca9dc..999903ba93dd 100644 --- a/frontend/src/mocks/handlers.ts +++ b/frontend/src/mocks/handlers.ts @@ -1,146 +1,17 @@ -import { delay, http, HttpResponse } from "msw"; -import { GetConfigResponse } from "#/api/option-service/option.types"; -import { Conversation, ResultSet } from "#/api/open-hands.types"; -import { DEFAULT_SETTINGS } from "#/services/settings"; import { STRIPE_BILLING_HANDLERS } from "./billing-handlers"; -import { Provider } from "#/types/settings"; -import { - ApiSettings, - PostApiSettings, -} from "#/settings-service/settings.types"; import { FILE_SERVICE_HANDLERS } from "./file-service-handlers"; -import { GitUser } from "#/types/git"; import { TASK_SUGGESTIONS_HANDLERS } from "./task-suggestions-handlers"; import { SECRETS_HANDLERS } from "./secrets-handlers"; import { GIT_REPOSITORY_HANDLERS } from "./git-repository-handlers"; - -export const MOCK_DEFAULT_USER_SETTINGS: ApiSettings | PostApiSettings = { - llm_model: DEFAULT_SETTINGS.LLM_MODEL, - llm_base_url: DEFAULT_SETTINGS.LLM_BASE_URL, - llm_api_key: null, - llm_api_key_set: DEFAULT_SETTINGS.LLM_API_KEY_SET, - search_api_key_set: DEFAULT_SETTINGS.SEARCH_API_KEY_SET, - agent: DEFAULT_SETTINGS.AGENT, - language: DEFAULT_SETTINGS.LANGUAGE, - confirmation_mode: DEFAULT_SETTINGS.CONFIRMATION_MODE, - security_analyzer: DEFAULT_SETTINGS.SECURITY_ANALYZER, - remote_runtime_resource_factor: - DEFAULT_SETTINGS.REMOTE_RUNTIME_RESOURCE_FACTOR, - provider_tokens_set: {}, - enable_default_condenser: DEFAULT_SETTINGS.ENABLE_DEFAULT_CONDENSER, - condenser_max_size: DEFAULT_SETTINGS.CONDENSER_MAX_SIZE, - enable_sound_notifications: DEFAULT_SETTINGS.ENABLE_SOUND_NOTIFICATIONS, - enable_proactive_conversation_starters: - DEFAULT_SETTINGS.ENABLE_PROACTIVE_CONVERSATION_STARTERS, - enable_solvability_analysis: DEFAULT_SETTINGS.ENABLE_SOLVABILITY_ANALYSIS, - user_consents_to_analytics: DEFAULT_SETTINGS.USER_CONSENTS_TO_ANALYTICS, - max_budget_per_task: DEFAULT_SETTINGS.MAX_BUDGET_PER_TASK, -}; - -const MOCK_USER_PREFERENCES: { - settings: ApiSettings | PostApiSettings | null; -} = { - settings: null, -}; - -/** - * Set the user settings to the default settings - * - * Useful for resetting the settings in tests - */ -export const resetTestHandlersMockSettings = () => { - MOCK_USER_PREFERENCES.settings = MOCK_DEFAULT_USER_SETTINGS; -}; - -const conversations: Conversation[] = [ - { - conversation_id: "1", - title: "My New Project", - selected_repository: null, - git_provider: null, - selected_branch: null, - last_updated_at: new Date().toISOString(), - created_at: new Date().toISOString(), - status: "RUNNING", - runtime_status: "STATUS$READY", - url: null, - session_api_key: null, - }, - { - conversation_id: "2", - title: "Repo Testing", - selected_repository: "octocat/hello-world", - git_provider: "github", - selected_branch: null, - // 2 days ago - last_updated_at: new Date( - Date.now() - 2 * 24 * 60 * 60 * 1000, - ).toISOString(), - created_at: new Date(Date.now() - 2 * 24 * 60 * 60 * 1000).toISOString(), - status: "STOPPED", - runtime_status: null, - url: null, - session_api_key: null, - }, - { - conversation_id: "3", - title: "Another Project", - selected_repository: "octocat/earth", - git_provider: null, - selected_branch: "main", - // 5 days ago - last_updated_at: new Date( - Date.now() - 5 * 24 * 60 * 60 * 1000, - ).toISOString(), - created_at: new Date(Date.now() - 5 * 24 * 60 * 60 * 1000).toISOString(), - status: "STOPPED", - runtime_status: null, - url: null, - session_api_key: null, - }, -]; - -const CONVERSATIONS = new Map( - conversations.map((conversation) => [ - conversation.conversation_id, - conversation, - ]), -); - -const openHandsHandlers = [ - http.get("/api/options/models", async () => - HttpResponse.json([ - "gpt-3.5-turbo", - "gpt-4o", - "gpt-4o-mini", - "anthropic/claude-3.5", - "anthropic/claude-sonnet-4-20250514", - "anthropic/claude-sonnet-4-5-20250929", - "anthropic/claude-haiku-4-5-20251001", - "openhands/claude-sonnet-4-20250514", - "openhands/claude-sonnet-4-5-20250929", - "openhands/claude-haiku-4-5-20251001", - "sambanova/Meta-Llama-3.1-8B-Instruct", - ]), - ), - - http.get("/api/options/agents", async () => - HttpResponse.json(["CodeActAgent", "CoActAgent"]), - ), - - http.get("/api/options/security-analyzers", async () => - HttpResponse.json(["llm", "none"]), - ), - - http.post("http://localhost:3001/api/submit-feedback", async () => { - await delay(1200); - - return HttpResponse.json({ - statusCode: 200, - body: { message: "Success", link: "fake-url.com", password: "abc123" }, - }); - }), -]; +import { + SETTINGS_HANDLERS, + MOCK_DEFAULT_USER_SETTINGS, + resetTestHandlersMockSettings, +} from "./settings-handlers"; +import { CONVERSATION_HANDLERS } from "./conversation-handlers"; +import { AUTH_HANDLERS } from "./auth-handlers"; +import { FEEDBACK_HANDLERS } from "./feedback-handlers"; +import { ANALYTICS_HANDLERS } from "./analytics-handlers"; export const handlers = [ ...STRIPE_BILLING_HANDLERS, @@ -148,192 +19,11 @@ export const handlers = [ ...TASK_SUGGESTIONS_HANDLERS, ...SECRETS_HANDLERS, ...GIT_REPOSITORY_HANDLERS, - ...openHandsHandlers, - http.get("/api/user/info", () => { - const user: GitUser = { - id: "1", - login: "octocat", - avatar_url: "https://avatars.githubusercontent.com/u/583231?v=4", - company: "GitHub", - email: "placeholder@placeholder.placeholder", - name: "monalisa octocat", - }; - - return HttpResponse.json(user); - }), - http.post("http://localhost:3001/api/submit-feedback", async () => - HttpResponse.json({ statusCode: 200 }, { status: 200 }), - ), - http.post("https://us.i.posthog.com/e", async () => - HttpResponse.json(null, { status: 200 }), - ), - http.get("/api/options/config", () => { - const mockSaas = import.meta.env.VITE_MOCK_SAAS === "true"; - - const config: GetConfigResponse = { - APP_MODE: mockSaas ? "saas" : "oss", - GITHUB_CLIENT_ID: "fake-github-client-id", - POSTHOG_CLIENT_KEY: "fake-posthog-client-key", - FEATURE_FLAGS: { - ENABLE_BILLING: false, - HIDE_LLM_SETTINGS: mockSaas, - ENABLE_JIRA: false, - ENABLE_JIRA_DC: false, - ENABLE_LINEAR: false, - }, - // Uncomment the following to test the maintenance banner - // MAINTENANCE: { - // startTime: "2024-01-15T10:00:00-05:00", // EST timestamp - // }, - }; - - return HttpResponse.json(config); - }), - http.get("/api/settings", async () => { - await delay(); - - const { settings } = MOCK_USER_PREFERENCES; - - if (!settings) return HttpResponse.json(null, { status: 404 }); - - return HttpResponse.json(settings); - }), - http.post("/api/settings", async ({ request }) => { - await delay(); - const body = await request.json(); - - if (body) { - const current = MOCK_USER_PREFERENCES.settings || { - ...MOCK_DEFAULT_USER_SETTINGS, - }; - // Persist new values over current/mock defaults - MOCK_USER_PREFERENCES.settings = { - ...current, - ...(body as Partial), - }; - return HttpResponse.json(null, { status: 200 }); - } - - return HttpResponse.json(null, { status: 400 }); - }), - - http.post("/api/authenticate", async () => - HttpResponse.json({ message: "Authenticated" }), - ), - - http.get("/api/conversations", async () => { - const values = Array.from(CONVERSATIONS.values()); - const results: ResultSet = { - results: values, - next_page_id: null, - }; - - return HttpResponse.json(results, { status: 200 }); - }), - - http.delete("/api/conversations/:conversationId", async ({ params }) => { - const { conversationId } = params; - - if (typeof conversationId === "string") { - CONVERSATIONS.delete(conversationId); - return HttpResponse.json(null, { status: 200 }); - } - - return HttpResponse.json(null, { status: 404 }); - }), - - http.patch( - "/api/conversations/:conversationId", - async ({ params, request }) => { - const { conversationId } = params; - - if (typeof conversationId === "string") { - const conversation = CONVERSATIONS.get(conversationId); - - if (conversation) { - const body = await request.json(); - if (typeof body === "object" && body?.title) { - CONVERSATIONS.set(conversationId, { - ...conversation, - title: body.title, - }); - return HttpResponse.json(null, { status: 200 }); - } - } - } - - return HttpResponse.json(null, { status: 404 }); - }, - ), - - http.post("/api/conversations", async () => { - await delay(); - - const conversation: Conversation = { - conversation_id: (Math.random() * 100).toString(), - title: "New Conversation", - selected_repository: null, - git_provider: null, - selected_branch: null, - last_updated_at: new Date().toISOString(), - created_at: new Date().toISOString(), - status: "RUNNING", - runtime_status: "STATUS$READY", - url: null, - session_api_key: null, - }; - - CONVERSATIONS.set(conversation.conversation_id, conversation); - return HttpResponse.json(conversation, { status: 201 }); - }), - - http.get("/api/conversations/:conversationId", async ({ params }) => { - const { conversationId } = params; - - if (typeof conversationId === "string") { - const project = CONVERSATIONS.get(conversationId); - - if (project) { - return HttpResponse.json(project, { status: 200 }); - } - } - - return HttpResponse.json(null, { status: 404 }); - }), - - http.post("/api/logout", () => HttpResponse.json(null, { status: 200 })), - - http.post("/api/reset-settings", async () => { - await delay(); - MOCK_USER_PREFERENCES.settings = { ...MOCK_DEFAULT_USER_SETTINGS }; - return HttpResponse.json(null, { status: 200 }); - }), - - http.post("/api/add-git-providers", async ({ request }) => { - const body = await request.json(); - - if (typeof body === "object" && body?.provider_tokens) { - const rawTokens = body.provider_tokens as Record< - string, - { token?: string } - >; - - const providerTokensSet: Partial> = - Object.fromEntries( - Object.entries(rawTokens) - .filter(([, val]) => val && val.token) - .map(([provider]) => [provider as Provider, ""]), - ); - - const newSettings = { - ...(MOCK_USER_PREFERENCES.settings ?? MOCK_DEFAULT_USER_SETTINGS), - provider_tokens_set: providerTokensSet, - }; - MOCK_USER_PREFERENCES.settings = newSettings; - - return HttpResponse.json(true, { status: 200 }); - } - - return HttpResponse.json(null, { status: 400 }); - }), + ...SETTINGS_HANDLERS, + ...CONVERSATION_HANDLERS, + ...AUTH_HANDLERS, + ...FEEDBACK_HANDLERS, + ...ANALYTICS_HANDLERS, ]; + +export { MOCK_DEFAULT_USER_SETTINGS, resetTestHandlersMockSettings }; diff --git a/frontend/src/mocks/mock-ws-helpers.ts b/frontend/src/mocks/mock-ws-helpers.ts index 512045766f5c..ae4e214943fb 100644 --- a/frontend/src/mocks/mock-ws-helpers.ts +++ b/frontend/src/mocks/mock-ws-helpers.ts @@ -184,3 +184,55 @@ export const createMockExecuteBashObservationEvent = ( }, action_id: "bash-action-123", }); + +/** + * Creates a mock BrowserObservation event for testing browser state handling + */ +export const createMockBrowserObservationEvent = ( + screenshotData: string | null = "base64-screenshot-data", + output: string = "Browser action completed", + error: string | null = null, +) => ({ + id: "browser-obs-123", + timestamp: new Date().toISOString(), + source: "environment", + tool_name: "browser_navigate", + tool_call_id: "browser-call-456", + observation: { + kind: "BrowserObservation", + output, + error, + screenshot_data: screenshotData, + }, + action_id: "browser-action-123", +}); + +/** + * Creates a mock BrowserNavigateAction event for testing browser URL extraction + */ +export const createMockBrowserNavigateActionEvent = ( + url: string = "https://example.com", +) => ({ + id: "browser-action-123", + timestamp: new Date().toISOString(), + source: "agent", + thought: [{ type: "text", text: "Navigating to URL" }], + thinking_blocks: [], + action: { + kind: "BrowserNavigateAction", + url, + new_tab: false, + }, + tool_name: "browser_navigate", + tool_call_id: "browser-call-456", + tool_call: { + id: "browser-call-456", + type: "function", + function: { + name: "browser_navigate", + arguments: JSON.stringify({ url, new_tab: false }), + }, + }, + llm_response_id: "llm-response-789", + security_risk: { level: "low" }, +}); diff --git a/frontend/src/mocks/secrets-handlers.ts b/frontend/src/mocks/secrets-handlers.ts index 3d5570943a26..18c4dc98fdeb 100644 --- a/frontend/src/mocks/secrets-handlers.ts +++ b/frontend/src/mocks/secrets-handlers.ts @@ -34,7 +34,7 @@ export const SECRETS_HANDLERS = [ http.post("/api/secrets", async ({ request }) => { const body = (await request.json()) as CustomSecret; - if (typeof body === "object" && body && body.name) { + if (typeof body === "object" && body?.name) { secrets.set(body.name, body); return HttpResponse.json(true); } @@ -48,7 +48,7 @@ export const SECRETS_HANDLERS = [ if (typeof id === "string" && typeof body === "object") { const secret = secrets.get(id); - if (secret && body && body.name) { + if (secret && body?.name) { const newSecret: CustomSecret = { ...secret, ...body }; secrets.delete(id); secrets.set(body.name, newSecret); diff --git a/frontend/src/mocks/settings-handlers.ts b/frontend/src/mocks/settings-handlers.ts new file mode 100644 index 000000000000..00de1e9c5d97 --- /dev/null +++ b/frontend/src/mocks/settings-handlers.ts @@ -0,0 +1,153 @@ +import { http, delay, HttpResponse } from "msw"; +import { GetConfigResponse } from "#/api/option-service/option.types"; +import { DEFAULT_SETTINGS } from "#/services/settings"; +import { Provider, Settings } from "#/types/settings"; + +export const MOCK_DEFAULT_USER_SETTINGS: Settings = { + llm_model: DEFAULT_SETTINGS.llm_model, + llm_base_url: DEFAULT_SETTINGS.llm_base_url, + llm_api_key: null, + llm_api_key_set: DEFAULT_SETTINGS.llm_api_key_set, + search_api_key_set: DEFAULT_SETTINGS.search_api_key_set, + agent: DEFAULT_SETTINGS.agent, + language: DEFAULT_SETTINGS.language, + confirmation_mode: DEFAULT_SETTINGS.confirmation_mode, + security_analyzer: DEFAULT_SETTINGS.security_analyzer, + remote_runtime_resource_factor: + DEFAULT_SETTINGS.remote_runtime_resource_factor, + provider_tokens_set: {}, + enable_default_condenser: DEFAULT_SETTINGS.enable_default_condenser, + condenser_max_size: DEFAULT_SETTINGS.condenser_max_size, + enable_sound_notifications: DEFAULT_SETTINGS.enable_sound_notifications, + enable_proactive_conversation_starters: + DEFAULT_SETTINGS.enable_proactive_conversation_starters, + enable_solvability_analysis: DEFAULT_SETTINGS.enable_solvability_analysis, + user_consents_to_analytics: DEFAULT_SETTINGS.user_consents_to_analytics, + max_budget_per_task: DEFAULT_SETTINGS.max_budget_per_task, +}; + +const MOCK_USER_PREFERENCES: { + settings: Settings | null; +} = { + settings: null, +}; + +// Reset mock +export const resetTestHandlersMockSettings = () => { + MOCK_USER_PREFERENCES.settings = MOCK_DEFAULT_USER_SETTINGS; +}; + +// --- Handlers for options/config/settings --- + +export const SETTINGS_HANDLERS = [ + http.get("/api/options/models", async () => + HttpResponse.json([ + "gpt-3.5-turbo", + "gpt-4o", + "gpt-4o-mini", + "anthropic/claude-3.5", + "anthropic/claude-sonnet-4-20250514", + "anthropic/claude-sonnet-4-5-20250929", + "anthropic/claude-haiku-4-5-20251001", + "anthropic/claude-opus-4-5-20251101", + "openhands/claude-sonnet-4-20250514", + "openhands/claude-sonnet-4-5-20250929", + "openhands/claude-haiku-4-5-20251001", + "openhands/claude-opus-4-5-20251101", + "sambanova/Meta-Llama-3.1-8B-Instruct", + ]), + ), + + http.get("/api/options/agents", async () => + HttpResponse.json(["CodeActAgent", "CoActAgent"]), + ), + + http.get("/api/options/security-analyzers", async () => + HttpResponse.json(["llm", "none"]), + ), + + http.get("/api/options/config", () => { + const mockSaas = import.meta.env.VITE_MOCK_SAAS === "true"; + + const config: GetConfigResponse = { + APP_MODE: mockSaas ? "saas" : "oss", + GITHUB_CLIENT_ID: "fake-github-client-id", + POSTHOG_CLIENT_KEY: "fake-posthog-client-key", + FEATURE_FLAGS: { + ENABLE_BILLING: false, + HIDE_LLM_SETTINGS: mockSaas, + ENABLE_JIRA: false, + ENABLE_JIRA_DC: false, + ENABLE_LINEAR: false, + }, + // Uncomment the following to test the maintenance banner + // MAINTENANCE: { + // startTime: "2024-01-15T10:00:00-05:00", // EST timestamp + // }, + }; + + return HttpResponse.json(config); + }), + + http.get("/api/settings", async () => { + await delay(); + const { settings } = MOCK_USER_PREFERENCES; + + if (!settings) return HttpResponse.json(null, { status: 404 }); + + return HttpResponse.json(settings); + }), + + http.post("/api/settings", async ({ request }) => { + await delay(); + const body = await request.json(); + + if (body) { + const current = MOCK_USER_PREFERENCES.settings || { + ...MOCK_DEFAULT_USER_SETTINGS, + }; + + MOCK_USER_PREFERENCES.settings = { + ...current, + ...(body as Partial), + }; + + return HttpResponse.json(null, { status: 200 }); + } + + return HttpResponse.json(null, { status: 400 }); + }), + + http.post("/api/reset-settings", async () => { + await delay(); + MOCK_USER_PREFERENCES.settings = { ...MOCK_DEFAULT_USER_SETTINGS }; + return HttpResponse.json(null, { status: 200 }); + }), + + http.post("/api/add-git-providers", async ({ request }) => { + const body = await request.json(); + + if (typeof body === "object" && body?.provider_tokens) { + const rawTokens = body.provider_tokens as Record< + string, + { token?: string } + >; + + const providerTokensSet: Partial> = + Object.fromEntries( + Object.entries(rawTokens) + .filter(([, val]) => val?.token) + .map(([provider]) => [provider as Provider, ""]), + ); + + MOCK_USER_PREFERENCES.settings = { + ...(MOCK_USER_PREFERENCES.settings || MOCK_DEFAULT_USER_SETTINGS), + provider_tokens_set: providerTokensSet, + }; + + return HttpResponse.json(true, { status: 200 }); + } + + return HttpResponse.json(null, { status: 400 }); + }), +]; diff --git a/frontend/src/routes.ts b/frontend/src/routes.ts index 4c3c48adc543..ecee511688ff 100644 --- a/frontend/src/routes.ts +++ b/frontend/src/routes.ts @@ -21,5 +21,6 @@ export default [ ]), route("conversations/:conversationId", "routes/conversation.tsx"), route("microagent-management", "routes/microagent-management.tsx"), + route("oauth/device/verify", "routes/device-verify.tsx"), ]), ] satisfies RouteConfig; diff --git a/frontend/src/routes/accept-tos.tsx b/frontend/src/routes/accept-tos.tsx index f723f2a5f659..a3732273e346 100644 --- a/frontend/src/routes/accept-tos.tsx +++ b/frontend/src/routes/accept-tos.tsx @@ -1,66 +1,27 @@ import React from "react"; import { useTranslation } from "react-i18next"; -import { useNavigate, useSearchParams } from "react-router"; -import { useMutation } from "@tanstack/react-query"; -import { usePostHog } from "posthog-js/react"; +import { useSearchParams } from "react-router"; import { I18nKey } from "#/i18n/declaration"; import OpenHandsLogo from "#/assets/branding/openhands-logo.svg?react"; import { TOSCheckbox } from "#/components/features/waitlist/tos-checkbox"; import { BrandButton } from "#/components/features/settings/brand-button"; -import { handleCaptureConsent } from "#/utils/handle-capture-consent"; -import { openHands } from "#/api/open-hands-axios"; import { ModalBackdrop } from "#/components/shared/modals/modal-backdrop"; -import { useTracking } from "#/hooks/use-tracking"; +import { useAcceptTos } from "#/hooks/mutation/use-accept-tos"; export default function AcceptTOS() { - const posthog = usePostHog(); const { t } = useTranslation(); - const navigate = useNavigate(); const [searchParams] = useSearchParams(); const [isTosAccepted, setIsTosAccepted] = React.useState(false); - const { trackUserSignupCompleted } = useTracking(); // Get the redirect URL from the query parameters const redirectUrl = searchParams.get("redirect_url") || "/"; // Use mutation for accepting TOS - const { mutate: acceptTOS, isPending: isSubmitting } = useMutation({ - mutationFn: async () => { - // Set consent for analytics - handleCaptureConsent(posthog, true); - - // Call the API to record TOS acceptance in the database - return openHands.post("/api/accept_tos", { - redirect_url: redirectUrl, - }); - }, - onSuccess: (response) => { - // Track user signup completion - trackUserSignupCompleted(); - - // Get the redirect URL from the response - const finalRedirectUrl = response.data.redirect_url || redirectUrl; - - // Check if the redirect URL is an external URL (starts with http or https) - if ( - finalRedirectUrl.startsWith("http://") || - finalRedirectUrl.startsWith("https://") - ) { - // For external URLs, redirect using window.location - window.location.href = finalRedirectUrl; - } else { - // For internal routes, use navigate - navigate(finalRedirectUrl); - } - }, - onError: () => { - window.location.href = "/"; - }, - }); + const { mutate: acceptTOS, isPending: isSubmitting } = useAcceptTos(); const handleAcceptTOS = () => { if (isTosAccepted && !isSubmitting) { - acceptTOS(); + acceptTOS({ redirectUrl }); } }; diff --git a/frontend/src/routes/app-settings.tsx b/frontend/src/routes/app-settings.tsx index 4206141cbe51..a8524cc9898b 100644 --- a/frontend/src/routes/app-settings.tsx +++ b/frontend/src/routes/app-settings.tsx @@ -56,7 +56,7 @@ function AppSettingsScreen() { const languageValue = AvailableLanguages.find( ({ label }) => label === languageLabel, )?.value; - const language = languageValue || DEFAULT_SETTINGS.LANGUAGE; + const language = languageValue || DEFAULT_SETTINGS.language; const enableAnalytics = formData.get("enable-analytics-switch")?.toString() === "on"; @@ -77,21 +77,21 @@ function AppSettingsScreen() { const gitUserName = formData.get("git-user-name-input")?.toString() || - DEFAULT_SETTINGS.GIT_USER_NAME; + DEFAULT_SETTINGS.git_user_name; const gitUserEmail = formData.get("git-user-email-input")?.toString() || - DEFAULT_SETTINGS.GIT_USER_EMAIL; + DEFAULT_SETTINGS.git_user_email; saveSettings( { - LANGUAGE: language, + language, user_consents_to_analytics: enableAnalytics, - ENABLE_SOUND_NOTIFICATIONS: enableSoundNotifications, - ENABLE_PROACTIVE_CONVERSATION_STARTERS: enableProactiveConversations, - ENABLE_SOLVABILITY_ANALYSIS: enableSolvabilityAnalysis, - MAX_BUDGET_PER_TASK: maxBudgetPerTask, - GIT_USER_NAME: gitUserName, - GIT_USER_EMAIL: gitUserEmail, + enable_sound_notifications: enableSoundNotifications, + enable_proactive_conversation_starters: enableProactiveConversations, + enable_solvability_analysis: enableSolvabilityAnalysis, + max_budget_per_task: maxBudgetPerTask, + git_user_name: gitUserName, + git_user_email: gitUserEmail, }, { onSuccess: () => { @@ -120,7 +120,7 @@ function AppSettingsScreen() { ({ label: langValue }) => langValue === value, )?.label; const currentLanguage = AvailableLanguages.find( - ({ value: langValue }) => langValue === settings?.LANGUAGE, + ({ value: langValue }) => langValue === settings?.language, )?.label; setLanguageInputHasChanged(selectedLanguage !== currentLanguage); @@ -128,12 +128,12 @@ function AppSettingsScreen() { const checkIfAnalyticsSwitchHasChanged = (checked: boolean) => { // Treat null as true since analytics is opt-in by default - const currentAnalytics = settings?.USER_CONSENTS_TO_ANALYTICS ?? true; + const currentAnalytics = settings?.user_consents_to_analytics ?? true; setAnalyticsSwitchHasChanged(checked !== currentAnalytics); }; const checkIfSoundNotificationsSwitchHasChanged = (checked: boolean) => { - const currentSoundNotifications = !!settings?.ENABLE_SOUND_NOTIFICATIONS; + const currentSoundNotifications = !!settings?.enable_sound_notifications; setSoundNotificationsSwitchHasChanged( checked !== currentSoundNotifications, ); @@ -141,14 +141,14 @@ function AppSettingsScreen() { const checkIfProactiveConversationsSwitchHasChanged = (checked: boolean) => { const currentProactiveConversations = - !!settings?.ENABLE_PROACTIVE_CONVERSATION_STARTERS; + !!settings?.enable_proactive_conversation_starters; setProactiveConversationsSwitchHasChanged( checked !== currentProactiveConversations, ); }; const checkIfSolvabilityAnalysisSwitchHasChanged = (checked: boolean) => { - const currentSolvabilityAnalysis = !!settings?.ENABLE_SOLVABILITY_ANALYSIS; + const currentSolvabilityAnalysis = !!settings?.enable_solvability_analysis; setSolvabilityAnalysisSwitchHasChanged( checked !== currentSolvabilityAnalysis, ); @@ -156,17 +156,17 @@ function AppSettingsScreen() { const checkIfMaxBudgetPerTaskHasChanged = (value: string) => { const newValue = parseMaxBudgetPerTask(value); - const currentValue = settings?.MAX_BUDGET_PER_TASK; + const currentValue = settings?.max_budget_per_task; setMaxBudgetPerTaskHasChanged(newValue !== currentValue); }; const checkIfGitUserNameHasChanged = (value: string) => { - const currentValue = settings?.GIT_USER_NAME; + const currentValue = settings?.git_user_name; setGitUserNameHasChanged(value !== currentValue); }; const checkIfGitUserEmailHasChanged = (value: string) => { - const currentValue = settings?.GIT_USER_EMAIL; + const currentValue = settings?.git_user_email; setGitUserEmailHasChanged(value !== currentValue); }; @@ -193,14 +193,14 @@ function AppSettingsScreen() {
    {t(I18nKey.ANALYTICS$SEND_ANONYMOUS_DATA)} @@ -209,7 +209,7 @@ function AppSettingsScreen() { {t(I18nKey.SETTINGS$SOUND_NOTIFICATIONS)} @@ -220,7 +220,7 @@ function AppSettingsScreen() { testId="enable-proactive-conversations-switch" name="enable-proactive-conversations-switch" defaultIsToggled={ - !!settings.ENABLE_PROACTIVE_CONVERSATION_STARTERS + !!settings.enable_proactive_conversation_starters } onToggle={checkIfProactiveConversationsSwitchHasChanged} > @@ -232,25 +232,27 @@ function AppSettingsScreen() { {t(I18nKey.SETTINGS$SOLVABILITY_ANALYSIS)} )} - + {!settings?.v1_enabled && ( + + )}

    @@ -265,7 +267,7 @@ function AppSettingsScreen() { name="git-user-name-input" type="text" label={t(I18nKey.SETTINGS$GIT_USERNAME)} - defaultValue={settings.GIT_USER_NAME || ""} + defaultValue={settings.git_user_name || ""} onChange={checkIfGitUserNameHasChanged} placeholder="Username for git commits" className="w-full max-w-[680px]" @@ -275,7 +277,7 @@ function AppSettingsScreen() { name="git-user-email-input" type="email" label={t(I18nKey.SETTINGS$GIT_EMAIL)} - defaultValue={settings.GIT_USER_EMAIL || ""} + defaultValue={settings.git_user_email || ""} onChange={checkIfGitUserEmailHasChanged} placeholder="Email for git commits" className="w-full max-w-[680px]" diff --git a/frontend/src/routes/billing.tsx b/frontend/src/routes/billing.tsx index c004d93dee31..05d23fe276d6 100644 --- a/frontend/src/routes/billing.tsx +++ b/frontend/src/routes/billing.tsx @@ -30,11 +30,12 @@ function BillingSettingsScreen() { } displaySuccessToast(t(I18nKey.PAYMENT$SUCCESS)); + + setSearchParams({}); } else if (checkoutStatus === "cancel") { displayErrorToast(t(I18nKey.PAYMENT$CANCELLED)); + setSearchParams({}); } - - setSearchParams({}); }, [checkoutStatus, searchParams, setSearchParams, t, trackCreditsPurchased]); return ; diff --git a/frontend/src/routes/conversation.tsx b/frontend/src/routes/conversation.tsx index ec19051530fe..0237878e1e0f 100644 --- a/frontend/src/routes/conversation.tsx +++ b/frontend/src/routes/conversation.tsx @@ -3,8 +3,8 @@ import { useNavigate } from "react-router"; import { useTranslation } from "react-i18next"; import { useConversationId } from "#/hooks/use-conversation-id"; -import { useCommandStore } from "#/state/command-store"; -import { useConversationStore } from "#/state/conversation-store"; +import { useCommandStore } from "#/stores/command-store"; +import { useConversationStore } from "#/stores/conversation-store"; import { useAgentStore } from "#/stores/agent-store"; import { AgentState } from "#/types/agent-state"; diff --git a/frontend/src/routes/device-verify.tsx b/frontend/src/routes/device-verify.tsx new file mode 100644 index 000000000000..f306d660a509 --- /dev/null +++ b/frontend/src/routes/device-verify.tsx @@ -0,0 +1,274 @@ +/* eslint-disable i18next/no-literal-string */ +import React, { useState } from "react"; +import { useSearchParams } from "react-router"; +import { useIsAuthed } from "#/hooks/query/use-is-authed"; + +export default function DeviceVerify() { + const [searchParams] = useSearchParams(); + const { data: isAuthed, isLoading: isAuthLoading } = useIsAuthed(); + const [verificationResult, setVerificationResult] = useState<{ + success: boolean; + message: string; + } | null>(null); + const [isProcessing, setIsProcessing] = useState(false); + + // Get user_code from URL parameters + const userCode = searchParams.get("user_code"); + + const processDeviceVerification = async (code: string) => { + try { + setIsProcessing(true); + + // Call the backend API endpoint to process device verification + const response = await fetch("/oauth/device/verify-authenticated", { + method: "POST", + headers: { + "Content-Type": "application/x-www-form-urlencoded", + }, + body: `user_code=${encodeURIComponent(code)}`, + credentials: "include", // Include cookies for authentication + }); + + if (response.ok) { + // Show success message + setVerificationResult({ + success: true, + message: + "Device authorized successfully! You can now return to your CLI and close this window.", + }); + } else { + const errorText = await response.text(); + setVerificationResult({ + success: false, + message: errorText || "Failed to authorize device. Please try again.", + }); + } + } catch (error) { + setVerificationResult({ + success: false, + message: + "An error occurred while authorizing the device. Please try again.", + }); + } finally { + setIsProcessing(false); + } + }; + + // Remove automatic verification - require explicit user consent + + const handleManualSubmit = (event: React.FormEvent) => { + event.preventDefault(); + const formData = new FormData(event.currentTarget); + const code = formData.get("user_code") as string; + if (code && isAuthed) { + processDeviceVerification(code); + } + }; + + // Show verification result if we have one + if (verificationResult) { + return ( +
    +
    +
    +
    + {verificationResult.success ? ( + + + + ) : ( + + + + )} +
    +

    + {verificationResult.success ? "Success!" : "Error"} +

    +

    + {verificationResult.message} +

    + {!verificationResult.success && ( + + )} +
    +
    +
    + ); + } + + // Show processing state + if (isProcessing) { + return ( +
    +
    +
    +
    +

    + Processing device verification... +

    +
    +
    +
    + ); + } + + // Show device authorization confirmation if user is authenticated and code is provided + if (isAuthed && userCode) { + return ( +
    +
    +

    + Device Authorization Request +

    +
    +

    Device Code:

    +

    + {userCode} +

    +
    +
    +
    + + + +
    +

    + Security Notice +

    +

    + Only authorize this device if you initiated this request from + your CLI or application. +

    +
    +
    +
    +

    + Do you want to authorize this device to access your OpenHands + account? +

    +
    + + +
    +
    +
    + ); + } + + // Show manual code entry form if no code in URL but user is authenticated + if (isAuthed && !userCode) { + return ( +
    +
    +

    + Device Authorization +

    +

    + Enter the code displayed on your device: +

    +
    +
    + + +
    + +
    +
    +
    + ); + } + + // Show loading state while checking authentication + if (isAuthLoading) { + return ( +
    +
    +
    +

    + Processing device verification... +

    +
    +
    + ); + } + + // Show authentication required message (this will trigger the auth modal via root layout) + return ( +
    +
    +

    Authentication Required

    +

    + Please sign in to authorize your device. +

    +
    +
    + ); +} diff --git a/frontend/src/routes/git-settings.tsx b/frontend/src/routes/git-settings.tsx index 89a25b9828ce..69a7838c10f8 100644 --- a/frontend/src/routes/git-settings.tsx +++ b/frontend/src/routes/git-settings.tsx @@ -9,7 +9,6 @@ import { GitLabTokenInput } from "#/components/features/settings/git-settings/gi import { BitbucketTokenInput } from "#/components/features/settings/git-settings/bitbucket-token-input"; import { AzureDevOpsTokenInput } from "#/components/features/settings/git-settings/azure-devops-token-input"; import { ConfigureGitHubRepositoriesAnchor } from "#/components/features/settings/git-settings/configure-github-repositories-anchor"; -import { ConfigureAzureDevOpsAnchor } from "#/components/features/settings/git-settings/configure-azure-devops-anchor"; import { InstallSlackAppAnchor } from "#/components/features/settings/git-settings/install-slack-app-anchor"; import { I18nKey } from "#/i18n/declaration"; import { @@ -51,10 +50,10 @@ function GitSettingsScreen() { const [azureDevOpsHostInputHasValue, setAzureDevOpsHostInputHasValue] = React.useState(false); - const existingGithubHost = settings?.PROVIDER_TOKENS_SET.github; - const existingGitlabHost = settings?.PROVIDER_TOKENS_SET.gitlab; - const existingBitbucketHost = settings?.PROVIDER_TOKENS_SET.bitbucket; - const existingAzureDevOpsHost = settings?.PROVIDER_TOKENS_SET.azure_devops; + const existingGithubHost = settings?.provider_tokens_set.github; + const existingGitlabHost = settings?.provider_tokens_set.gitlab; + const existingBitbucketHost = settings?.provider_tokens_set.bitbucket; + const existingAzureDevOpsHost = settings?.provider_tokens_set.azure_devops; const isSaas = config?.APP_MODE === "saas"; const isGitHubTokenSet = providers.includes("github"); @@ -153,18 +152,6 @@ function GitSettingsScreen() { )} - {shouldRenderExternalConfigureButtons && !isLoading && ( - <> -
    -

    - {t(I18nKey.SETTINGS$AZURE_DEVOPS)} -

    - -
    -
    - - )} - {shouldRenderExternalConfigureButtons && !isLoading && ( <>
    diff --git a/frontend/src/routes/llm-settings.tsx b/frontend/src/routes/llm-settings.tsx index ed89d03882bc..d793e568762f 100644 --- a/frontend/src/routes/llm-settings.tsx +++ b/frontend/src/routes/llm-settings.tsx @@ -91,26 +91,55 @@ function LlmSettingsScreen() { // Track confirmation mode state to control security analyzer visibility const [confirmationModeEnabled, setConfirmationModeEnabled] = React.useState( - settings?.CONFIRMATION_MODE ?? DEFAULT_SETTINGS.CONFIRMATION_MODE, + settings?.confirmation_mode ?? DEFAULT_SETTINGS.confirmation_mode, ); // Track selected security analyzer for form submission const [selectedSecurityAnalyzer, setSelectedSecurityAnalyzer] = React.useState( - settings?.SECURITY_ANALYZER === null + settings?.security_analyzer === null ? "none" - : (settings?.SECURITY_ANALYZER ?? DEFAULT_SETTINGS.SECURITY_ANALYZER), + : (settings?.security_analyzer ?? DEFAULT_SETTINGS.security_analyzer), ); + const [selectedProvider, setSelectedProvider] = React.useState( + null, + ); + const modelsAndProviders = organizeModelsAndProviders( resources?.models || [], ); + // Determine if we should hide the API key input and use OpenHands-managed key (when using OpenHands provider in SaaS mode) + const currentModel = currentSelectedModel || settings?.llm_model; + + const isSaasMode = config?.APP_MODE === "saas"; + + const isOpenHandsProvider = () => { + if (view === "basic") { + return selectedProvider === "openhands"; + } + + if (view === "advanced") { + if (dirtyInputs.model) { + return currentModel?.startsWith("openhands/"); + } + return settings?.llm_model?.startsWith("openhands/"); + } + + return false; + }; + + const shouldUseOpenHandsKey = isOpenHandsProvider() && isSaasMode; + + // Determine if we should hide the agent dropdown when V1 conversation API is enabled + const isV1Enabled = settings?.v1_enabled; + React.useEffect(() => { const determineWhetherToToggleAdvancedSettings = () => { if (resources && settings) { return ( - isCustomModel(resources.models, settings.LLM_MODEL) || + isCustomModel(resources.models, settings.llm_model) || hasAdvancedSettingsSet({ ...settings, }) @@ -128,24 +157,24 @@ function LlmSettingsScreen() { // Initialize currentSelectedModel with the current settings React.useEffect(() => { - if (settings?.LLM_MODEL) { - setCurrentSelectedModel(settings.LLM_MODEL); + if (settings?.llm_model) { + setCurrentSelectedModel(settings.llm_model); } - }, [settings?.LLM_MODEL]); + }, [settings?.llm_model]); // Update confirmation mode state when settings change React.useEffect(() => { - if (settings?.CONFIRMATION_MODE !== undefined) { - setConfirmationModeEnabled(settings.CONFIRMATION_MODE); + if (settings?.confirmation_mode !== undefined) { + setConfirmationModeEnabled(settings.confirmation_mode); } - }, [settings?.CONFIRMATION_MODE]); + }, [settings?.confirmation_mode]); // Update selected security analyzer state when settings change React.useEffect(() => { - if (settings?.SECURITY_ANALYZER !== undefined) { - setSelectedSecurityAnalyzer(settings.SECURITY_ANALYZER || "none"); + if (settings?.security_analyzer !== undefined) { + setSelectedSecurityAnalyzer(settings.security_analyzer || "none"); } - }, [settings?.SECURITY_ANALYZER]); + }, [settings?.security_analyzer]); // Handle URL parameters for SaaS subscription redirects React.useEffect(() => { @@ -196,21 +225,24 @@ function LlmSettingsScreen() { const fullLlmModel = provider && model && `${provider}/${model}`; + // Use OpenHands-managed key for OpenHands provider in SaaS mode + const finalApiKey = shouldUseOpenHandsKey ? null : apiKey; + saveSettings( { - LLM_MODEL: fullLlmModel, - llm_api_key: apiKey || null, - SEARCH_API_KEY: searchApiKey || "", - CONFIRMATION_MODE: confirmationMode, - SECURITY_ANALYZER: + llm_model: fullLlmModel, + llm_api_key: finalApiKey || null, + search_api_key: searchApiKey || "", + confirmation_mode: confirmationMode, + security_analyzer: securityAnalyzer === "none" ? null - : securityAnalyzer || DEFAULT_SETTINGS.SECURITY_ANALYZER, + : securityAnalyzer || DEFAULT_SETTINGS.security_analyzer, // reset advanced settings - LLM_BASE_URL: DEFAULT_SETTINGS.LLM_BASE_URL, - AGENT: DEFAULT_SETTINGS.AGENT, - ENABLE_DEFAULT_CONDENSER: DEFAULT_SETTINGS.ENABLE_DEFAULT_CONDENSER, + llm_base_url: DEFAULT_SETTINGS.llm_base_url, + agent: DEFAULT_SETTINGS.agent, + enable_default_condenser: DEFAULT_SETTINGS.enable_default_condenser, }, { onSuccess: handleSuccessfulMutation, @@ -244,21 +276,24 @@ function LlmSettingsScreen() { .get("security-analyzer-input") ?.toString(); + // Use OpenHands-managed key for OpenHands provider in SaaS mode + const finalApiKey = shouldUseOpenHandsKey ? null : apiKey; + saveSettings( { - LLM_MODEL: model, - LLM_BASE_URL: baseUrl, - llm_api_key: apiKey || null, - SEARCH_API_KEY: searchApiKey || "", - AGENT: agent, - CONFIRMATION_MODE: confirmationMode, - ENABLE_DEFAULT_CONDENSER: enableDefaultCondenser, - CONDENSER_MAX_SIZE: - condenserMaxSize ?? DEFAULT_SETTINGS.CONDENSER_MAX_SIZE, - SECURITY_ANALYZER: + llm_model: model, + llm_base_url: baseUrl, + llm_api_key: finalApiKey || null, + search_api_key: searchApiKey || "", + agent, + confirmation_mode: confirmationMode, + enable_default_condenser: enableDefaultCondenser, + condenser_max_size: + condenserMaxSize ?? DEFAULT_SETTINGS.condenser_max_size, + security_analyzer: securityAnalyzer === "none" ? null - : securityAnalyzer || DEFAULT_SETTINGS.SECURITY_ANALYZER, + : securityAnalyzer || DEFAULT_SETTINGS.security_analyzer, }, { onSuccess: handleSuccessfulMutation, @@ -282,10 +317,13 @@ function LlmSettingsScreen() { }); }; - const handleModelIsDirty = (model: string | null) => { + const handleModelIsDirty = ( + provider: string | null, + model: string | null, + ) => { // openai providers are special case; see ModelSelector // component for details - const modelIsDirty = model !== settings?.LLM_MODEL.replace("openai/", ""); + const modelIsDirty = model !== settings?.llm_model.replace("openai/", ""); setDirtyInputs((prev) => ({ ...prev, model: modelIsDirty, @@ -293,6 +331,15 @@ function LlmSettingsScreen() { // Track the currently selected model for help text display setCurrentSelectedModel(model); + setSelectedProvider(provider); + }; + + const onDefaultValuesChanged = ( + provider: string | null, + model: string | null, + ) => { + setSelectedProvider(provider); + setCurrentSelectedModel(model); }; const handleApiKeyIsDirty = (apiKey: string) => { @@ -304,7 +351,7 @@ function LlmSettingsScreen() { }; const handleSearchApiKeyIsDirty = (searchApiKey: string) => { - const searchApiKeyIsDirty = searchApiKey !== settings?.SEARCH_API_KEY; + const searchApiKeyIsDirty = searchApiKey !== settings?.search_api_key; setDirtyInputs((prev) => ({ ...prev, searchApiKey: searchApiKeyIsDirty, @@ -312,7 +359,7 @@ function LlmSettingsScreen() { }; const handleCustomModelIsDirty = (model: string) => { - const modelIsDirty = model !== settings?.LLM_MODEL && model !== ""; + const modelIsDirty = model !== settings?.llm_model && model !== ""; setDirtyInputs((prev) => ({ ...prev, model: modelIsDirty, @@ -323,7 +370,7 @@ function LlmSettingsScreen() { }; const handleBaseUrlIsDirty = (baseUrl: string) => { - const baseUrlIsDirty = baseUrl !== settings?.LLM_BASE_URL; + const baseUrlIsDirty = baseUrl !== settings?.llm_base_url; setDirtyInputs((prev) => ({ ...prev, baseUrl: baseUrlIsDirty, @@ -331,7 +378,7 @@ function LlmSettingsScreen() { }; const handleAgentIsDirty = (agent: string) => { - const agentIsDirty = agent !== settings?.AGENT && agent !== ""; + const agentIsDirty = agent !== settings?.agent && agent !== ""; setDirtyInputs((prev) => ({ ...prev, agent: agentIsDirty, @@ -339,7 +386,7 @@ function LlmSettingsScreen() { }; const handleConfirmationModeIsDirty = (isToggled: boolean) => { - const confirmationModeIsDirty = isToggled !== settings?.CONFIRMATION_MODE; + const confirmationModeIsDirty = isToggled !== settings?.confirmation_mode; setDirtyInputs((prev) => ({ ...prev, confirmationMode: confirmationModeIsDirty, @@ -348,7 +395,7 @@ function LlmSettingsScreen() { // When confirmation mode is enabled, set default security analyzer to "llm" if not already set if (isToggled && !selectedSecurityAnalyzer) { - setSelectedSecurityAnalyzer(DEFAULT_SETTINGS.SECURITY_ANALYZER); + setSelectedSecurityAnalyzer(DEFAULT_SETTINGS.security_analyzer); setDirtyInputs((prev) => ({ ...prev, securityAnalyzer: true, @@ -358,7 +405,7 @@ function LlmSettingsScreen() { const handleEnableDefaultCondenserIsDirty = (isToggled: boolean) => { const enableDefaultCondenserIsDirty = - isToggled !== settings?.ENABLE_DEFAULT_CONDENSER; + isToggled !== settings?.enable_default_condenser; setDirtyInputs((prev) => ({ ...prev, enableDefaultCondenser: enableDefaultCondenserIsDirty, @@ -369,8 +416,8 @@ function LlmSettingsScreen() { const parsed = value ? Number.parseInt(value, 10) : undefined; const bounded = parsed !== undefined ? Math.max(20, parsed) : undefined; const condenserMaxSizeIsDirty = - (bounded ?? DEFAULT_SETTINGS.CONDENSER_MAX_SIZE) !== - (settings?.CONDENSER_MAX_SIZE ?? DEFAULT_SETTINGS.CONDENSER_MAX_SIZE); + (bounded ?? DEFAULT_SETTINGS.condenser_max_size) !== + (settings?.condenser_max_size ?? DEFAULT_SETTINGS.condenser_max_size); setDirtyInputs((prev) => ({ ...prev, condenserMaxSize: condenserMaxSizeIsDirty, @@ -379,7 +426,7 @@ function LlmSettingsScreen() { const handleSecurityAnalyzerIsDirty = (securityAnalyzer: string) => { const securityAnalyzerIsDirty = - securityAnalyzer !== settings?.SECURITY_ANALYZER; + securityAnalyzer !== settings?.security_analyzer; setDirtyInputs((prev) => ({ ...prev, securityAnalyzer: securityAnalyzerIsDirty, @@ -406,6 +453,10 @@ function LlmSettingsScreen() { label: t(I18nKey.SETTINGS$SECURITY_ANALYZER_NONE), }); + if (isV1Enabled) { + return orderedItems; + } + // Add Invariant analyzer third if (analyzers.includes("invariant")) { orderedItems.push({ @@ -461,38 +512,43 @@ function LlmSettingsScreen() { <> - {(settings.LLM_MODEL?.startsWith("openhands/") || + {(settings.llm_model?.startsWith("openhands/") || currentSelectedModel?.startsWith("openhands/")) && ( )} )} - " : ""} - onChange={handleApiKeyIsDirty} - startContent={ - settings.LLM_API_KEY_SET && ( - - ) - } - /> + {!shouldUseOpenHandsKey && ( + <> + " : ""} + onChange={handleApiKeyIsDirty} + startContent={ + settings.llm_api_key_set && ( + + ) + } + /> - + + + )}
    )} @@ -505,13 +561,13 @@ function LlmSettingsScreen() { testId="llm-custom-model-input" name="llm-custom-model-input" label={t(I18nKey.SETTINGS$CUSTOM_MODEL)} - defaultValue={settings.LLM_MODEL || DEFAULT_OPENHANDS_MODEL} + defaultValue={settings.llm_model || DEFAULT_OPENHANDS_MODEL} placeholder={DEFAULT_OPENHANDS_MODEL} type="text" className="w-full max-w-[680px]" onChange={handleCustomModelIsDirty} /> - {(settings.LLM_MODEL?.startsWith("openhands/") || + {(settings.llm_model?.startsWith("openhands/") || currentSelectedModel?.startsWith("openhands/")) && ( )} @@ -520,33 +576,37 @@ function LlmSettingsScreen() { testId="base-url-input" name="base-url-input" label={t(I18nKey.SETTINGS$BASE_URL)} - defaultValue={settings.LLM_BASE_URL} + defaultValue={settings.llm_base_url} placeholder="https://api.openai.com" type="text" className="w-full max-w-[680px]" onChange={handleBaseUrlIsDirty} /> - " : ""} - onChange={handleApiKeyIsDirty} - startContent={ - settings.LLM_API_KEY_SET && ( - - ) - } - /> - + {!shouldUseOpenHandsKey && ( + <> + " : ""} + onChange={handleApiKeyIsDirty} + startContent={ + settings.llm_api_key_set && ( + + ) + } + /> + + + )} {config?.APP_MODE !== "saas" && ( <> @@ -556,12 +616,12 @@ function LlmSettingsScreen() { label={t(I18nKey.SETTINGS$SEARCH_API_KEY)} type="password" className="w-full max-w-[680px]" - defaultValue={settings.SEARCH_API_KEY || ""} + defaultValue={settings.search_api_key || ""} onChange={handleSearchApiKeyIsDirty} placeholder={t(I18nKey.API$TVLY_KEY_EXAMPLE)} startContent={ - settings.SEARCH_API_KEY_SET && ( - + settings.search_api_key_set && ( + ) } /> @@ -573,21 +633,23 @@ function LlmSettingsScreen() { href="https://tavily.com/" /> - ({ - key: agent, - label: agent, // TODO: Add i18n support for agent names - })) || [] - } - defaultSelectedKey={settings.AGENT} - isClearable={false} - onInputChange={handleAgentIsDirty} - wrapperClassName="w-full max-w-[680px]" - /> + {!isV1Enabled && ( + ({ + key: agent, + label: agent, // TODO: Add i18n support for agent names + })) || [] + } + defaultSelectedKey={settings.agent} + isClearable={false} + onInputChange={handleAgentIsDirty} + wrapperClassName="w-full max-w-[680px]" + /> + )} )} @@ -600,11 +662,11 @@ function LlmSettingsScreen() { step={1} label={t(I18nKey.SETTINGS$CONDENSER_MAX_SIZE)} defaultValue={( - settings.CONDENSER_MAX_SIZE ?? - DEFAULT_SETTINGS.CONDENSER_MAX_SIZE + settings.condenser_max_size ?? + DEFAULT_SETTINGS.condenser_max_size )?.toString()} onChange={(value) => handleCondenserMaxSizeIsDirty(value)} - isDisabled={!settings.ENABLE_DEFAULT_CONDENSER} + isDisabled={!settings.enable_default_condenser} />

    {t(I18nKey.SETTINGS$CONDENSER_MAX_SIZE_TOOLTIP)} @@ -614,7 +676,7 @@ function LlmSettingsScreen() { {t(I18nKey.SETTINGS$ENABLE_MEMORY_CONDENSATION)} @@ -626,7 +688,7 @@ function LlmSettingsScreen() { testId="enable-confirmation-mode-switch" name="enable-confirmation-mode-switch" onToggle={handleConfirmationModeIsDirty} - defaultIsToggled={settings.CONFIRMATION_MODE} + defaultIsToggled={settings.confirmation_mode} isBeta > {t(I18nKey.SETTINGS$CONFIRMATION_MODE)} diff --git a/frontend/src/routes/mcp-settings.tsx b/frontend/src/routes/mcp-settings.tsx index 0a4224182bd0..e308b45228cc 100644 --- a/frontend/src/routes/mcp-settings.tsx +++ b/frontend/src/routes/mcp-settings.tsx @@ -41,7 +41,7 @@ function MCPSettingsScreen() { useState(false); const [serverToDelete, setServerToDelete] = useState(null); - const mcpConfig: MCPConfig = settings?.MCP_CONFIG || { + const mcpConfig: MCPConfig = settings?.mcp_config || { sse_servers: [], stdio_servers: [], shttp_servers: [], diff --git a/frontend/src/routes/planner-tab.tsx b/frontend/src/routes/planner-tab.tsx index a3002c665119..f17a0acbc52a 100644 --- a/frontend/src/routes/planner-tab.tsx +++ b/frontend/src/routes/planner-tab.tsx @@ -1,49 +1,31 @@ +import React from "react"; import { useTranslation } from "react-i18next"; -import Markdown from "react-markdown"; -import remarkGfm from "remark-gfm"; -import remarkBreaks from "remark-breaks"; import { I18nKey } from "#/i18n/declaration"; import LessonPlanIcon from "#/icons/lesson-plan.svg?react"; -import { useConversationStore } from "#/state/conversation-store"; -import { code } from "#/components/features/markdown/code"; -import { ul, ol } from "#/components/features/markdown/list"; -import { paragraph } from "#/components/features/markdown/paragraph"; -import { anchor } from "#/components/features/markdown/anchor"; -import { - h1, - h2, - h3, - h4, - h5, - h6, -} from "#/components/features/markdown/headings"; +import { useConversationStore } from "#/stores/conversation-store"; +import { useScrollToBottom } from "#/hooks/use-scroll-to-bottom"; +import { MarkdownRenderer } from "#/components/features/markdown/markdown-renderer"; +import { useHandlePlanClick } from "#/hooks/use-handle-plan-click"; function PlannerTab() { const { t } = useTranslation(); + const { scrollRef: scrollContainerRef, onChatBodyScroll } = useScrollToBottom( + React.useRef(null), + ); - const { planContent, setConversationMode } = useConversationStore(); + const { planContent } = useConversationStore(); + const { handlePlanClick } = useHandlePlanClick(); - if (planContent) { + if (planContent !== null && planContent !== undefined) { return ( -

    - +
    onChatBodyScroll(e.currentTarget)} + className="flex flex-col w-full h-full p-4 overflow-auto" + > + {planContent} - +
    ); } @@ -56,7 +38,7 @@ function PlannerTab() {
    ); } diff --git a/frontend/src/routes/served-tab.tsx b/frontend/src/routes/served-tab.tsx index f2f6b2688383..b6abb5b3d3ca 100644 --- a/frontend/src/routes/served-tab.tsx +++ b/frontend/src/routes/served-tab.tsx @@ -65,6 +65,7 @@ function ServedApp() { type="button" onClick={() => window.open(fullUrl, "_blank")} className="text-sm" + aria-label={t(I18nKey.BUTTON$OPEN_IN_NEW_TAB)} > @@ -72,11 +73,17 @@ function ServedApp() { type="button" onClick={() => setRefreshKey((prev) => prev + 1)} className="text-sm" + aria-label={t(I18nKey.BUTTON$REFRESH)} > -
    diff --git a/frontend/src/routes/settings.tsx b/frontend/src/routes/settings.tsx index 19370245b330..4f35595d1319 100644 --- a/frontend/src/routes/settings.tsx +++ b/frontend/src/routes/settings.tsx @@ -1,14 +1,13 @@ import { useMemo } from "react"; import { Outlet, redirect, useLocation } from "react-router"; import { useTranslation } from "react-i18next"; -import { useConfig } from "#/hooks/query/use-config"; import { Route } from "./+types/settings"; import OptionService from "#/api/option-service/option-service.api"; import { queryClient } from "#/query-client-config"; import { GetConfigResponse } from "#/api/option-service/option.types"; -import { SAAS_NAV_ITEMS, OSS_NAV_ITEMS } from "#/constants/settings-nav"; -import { Typography } from "#/ui/typography"; import { SettingsLayout } from "#/components/features/settings/settings-layout"; +import { Typography } from "#/ui/typography"; +import { useSettingsNavItems } from "#/hooks/use-settings-nav-items"; const SAAS_ONLY_PATHS = [ "/settings/user", @@ -33,32 +32,26 @@ export const clientLoader = async ({ request }: Route.ClientLoaderArgs) => { // if in OSS mode, do not allow access to saas-only paths return redirect("/settings"); } + // If LLM settings are hidden and user tries to access the LLM settings page + if (config?.FEATURE_FLAGS?.HIDE_LLM_SETTINGS && pathname === "/settings") { + // Redirect to the first available settings page + return isSaas ? redirect("/settings/user") : redirect("/settings/mcp"); + } return null; }; function SettingsScreen() { const { t } = useTranslation(); - const { data: config } = useConfig(); const location = useLocation(); - - const isSaas = config?.APP_MODE === "saas"; - - // Navigation items configuration - const navItems = useMemo(() => { - const items = []; - if (isSaas) { - items.push(...SAAS_NAV_ITEMS); - } else { - items.push(...OSS_NAV_ITEMS); - } - return items; - }, [isSaas]); - + const navItems = useSettingsNavItems(); // Current section title for the main content area const currentSectionTitle = useMemo(() => { const currentItem = navItems.find((item) => item.to === location.pathname); - return currentItem ? currentItem.text : "SETTINGS$NAV_LLM"; + // Default to the first available navigation item if current page is not found + return currentItem + ? currentItem.text + : (navItems[0]?.text ?? "SETTINGS$TITLE"); }, [navItems, location.pathname]); return ( diff --git a/frontend/src/routes/user-settings.tsx b/frontend/src/routes/user-settings.tsx index 93366574b051..cddc38466ebf 100644 --- a/frontend/src/routes/user-settings.tsx +++ b/frontend/src/routes/user-settings.tsx @@ -122,12 +122,12 @@ function UserSettingsScreen() { const prevVerificationStatusRef = useRef(undefined); useEffect(() => { - if (settings?.EMAIL) { - setEmail(settings.EMAIL); - setOriginalEmail(settings.EMAIL); - setIsEmailValid(EMAIL_REGEX.test(settings.EMAIL)); + if (settings?.email) { + setEmail(settings.email); + setOriginalEmail(settings.email); + setIsEmailValid(EMAIL_REGEX.test(settings.email)); } - }, [settings?.EMAIL]); + }, [settings?.email]); useEffect(() => { if (pollingIntervalRef.current) { @@ -137,7 +137,7 @@ function UserSettingsScreen() { if ( prevVerificationStatusRef.current === false && - settings?.EMAIL_VERIFIED === true + settings?.email_verified === true ) { // Display toast notification instead of setting state displaySuccessToast(t("SETTINGS$EMAIL_VERIFIED_SUCCESSFULLY")); @@ -146,9 +146,9 @@ function UserSettingsScreen() { }, 2000); } - prevVerificationStatusRef.current = settings?.EMAIL_VERIFIED; + prevVerificationStatusRef.current = settings?.email_verified; - if (settings?.EMAIL_VERIFIED === false) { + if (settings?.email_verified === false) { pollingIntervalRef.current = window.setInterval(() => { refetch(); }, 5000); @@ -160,7 +160,7 @@ function UserSettingsScreen() { pollingIntervalRef.current = null; } }; - }, [settings?.EMAIL_VERIFIED, refetch, queryClient, t]); + }, [settings?.email_verified, refetch, queryClient, t]); const handleEmailChange = (e: React.ChangeEvent) => { const newEmail = e.target.value; @@ -215,10 +215,10 @@ function UserSettingsScreen() { isSaving={isSaving} isResendingVerification={isResendingVerification} isEmailChanged={isEmailChanged} - emailVerified={settings?.EMAIL_VERIFIED} + emailVerified={settings?.email_verified} isEmailValid={isEmailValid} > - {settings?.EMAIL_VERIFIED === false && } + {settings?.email_verified === false && } )}
    diff --git a/frontend/src/routes/vscode-tab.tsx b/frontend/src/routes/vscode-tab.tsx index 0d64180c1d2f..e1bb2e8fe4cb 100644 --- a/frontend/src/routes/vscode-tab.tsx +++ b/frontend/src/routes/vscode-tab.tsx @@ -51,7 +51,7 @@ function VSCodeTab() { ); } - if (error || (data && data.error) || !data?.url || iframeError) { + if (error || data?.error || !data?.url || iframeError) { return (
    {iframeError || diff --git a/frontend/src/services/actions.ts b/frontend/src/services/actions.ts index 86b89106ff09..6f03c526e124 100644 --- a/frontend/src/services/actions.ts +++ b/frontend/src/services/actions.ts @@ -1,6 +1,6 @@ import { trackError } from "#/utils/error-handler"; import useMetricsStore from "#/stores/metrics-store"; -import { useStatusStore } from "#/state/status-store"; +import { useStatusStore } from "#/stores/status-store"; import ActionType from "#/types/action-type"; import { ActionMessage, @@ -8,7 +8,7 @@ import { StatusMessage, } from "#/types/message"; import { handleObservationMessage } from "./observations"; -import { useCommandStore } from "#/state/command-store"; +import { useCommandStore } from "#/stores/command-store"; import { queryClient } from "#/query-client-config"; import { ActionSecurityRisk, diff --git a/frontend/src/services/observations.ts b/frontend/src/services/observations.ts index 40cc1daa8a27..8f1d8d3b4119 100644 --- a/frontend/src/services/observations.ts +++ b/frontend/src/services/observations.ts @@ -1,5 +1,5 @@ import { ObservationMessage } from "#/types/message"; -import { useCommandStore } from "#/state/command-store"; +import { useCommandStore } from "#/stores/command-store"; import ObservationType from "#/types/observation-type"; import { useBrowserStore } from "#/stores/browser-store"; import { useAgentStore } from "#/stores/agent-store"; diff --git a/frontend/src/services/settings.ts b/frontend/src/services/settings.ts index f7cad15b43a9..e4a04b1e87f2 100644 --- a/frontend/src/services/settings.ts +++ b/frontend/src/services/settings.ts @@ -3,34 +3,36 @@ import { Settings } from "#/types/settings"; export const LATEST_SETTINGS_VERSION = 5; export const DEFAULT_SETTINGS: Settings = { - LLM_MODEL: "openhands/claude-sonnet-4-20250514", - LLM_BASE_URL: "", - AGENT: "CodeActAgent", - LANGUAGE: "en", - LLM_API_KEY_SET: false, - SEARCH_API_KEY_SET: false, - CONFIRMATION_MODE: false, - SECURITY_ANALYZER: "llm", - REMOTE_RUNTIME_RESOURCE_FACTOR: 1, - PROVIDER_TOKENS_SET: {}, - ENABLE_DEFAULT_CONDENSER: true, - CONDENSER_MAX_SIZE: 120, - ENABLE_SOUND_NOTIFICATIONS: false, - USER_CONSENTS_TO_ANALYTICS: false, - ENABLE_PROACTIVE_CONVERSATION_STARTERS: false, - ENABLE_SOLVABILITY_ANALYSIS: false, - SEARCH_API_KEY: "", - IS_NEW_USER: true, - MAX_BUDGET_PER_TASK: null, - EMAIL: "", - EMAIL_VERIFIED: true, // Default to true to avoid restricting access unnecessarily - MCP_CONFIG: { + llm_model: "openhands/claude-opus-4-5-20251101", + llm_base_url: "", + agent: "CodeActAgent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: "llm", + remote_runtime_resource_factor: 1, + provider_tokens_set: {}, + enable_default_condenser: true, + condenser_max_size: 120, + enable_sound_notifications: false, + user_consents_to_analytics: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + search_api_key: "", + is_new_user: true, + max_budget_per_task: null, + email: "", + email_verified: true, // Default to true to avoid restricting access unnecessarily + mcp_config: { sse_servers: [], stdio_servers: [], shttp_servers: [], }, - GIT_USER_NAME: "openhands", - GIT_USER_EMAIL: "openhands@all-hands.dev", + git_user_name: "openhands", + git_user_email: "openhands@all-hands.dev", + v1_enabled: false, }; /** diff --git a/frontend/src/settings-service/settings.types.ts b/frontend/src/settings-service/settings.types.ts deleted file mode 100644 index bdd1610f4923..000000000000 --- a/frontend/src/settings-service/settings.types.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { Provider } from "#/types/settings"; - -export type ApiSettings = { - llm_model: string; - llm_base_url: string; - agent: string; - language: string; - llm_api_key: string | null; - llm_api_key_set: boolean; - search_api_key_set: boolean; - confirmation_mode: boolean; - security_analyzer: string | null; - remote_runtime_resource_factor: number | null; - enable_default_condenser: boolean; - // Max size for condenser in backend settings - condenser_max_size: number | null; - enable_sound_notifications: boolean; - enable_proactive_conversation_starters: boolean; - enable_solvability_analysis: boolean; - user_consents_to_analytics: boolean | null; - search_api_key?: string; - provider_tokens_set: Partial>; - max_budget_per_task: number | null; - mcp_config?: { - sse_servers: (string | { url: string; api_key?: string })[]; - stdio_servers: { - name: string; - command: string; - args?: string[]; - env?: Record; - }[]; - shttp_servers: (string | { url: string; api_key?: string })[]; - }; - email?: string; - email_verified?: boolean; - git_user_name?: string; - git_user_email?: string; -}; - -export type PostApiSettings = ApiSettings & { - user_consents_to_analytics: boolean | null; - search_api_key?: string; - mcp_config?: { - sse_servers: (string | { url: string; api_key?: string })[]; - stdio_servers: { - name: string; - command: string; - args?: string[]; - env?: Record; - }[]; - shttp_servers: (string | { url: string; api_key?: string })[]; - }; -}; diff --git a/frontend/src/state/command-store.ts b/frontend/src/stores/command-store.ts similarity index 100% rename from frontend/src/state/command-store.ts rename to frontend/src/stores/command-store.ts diff --git a/frontend/src/state/conversation-store.ts b/frontend/src/stores/conversation-store.ts similarity index 68% rename from frontend/src/state/conversation-store.ts rename to frontend/src/stores/conversation-store.ts index 77186ce69c9a..a8edd16f6a44 100644 --- a/frontend/src/state/conversation-store.ts +++ b/frontend/src/stores/conversation-store.ts @@ -56,14 +56,53 @@ interface ConversationActions { setHasRightPanelToggled: (hasRightPanelToggled: boolean) => void; setConversationMode: (conversationMode: ConversationMode) => void; setSubConversationTaskId: (taskId: string | null) => void; + setPlanContent: (planContent: string | null) => void; } type ConversationStore = ConversationState & ConversationActions; -// Helper function to get initial right panel state from localStorage +const getConversationIdFromLocation = (): string | null => { + if (typeof window === "undefined") { + return null; + } + + const match = window.location.pathname.match(/\/conversations\/([^/]+)/); + return match ? match[1] : null; +}; + +const parseStoredBoolean = (value: string | null): boolean | null => { + if (value === null) { + return null; + } + + try { + return JSON.parse(value); + } catch { + return null; + } +}; + const getInitialRightPanelState = (): boolean => { - const stored = localStorage.getItem("conversation-right-panel-shown"); - return stored !== null ? JSON.parse(stored) : true; + if (typeof window === "undefined") { + return true; + } + + const conversationId = getConversationIdFromLocation(); + const keysToCheck = conversationId + ? [`conversation-right-panel-shown-${conversationId}`] + : []; + + // Fallback to legacy global key for users who haven't switched tabs yet + keysToCheck.push("conversation-right-panel-shown"); + + for (const key of keysToCheck) { + const parsed = parseStoredBoolean(localStorage.getItem(key)); + if (parsed !== null) { + return parsed; + } + } + + return true; }; export const useConversationStore = create()( @@ -81,91 +120,7 @@ export const useConversationStore = create()( submittedMessage: null, shouldHideSuggestions: false, hasRightPanelToggled: true, - planContent: ` -# Improve Developer Onboarding and Examples - -## Overview - -Based on the analysis of Browser-Use's current documentation and examples, this plan addresses gaps in developer onboarding by creating a progressive learning path, troubleshooting resources, and practical examples that address real-world scenarios (like the LM Studio/local LLM integration issues encountered). - -## Current State Analysis - -**Strengths:** - -- Good quickstart documentation in \`docs/quickstart.mdx\` -- Extensive examples across multiple categories (60+ example files) -- Well-structured docs with multiple LLM provider examples -- Active community support via Discord - -**Gaps Identified:** - -- No progressive tutorial series that builds complexity gradually -- Limited troubleshooting documentation for common issues -- Sparse comments in example files explaining what's happening -- Local LLM setup (Ollama/LM Studio) not prominently featured -- No "first 10 minutes" success path -- Missing visual/conceptual architecture guides for beginners -- Error messages don't always point to solutions - -## Proposed Improvements - -### 1. Create Interactive Tutorial Series (\`examples/tutorials/\`) - -**New folder structure:** - -\`\`\` -examples/tutorials/ -├── README.md # Tutorial overview and prerequisites -├── 00_hello_world.py # Absolute minimal example -├── 01_your_first_search.py # Basic search with detailed comments -├── 02_understanding_actions.py # How actions work -├── 03_data_extraction_basics.py # Extract data step-by-step -├── 04_error_handling.py # Common errors and solutions -├── 05_custom_tools_intro.py # First custom tool -├── 06_local_llm_setup.py # Ollama/LM Studio complete guide -└── 07_debugging_tips.py # Debugging strategies -\`\`\` - -**Key Features:** - -- Each file 50–80 lines max -- Extensive inline comments explaining every concept -- Clear learning objectives at the top of each file -- "What you'll learn" and "Prerequisites" sections -- Common pitfalls highlighted -- Expected output shown in comments - -### 2. Troubleshooting Guide (\`docs/troubleshooting.mdx\`) - -**Sections:** - -- Installation issues (Chromium, dependencies, virtual environments) -- LLM provider connection errors (API keys, timeouts, rate limits) -- Local LLM setup (Ollama vs LM Studio, model compatibility) -- Browser automation issues (element not found, timeout errors) -- Common error messages with solutions -- Performance optimization tips -- When to ask for help (Discord/GitHub) - -**Format:** - -**Error: "LLM call timed out after 60 seconds"** - -**What it means:** -The model took too long to respond - -**Common causes:** - -1. Model is too slow for the task -2. LM Studio/Ollama not responding properly -3. Complex page overwhelming the model - -**Solutions:** - -- Use flash_mode for faster execution -- Try a faster model (Gemini Flash, GPT-4 Turbo Mini) -- Simplify the task -- Check model server logs`, + planContent: null, conversationMode: "code", subConversationTaskId: null, @@ -304,6 +259,7 @@ The model took too long to respond shouldHideSuggestions: false, conversationMode: "code", subConversationTaskId: null, + planContent: null, }, false, "resetConversationState", @@ -317,6 +273,9 @@ The model took too long to respond setSubConversationTaskId: (subConversationTaskId) => set({ subConversationTaskId }, false, "setSubConversationTaskId"), + + setPlanContent: (planContent) => + set({ planContent }, false, "setPlanContent"), }), { name: "conversation-store", diff --git a/frontend/src/stores/home-store.ts b/frontend/src/stores/home-store.ts index 3ec2ed2c26a4..6289f65f0160 100644 --- a/frontend/src/stores/home-store.ts +++ b/frontend/src/stores/home-store.ts @@ -1,21 +1,26 @@ import { create } from "zustand"; import { persist, createJSONStorage } from "zustand/middleware"; import { GitRepository } from "#/types/git"; +import { Provider } from "#/types/settings"; interface HomeState { recentRepositories: GitRepository[]; + lastSelectedProvider: Provider | null; } interface HomeActions { addRecentRepository: (repository: GitRepository) => void; clearRecentRepositories: () => void; getRecentRepositories: () => GitRepository[]; + setLastSelectedProvider: (provider: Provider | null) => void; + getLastSelectedProvider: () => Provider | null; } type HomeStore = HomeState & HomeActions; const initialState: HomeState = { recentRepositories: [], + lastSelectedProvider: null, }; export const useHomeStore = create()( @@ -44,6 +49,13 @@ export const useHomeStore = create()( })), getRecentRepositories: () => get().recentRepositories, + + setLastSelectedProvider: (provider: Provider | null) => + set(() => ({ + lastSelectedProvider: provider, + })), + + getLastSelectedProvider: () => get().lastSelectedProvider, }), { name: "home-store", // unique name for localStorage diff --git a/frontend/src/state/microagent-management-store.ts b/frontend/src/stores/microagent-management-store.ts similarity index 100% rename from frontend/src/state/microagent-management-store.ts rename to frontend/src/stores/microagent-management-store.ts diff --git a/frontend/src/state/status-store.ts b/frontend/src/stores/status-store.ts similarity index 100% rename from frontend/src/state/status-store.ts rename to frontend/src/stores/status-store.ts diff --git a/frontend/src/stores/use-event-store.ts b/frontend/src/stores/use-event-store.ts index 307f4ced0d7b..2d8ecf0a3b6c 100644 --- a/frontend/src/stores/use-event-store.ts +++ b/frontend/src/stores/use-event-store.ts @@ -5,7 +5,9 @@ import { OpenHandsParsedEvent } from "#/types/core"; import { isV1Event } from "#/types/v1/type-guards"; // While we transition to v1 events, our store can handle both v0 and v1 events -type OHEvent = OpenHandsEvent | OpenHandsParsedEvent; +type OHEvent = (OpenHandsEvent | OpenHandsParsedEvent) & { + isFromPlanningAgent?: boolean; +}; interface EventState { events: OHEvent[]; diff --git a/frontend/src/tailwind.css b/frontend/src/tailwind.css index 8228f6b15440..16732885646a 100644 --- a/frontend/src/tailwind.css +++ b/frontend/src/tailwind.css @@ -318,8 +318,8 @@ background: transparent !important; } -/* Ensure all xterm elements have transparent backgrounds */ -.xterm * { +/* Ensure all xterm DOM elements have transparent backgrounds. Exclude canvas elements */ +.xterm { background: transparent !important; } diff --git a/frontend/src/types/core/actions.ts b/frontend/src/types/core/actions.ts index 89852f16e31e..bb80971e3285 100644 --- a/frontend/src/types/core/actions.ts +++ b/frontend/src/types/core/actions.ts @@ -31,8 +31,7 @@ export interface CommandAction extends OpenHandsActionEvent<"run"> { }; } -export interface AssistantMessageAction - extends OpenHandsActionEvent<"message"> { +export interface AssistantMessageAction extends OpenHandsActionEvent<"message"> { source: "agent"; args: { thought: string; @@ -87,8 +86,7 @@ export interface BrowseAction extends OpenHandsActionEvent<"browse"> { }; } -export interface BrowseInteractiveAction - extends OpenHandsActionEvent<"browse_interactive"> { +export interface BrowseInteractiveAction extends OpenHandsActionEvent<"browse_interactive"> { source: "agent"; timeout: number; args: { @@ -162,8 +160,7 @@ export interface MCPAction extends OpenHandsActionEvent<"call_tool_mcp"> { }; } -export interface TaskTrackingAction - extends OpenHandsActionEvent<"task_tracking"> { +export interface TaskTrackingAction extends OpenHandsActionEvent<"task_tracking"> { source: "agent"; args: { command: string; diff --git a/frontend/src/types/core/base.ts b/frontend/src/types/core/base.ts index 4014d2bbb5fa..e305bf7d4d6d 100644 --- a/frontend/src/types/core/base.ts +++ b/frontend/src/types/core/base.ts @@ -30,14 +30,16 @@ interface OpenHandsBaseEvent { timestamp: string; // ISO 8601 } -export interface OpenHandsActionEvent - extends OpenHandsBaseEvent { +export interface OpenHandsActionEvent< + T extends OpenHandsEventType, +> extends OpenHandsBaseEvent { action: T; args: Record; } -export interface OpenHandsObservationEvent - extends OpenHandsBaseEvent { +export interface OpenHandsObservationEvent< + T extends OpenHandsEventType, +> extends OpenHandsBaseEvent { cause: number; observation: T; content: string; diff --git a/frontend/src/types/core/observations.ts b/frontend/src/types/core/observations.ts index 01a73ec81beb..2741926fdaf7 100644 --- a/frontend/src/types/core/observations.ts +++ b/frontend/src/types/core/observations.ts @@ -1,8 +1,7 @@ import { AgentState } from "../agent-state"; import { OpenHandsObservationEvent } from "./base"; -export interface AgentStateChangeObservation - extends OpenHandsObservationEvent<"agent_state_changed"> { +export interface AgentStateChangeObservation extends OpenHandsObservationEvent<"agent_state_changed"> { source: "agent"; extras: { agent_state: AgentState; @@ -19,8 +18,7 @@ export interface CommandObservation extends OpenHandsObservationEvent<"run"> { }; } -export interface IPythonObservation - extends OpenHandsObservationEvent<"run_ipython"> { +export interface IPythonObservation extends OpenHandsObservationEvent<"run_ipython"> { source: "agent"; extras: { code: string; @@ -28,8 +26,7 @@ export interface IPythonObservation }; } -export interface DelegateObservation - extends OpenHandsObservationEvent<"delegate"> { +export interface DelegateObservation extends OpenHandsObservationEvent<"delegate"> { source: "agent"; extras: { outputs: Record; @@ -53,8 +50,7 @@ export interface BrowseObservation extends OpenHandsObservationEvent<"browse"> { }; } -export interface BrowseInteractiveObservation - extends OpenHandsObservationEvent<"browse_interactive"> { +export interface BrowseInteractiveObservation extends OpenHandsObservationEvent<"browse_interactive"> { source: "agent"; extras: { url: string; @@ -103,8 +99,7 @@ export interface ErrorObservation extends OpenHandsObservationEvent<"error"> { }; } -export interface AgentThinkObservation - extends OpenHandsObservationEvent<"think"> { +export interface AgentThinkObservation extends OpenHandsObservationEvent<"think"> { source: "agent"; extras: { thought: string; @@ -141,14 +136,12 @@ export interface MCPObservation extends OpenHandsObservationEvent<"mcp"> { }; } -export interface UserRejectedObservation - extends OpenHandsObservationEvent<"user_rejected"> { +export interface UserRejectedObservation extends OpenHandsObservationEvent<"user_rejected"> { source: "agent"; extras: Record; } -export interface TaskTrackingObservation - extends OpenHandsObservationEvent<"task_tracking"> { +export interface TaskTrackingObservation extends OpenHandsObservationEvent<"task_tracking"> { source: "agent"; extras: { command: string; diff --git a/frontend/src/types/settings.ts b/frontend/src/types/settings.ts index f76fcaa19a95..e5db0296bd1b 100644 --- a/frontend/src/types/settings.ts +++ b/frontend/src/types/settings.ts @@ -38,36 +38,31 @@ export type MCPConfig = { }; export type Settings = { - LLM_MODEL: string; - LLM_BASE_URL: string; - AGENT: string; - LANGUAGE: string; - LLM_API_KEY_SET: boolean; - SEARCH_API_KEY_SET: boolean; - CONFIRMATION_MODE: boolean; - SECURITY_ANALYZER: string | null; - REMOTE_RUNTIME_RESOURCE_FACTOR: number | null; - PROVIDER_TOKENS_SET: Partial>; - ENABLE_DEFAULT_CONDENSER: boolean; + llm_model: string; + llm_base_url: string; + agent: string; + language: string; + llm_api_key: string | null; + llm_api_key_set: boolean; + search_api_key_set: boolean; + confirmation_mode: boolean; + security_analyzer: string | null; + remote_runtime_resource_factor: number | null; + provider_tokens_set: Partial>; + enable_default_condenser: boolean; // Maximum number of events before the condenser runs - CONDENSER_MAX_SIZE: number | null; - ENABLE_SOUND_NOTIFICATIONS: boolean; - ENABLE_PROACTIVE_CONVERSATION_STARTERS: boolean; - ENABLE_SOLVABILITY_ANALYSIS: boolean; - USER_CONSENTS_TO_ANALYTICS: boolean | null; - SEARCH_API_KEY?: string; - IS_NEW_USER?: boolean; - MCP_CONFIG?: MCPConfig; - MAX_BUDGET_PER_TASK: number | null; - EMAIL?: string; - EMAIL_VERIFIED?: boolean; - GIT_USER_NAME?: string; - GIT_USER_EMAIL?: string; -}; - -export type PostSettings = Settings & { + condenser_max_size: number | null; + enable_sound_notifications: boolean; + enable_proactive_conversation_starters: boolean; + enable_solvability_analysis: boolean; user_consents_to_analytics: boolean | null; - llm_api_key?: string | null; search_api_key?: string; + is_new_user?: boolean; mcp_config?: MCPConfig; + max_budget_per_task: number | null; + email?: string; + email_verified?: boolean; + git_user_name?: string; + git_user_email?: string; + v1_enabled?: boolean; }; diff --git a/frontend/src/types/v1/core/base/action.ts b/frontend/src/types/v1/core/base/action.ts index ce08d5a1b99a..8d3ec41bff48 100644 --- a/frontend/src/types/v1/core/base/action.ts +++ b/frontend/src/types/v1/core/base/action.ts @@ -41,6 +41,25 @@ export interface ExecuteBashAction extends ActionBase<"ExecuteBashAction"> { reset: boolean; } +export interface TerminalAction extends ActionBase<"TerminalAction"> { + /** + * The terminal command to execute. + */ + command: string; + /** + * If True, the command is an input to the running process. If False, the command is executed directly. + */ + is_input: boolean; + /** + * Optional max time limit (seconds) for the command. + */ + timeout: number | null; + /** + * If True, reset the terminal session before running the command. + */ + reset: boolean; +} + export interface FileEditorAction extends ActionBase<"FileEditorAction"> { /** * The commands to run. Allowed options are: `view`, `create`, `str_replace`, `insert`, `undo_edit`. @@ -72,8 +91,7 @@ export interface FileEditorAction extends ActionBase<"FileEditorAction"> { view_range: [number, number] | null; } -export interface StrReplaceEditorAction - extends ActionBase<"StrReplaceEditorAction"> { +export interface StrReplaceEditorAction extends ActionBase<"StrReplaceEditorAction"> { /** * The commands to run. Allowed options are: `view`, `create`, `str_replace`, `insert`, `undo_edit`. */ @@ -115,8 +133,7 @@ export interface TaskTrackerAction extends ActionBase<"TaskTrackerAction"> { task_list: TaskItem[]; } -export interface BrowserNavigateAction - extends ActionBase<"BrowserNavigateAction"> { +export interface BrowserNavigateAction extends ActionBase<"BrowserNavigateAction"> { /** * The URL to navigate to */ @@ -149,16 +166,14 @@ export interface BrowserTypeAction extends ActionBase<"BrowserTypeAction"> { text: string; } -export interface BrowserGetStateAction - extends ActionBase<"BrowserGetStateAction"> { +export interface BrowserGetStateAction extends ActionBase<"BrowserGetStateAction"> { /** * Whether to include a screenshot of the current page. Default: False */ include_screenshot: boolean; } -export interface BrowserGetContentAction - extends ActionBase<"BrowserGetContentAction"> { +export interface BrowserGetContentAction extends ActionBase<"BrowserGetContentAction"> { /** * Whether to include links in the content (default: False) */ @@ -180,21 +195,18 @@ export interface BrowserGoBackAction extends ActionBase<"BrowserGoBackAction"> { // No additional properties - this action has no parameters } -export interface BrowserListTabsAction - extends ActionBase<"BrowserListTabsAction"> { +export interface BrowserListTabsAction extends ActionBase<"BrowserListTabsAction"> { // No additional properties - this action has no parameters } -export interface BrowserSwitchTabAction - extends ActionBase<"BrowserSwitchTabAction"> { +export interface BrowserSwitchTabAction extends ActionBase<"BrowserSwitchTabAction"> { /** * 4 Character Tab ID of the tab to switch to (from browser_list_tabs) */ tab_id: string; } -export interface BrowserCloseTabAction - extends ActionBase<"BrowserCloseTabAction"> { +export interface BrowserCloseTabAction extends ActionBase<"BrowserCloseTabAction"> { /** * 4 Character Tab ID of the tab to close (from browser_list_tabs) */ @@ -206,6 +218,7 @@ export type Action = | FinishAction | ThinkAction | ExecuteBashAction + | TerminalAction | FileEditorAction | StrReplaceEditorAction | TaskTrackerAction diff --git a/frontend/src/types/v1/core/base/base.ts b/frontend/src/types/v1/core/base/base.ts index 5925e8599d43..7704f1105de5 100644 --- a/frontend/src/types/v1/core/base/base.ts +++ b/frontend/src/types/v1/core/base/base.ts @@ -3,9 +3,11 @@ type EventType = | "Finish" | "Think" | "ExecuteBash" + | "Terminal" | "FileEditor" | "StrReplaceEditor" - | "TaskTracker"; + | "TaskTracker" + | "PlanningFileEditor"; type ActionOnlyType = | "BrowserNavigate" @@ -24,7 +26,8 @@ type ObservationOnlyType = "Browser"; type ActionEventType = `${ActionOnlyType}Action` | `${EventType}Action`; type ObservationEventType = | `${ObservationOnlyType}Observation` - | `${EventType}Observation`; + | `${EventType}Observation` + | "TerminalObservation"; export interface ActionBase { kind: T; diff --git a/frontend/src/types/v1/core/base/observation.ts b/frontend/src/types/v1/core/base/observation.ts index e406e30593fb..a1c8a1a48d25 100644 --- a/frontend/src/types/v1/core/base/observation.ts +++ b/frontend/src/types/v1/core/base/observation.ts @@ -6,8 +6,7 @@ import { ImageContent, } from "./common"; -export interface MCPToolObservation - extends ObservationBase<"MCPToolObservation"> { +export interface MCPToolObservation extends ObservationBase<"MCPToolObservation"> { /** * Content returned from the MCP tool converted to LLM Ready TextContent or ImageContent */ @@ -22,23 +21,25 @@ export interface MCPToolObservation tool_name: string; } -export interface FinishObservation - extends ObservationBase<"FinishObservation"> { +export interface FinishObservation extends ObservationBase<"FinishObservation"> { /** - * Final message sent to the user + * Content returned from the finish action as a list of TextContent/ImageContent objects. */ - message: string; + content: Array; + /** + * Whether the finish action resulted in an error + */ + is_error: boolean; } export interface ThinkObservation extends ObservationBase<"ThinkObservation"> { /** * Confirmation message. DEFAULT: "Your thought has been logged." */ - content: string; + content: Array; } -export interface BrowserObservation - extends ObservationBase<"BrowserObservation"> { +export interface BrowserObservation extends ObservationBase<"BrowserObservation"> { /** * The output message from the browser operation */ @@ -53,8 +54,7 @@ export interface BrowserObservation screenshot_data: string | null; } -export interface ExecuteBashObservation - extends ObservationBase<"ExecuteBashObservation"> { +export interface ExecuteBashObservation extends ObservationBase<"ExecuteBashObservation"> { /** * Content returned from the tool as a list of TextContent/ImageContent objects. */ @@ -81,8 +81,34 @@ export interface ExecuteBashObservation metadata: CmdOutputMetadata; } -export interface FileEditorObservation - extends ObservationBase<"FileEditorObservation"> { +export interface TerminalObservation extends ObservationBase<"TerminalObservation"> { + /** + * Content returned from the terminal as a list of TextContent/ImageContent objects. + */ + content: Array; + /** + * The bash command that was executed. + */ + command: string | null; + /** + * The exit code of the command if it has finished. + */ + exit_code: number | null; + /** + * Whether the command execution produced an error. + */ + is_error: boolean; + /** + * Whether the command execution timed out. + */ + timeout: boolean; + /** + * Additional metadata captured from the shell after command execution. + */ + metadata: CmdOutputMetadata; +} + +export interface FileEditorObservation extends ObservationBase<"FileEditorObservation"> { /** * The commands to run. Allowed options are: `view`, `create`, `str_replace`, `insert`, `undo_edit`. */ @@ -114,8 +140,7 @@ export interface FileEditorObservation } // Keep StrReplaceEditorObservation as a separate interface for backward compatibility -export interface StrReplaceEditorObservation - extends ObservationBase<"StrReplaceEditorObservation"> { +export interface StrReplaceEditorObservation extends ObservationBase<"StrReplaceEditorObservation"> { /** * The commands to run. Allowed options are: `view`, `create`, `str_replace`, `insert`, `undo_edit`. */ @@ -146,8 +171,7 @@ export interface StrReplaceEditorObservation error: string | null; } -export interface TaskTrackerObservation - extends ObservationBase<"TaskTrackerObservation"> { +export interface TaskTrackerObservation extends ObservationBase<"TaskTrackerObservation"> { /** * The formatted task list or status message. */ @@ -162,12 +186,45 @@ export interface TaskTrackerObservation task_list: TaskItem[]; } +export interface PlanningFileEditorObservation extends ObservationBase<"PlanningFileEditorObservation"> { + /** + * Content returned from the tool as a list of TextContent/ImageContent objects. + */ + content: Array; + /** + * Whether the call resulted in an error. + */ + is_error: boolean; + /** + * The commands to run. Allowed options are: `view`, `create`, `str_replace`, `insert`, `undo_edit`. + */ + command: "view" | "create" | "str_replace" | "insert" | "undo_edit"; + /** + * The file path that was edited. + */ + path: string | null; + /** + * Indicates if the file previously existed. If not, it was created. + */ + prev_exist: boolean; + /** + * The content of the file before the edit. + */ + old_content: string | null; + /** + * The content of the file after the edit. + */ + new_content: string | null; +} + export type Observation = | MCPToolObservation | FinishObservation | ThinkObservation | BrowserObservation | ExecuteBashObservation + | TerminalObservation | FileEditorObservation | StrReplaceEditorObservation - | TaskTrackerObservation; + | TaskTrackerObservation + | PlanningFileEditorObservation; diff --git a/frontend/src/types/v1/core/events/conversation-state-event.ts b/frontend/src/types/v1/core/events/conversation-state-event.ts index 81b3640dfa1a..93679d667170 100644 --- a/frontend/src/types/v1/core/events/conversation-state-event.ts +++ b/frontend/src/types/v1/core/events/conversation-state-event.ts @@ -1,11 +1,63 @@ import { BaseEvent } from "../base/event"; import { V1ExecutionStatus } from "../base/common"; +/** + * Token usage metrics for LLM calls + */ +export interface TokenUsage { + model: string; + prompt_tokens: number; + completion_tokens: number; + cache_read_tokens: number; + cache_write_tokens: number; + reasoning_tokens: number; + context_window: number; + per_turn_token: number; + response_id: string; +} + +/** + * LLM metrics for a specific component (agent or condenser) + */ +export interface LLMMetrics { + model_name: string; + accumulated_cost: number; + max_budget_per_task: number | null; + accumulated_token_usage: TokenUsage; + costs: Array<{ + model: string; + cost: number; + timestamp: number; + }>; + response_latencies: Array<{ + model: string; + latency: number; + response_id: string; + }>; + token_usages: TokenUsage[]; +} + +/** + * Usage metrics mapping for different components + */ +export interface UsageToMetrics { + agent: LLMMetrics; + condenser: LLMMetrics; +} + +/** + * Stats containing usage metrics + */ +export interface ConversationStats { + usage_to_metrics: UsageToMetrics; +} + /** * Conversation state value types */ export interface ConversationState { execution_status: V1ExecutionStatus; + stats?: ConversationStats; // Add other conversation state fields here as needed } @@ -19,32 +71,37 @@ interface ConversationStateUpdateEventBase extends BaseEvent { * Unique key for this state update event. * Can be "full_state" for full state snapshots or field names for partial updates. */ - key: "full_state" | "execution_status"; // Extend with other keys as needed + key: "full_state" | "execution_status" | "stats"; // Extend with other keys as needed /** * Conversation state updates */ - value: ConversationState | V1ExecutionStatus; + value: ConversationState | V1ExecutionStatus | ConversationStats; } // Narrowed interfaces for full state update event -export interface ConversationStateUpdateEventFullState - extends ConversationStateUpdateEventBase { +export interface ConversationStateUpdateEventFullState extends ConversationStateUpdateEventBase { key: "full_state"; value: ConversationState; } // Narrowed interface for agent status update event -export interface ConversationStateUpdateEventAgentStatus - extends ConversationStateUpdateEventBase { +export interface ConversationStateUpdateEventAgentStatus extends ConversationStateUpdateEventBase { key: "execution_status"; value: V1ExecutionStatus; } +// Narrowed interface for stats update event +export interface ConversationStateUpdateEventStats extends ConversationStateUpdateEventBase { + key: "stats"; + value: ConversationStats; +} + // Conversation state update event - contains conversation state updates export type ConversationStateUpdateEvent = | ConversationStateUpdateEventFullState - | ConversationStateUpdateEventAgentStatus; + | ConversationStateUpdateEventAgentStatus + | ConversationStateUpdateEventStats; // Conversation error event - contains error information export interface ConversationErrorEvent extends BaseEvent { diff --git a/frontend/src/types/v1/core/events/observation-event.ts b/frontend/src/types/v1/core/events/observation-event.ts index 62750d72898c..bf4e22a70983 100644 --- a/frontend/src/types/v1/core/events/observation-event.ts +++ b/frontend/src/types/v1/core/events/observation-event.ts @@ -21,8 +21,9 @@ export interface ObservationBaseEvent extends BaseEvent { } // Main observation event interface -export interface ObservationEvent - extends ObservationBaseEvent { +export interface ObservationEvent< + T extends Observation = Observation, +> extends ObservationBaseEvent { /** * The observation (tool call) sent to LLM */ diff --git a/frontend/src/types/v1/type-guards.ts b/frontend/src/types/v1/type-guards.ts index b479e4697b5a..dec181620923 100644 --- a/frontend/src/types/v1/type-guards.ts +++ b/frontend/src/types/v1/type-guards.ts @@ -3,7 +3,12 @@ import { ObservationEvent, BaseEvent, ExecuteBashAction, + TerminalAction, ExecuteBashObservation, + PlanningFileEditorObservation, + TerminalObservation, + BrowserObservation, + BrowserNavigateAction, } from "./core"; import { AgentErrorEvent } from "./core/events/observation-event"; import { MessageEvent } from "./core/events/message-event"; @@ -12,6 +17,7 @@ import { ConversationStateUpdateEvent, ConversationStateUpdateEventAgentStatus, ConversationStateUpdateEventFullState, + ConversationStateUpdateEventStats, ConversationErrorEvent, } from "./core/events/conversation-state-event"; import { SystemPromptEvent } from "./core/events/system-event"; @@ -48,7 +54,10 @@ export const isObservationEvent = ( ): event is ObservationEvent => event.source === "environment" && "action_id" in event && - "observation" in event; + "observation" in event && + event.observation !== null && + typeof event.observation === "object" && + "kind" in event.observation; /** * Type guard function to check if an event is an agent error event @@ -88,6 +97,9 @@ export const isUserMessageEvent = ( export const isActionEvent = (event: OpenHandsEvent): event is ActionEvent => event.source === "agent" && "action" in event && + event.action !== null && + typeof event.action === "object" && + "kind" in event.action && "tool_name" in event && "tool_call_id" in event && typeof event.tool_name === "string" && @@ -98,17 +110,45 @@ export const isActionEvent = (event: OpenHandsEvent): event is ActionEvent => */ export const isExecuteBashActionEvent = ( event: OpenHandsEvent, -): event is ActionEvent => - isActionEvent(event) && event.action.kind === "ExecuteBashAction"; +): event is ActionEvent => + isActionEvent(event) && + (event.action.kind === "ExecuteBashAction" || + event.action.kind === "TerminalAction"); /** - * Type guard function to check if an observation event is an ExecuteBashObservation + * Type guard function to check if an observation event contains terminal output */ export const isExecuteBashObservationEvent = ( event: OpenHandsEvent, -): event is ObservationEvent => +): event is ObservationEvent => isObservationEvent(event) && - event.observation.kind === "ExecuteBashObservation"; + (event.observation.kind === "ExecuteBashObservation" || + event.observation.kind === "TerminalObservation"); + +/** + * Type guard function to check if an observation event is a PlanningFileEditorObservation + */ +export const isPlanningFileEditorObservationEvent = ( + event: OpenHandsEvent, +): event is ObservationEvent => + isObservationEvent(event) && + event.observation.kind === "PlanningFileEditorObservation"; + +/** + * Type guard function to check if an observation event is a BrowserObservation + */ +export const isBrowserObservationEvent = ( + event: OpenHandsEvent, +): event is ObservationEvent => + isObservationEvent(event) && event.observation.kind === "BrowserObservation"; + +/** + * Type guard function to check if an action event is a BrowserNavigateAction + */ +export const isBrowserNavigateActionEvent = ( + event: OpenHandsEvent, +): event is ActionEvent => + isActionEvent(event) && event.action.kind === "BrowserNavigateAction"; /** * Type guard function to check if an event is a system prompt event @@ -139,6 +179,10 @@ export const isAgentStatusConversationStateUpdateEvent = ( ): event is ConversationStateUpdateEventAgentStatus => event.key === "execution_status"; +export const isStatsConversationStateUpdateEvent = ( + event: ConversationStateUpdateEvent, +): event is ConversationStateUpdateEventStats => event.key === "stats"; + /** * Type guard function to check if an event is a conversation error event */ diff --git a/frontend/src/utils/extract-model-and-provider.ts b/frontend/src/utils/extract-model-and-provider.ts index 93ef12d8bf16..ab0836079f5b 100644 --- a/frontend/src/utils/extract-model-and-provider.ts +++ b/frontend/src/utils/extract-model-and-provider.ts @@ -16,7 +16,7 @@ import { * splitIsActuallyVersion(split) // returns true */ const splitIsActuallyVersion = (split: string[]) => - split[1] && split[1][0] && isNumber(split[1][0]); + split[1]?.[0] && isNumber(split[1][0]); /** * Given a model string, extract the provider and model name. Currently the supported separators are "/" and "." diff --git a/frontend/src/utils/feature-flags.ts b/frontend/src/utils/feature-flags.ts index acbe83d7d7e5..0f38a4d7eace 100644 --- a/frontend/src/utils/feature-flags.ts +++ b/frontend/src/utils/feature-flags.ts @@ -17,6 +17,4 @@ export const HIDE_LLM_SETTINGS = () => loadFeatureFlag("HIDE_LLM_SETTINGS"); export const VSCODE_IN_NEW_TAB = () => loadFeatureFlag("VSCODE_IN_NEW_TAB"); export const ENABLE_TRAJECTORY_REPLAY = () => loadFeatureFlag("TRAJECTORY_REPLAY"); -export const USE_V1_CONVERSATION_API = () => - loadFeatureFlag("USE_V1_CONVERSATION_API"); export const USE_PLANNING_AGENT = () => loadFeatureFlag("USE_PLANNING_AGENT"); diff --git a/frontend/src/utils/format-time-delta.ts b/frontend/src/utils/format-time-delta.ts index 8f2425a234fe..6785d9c845cc 100644 --- a/frontend/src/utils/format-time-delta.ts +++ b/frontend/src/utils/format-time-delta.ts @@ -1,16 +1,45 @@ +/** + * Parses a date string as UTC if it doesn't have a timezone indicator. + * This fixes the issue where ISO strings without timezone info are interpreted as local time. + * @param dateString ISO 8601 date string + * @returns Date object parsed as UTC + * + * @example + * parseDateAsUTC("2025-12-01T11:53:37.273886"); // Parsed as UTC + * parseDateAsUTC("2025-12-01T11:53:37.273886Z"); // Already has timezone, parsed correctly + * parseDateAsUTC("2025-12-01T11:53:37+00:00"); // Already has timezone, parsed correctly + */ +const parseDateAsUTC = (dateString: string): Date => { + // Check if the string already has a timezone indicator + // Look for 'Z' (UTC), '+' (positive offset), or '-' after the time part (negative offset) + const hasTimezone = + dateString.includes("Z") || dateString.match(/[+-]\d{2}:\d{2}$/) !== null; + + if (hasTimezone) { + // Already has timezone info, parse normally + return new Date(dateString); + } + + // No timezone indicator - append 'Z' to force UTC parsing + return new Date(`${dateString}Z`); +}; + /** * Formats a date into a compact string representing the time delta between the given date and the current date. - * @param date The date to format + * @param date The date to format (Date object or ISO 8601 string) * @returns A compact string representing the time delta between the given date and the current date * * @example * // now is 2024-01-01T00:00:00Z * formatTimeDelta(new Date("2023-12-31T23:59:59Z")); // "1s" - * formatTimeDelta(new Date("2022-01-01T00:00:00Z")); // "2y" + * formatTimeDelta("2023-12-31T23:59:59Z"); // "1s" + * formatTimeDelta("2025-12-01T11:53:37.273886"); // Parsed as UTC automatically */ -export const formatTimeDelta = (date: Date) => { +export const formatTimeDelta = (date: Date | string) => { + // Parse string dates as UTC if needed, or use Date object directly + const dateObj = typeof date === "string" ? parseDateAsUTC(date) : date; const now = new Date(); - const delta = now.getTime() - date.getTime(); + const delta = now.getTime() - dateObj.getTime(); const seconds = Math.floor(delta / 1000); const minutes = Math.floor(seconds / 60); diff --git a/frontend/src/utils/has-advanced-settings-set.ts b/frontend/src/utils/has-advanced-settings-set.ts index 8cf3f10a393d..b87342523917 100644 --- a/frontend/src/utils/has-advanced-settings-set.ts +++ b/frontend/src/utils/has-advanced-settings-set.ts @@ -3,4 +3,4 @@ import { Settings } from "#/types/settings"; export const hasAdvancedSettingsSet = (settings: Partial): boolean => Object.keys(settings).length > 0 && - (!!settings.LLM_BASE_URL || settings.AGENT !== DEFAULT_SETTINGS.AGENT); + (!!settings.llm_base_url || settings.agent !== DEFAULT_SETTINGS.agent); diff --git a/frontend/src/utils/parse-terminal-output.ts b/frontend/src/utils/parse-terminal-output.ts index a6ccc73cfc58..1cd54eb8581a 100644 --- a/frontend/src/utils/parse-terminal-output.ts +++ b/frontend/src/utils/parse-terminal-output.ts @@ -1,3 +1,5 @@ +const START = "[Python Interpreter: "; + /** * Parses the raw output from the terminal into the command and symbol * @param raw The raw output to be displayed in the terminal @@ -13,9 +15,14 @@ * console.log(parsed.symbol); // openhands@659478cb008c:/workspace $ */ export const parseTerminalOutput = (raw: string) => { - const envRegex = /(.*)\[Python Interpreter: (.*)\]/s; - const match = raw.match(envRegex); - - if (!match) return raw; - return match[1]?.trim() || ""; + const start = raw.indexOf(START); + if (start < 0) { + return raw; + } + const offset = start + START.length; + const end = raw.indexOf("]", offset); + if (end <= offset) { + return raw; + } + return raw.substring(0, start).trim(); }; diff --git a/frontend/src/utils/reo.ts b/frontend/src/utils/reo.ts index 9f76c98d314f..b2b8773ec804 100644 --- a/frontend/src/utils/reo.ts +++ b/frontend/src/utils/reo.ts @@ -4,6 +4,8 @@ * Using CDN approach for better TypeScript compatibility */ +import EventLogger from "./event-logger"; + export interface ReoIdentity { username: string; type: "github" | "email"; @@ -41,7 +43,7 @@ class ReoService { this.initialized = true; } } catch (error) { - console.error("Failed to initialize Reo.dev tracking:", error); + EventLogger.error(`Failed to initialize Reo.dev tracking: ${error}`); } } @@ -78,7 +80,7 @@ class ReoService { */ identify(identity: ReoIdentity): void { if (!this.initialized) { - console.warn("Reo.dev not initialized. Call init() first."); + EventLogger.warning("Reo.dev not initialized. Call init() first."); return; } @@ -87,7 +89,7 @@ class ReoService { window.Reo.identify(identity); } } catch (error) { - console.error("Failed to identify user in Reo.dev:", error); + EventLogger.error(`Failed to identify user in Reo.dev: ${error}`); } } diff --git a/frontend/src/utils/settings-utils.ts b/frontend/src/utils/settings-utils.ts index ca56b251707e..4259226d7718 100644 --- a/frontend/src/utils/settings-utils.ts +++ b/frontend/src/utils/settings-utils.ts @@ -67,9 +67,7 @@ export const parseMaxBudgetPerTask = (value: string): number | null => { : null; }; -export const extractSettings = ( - formData: FormData, -): Partial & { llm_api_key?: string | null } => { +export const extractSettings = (formData: FormData): Partial => { const { LLM_MODEL, LLM_API_KEY, AGENT, LANGUAGE } = extractBasicFormData(formData); @@ -82,14 +80,14 @@ export const extractSettings = ( } = extractAdvancedFormData(formData); return { - LLM_MODEL: CUSTOM_LLM_MODEL || LLM_MODEL, - LLM_API_KEY_SET: !!LLM_API_KEY, - AGENT, - LANGUAGE, - LLM_BASE_URL, - CONFIRMATION_MODE, - SECURITY_ANALYZER, - ENABLE_DEFAULT_CONDENSER, + llm_model: CUSTOM_LLM_MODEL || LLM_MODEL, + llm_api_key_set: !!LLM_API_KEY, + agent: AGENT, + language: LANGUAGE, + llm_base_url: LLM_BASE_URL, + confirmation_mode: CONFIRMATION_MODE, + security_analyzer: SECURITY_ANALYZER, + enable_default_condenser: ENABLE_DEFAULT_CONDENSER, llm_api_key: LLM_API_KEY, }; }; diff --git a/frontend/src/utils/utils.ts b/frontend/src/utils/utils.ts index 620fb2c444ba..69ff7aae5f01 100644 --- a/frontend/src/utils/utils.ts +++ b/frontend/src/utils/utils.ts @@ -606,10 +606,15 @@ export const shouldIncludeRepository = ( * @returns The query string for searching OpenHands repositories */ export const getOpenHandsQuery = (provider: Provider | null): string => { - if (provider === "gitlab") { - return "openhands-config"; - } - return ".openhands"; + const providerRepositorySuffix: Record = { + gitlab: "openhands-config", + azure_devops: "openhands-config", + default: ".openhands", + } as const; + + return provider && provider in providerRepositorySuffix + ? providerRepositorySuffix[provider] + : providerRepositorySuffix.default; }; /** @@ -621,12 +626,7 @@ export const getOpenHandsQuery = (provider: Provider | null): string => { export const hasOpenHandsSuffix = ( repo: GitRepository, provider: Provider | null, -): boolean => { - if (provider === "gitlab") { - return repo.full_name.endsWith("/openhands-config"); - } - return repo.full_name.endsWith("/.openhands"); -}; +): boolean => repo.full_name.endsWith(`/${getOpenHandsQuery(provider)}`); /** * Build headers for V1 API requests that require session authentication diff --git a/frontend/src/utils/verified-models.ts b/frontend/src/utils/verified-models.ts index 12453c6c8642..dcf5f7251766 100644 --- a/frontend/src/utils/verified-models.ts +++ b/frontend/src/utils/verified-models.ts @@ -19,6 +19,7 @@ export const VERIFIED_MODELS = [ "claude-haiku-4-5-20251001", "claude-opus-4-20250514", "claude-opus-4-1-20250805", + "claude-opus-4-5-20251101", "gemini-2.5-pro", "o4-mini", "deepseek-chat", @@ -59,6 +60,7 @@ export const VERIFIED_ANTHROPIC_MODELS = [ "claude-haiku-4-5-20251001", "claude-opus-4-20250514", "claude-opus-4-1-20250805", + "claude-opus-4-5-20251101", ]; // LiteLLM does not return the compatible Mistral models with the provider, so we list them here to set them ourselves @@ -79,6 +81,7 @@ export const VERIFIED_OPENHANDS_MODELS = [ "gpt-5-mini-2025-08-07", "claude-opus-4-20250514", "claude-opus-4-1-20250805", + "claude-opus-4-5-20251101", "gemini-2.5-pro", "o3", "o4-mini", @@ -90,4 +93,4 @@ export const VERIFIED_OPENHANDS_MODELS = [ ]; // Default model for OpenHands provider -export const DEFAULT_OPENHANDS_MODEL = "openhands/claude-sonnet-4-20250514"; +export const DEFAULT_OPENHANDS_MODEL = "openhands/claude-opus-4-5-20251101"; diff --git a/frontend/src/utils/websocket-url.ts b/frontend/src/utils/websocket-url.ts index fa6b907d0e11..a0aebf4151cb 100644 --- a/frontend/src/utils/websocket-url.ts +++ b/frontend/src/utils/websocket-url.ts @@ -9,6 +9,17 @@ export function extractBaseHost( if (conversationUrl && !conversationUrl.startsWith("/")) { try { const url = new URL(conversationUrl); + // If the backend returns a localhost URL but the UI is accessed via + // another hostname (e.g., from a remote machine), swap the hostname + // while preserving the backend-provided port so the socket remains + // reachable. + if ( + ["localhost", "127.0.0.1"].includes(url.hostname) && + window.location.hostname !== url.hostname + ) { + return `${window.location.hostname}${url.port ? `:${url.port}` : ""}`; + } + return url.host; // e.g., "localhost:3000" } catch { return window.location.host; diff --git a/frontend/tests/avatar-menu.spec.ts b/frontend/tests/avatar-menu.spec.ts new file mode 100644 index 000000000000..a7ef4efe4044 --- /dev/null +++ b/frontend/tests/avatar-menu.spec.ts @@ -0,0 +1,48 @@ +import test, { expect } from "@playwright/test"; + +/** + * Test for issue #11933: Avatar context menu closes when moving cursor diagonally + * + * This test verifies that the user can move their cursor diagonally from the + * avatar to the context menu without the menu closing unexpectedly. + */ +test("avatar context menu stays open when moving cursor diagonally to menu", async ({ + page, + browserName, +}) => { + // Skip on WebKit - Playwright's mouse.move() doesn't reliably trigger CSS hover states + test.skip(browserName === "webkit", "Playwright hover simulation unreliable"); + + await page.goto("/"); + + // Get the user avatar button + const userAvatar = page.getByTestId("user-avatar"); + await expect(userAvatar).toBeVisible(); + + // Get avatar bounding box first + const avatarBox = await userAvatar.boundingBox(); + if (!avatarBox) { + throw new Error("Could not get bounding box for avatar"); + } + + // Use mouse.move to hover (not .hover() which may trigger click) + const avatarCenterX = avatarBox.x + avatarBox.width / 2; + const avatarCenterY = avatarBox.y + avatarBox.height / 2; + await page.mouse.move(avatarCenterX, avatarCenterY); + + // The context menu should appear via CSS group-hover + const contextMenu = page.getByTestId("account-settings-context-menu"); + await expect(contextMenu).toBeVisible(); + + // Move UP from the LEFT side of the avatar - simulating diagonal movement + // toward the menu (which is to the right). This exits the hover zone. + const leftX = avatarBox.x + 2; + const aboveY = avatarBox.y - 50; + await page.mouse.move(leftX, aboveY); + + // The menu uses opacity-0/opacity-100 for visibility via CSS. + // Use toHaveCSS which auto-retries, avoiding flaky waitForTimeout. + // The menu should remain visible (opacity 1) to allow diagonal access to it. + const menuWrapper = contextMenu.locator(".."); + await expect(menuWrapper).toHaveCSS("opacity", "1"); +}); diff --git a/frontend/tests/conversation-panel.test.ts b/frontend/tests/conversation-panel.test.ts deleted file mode 100644 index 6e3f58cd458f..000000000000 --- a/frontend/tests/conversation-panel.test.ts +++ /dev/null @@ -1,134 +0,0 @@ -import test, { expect, Page } from "@playwright/test"; - -const toggleConversationPanel = async (page: Page) => { - const panel = page.getByTestId("conversation-panel"); - await page.waitForTimeout(1000); // Wait for state to stabilize - const panelIsVisible = await panel.isVisible(); - - if (!panelIsVisible) { - const conversationPanelButton = page.getByTestId( - "toggle-conversation-panel", - ); - await conversationPanelButton.click(); - } - - return page.getByTestId("conversation-panel"); -}; - -const selectConversationCard = async (page: Page, index: number) => { - const panel = await toggleConversationPanel(page); - - // select a conversation - const conversationItem = panel.getByTestId("conversation-card").nth(index); - await conversationItem.click(); - - // panel should close - await expect(panel).not.toBeVisible(); - - await page.waitForURL(`/conversations/${index + 1}`); - expect(page.url()).toBe(`http://localhost:3001/conversations/${index + 1}`); -}; - -test.beforeEach(async ({ page }) => { - await page.goto("/"); -}); - -test("should only display the create new conversation button when in a conversation", async ({ - page, -}) => { - const panel = page.getByTestId("conversation-panel"); - - const newProjectButton = panel.getByTestId("new-conversation-button"); - await expect(newProjectButton).not.toBeVisible(); - - await page.goto("/conversations/1"); - await expect(newProjectButton).toBeVisible(); -}); - -test("redirect to /conversation with the session id as a path param when clicking on a conversation card", async ({ - page, -}) => { - const panel = page.getByTestId("conversation-panel"); - - // select a conversation - const conversationItem = panel.getByTestId("conversation-card").first(); - await conversationItem.click(); - - // panel should close - expect(panel).not.toBeVisible(); - - await page.waitForURL("/conversations/1"); - expect(page.url()).toBe("http://localhost:3001/conversations/1"); -}); - -test("redirect to the home screen if the current session was deleted", async ({ - page, -}) => { - await page.goto("/conversations/1"); - await page.waitForURL("/conversations/1"); - - const panel = page.getByTestId("conversation-panel"); - const firstCard = panel.getByTestId("conversation-card").first(); - - const ellipsisButton = firstCard.getByTestId("ellipsis-button"); - await ellipsisButton.click(); - - const deleteButton = firstCard.getByTestId("delete-button"); - await deleteButton.click(); - - // confirm modal - const confirmButton = page.getByText("Confirm"); - await confirmButton.click(); - - await page.waitForURL("/"); -}); - -test("load relevant files in the file explorer", async ({ page }) => { - await selectConversationCard(page, 0); - - // check if the file explorer has the correct files - const fileExplorer = page.getByTestId("file-explorer"); - - await expect(fileExplorer.getByText("file1.txt")).toBeVisible(); - await expect(fileExplorer.getByText("file2.txt")).toBeVisible(); - await expect(fileExplorer.getByText("file3.txt")).toBeVisible(); - - await selectConversationCard(page, 2); - - // check if the file explorer has the correct files - expect(fileExplorer.getByText("reboot_skynet.exe")).toBeVisible(); - expect(fileExplorer.getByText("target_list.txt")).toBeVisible(); - expect(fileExplorer.getByText("terminator_blueprint.txt")).toBeVisible(); -}); - -test("should redirect to home screen if conversation deos not exist", async ({ - page, -}) => { - await page.goto("/conversations/9999"); - await page.waitForURL("/"); -}); - -test("display the conversation details during a conversation", async ({ - page, -}) => { - const conversationPanelButton = page.getByTestId("toggle-conversation-panel"); - await expect(conversationPanelButton).toBeVisible(); - await conversationPanelButton.click(); - - const panel = page.getByTestId("conversation-panel"); - - // select a conversation - const conversationItem = panel.getByTestId("conversation-card").first(); - await conversationItem.click(); - - // panel should close - await expect(panel).not.toBeVisible(); - - await page.waitForURL("/conversations/1"); - expect(page.url()).toBe("http://localhost:3001/conversations/1"); - - const conversationDetails = page.getByTestId("conversation-card"); - - await expect(conversationDetails).toBeVisible(); - await expect(conversationDetails).toHaveText("Conversation 1"); -}); diff --git a/frontend/tests/helpers/confirm-settings.ts b/frontend/tests/helpers/confirm-settings.ts deleted file mode 100644 index ca82edd35a15..000000000000 --- a/frontend/tests/helpers/confirm-settings.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Page } from "@playwright/test"; - -export const confirmSettings = async (page: Page) => { - const confirmPreferenceButton = page.getByRole("button", { - name: /confirm preferences/i, - }); - await confirmPreferenceButton.click(); - - const configSaveButton = page - .getByRole("button", { - name: /save/i, - }) - .first(); - await configSaveButton.click(); - - const confirmChanges = page.getByRole("button", { - name: /yes, close settings/i, - }); - await confirmChanges.click(); -}; diff --git a/frontend/tests/placeholder.spec.ts b/frontend/tests/placeholder.spec.ts new file mode 100644 index 000000000000..48e76b587ecb --- /dev/null +++ b/frontend/tests/placeholder.spec.ts @@ -0,0 +1,4 @@ +import { test } from "@playwright/test"; + +// Placeholder test to ensure CI passes until real E2E tests are added +test("placeholder", () => {}); diff --git a/frontend/tests/redirect.spec.ts b/frontend/tests/redirect.spec.ts deleted file mode 100644 index 8425345ba624..000000000000 --- a/frontend/tests/redirect.spec.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { expect, test } from "@playwright/test"; -import path from "path"; -import { fileURLToPath } from "url"; - -const filename = fileURLToPath(import.meta.url); -const dirname = path.dirname(filename); - -test.beforeEach(async ({ page }) => { - await page.goto("/"); -}); - -test("should redirect to /conversations after uploading a project zip", async ({ - page, -}) => { - const fileInput = page.getByLabel("Upload a .zip"); - const filePath = path.join(dirname, "fixtures/project.zip"); - await fileInput.setInputFiles(filePath); - - await page.waitForURL(/\/conversations\/\d+/); -}); - -test("should redirect to /conversations after selecting a repo", async ({ - page, -}) => { - // enter a github token to view the repositories - const connectToGitHubButton = page.getByRole("button", { - name: /connect to github/i, - }); - await connectToGitHubButton.click(); - const tokenInput = page.getByLabel(/github token\*/i); - await tokenInput.fill("fake-token"); - - const submitButton = page.getByTestId("connect-to-github"); - await submitButton.click(); - - // select a repository - const repoDropdown = page.getByLabel(/github repository/i); - await repoDropdown.click(); - - const repoItem = page.getByTestId("github-repo-item").first(); - await repoItem.click(); - - await page.waitForURL(/\/conversations\/\d+/); -}); - -// FIXME: This fails because the MSW WS mocks change state too quickly, -// missing the OPENING status where the initial query is rendered. -test.skip("should redirect the user to /conversation with their initial query after selecting a project", async ({ - page, -}) => { - // enter query - const testQuery = "this is my test query"; - const textbox = page.getByPlaceholder(/what do you want to build/i); - expect(textbox).not.toBeNull(); - await textbox.fill(testQuery); - - const fileInput = page.getByLabel("Upload a .zip"); - const filePath = path.join(dirname, "fixtures/project.zip"); - await fileInput.setInputFiles(filePath); - - await page.waitForURL("/conversation"); - - // get user message - const userMessage = page.getByTestId("user-message"); - expect(await userMessage.textContent()).toBe(testQuery); -}); diff --git a/frontend/tests/repo-selection-form.test.tsx b/frontend/tests/repo-selection-form.test.tsx deleted file mode 100644 index 24666d49fc23..000000000000 --- a/frontend/tests/repo-selection-form.test.tsx +++ /dev/null @@ -1,130 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from "vitest"; -import { render, screen, fireEvent } from "@testing-library/react"; -import { RepositorySelectionForm } from "../src/components/features/home/repo-selection-form"; -import { useUserRepositories } from "../src/hooks/query/use-user-repositories"; -import { useRepositoryBranches } from "../src/hooks/query/use-repository-branches"; -import { useCreateConversation } from "../src/hooks/mutation/use-create-conversation"; -import { useIsCreatingConversation } from "../src/hooks/use-is-creating-conversation"; - -// Mock the hooks -vi.mock("../src/hooks/query/use-user-repositories"); -vi.mock("../src/hooks/query/use-repository-branches"); -vi.mock("../src/hooks/mutation/use-create-conversation"); -vi.mock("../src/hooks/use-is-creating-conversation"); -vi.mock("react-i18next", () => ({ - useTranslation: () => ({ - t: (key: string) => key, - }), -})); - -describe("RepositorySelectionForm", () => { - const mockOnRepoSelection = vi.fn(); - - beforeEach(() => { - vi.resetAllMocks(); - - // Mock the hooks with default values - (useUserRepositories as any).mockReturnValue({ - data: [ - { id: "1", full_name: "test/repo1" }, - { id: "2", full_name: "test/repo2" } - ], - isLoading: false, - isError: false, - }); - - (useRepositoryBranches as any).mockReturnValue({ - data: [ - { name: "main" }, - { name: "develop" } - ], - isLoading: false, - isError: false, - }); - - (useCreateConversation as any).mockReturnValue({ - mutate: vi.fn(() => (useIsCreatingConversation as any).mockReturnValue(true)), - isPending: false, - isSuccess: false, - }); - - (useIsCreatingConversation as any).mockReturnValue(false); - }); - - it("should clear selected branch when input is empty", async () => { - render(); - - // First select a repository to enable the branch dropdown - const repoDropdown = screen.getByTestId("repository-dropdown"); - fireEvent.change(repoDropdown, { target: { value: "test/repo1" } }); - - // Get the branch dropdown and verify it's enabled - const branchDropdown = screen.getByTestId("branch-dropdown"); - expect(branchDropdown).not.toBeDisabled(); - - // Simulate deleting all text in the branch input - fireEvent.change(branchDropdown, { target: { value: "" } }); - - // Verify the branch input is cleared (no selected branch) - expect(branchDropdown).toHaveValue(""); - }); - - it("should clear selected branch when input contains only whitespace", async () => { - render(); - - // First select a repository to enable the branch dropdown - const repoDropdown = screen.getByTestId("repository-dropdown"); - fireEvent.change(repoDropdown, { target: { value: "test/repo1" } }); - - // Get the branch dropdown and verify it's enabled - const branchDropdown = screen.getByTestId("branch-dropdown"); - expect(branchDropdown).not.toBeDisabled(); - - // Simulate entering only whitespace in the branch input - fireEvent.change(branchDropdown, { target: { value: " " } }); - - // Verify the branch input is cleared (no selected branch) - expect(branchDropdown).toHaveValue(""); - }); - - it("should keep branch empty after being cleared even with auto-selection", async () => { - render(); - - // First select a repository to enable the branch dropdown - const repoDropdown = screen.getByTestId("repository-dropdown"); - fireEvent.change(repoDropdown, { target: { value: "test/repo1" } }); - - // Get the branch dropdown and verify it's enabled - const branchDropdown = screen.getByTestId("branch-dropdown"); - expect(branchDropdown).not.toBeDisabled(); - - // The branch should be auto-selected to "main" initially - expect(branchDropdown).toHaveValue("main"); - - // Simulate deleting all text in the branch input - fireEvent.change(branchDropdown, { target: { value: "" } }); - - // Verify the branch input is cleared (no selected branch) - expect(branchDropdown).toHaveValue(""); - - // Trigger a re-render by changing something else - fireEvent.change(repoDropdown, { target: { value: "test/repo2" } }); - fireEvent.change(repoDropdown, { target: { value: "test/repo1" } }); - - // The branch should be auto-selected to "main" again after repo change - expect(branchDropdown).toHaveValue("main"); - - // Clear it again - fireEvent.change(branchDropdown, { target: { value: "" } }); - - // Verify it stays empty - expect(branchDropdown).toHaveValue(""); - - // Simulate a component update without changing repos - // This would normally trigger the useEffect if our fix wasn't working - fireEvent.blur(branchDropdown); - - // Verify it still stays empty - expect(branchDropdown).toHaveValue(""); - }); -}); diff --git a/frontend/tests/settings.spec.ts b/frontend/tests/settings.spec.ts deleted file mode 100644 index e4c4ce3b35df..000000000000 --- a/frontend/tests/settings.spec.ts +++ /dev/null @@ -1,17 +0,0 @@ -import test, { expect } from "@playwright/test"; - -test("do not navigate to /settings/billing if not SaaS mode", async ({ - page, -}) => { - await page.goto("/settings/billing"); - await expect(page.getByTestId("settings-screen")).toBeVisible(); - expect(page.url()).toBe("http://localhost:3001/settings"); -}); - -// FIXME: This test is failing because the config is not being set to SaaS mode -// since MSW is always returning APP_MODE as "oss" -test.skip("navigate to /settings/billing if SaaS mode", async ({ page }) => { - await page.goto("/settings/billing"); - await expect(page.getByTestId("settings-screen")).toBeVisible(); - expect(page.url()).toBe("http://localhost:3001/settings/billing"); -}); diff --git a/openhands/README.md b/openhands/README.md index 5864a39b0e12..93f06f26b3ba 100644 --- a/openhands/README.md +++ b/openhands/README.md @@ -2,8 +2,7 @@ This directory contains the core components of OpenHands. -This diagram provides an overview of the roles of each component and how they communicate and collaborate. -![OpenHands System Architecture Diagram (July 4, 2024)](../docs/static/img/system_architecture_overview.png) +For an overview of the system architecture, see the [architecture documentation](https://docs.openhands.dev/usage/architecture/backend) (v0 backend architecture). ## Classes diff --git a/openhands/agenthub/codeact_agent/codeact_agent.py b/openhands/agenthub/codeact_agent/codeact_agent.py index 85e5f88cbcb7..9dd814e9cf77 100644 --- a/openhands/agenthub/codeact_agent/codeact_agent.py +++ b/openhands/agenthub/codeact_agent/codeact_agent.py @@ -194,9 +194,12 @@ def step(self, state: State) -> 'Action': # event we'll just return that instead of an action. The controller will # immediately ask the agent to step again with the new view. condensed_history: list[Event] = [] + # Track which event IDs have been forgotten/condensed + forgotten_event_ids: set[int] = set() match self.condenser.condensed_history(state): - case View(events=events): + case View(events=events, forgotten_event_ids=forgotten_ids): condensed_history = events + forgotten_event_ids = forgotten_ids case Condensation(action=condensation_action): return condensation_action @@ -206,7 +209,9 @@ def step(self, state: State) -> 'Action': ) initial_user_message = self._get_initial_user_message(state.history) - messages = self._get_messages(condensed_history, initial_user_message) + messages = self._get_messages( + condensed_history, initial_user_message, forgotten_event_ids + ) params: dict = { 'messages': messages, } @@ -245,7 +250,10 @@ def _get_initial_user_message(self, history: list[Event]) -> MessageAction: return initial_user_message def _get_messages( - self, events: list[Event], initial_user_message: MessageAction + self, + events: list[Event], + initial_user_message: MessageAction, + forgotten_event_ids: set[int], ) -> list[Message]: """Constructs the message history for the LLM conversation. @@ -284,6 +292,7 @@ def _get_messages( messages = self.conversation_memory.process_events( condensed_history=events, initial_user_action=initial_user_message, + forgotten_event_ids=forgotten_event_ids, max_message_chars=self.llm.config.max_message_chars, vision_is_active=self.llm.vision_is_active(), ) diff --git a/openhands/app_server/app_conversation/app_conversation_info_service.py b/openhands/app_server/app_conversation/app_conversation_info_service.py index 22305c1ff01a..8e9f1ffe6828 100644 --- a/openhands/app_server/app_conversation/app_conversation_info_service.py +++ b/openhands/app_server/app_conversation/app_conversation_info_service.py @@ -9,6 +9,7 @@ AppConversationSortOrder, ) from openhands.app_server.services.injector import Injector +from openhands.sdk.event import ConversationStateUpdateEvent from openhands.sdk.utils.models import DiscriminatedUnionMixin @@ -92,6 +93,19 @@ async def save_app_conversation_info( Return the stored info """ + @abstractmethod + async def process_stats_event( + self, + event: ConversationStateUpdateEvent, + conversation_id: UUID, + ) -> None: + """Process a stats event and update conversation statistics. + + Args: + event: The ConversationStateUpdateEvent with key='stats' + conversation_id: The ID of the conversation to update + """ + class AppConversationInfoServiceInjector( DiscriminatedUnionMixin, Injector[AppConversationInfoService], ABC diff --git a/openhands/app_server/app_conversation/app_conversation_models.py b/openhands/app_server/app_conversation/app_conversation_models.py index dde2ccc8796b..1c0ba914cb35 100644 --- a/openhands/app_server/app_conversation/app_conversation_models.py +++ b/openhands/app_server/app_conversation/app_conversation_models.py @@ -1,6 +1,7 @@ from datetime import datetime from enum import Enum -from uuid import uuid4 +from typing import Literal +from uuid import UUID, uuid4 from pydantic import BaseModel, Field @@ -97,7 +98,9 @@ class AppConversationStartRequest(BaseModel): """ sandbox_id: str | None = Field(default=None) + conversation_id: UUID | None = Field(default=None) initial_message: SendMessageRequest | None = None + system_message_suffix: str | None = None processors: list[EventCallbackProcessor] | None = Field(default=None) llm_model: str | None = None @@ -159,3 +162,12 @@ class AppConversationStartTask(BaseModel): class AppConversationStartTaskPage(BaseModel): items: list[AppConversationStartTask] next_page_id: str | None = None + + +class SkillResponse(BaseModel): + """Response model for skills endpoint.""" + + name: str + type: Literal['repo', 'knowledge'] + content: str + triggers: list[str] = [] diff --git a/openhands/app_server/app_conversation/app_conversation_router.py b/openhands/app_server/app_conversation/app_conversation_router.py index b66d9983621b..a7a0414e3118 100644 --- a/openhands/app_server/app_conversation/app_conversation_router.py +++ b/openhands/app_server/app_conversation/app_conversation_router.py @@ -1,9 +1,12 @@ """Sandboxed Conversation router for OpenHands Server.""" import asyncio +import logging +import os import sys +import tempfile from datetime import datetime -from typing import Annotated, AsyncGenerator +from typing import Annotated, AsyncGenerator, Literal from uuid import UUID import httpx @@ -26,8 +29,8 @@ async def anext(async_iterator): return await async_iterator.__anext__() -from fastapi import APIRouter, Query, Request -from fastapi.responses import StreamingResponse +from fastapi import APIRouter, Query, Request, status +from fastapi.responses import JSONResponse, StreamingResponse from sqlalchemy.ext.asyncio import AsyncSession from openhands.app_server.app_conversation.app_conversation_models import ( @@ -37,10 +40,14 @@ async def anext(async_iterator): AppConversationStartTask, AppConversationStartTaskPage, AppConversationStartTaskSortOrder, + SkillResponse, ) from openhands.app_server.app_conversation.app_conversation_service import ( AppConversationService, ) +from openhands.app_server.app_conversation.app_conversation_service_base import ( + AppConversationServiceBase, +) from openhands.app_server.app_conversation.app_conversation_start_task_service import ( AppConversationStartTaskService, ) @@ -49,11 +56,25 @@ async def anext(async_iterator): depends_app_conversation_start_task_service, depends_db_session, depends_httpx_client, + depends_sandbox_service, + depends_sandbox_spec_service, depends_user_context, get_app_conversation_service, ) +from openhands.app_server.sandbox.sandbox_models import ( + AGENT_SERVER, + SandboxStatus, +) +from openhands.app_server.sandbox.sandbox_service import SandboxService +from openhands.app_server.sandbox.sandbox_spec_service import SandboxSpecService +from openhands.app_server.utils.docker_utils import ( + replace_localhost_hostname_for_docker, +) +from openhands.sdk.context.skills import KeywordTrigger, TaskTrigger +from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace router = APIRouter(prefix='/app-conversations', tags=['Conversations']) +logger = logging.getLogger(__name__) app_conversation_service_dependency = depends_app_conversation_service() app_conversation_start_task_service_dependency = ( depends_app_conversation_start_task_service() @@ -61,6 +82,8 @@ async def anext(async_iterator): user_context_dependency = depends_user_context() db_session_dependency = depends_db_session() httpx_client_dependency = depends_httpx_client() +sandbox_service_dependency = depends_sandbox_service() +sandbox_spec_service_dependency = depends_sandbox_spec_service() # Read methods @@ -289,6 +312,240 @@ async def batch_get_app_conversation_start_tasks( return start_tasks +@router.get('/{conversation_id}/file') +async def read_conversation_file( + conversation_id: UUID, + file_path: Annotated[ + str, + Query(title='Path to the file to read within the sandbox workspace'), + ] = '/workspace/project/PLAN.md', + app_conversation_service: AppConversationService = ( + app_conversation_service_dependency + ), + sandbox_service: SandboxService = sandbox_service_dependency, + sandbox_spec_service: SandboxSpecService = sandbox_spec_service_dependency, +) -> str: + """Read a file from a specific conversation's sandbox workspace. + + Returns the content of the file at the specified path if it exists, otherwise returns an empty string. + + Args: + conversation_id: The UUID of the conversation + file_path: Path to the file to read within the sandbox workspace + + Returns: + The content of the file or an empty string if the file doesn't exist + """ + # Get the conversation info + conversation = await app_conversation_service.get_app_conversation(conversation_id) + if not conversation: + return '' + + # Get the sandbox info + sandbox = await sandbox_service.get_sandbox(conversation.sandbox_id) + if not sandbox or sandbox.status != SandboxStatus.RUNNING: + return '' + + # Get the sandbox spec to find the working directory + sandbox_spec = await sandbox_spec_service.get_sandbox_spec(sandbox.sandbox_spec_id) + if not sandbox_spec: + return '' + + # Get the agent server URL + if not sandbox.exposed_urls: + return '' + + agent_server_url = None + for exposed_url in sandbox.exposed_urls: + if exposed_url.name == AGENT_SERVER: + agent_server_url = exposed_url.url + break + + if not agent_server_url: + return '' + + agent_server_url = replace_localhost_hostname_for_docker(agent_server_url) + + # Create remote workspace + remote_workspace = AsyncRemoteWorkspace( + host=agent_server_url, + api_key=sandbox.session_api_key, + working_dir=sandbox_spec.working_dir, + ) + + # Read the file at the specified path + temp_file_path = None + try: + # Create a temporary file path to download the remote file + with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as temp_file: + temp_file_path = temp_file.name + + # Download the file from remote system + result = await remote_workspace.file_download( + source_path=file_path, + destination_path=temp_file_path, + ) + + if result.success: + # Read the content from the temporary file + with open(temp_file_path, 'rb') as f: + content = f.read() + # Decode bytes to string + return content.decode('utf-8') + except Exception: + # If there's any error reading the file, return empty string + pass + finally: + # Clean up the temporary file + if temp_file_path: + try: + os.unlink(temp_file_path) + except Exception: + # Ignore errors during cleanup + pass + + return '' + + +@router.get('/{conversation_id}/skills') +async def get_conversation_skills( + conversation_id: UUID, + app_conversation_service: AppConversationService = ( + app_conversation_service_dependency + ), + sandbox_service: SandboxService = sandbox_service_dependency, + sandbox_spec_service: SandboxSpecService = sandbox_spec_service_dependency, +) -> JSONResponse: + """Get all skills associated with the conversation. + + This endpoint returns all skills that are loaded for the v1 conversation. + Skills are loaded from multiple sources: + - Sandbox skills (exposed URLs) + - Global skills (OpenHands/skills/) + - User skills (~/.openhands/skills/) + - Organization skills (org/.openhands repository) + - Repository skills (repo/.openhands/skills/ or .openhands/microagents/) + + Returns: + JSONResponse: A JSON response containing the list of skills. + """ + try: + # Get the conversation info + conversation = await app_conversation_service.get_app_conversation( + conversation_id + ) + if not conversation: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content={'error': f'Conversation {conversation_id} not found'}, + ) + + # Get the sandbox info + sandbox = await sandbox_service.get_sandbox(conversation.sandbox_id) + if not sandbox or sandbox.status != SandboxStatus.RUNNING: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content={ + 'error': f'Sandbox not found or not running for conversation {conversation_id}' + }, + ) + + # Get the sandbox spec to find the working directory + sandbox_spec = await sandbox_spec_service.get_sandbox_spec( + sandbox.sandbox_spec_id + ) + if not sandbox_spec: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content={'error': 'Sandbox spec not found'}, + ) + + # Get the agent server URL + if not sandbox.exposed_urls: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content={'error': 'No agent server URL found for sandbox'}, + ) + + agent_server_url = None + for exposed_url in sandbox.exposed_urls: + if exposed_url.name == AGENT_SERVER: + agent_server_url = exposed_url.url + break + + if not agent_server_url: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content={'error': 'Agent server URL not found in sandbox'}, + ) + + agent_server_url = replace_localhost_hostname_for_docker(agent_server_url) + + # Create remote workspace + remote_workspace = AsyncRemoteWorkspace( + host=agent_server_url, + api_key=sandbox.session_api_key, + working_dir=sandbox_spec.working_dir, + ) + + # Load skills from all sources + logger.info(f'Loading skills for conversation {conversation_id}') + + # Prefer the shared loader to avoid duplication; otherwise return empty list. + all_skills: list = [] + if isinstance(app_conversation_service, AppConversationServiceBase): + all_skills = await app_conversation_service.load_and_merge_all_skills( + sandbox, + remote_workspace, + conversation.selected_repository, + sandbox_spec.working_dir, + ) + + logger.info( + f'Loaded {len(all_skills)} skills for conversation {conversation_id}: ' + f'{[s.name for s in all_skills]}' + ) + + # Transform skills to response format + skills_response = [] + for skill in all_skills: + # Determine type based on trigger + skill_type: Literal['repo', 'knowledge'] + if skill.trigger is None: + skill_type = 'repo' + else: + skill_type = 'knowledge' + + # Extract triggers + triggers = [] + if isinstance(skill.trigger, (KeywordTrigger, TaskTrigger)): + if hasattr(skill.trigger, 'keywords'): + triggers = skill.trigger.keywords + elif hasattr(skill.trigger, 'triggers'): + triggers = skill.trigger.triggers + + skills_response.append( + SkillResponse( + name=skill.name, + type=skill_type, + content=skill.content, + triggers=triggers, + ) + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content={'skills': [s.model_dump() for s in skills_response]}, + ) + + except Exception as e: + logger.error(f'Error getting skills for conversation {conversation_id}: {e}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content={'error': f'Error getting skills: {str(e)}'}, + ) + + async def _consume_remaining( async_iter, db_session: AsyncSession, httpx_client: httpx.AsyncClient ): diff --git a/openhands/app_server/app_conversation/app_conversation_service_base.py b/openhands/app_server/app_conversation/app_conversation_service_base.py index 2027426ac22d..aa6add73fe4e 100644 --- a/openhands/app_server/app_conversation/app_conversation_service_base.py +++ b/openhands/app_server/app_conversation/app_conversation_service_base.py @@ -4,11 +4,16 @@ from abc import ABC from dataclasses import dataclass from pathlib import Path -from typing import AsyncGenerator +from typing import TYPE_CHECKING, AsyncGenerator +from uuid import UUID + +if TYPE_CHECKING: + import httpx import base62 from openhands.app_server.app_conversation.app_conversation_models import ( + AgentType, AppConversationStartTask, AppConversationStartTaskStatus, ) @@ -17,6 +22,7 @@ ) from openhands.app_server.app_conversation.skill_loader import ( load_global_skills, + load_org_skills, load_repo_skills, load_sandbox_skills, merge_skills, @@ -25,7 +31,17 @@ from openhands.app_server.user.user_context import UserContext from openhands.sdk import Agent from openhands.sdk.context.agent_context import AgentContext +from openhands.sdk.context.condenser import LLMSummarizingCondenser from openhands.sdk.context.skills import load_user_skills +from openhands.sdk.llm import LLM +from openhands.sdk.security.analyzer import SecurityAnalyzerBase +from openhands.sdk.security.confirmation_policy import ( + AlwaysConfirm, + ConfirmationPolicyBase, + ConfirmRisky, + NeverConfirm, +) +from openhands.sdk.security.llm_analyzer import LLMSecurityAnalyzer from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace _logger = logging.getLogger(__name__) @@ -42,7 +58,7 @@ class AppConversationServiceBase(AppConversationService, ABC): init_git_in_empty_workspace: bool user_context: UserContext - async def _load_and_merge_all_skills( + async def load_and_merge_all_skills( self, sandbox: SandboxInfo, remote_workspace: AsyncRemoteWorkspace, @@ -79,13 +95,20 @@ async def _load_and_merge_all_skills( except Exception as e: _logger.warning(f'Failed to load user skills: {str(e)}') user_skills = [] + + # Load organization-level skills + org_skills = await load_org_skills( + remote_workspace, selected_repository, working_dir, self.user_context + ) + repo_skills = await load_repo_skills( remote_workspace, selected_repository, working_dir ) # Merge all skills (later lists override earlier ones) + # Precedence: sandbox < global < user < org < repo all_skills = merge_skills( - [sandbox_skills, global_skills, user_skills, repo_skills] + [sandbox_skills, global_skills, user_skills, org_skills, repo_skills] ) _logger.info( @@ -146,7 +169,7 @@ async def _load_skills_and_update_agent( Updated agent with skills loaded into context """ # Load and merge all skills - all_skills = await self._load_and_merge_all_skills( + all_skills = await self.load_and_merge_all_skills( sandbox, remote_workspace, selected_repository, working_dir ) @@ -175,13 +198,50 @@ async def run_setup_scripts( task.status = AppConversationStartTaskStatus.SETTING_UP_SKILLS yield task - await self._load_and_merge_all_skills( + await self.load_and_merge_all_skills( sandbox, workspace, task.request.selected_repository, workspace.working_dir, ) + async def _configure_git_user_settings( + self, + workspace: AsyncRemoteWorkspace, + ) -> None: + """Configure git global user settings from user preferences. + + Reads git_user_name and git_user_email from user settings and + configures them as git global settings in the workspace. + + Args: + workspace: The remote workspace to configure git settings in. + """ + try: + user_info = await self.user_context.get_user_info() + + if user_info.git_user_name: + cmd = f'git config --global user.name "{user_info.git_user_name}"' + result = await workspace.execute_command(cmd, workspace.working_dir) + if result.exit_code: + _logger.warning(f'Git config user.name failed: {result.stderr}') + else: + _logger.info( + f'Git configured with user.name={user_info.git_user_name}' + ) + + if user_info.git_user_email: + cmd = f'git config --global user.email "{user_info.git_user_email}"' + result = await workspace.execute_command(cmd, workspace.working_dir) + if result.exit_code: + _logger.warning(f'Git config user.email failed: {result.stderr}') + else: + _logger.info( + f'Git configured with user.email={user_info.git_user_email}' + ) + except Exception as e: + _logger.warning(f'Failed to configure git user settings: {e}') + async def clone_or_init_git_repo( self, task: AppConversationStartTask, @@ -197,6 +257,9 @@ async def clone_or_init_git_repo( if result.exit_code: _logger.warning(f'mkdir failed: {result.stderr}') + # Configure git user settings from user preferences + await self._configure_git_user_settings(workspace) + if not request.selected_repository: if self.init_git_in_empty_workspace: _logger.debug('Initializing a new git repository in the workspace.') @@ -221,7 +284,9 @@ async def clone_or_init_git_repo( # Clone the repo - this is the slow part! clone_command = f'git clone {remote_repo_url} {dir_name}' - result = await workspace.execute_command(clone_command, workspace.working_dir) + result = await workspace.execute_command( + clone_command, workspace.working_dir, 120 + ) if result.exit_code: _logger.warning(f'Git clone failed: {result.stderr}') @@ -233,7 +298,10 @@ async def clone_or_init_git_repo( random_str = base62.encodebytes(os.urandom(16)) openhands_workspace_branch = f'openhands-workspace-{random_str}' checkout_command = f'git checkout -b {openhands_workspace_branch}' - await workspace.execute_command(checkout_command, workspace.working_dir) + git_dir = Path(workspace.working_dir) / dir_name + result = await workspace.execute_command(checkout_command, git_dir) + if result.exit_code: + _logger.warning(f'Git checkout failed: {result.stderr}') async def maybe_run_setup_script( self, @@ -295,3 +363,131 @@ async def maybe_setup_git_hooks( return _logger.info('Git pre-commit hook installed successfully') + + def _create_condenser( + self, + llm: LLM, + agent_type: AgentType, + condenser_max_size: int | None, + ) -> LLMSummarizingCondenser: + """Create a condenser based on user settings and agent type. + + Args: + llm: The LLM instance to use for condensation + agent_type: Type of agent (PLAN or DEFAULT) + condenser_max_size: condenser_max_size setting + + Returns: + Configured LLMSummarizingCondenser instance + """ + # LLMSummarizingCondenser has defaults: max_size=120, keep_first=4 + condenser_kwargs = { + 'llm': llm.model_copy( + update={ + 'usage_id': ( + 'condenser' + if agent_type == AgentType.DEFAULT + else 'planning_condenser' + ) + } + ), + } + # Only override max_size if user has a custom value + if condenser_max_size is not None: + condenser_kwargs['max_size'] = condenser_max_size + + condenser = LLMSummarizingCondenser(**condenser_kwargs) + + return condenser + + def _create_security_analyzer_from_string( + self, security_analyzer_str: str | None + ) -> SecurityAnalyzerBase | None: + """Convert security analyzer string from settings to SecurityAnalyzerBase instance. + + Args: + security_analyzer_str: String value from settings. Valid values: + - "llm" -> LLMSecurityAnalyzer + - "none" or None -> None + - Other values -> None (unsupported analyzers are ignored) + + Returns: + SecurityAnalyzerBase instance or None + """ + if not security_analyzer_str or security_analyzer_str.lower() == 'none': + return None + + if security_analyzer_str.lower() == 'llm': + return LLMSecurityAnalyzer() + + # For unknown values, log a warning and return None + _logger.warning( + f'Unknown security analyzer value: {security_analyzer_str}. ' + 'Supported values: "llm", "none". Defaulting to None.' + ) + return None + + def _select_confirmation_policy( + self, confirmation_mode: bool, security_analyzer: str | None + ) -> ConfirmationPolicyBase: + """Choose confirmation policy using only mode flag and analyzer string.""" + if not confirmation_mode: + return NeverConfirm() + + analyzer_kind = (security_analyzer or '').lower() + if analyzer_kind == 'llm': + return ConfirmRisky() + + return AlwaysConfirm() + + async def _set_security_analyzer_from_settings( + self, + agent_server_url: str, + session_api_key: str | None, + conversation_id: UUID, + security_analyzer_str: str | None, + httpx_client: 'httpx.AsyncClient', + ) -> None: + """Set security analyzer on conversation using only the analyzer string. + + Args: + agent_server_url: URL of the agent server + session_api_key: Session API key for authentication + conversation_id: ID of the conversation to update + security_analyzer_str: String value from settings + httpx_client: HTTP client for making API requests + """ + + if session_api_key is None: + return + + security_analyzer = self._create_security_analyzer_from_string( + security_analyzer_str + ) + + # Only make API call if we have a security analyzer to set + # (None is the default, so we can skip the call if it's None) + if security_analyzer is None: + return + + try: + # Prepare the request payload + payload = {'security_analyzer': security_analyzer.model_dump()} + + # Call agent server API to set security analyzer + response = await httpx_client.post( + f'{agent_server_url}/api/conversations/{conversation_id}/security_analyzer', + json=payload, + headers={'X-Session-API-Key': session_api_key}, + timeout=30.0, + ) + response.raise_for_status() + _logger.info( + f'Successfully set security analyzer for conversation {conversation_id}' + ) + except Exception as e: + # Log error but don't fail conversation creation + _logger.warning( + f'Failed to set security analyzer for conversation {conversation_id}: {e}', + exc_info=True, + ) diff --git a/openhands/app_server/app_conversation/live_status_app_conversation_service.py b/openhands/app_server/app_conversation/live_status_app_conversation_service.py index 5c81e4841c06..db30710f7626 100644 --- a/openhands/app_server/app_conversation/live_status_app_conversation_service.py +++ b/openhands/app_server/app_conversation/live_status_app_conversation_service.py @@ -4,16 +4,15 @@ from dataclasses import dataclass from datetime import datetime, timedelta from time import time -from typing import AsyncGenerator, Sequence +from typing import Any, AsyncGenerator, Sequence from uuid import UUID, uuid4 import httpx from fastapi import Request -from pydantic import Field, TypeAdapter +from pydantic import Field, SecretStr, TypeAdapter from openhands.agent_server.models import ( ConversationInfo, - NeverConfirm, SendMessageRequest, StartConversationRequest, ) @@ -63,22 +62,27 @@ from openhands.app_server.services.injector import InjectorState from openhands.app_server.services.jwt_service import JwtService from openhands.app_server.user.user_context import UserContext +from openhands.app_server.user.user_models import UserInfo from openhands.app_server.utils.docker_utils import ( replace_localhost_hostname_for_docker, ) from openhands.experiments.experiment_manager import ExperimentManagerImpl from openhands.integrations.provider import ProviderType -from openhands.sdk import LocalWorkspace -from openhands.sdk.conversation.secret_source import LookupSecret, StaticSecret +from openhands.sdk import Agent, AgentContext, LocalWorkspace from openhands.sdk.llm import LLM -from openhands.sdk.security.confirmation_policy import AlwaysConfirm +from openhands.sdk.secret import LookupSecret, StaticSecret from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace -from openhands.tools.preset.default import get_default_agent -from openhands.tools.preset.planning import get_planning_agent +from openhands.server.types import AppMode +from openhands.tools.preset.default import ( + get_default_tools, +) +from openhands.tools.preset.planning import ( + format_plan_structure, + get_planning_tools, +) _conversation_info_type_adapter = TypeAdapter(list[ConversationInfo | None]) _logger = logging.getLogger(__name__) -GIT_TOKEN = 'GIT_TOKEN' @dataclass @@ -96,7 +100,11 @@ class LiveStatusAppConversationService(AppConversationServiceBase): sandbox_startup_poll_frequency: int httpx_client: httpx.AsyncClient web_url: str | None + openhands_provider_base_url: str | None access_token_hard_timeout: timedelta | None + app_mode: str | None = None + keycloak_auth_cookie: str | None = None + tavily_api_key: str | None = None async def search_app_conversations( self, @@ -228,10 +236,12 @@ async def _start_app_conversation( await self._build_start_conversation_request_for_user( sandbox, request.initial_message, + request.system_message_suffix, request.git_provider, sandbox_spec.working_dir, request.agent_type, request.llm_model, + request.conversation_id, remote_workspace=remote_workspace, selected_repository=request.selected_repository, ) @@ -260,7 +270,7 @@ async def _start_app_conversation( user_id = await self.user_context.get_user_id() app_conversation_info = AppConversationInfo( id=info.id, - title=f'Conversation {info.id.hex}', + title=f'Conversation {info.id.hex[:5]}', sandbox_id=sandbox.id, created_by_user_id=user_id, llm_model=start_conversation_request.agent.llm.model, @@ -277,21 +287,33 @@ async def _start_app_conversation( ) # Setup default processors - processors = request.processors - if processors is None: - processors = [SetTitleCallbackProcessor()] + processors = request.processors or [] + + # Always ensure SetTitleCallbackProcessor is included + has_set_title_processor = any( + isinstance(processor, SetTitleCallbackProcessor) + for processor in processors + ) + if not has_set_title_processor: + processors.append(SetTitleCallbackProcessor()) # Save processors - await asyncio.gather( - *[ - self.event_callback_service.save_event_callback( - EventCallback( - conversation_id=info.id, - processor=processor, - ) + for processor in processors: + await self.event_callback_service.save_event_callback( + EventCallback( + conversation_id=info.id, + processor=processor, ) - for processor in processors - ] + ) + + # Set security analyzer from settings + user = await self.user_context.get_user_info() + await self._set_security_analyzer_from_settings( + agent_server_url, + sandbox.session_api_key, + info.id, + user.security_analyzer, + self.httpx_client, ) # Update the start task @@ -455,7 +477,11 @@ async def _wait_for_sandbox_start( if sandbox.status in (None, SandboxStatus.ERROR): raise SandboxError(f'Sandbox status: {sandbox.status}') if sandbox.status == SandboxStatus.RUNNING: - return + # There are still bugs in the remote runtime - they report running while still just + # starting resulting in a race condition. Manually check that it is actually + # running. + if await self._check_agent_server_alive(sandbox): + return if sandbox.status != SandboxStatus.STARTING: raise SandboxError(f'Sandbox not startable: {sandbox.id}') @@ -468,9 +494,19 @@ async def _wait_for_sandbox_start( if sandbox.status not in (SandboxStatus.STARTING, SandboxStatus.RUNNING): raise SandboxError(f'Sandbox not startable: {sandbox.id}') if sandbox_info.status == SandboxStatus.RUNNING: - return + # There are still bugs in the remote runtime - they report running while still just + # starting resulting in a race condition. Manually check that it is actually + # running. + if await self._check_agent_server_alive(sandbox_info): + return raise SandboxError(f'Sandbox failed to start: {sandbox.id}') + async def _check_agent_server_alive(self, sandbox_info: SandboxInfo) -> bool: + agent_server_url = self._get_agent_server_url(sandbox_info) + url = f'{agent_server_url.rstrip("/")}/alive' + response = await self.httpx_client.get(url) + return response.is_success + def _get_agent_server_url(self, sandbox: SandboxInfo) -> str: """Get agent server url for running sandbox.""" exposed_urls = sandbox.exposed_urls @@ -513,68 +549,376 @@ def _inherit_configuration_from_parent( if not request.llm_model and parent_info.llm_model: request.llm_model = parent_info.llm_model - async def _build_start_conversation_request_for_user( - self, - sandbox: SandboxInfo, - initial_message: SendMessageRequest | None, - git_provider: ProviderType | None, - working_dir: str, - agent_type: AgentType = AgentType.DEFAULT, - llm_model: str | None = None, - remote_workspace: AsyncRemoteWorkspace | None = None, - selected_repository: str | None = None, - ) -> StartConversationRequest: - user = await self.user_context.get_user_info() + async def _setup_secrets_for_git_providers(self, user: UserInfo) -> dict: + """Set up secrets for all git provider authentication. - # Set up a secret for the git token + Args: + user: User information containing authentication details + + Returns: + Dictionary of secrets for the conversation + """ secrets = await self.user_context.get_secrets() - if git_provider: + + # Get all provider tokens from user authentication + provider_tokens = await self.user_context.get_provider_tokens() + if not provider_tokens: + return secrets + + # Create secrets for each provider token + for provider_type, provider_token in provider_tokens.items(): + if not provider_token.token: + continue + + secret_name = f'{provider_type.name}_TOKEN' + if self.web_url: - # If there is a web url, then we create an access token to access it. - # For security reasons, we are explicit here - only this user, and - # only this provider, with a timeout + # Create an access token for web-based authentication access_token = self.jwt_service.create_jws_token( payload={ 'user_id': user.id, - 'provider_type': git_provider.value, + 'provider_type': provider_type.value, }, expires_in=self.access_token_hard_timeout, ) - secrets[GIT_TOKEN] = LookupSecret( + headers = {'X-Access-Token': access_token} + + # Include keycloak_auth cookie in headers if app_mode is SaaS + if self.app_mode == 'saas' and self.keycloak_auth_cookie: + headers['Cookie'] = f'keycloak_auth={self.keycloak_auth_cookie}' + + secrets[secret_name] = LookupSecret( url=self.web_url + '/api/v1/webhooks/secrets', - headers={'X-Access-Token': access_token}, + headers=headers, ) else: - # If there is no URL specified where the sandbox can access the app server - # then we supply a static secret with the most recent value. Depending - # on the type, this may eventually expire. - static_token = await self.user_context.get_latest_token(git_provider) + # Use static token for environments without web URL access + static_token = await self.user_context.get_latest_token(provider_type) if static_token: - secrets[GIT_TOKEN] = StaticSecret(value=static_token) + secrets[secret_name] = StaticSecret(value=static_token) - workspace = LocalWorkspace(working_dir=working_dir) + return secrets - # Use provided llm_model if available, otherwise fall back to user's default + def _configure_llm(self, user: UserInfo, llm_model: str | None) -> LLM: + """Configure LLM settings. + + Args: + user: User information containing LLM preferences + llm_model: Optional specific model to use, falls back to user default + + Returns: + Configured LLM instance + """ model = llm_model or user.llm_model - llm = LLM( + base_url = user.llm_base_url + if model and model.startswith('openhands/'): + base_url = user.llm_base_url or self.openhands_provider_base_url + + return LLM( model=model, - base_url=user.llm_base_url, + base_url=base_url, api_key=user.llm_api_key, usage_id='agent', ) - # The agent gets passed initial instructions - # Select agent based on agent_type + + async def _get_tavily_api_key(self, user: UserInfo) -> str | None: + """Get Tavily search API key, prioritizing user's key over service key. + + Args: + user: User information + + Returns: + Tavily API key if available, None otherwise + """ + # Get the actual API key values, prioritizing user's key over service key + user_search_key = None + if user.search_api_key: + key_value = user.search_api_key.get_secret_value() + if key_value and key_value.strip(): + user_search_key = key_value + + service_tavily_key = None + if self.tavily_api_key: + # tavily_api_key is already a string (extracted in the factory method) + if self.tavily_api_key.strip(): + service_tavily_key = self.tavily_api_key + + return user_search_key or service_tavily_key + + async def _add_system_mcp_servers( + self, mcp_servers: dict[str, Any], user: UserInfo + ) -> None: + """Add system-generated MCP servers (default OpenHands server and Tavily). + + Args: + mcp_servers: Dictionary to add servers to + user: User information for API keys + """ + if not self.web_url: + return + + # Add default OpenHands MCP server + mcp_url = f'{self.web_url}/mcp/mcp' + mcp_servers['default'] = {'url': mcp_url} + + # Add API key if available + mcp_api_key = await self.user_context.get_mcp_api_key() + if mcp_api_key: + mcp_servers['default']['headers'] = { + 'X-Session-API-Key': mcp_api_key, + } + + # Add Tavily search if API key is available + tavily_api_key = await self._get_tavily_api_key(user) + if tavily_api_key: + _logger.info('Adding search engine to MCP config') + mcp_servers['tavily'] = { + 'url': f'https://mcp.tavily.com/mcp/?tavilyApiKey={tavily_api_key}' + } + else: + _logger.info('No search engine API key found, skipping search engine') + + def _add_custom_sse_servers( + self, mcp_servers: dict[str, Any], sse_servers: list + ) -> None: + """Add custom SSE MCP servers from user configuration. + + Args: + mcp_servers: Dictionary to add servers to + sse_servers: List of SSE server configurations + """ + for sse_server in sse_servers: + server_config = { + 'url': sse_server.url, + 'transport': 'sse', + } + if sse_server.api_key: + server_config['headers'] = { + 'Authorization': f'Bearer {sse_server.api_key}' + } + + # Generate unique server name using UUID + # TODO: Let the users specify the server name + server_name = f'sse_{uuid4().hex[:8]}' + mcp_servers[server_name] = server_config + _logger.debug( + f'Added custom SSE server: {server_name} for {sse_server.url}' + ) + + def _add_custom_shttp_servers( + self, mcp_servers: dict[str, Any], shttp_servers: list + ) -> None: + """Add custom SHTTP MCP servers from user configuration. + + Args: + mcp_servers: Dictionary to add servers to + shttp_servers: List of SHTTP server configurations + """ + for shttp_server in shttp_servers: + server_config = { + 'url': shttp_server.url, + 'transport': 'streamable-http', + } + if shttp_server.api_key: + server_config['headers'] = { + 'Authorization': f'Bearer {shttp_server.api_key}' + } + if shttp_server.timeout: + server_config['timeout'] = shttp_server.timeout + + # Generate unique server name using UUID + # TODO: Let the users specify the server name + server_name = f'shttp_{uuid4().hex[:8]}' + mcp_servers[server_name] = server_config + _logger.debug( + f'Added custom SHTTP server: {server_name} for {shttp_server.url}' + ) + + def _add_custom_stdio_servers( + self, mcp_servers: dict[str, Any], stdio_servers: list + ) -> None: + """Add custom STDIO MCP servers from user configuration. + + Args: + mcp_servers: Dictionary to add servers to + stdio_servers: List of STDIO server configurations + """ + for stdio_server in stdio_servers: + server_config = { + 'command': stdio_server.command, + 'args': stdio_server.args, + } + if stdio_server.env: + server_config['env'] = stdio_server.env + + # STDIO servers have an explicit name field + mcp_servers[stdio_server.name] = server_config + _logger.debug(f'Added custom STDIO server: {stdio_server.name}') + + def _merge_custom_mcp_config( + self, mcp_servers: dict[str, Any], user: UserInfo + ) -> None: + """Merge custom MCP configuration from user settings. + + Args: + mcp_servers: Dictionary to add servers to + user: User information containing custom MCP config + """ + if not user.mcp_config: + return + + try: + sse_count = len(user.mcp_config.sse_servers) + shttp_count = len(user.mcp_config.shttp_servers) + stdio_count = len(user.mcp_config.stdio_servers) + + _logger.info( + f'Loading custom MCP config from user settings: ' + f'{sse_count} SSE, {shttp_count} SHTTP, {stdio_count} STDIO servers' + ) + + # Add each type of custom server + self._add_custom_sse_servers(mcp_servers, user.mcp_config.sse_servers) + self._add_custom_shttp_servers(mcp_servers, user.mcp_config.shttp_servers) + self._add_custom_stdio_servers(mcp_servers, user.mcp_config.stdio_servers) + + _logger.info( + f'Successfully merged custom MCP config: added {sse_count} SSE, ' + f'{shttp_count} SHTTP, and {stdio_count} STDIO servers' + ) + + except Exception as e: + _logger.error( + f'Error loading custom MCP config from user settings: {e}', + exc_info=True, + ) + # Continue with system config only, don't fail conversation startup + _logger.warning( + 'Continuing with system-generated MCP config only due to custom config error' + ) + + async def _configure_llm_and_mcp( + self, user: UserInfo, llm_model: str | None + ) -> tuple[LLM, dict]: + """Configure LLM and MCP (Model Context Protocol) settings. + + Args: + user: User information containing LLM preferences + llm_model: Optional specific model to use, falls back to user default + + Returns: + Tuple of (configured LLM instance, MCP config dictionary) + """ + # Configure LLM + llm = self._configure_llm(user, llm_model) + + # Configure MCP - SDK expects format: {'mcpServers': {'server_name': {...}}} + mcp_servers: dict[str, Any] = {} + + # Add system-generated servers (default + tavily) + await self._add_system_mcp_servers(mcp_servers, user) + + # Merge custom servers from user settings + self._merge_custom_mcp_config(mcp_servers, user) + + # Wrap in the mcpServers structure required by the SDK + mcp_config = {'mcpServers': mcp_servers} if mcp_servers else {} + _logger.info(f'Final MCP configuration: {mcp_config}') + + return llm, mcp_config + + def _create_agent_with_context( + self, + llm: LLM, + agent_type: AgentType, + system_message_suffix: str | None, + mcp_config: dict, + condenser_max_size: int | None, + secrets: dict | None = None, + ) -> Agent: + """Create an agent with appropriate tools and context based on agent type. + + Args: + llm: Configured LLM instance + agent_type: Type of agent to create (PLAN or DEFAULT) + system_message_suffix: Optional suffix for system messages + mcp_config: MCP configuration dictionary + condenser_max_size: condenser_max_size setting + secrets: Optional dictionary of secrets for authentication + + Returns: + Configured Agent instance with context + """ + # Create condenser with user's settings + condenser = self._create_condenser(llm, agent_type, condenser_max_size) + + # Create agent based on type if agent_type == AgentType.PLAN: - agent = get_planning_agent(llm=llm) + agent = Agent( + llm=llm, + tools=get_planning_tools(), + system_prompt_filename='system_prompt_planning.j2', + system_prompt_kwargs={'plan_structure': format_plan_structure()}, + condenser=condenser, + security_analyzer=None, + mcp_config=mcp_config, + ) else: - agent = get_default_agent(llm=llm) + agent = Agent( + llm=llm, + tools=get_default_tools(enable_browser=True), + system_prompt_kwargs={'cli_mode': False}, + condenser=condenser, + mcp_config=mcp_config, + ) + + # Add agent context + agent_context = AgentContext( + system_message_suffix=system_message_suffix, secrets=secrets + ) + agent = agent.model_copy(update={'agent_context': agent_context}) + + return agent + + async def _finalize_conversation_request( + self, + agent: Agent, + conversation_id: UUID | None, + user: UserInfo, + workspace: LocalWorkspace, + initial_message: SendMessageRequest | None, + secrets: dict, + sandbox: SandboxInfo, + remote_workspace: AsyncRemoteWorkspace | None, + selected_repository: str | None, + working_dir: str, + ) -> StartConversationRequest: + """Finalize the conversation request with experiment variants and skills. - conversation_id = uuid4() + Args: + agent: The configured agent + conversation_id: Optional conversation ID, generates new one if None + user: User information + workspace: Local workspace instance + initial_message: Optional initial message for the conversation + secrets: Dictionary of secrets for authentication + sandbox: Sandbox information + remote_workspace: Optional remote workspace for skills loading + selected_repository: Optional repository name + working_dir: Working directory path + + Returns: + Complete StartConversationRequest ready for use + """ + # Generate conversation ID if not provided + conversation_id = conversation_id or uuid4() + + # Apply experiment variants agent = ExperimentManagerImpl.run_agent_variant_tests__v1( user.id, conversation_id, agent ) - # Load and merge all skills if remote_workspace is available + # Load and merge skills if remote workspace is available if remote_workspace: try: agent = await self._load_skills_and_update_agent( @@ -584,17 +928,71 @@ async def _build_start_conversation_request_for_user( _logger.warning(f'Failed to load skills: {e}', exc_info=True) # Continue without skills - don't fail conversation startup - start_conversation_request = StartConversationRequest( + # Create and return the final request + return StartConversationRequest( conversation_id=conversation_id, agent=agent, workspace=workspace, - confirmation_policy=( - AlwaysConfirm() if user.confirmation_mode else NeverConfirm() + confirmation_policy=self._select_confirmation_policy( + bool(user.confirmation_mode), user.security_analyzer ), initial_message=initial_message, secrets=secrets, ) - return start_conversation_request + + async def _build_start_conversation_request_for_user( + self, + sandbox: SandboxInfo, + initial_message: SendMessageRequest | None, + system_message_suffix: str | None, + git_provider: ProviderType | None, + working_dir: str, + agent_type: AgentType = AgentType.DEFAULT, + llm_model: str | None = None, + conversation_id: UUID | None = None, + remote_workspace: AsyncRemoteWorkspace | None = None, + selected_repository: str | None = None, + ) -> StartConversationRequest: + """Build a complete conversation request for a user. + + This method orchestrates the creation of a conversation request by: + 1. Setting up git provider secrets + 2. Configuring LLM and MCP settings + 3. Creating an agent with appropriate context + 4. Finalizing the request with skills and experiment variants + """ + user = await self.user_context.get_user_info() + workspace = LocalWorkspace(working_dir=working_dir) + + # Set up secrets for all git providers + secrets = await self._setup_secrets_for_git_providers(user) + + # Configure LLM and MCP + llm, mcp_config = await self._configure_llm_and_mcp(user, llm_model) + + # Create agent with context + agent = self._create_agent_with_context( + llm, + agent_type, + system_message_suffix, + mcp_config, + user.condenser_max_size, + secrets=secrets, + ) + + # Finalize and return the conversation request + return await self._finalize_conversation_request( + agent, + conversation_id, + user, + workspace, + initial_message, + secrets, + sandbox, + remote_workspace, + selected_repository, + working_dir, + ) async def update_agent_server_conversation_title( self, @@ -799,6 +1197,10 @@ class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector): 'be retrieved by a sandboxed conversation.' ), ) + tavily_api_key: SecretStr | None = Field( + default=None, + description='The Tavily Search API key to add to MCP integration', + ) async def inject( self, state: InjectorState, request: Request | None = None @@ -841,6 +1243,29 @@ async def inject( if isinstance(sandbox_service, DockerSandboxService): web_url = f'http://host.docker.internal:{sandbox_service.host_port}' + # Get app_mode and keycloak_auth cookie for SaaS mode + app_mode = None + keycloak_auth_cookie = None + try: + from openhands.server.shared import server_config + + app_mode = ( + server_config.app_mode.value if server_config.app_mode else None + ) + if request and server_config.app_mode == AppMode.SAAS: + keycloak_auth_cookie = request.cookies.get('keycloak_auth') + except (ImportError, AttributeError): + # If server_config is not available (e.g., in tests), continue without it + pass + + # We supply the global tavily key only if the app mode is not SAAS, where + # currently the search endpoints are patched into the app server instead + # so the tavily key does not need to be shared + if self.tavily_api_key and app_mode != AppMode.SAAS: + tavily_api_key = self.tavily_api_key.get_secret_value() + else: + tavily_api_key = None + yield LiveStatusAppConversationService( init_git_in_empty_workspace=self.init_git_in_empty_workspace, user_context=user_context, @@ -854,5 +1279,9 @@ async def inject( sandbox_startup_poll_frequency=self.sandbox_startup_poll_frequency, httpx_client=httpx_client, web_url=web_url, + openhands_provider_base_url=config.openhands_provider_base_url, access_token_hard_timeout=access_token_hard_timeout, + app_mode=app_mode, + keycloak_auth_cookie=keycloak_auth_cookie, + tavily_api_key=tavily_api_key, ) diff --git a/openhands/app_server/app_conversation/skill_loader.py b/openhands/app_server/app_conversation/skill_loader.py index d8fca7cfc3a9..d237ff05426b 100644 --- a/openhands/app_server/app_conversation/skill_loader.py +++ b/openhands/app_server/app_conversation/skill_loader.py @@ -14,6 +14,9 @@ import openhands from openhands.app_server.sandbox.sandbox_models import SandboxInfo +from openhands.app_server.user.user_context import UserContext +from openhands.integrations.provider import ProviderType +from openhands.integrations.service_types import AuthenticationError from openhands.sdk.context.skills import Skill from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace @@ -119,6 +122,96 @@ def _determine_repo_root(working_dir: str, selected_repository: str | None) -> s return working_dir +async def _is_gitlab_repository(repo_name: str, user_context: UserContext) -> bool: + """Check if a repository is hosted on GitLab. + + Args: + repo_name: Repository name (e.g., "gitlab.com/org/repo" or "org/repo") + user_context: UserContext to access provider handler + + Returns: + True if the repository is hosted on GitLab, False otherwise + """ + try: + provider_handler = await user_context.get_provider_handler() # type: ignore[attr-defined] + repository = await provider_handler.verify_repo_provider(repo_name) + return repository.git_provider == ProviderType.GITLAB + except Exception: + # If we can't determine the provider, assume it's not GitLab + # This is a safe fallback since we'll just use the default .openhands + return False + + +async def _is_azure_devops_repository( + repo_name: str, user_context: UserContext +) -> bool: + """Check if a repository is hosted on Azure DevOps. + + Args: + repo_name: Repository name (e.g., "org/project/repo") + user_context: UserContext to access provider handler + + Returns: + True if the repository is hosted on Azure DevOps, False otherwise + """ + try: + provider_handler = await user_context.get_provider_handler() # type: ignore[attr-defined] + repository = await provider_handler.verify_repo_provider(repo_name) + return repository.git_provider == ProviderType.AZURE_DEVOPS + except Exception: + # If we can't determine the provider, assume it's not Azure DevOps + return False + + +async def _determine_org_repo_path( + selected_repository: str, user_context: UserContext +) -> tuple[str, str]: + """Determine the organization repository path and organization name. + + Args: + selected_repository: Repository name (e.g., 'owner/repo' or 'org/project/repo') + user_context: UserContext to access provider handler + + Returns: + Tuple of (org_repo_path, org_name) where: + - org_repo_path: Full path to org-level config repo + - org_name: Organization name extracted from repository + + Examples: + - GitHub/Bitbucket: ('owner/.openhands', 'owner') + - GitLab: ('owner/openhands-config', 'owner') + - Azure DevOps: ('org/openhands-config/openhands-config', 'org') + """ + repo_parts = selected_repository.split('/') + + # Determine repository type + is_azure_devops = await _is_azure_devops_repository( + selected_repository, user_context + ) + is_gitlab = await _is_gitlab_repository(selected_repository, user_context) + + # Extract the org/user name + # Azure DevOps format: org/project/repo (3 parts) - extract org (first part) + # GitHub/GitLab/Bitbucket format: owner/repo (2 parts) - extract owner (first part) + if is_azure_devops and len(repo_parts) >= 3: + org_name = repo_parts[0] # Get org from org/project/repo + else: + org_name = repo_parts[-2] # Get owner from owner/repo + + # For GitLab and Azure DevOps, use openhands-config (since .openhands is not a valid repo name) + # For other providers, use .openhands + if is_gitlab: + org_openhands_repo = f'{org_name}/openhands-config' + elif is_azure_devops: + # Azure DevOps format: org/project/repo + # For org-level config, use: org/openhands-config/openhands-config + org_openhands_repo = f'{org_name}/openhands-config/openhands-config' + else: + org_openhands_repo = f'{org_name}/.openhands' + + return org_openhands_repo, org_name + + async def _read_file_from_workspace( workspace: AsyncRemoteWorkspace, file_path: str, working_dir: str ) -> str | None: @@ -322,6 +415,248 @@ async def load_repo_skills( return [] +def _validate_repository_for_org_skills(selected_repository: str) -> bool: + """Validate that the repository path has sufficient parts for org skills. + + Args: + selected_repository: Repository name (e.g., 'owner/repo') + + Returns: + True if repository is valid for org skills loading, False otherwise + """ + repo_parts = selected_repository.split('/') + if len(repo_parts) < 2: + _logger.warning( + f'Repository path has insufficient parts ({len(repo_parts)} < 2), skipping org-level skills' + ) + return False + return True + + +async def _get_org_repository_url( + org_openhands_repo: str, user_context: UserContext +) -> str | None: + """Get authenticated Git URL for organization repository. + + Args: + org_openhands_repo: Organization repository path + user_context: UserContext to access authentication + + Returns: + Authenticated Git URL if successful, None otherwise + """ + try: + remote_url = await user_context.get_authenticated_git_url(org_openhands_repo) + return remote_url + except AuthenticationError as e: + _logger.debug( + f'org-level skill directory {org_openhands_repo} not found: {str(e)}' + ) + return None + except Exception as e: + _logger.debug( + f'Failed to get authenticated URL for {org_openhands_repo}: {str(e)}' + ) + return None + + +async def _clone_org_repository( + workspace: AsyncRemoteWorkspace, + remote_url: str, + org_repo_dir: str, + working_dir: str, + org_openhands_repo: str, +) -> bool: + """Clone organization repository to temporary directory. + + Args: + workspace: AsyncRemoteWorkspace to execute commands + remote_url: Authenticated Git URL + org_repo_dir: Temporary directory path for cloning + working_dir: Working directory for command execution + org_openhands_repo: Organization repository path (for logging) + + Returns: + True if clone successful, False otherwise + """ + _logger.debug(f'Creating temporary directory for org repo: {org_repo_dir}') + + # Clone the repo (shallow clone for efficiency) + clone_cmd = f'GIT_TERMINAL_PROMPT=0 git clone --depth 1 {remote_url} {org_repo_dir}' + _logger.info('Executing clone command for org-level repo') + + result = await workspace.execute_command(clone_cmd, working_dir, timeout=120.0) + + if result.exit_code != 0: + _logger.info( + f'No org-level skills found at {org_openhands_repo} (exit_code: {result.exit_code})' + ) + _logger.debug(f'Clone command output: {result.stderr}') + return False + + _logger.info(f'Successfully cloned org-level skills from {org_openhands_repo}') + return True + + +async def _load_skills_from_org_directories( + workspace: AsyncRemoteWorkspace, org_repo_dir: str, working_dir: str +) -> tuple[list[Skill], list[Skill]]: + """Load skills from both skills/ and microagents/ directories in org repo. + + Args: + workspace: AsyncRemoteWorkspace to execute commands + org_repo_dir: Path to cloned organization repository + working_dir: Working directory for command execution + + Returns: + Tuple of (skills_dir_skills, microagents_dir_skills) + """ + skills_dir = f'{org_repo_dir}/skills' + skills_dir_skills = await _find_and_load_skill_md_files( + workspace, skills_dir, working_dir + ) + + microagents_dir = f'{org_repo_dir}/microagents' + microagents_dir_skills = await _find_and_load_skill_md_files( + workspace, microagents_dir, working_dir + ) + + return skills_dir_skills, microagents_dir_skills + + +def _merge_org_skills_with_precedence( + skills_dir_skills: list[Skill], microagents_dir_skills: list[Skill] +) -> list[Skill]: + """Merge skills from skills/ and microagents/ with proper precedence. + + Precedence: skills/ > microagents/ (skills/ overrides microagents/ for same name) + + Args: + skills_dir_skills: Skills loaded from skills/ directory + microagents_dir_skills: Skills loaded from microagents/ directory + + Returns: + Merged list of skills with proper precedence applied + """ + skills_by_name = {} + for skill in microagents_dir_skills + skills_dir_skills: + # Later sources (skills/) override earlier ones (microagents/) + if skill.name not in skills_by_name: + skills_by_name[skill.name] = skill + else: + _logger.debug( + f'Overriding org skill "{skill.name}" from microagents/ with skills/' + ) + skills_by_name[skill.name] = skill + + return list(skills_by_name.values()) + + +async def _cleanup_org_repository( + workspace: AsyncRemoteWorkspace, org_repo_dir: str, working_dir: str +) -> None: + """Clean up cloned organization repository directory. + + Args: + workspace: AsyncRemoteWorkspace to execute commands + org_repo_dir: Path to cloned organization repository + working_dir: Working directory for command execution + """ + cleanup_cmd = f'rm -rf {org_repo_dir}' + await workspace.execute_command(cleanup_cmd, working_dir, timeout=10.0) + + +async def load_org_skills( + workspace: AsyncRemoteWorkspace, + selected_repository: str | None, + working_dir: str, + user_context: UserContext, +) -> list[Skill]: + """Load organization-level skills from the organization repository. + + For example, if the repository is github.com/acme-co/api, this will check if + github.com/acme-co/.openhands exists. If it does, it will clone it and load + the skills from both the ./skills/ and ./microagents/ folders. + + For GitLab repositories, it will use openhands-config instead of .openhands + since GitLab doesn't support repository names starting with non-alphanumeric + characters. + + For Azure DevOps repositories, it will use org/openhands-config/openhands-config + format to match Azure DevOps's three-part repository structure (org/project/repo). + + Args: + workspace: AsyncRemoteWorkspace to execute commands in the sandbox + selected_repository: Repository name (e.g., 'owner/repo') or None + working_dir: Working directory path + user_context: UserContext to access provider handler and authentication + + Returns: + List of Skill objects loaded from organization repository. + Returns empty list if no repository selected or on errors. + """ + if not selected_repository: + return [] + + try: + _logger.debug( + f'Starting org-level skill loading for repository: {selected_repository}' + ) + + # Validate repository path + if not _validate_repository_for_org_skills(selected_repository): + return [] + + # Determine organization repository path + org_openhands_repo, org_name = await _determine_org_repo_path( + selected_repository, user_context + ) + + _logger.info(f'Checking for org-level skills at {org_openhands_repo}') + + # Get authenticated URL for org repository + remote_url = await _get_org_repository_url(org_openhands_repo, user_context) + if not remote_url: + return [] + + # Clone the organization repository + org_repo_dir = f'{working_dir}/_org_openhands_{org_name}' + clone_success = await _clone_org_repository( + workspace, remote_url, org_repo_dir, working_dir, org_openhands_repo + ) + if not clone_success: + return [] + + # Load skills from both skills/ and microagents/ directories + ( + skills_dir_skills, + microagents_dir_skills, + ) = await _load_skills_from_org_directories( + workspace, org_repo_dir, working_dir + ) + + # Merge skills with proper precedence + loaded_skills = _merge_org_skills_with_precedence( + skills_dir_skills, microagents_dir_skills + ) + + _logger.info( + f'Loaded {len(loaded_skills)} skills from org-level repository {org_openhands_repo}: {[s.name for s in loaded_skills]}' + ) + + # Clean up the org repo directory + await _cleanup_org_repository(workspace, org_repo_dir, working_dir) + + return loaded_skills + + except AuthenticationError as e: + _logger.debug(f'org-level skill directory not found: {str(e)}') + return [] + except Exception as e: + _logger.warning(f'Failed to load org-level skills: {str(e)}') + return [] + + def merge_skills(skill_lists: list[list[Skill]]) -> list[Skill]: """Merge multiple skill lists, avoiding duplicates by name. diff --git a/openhands/app_server/app_conversation/sql_app_conversation_info_service.py b/openhands/app_server/app_conversation/sql_app_conversation_info_service.py index e411224d5c3e..83e2d1915b47 100644 --- a/openhands/app_server/app_conversation/sql_app_conversation_info_service.py +++ b/openhands/app_server/app_conversation/sql_app_conversation_info_service.py @@ -45,6 +45,8 @@ create_json_type_decorator, ) from openhands.integrations.provider import ProviderType +from openhands.sdk.conversation.conversation_stats import ConversationStats +from openhands.sdk.event import ConversationStateUpdateEvent from openhands.sdk.llm import MetricsSnapshot from openhands.sdk.llm.utils.metrics import TokenUsage from openhands.storage.data_models.conversation_metadata import ConversationTrigger @@ -354,6 +356,130 @@ async def save_app_conversation_info( await self.db_session.commit() return info + async def update_conversation_statistics( + self, conversation_id: UUID, stats: ConversationStats + ) -> None: + """Update conversation statistics from stats event data. + + Args: + conversation_id: The ID of the conversation to update + stats: ConversationStats object containing usage_to_metrics data from stats event + """ + # Extract agent metrics from usage_to_metrics + usage_to_metrics = stats.usage_to_metrics + agent_metrics = usage_to_metrics.get('agent') + + if not agent_metrics: + logger.debug( + 'No agent metrics found in stats for conversation %s', conversation_id + ) + return + + # Query existing record using secure select (filters for V1 and user if available) + query = await self._secure_select() + query = query.where( + StoredConversationMetadata.conversation_id == str(conversation_id) + ) + result = await self.db_session.execute(query) + stored = result.scalar_one_or_none() + + if not stored: + logger.debug( + 'Conversation %s not found or not accessible, skipping statistics update', + conversation_id, + ) + return + + # Extract accumulated_cost and max_budget_per_task from Metrics object + accumulated_cost = agent_metrics.accumulated_cost + max_budget_per_task = agent_metrics.max_budget_per_task + + # Extract accumulated_token_usage from Metrics object + accumulated_token_usage = agent_metrics.accumulated_token_usage + if accumulated_token_usage: + prompt_tokens = accumulated_token_usage.prompt_tokens + completion_tokens = accumulated_token_usage.completion_tokens + cache_read_tokens = accumulated_token_usage.cache_read_tokens + cache_write_tokens = accumulated_token_usage.cache_write_tokens + reasoning_tokens = accumulated_token_usage.reasoning_tokens + context_window = accumulated_token_usage.context_window + per_turn_token = accumulated_token_usage.per_turn_token + else: + prompt_tokens = None + completion_tokens = None + cache_read_tokens = None + cache_write_tokens = None + reasoning_tokens = None + context_window = None + per_turn_token = None + + # Update fields only if values are provided (not None) + if accumulated_cost is not None: + stored.accumulated_cost = accumulated_cost + if max_budget_per_task is not None: + stored.max_budget_per_task = max_budget_per_task + if prompt_tokens is not None: + stored.prompt_tokens = prompt_tokens + if completion_tokens is not None: + stored.completion_tokens = completion_tokens + if cache_read_tokens is not None: + stored.cache_read_tokens = cache_read_tokens + if cache_write_tokens is not None: + stored.cache_write_tokens = cache_write_tokens + if reasoning_tokens is not None: + stored.reasoning_tokens = reasoning_tokens + if context_window is not None: + stored.context_window = context_window + if per_turn_token is not None: + stored.per_turn_token = per_turn_token + + # Update last_updated_at timestamp + stored.last_updated_at = utc_now() + + await self.db_session.commit() + + async def process_stats_event( + self, + event: ConversationStateUpdateEvent, + conversation_id: UUID, + ) -> None: + """Process a stats event and update conversation statistics. + + Args: + event: The ConversationStateUpdateEvent with key='stats' + conversation_id: The ID of the conversation to update + """ + try: + # Parse event value into ConversationStats model for type safety + # event.value can be a dict (from JSON deserialization) or a ConversationStats object + event_value = event.value + conversation_stats: ConversationStats | None = None + + if isinstance(event_value, ConversationStats): + # Already a ConversationStats object + conversation_stats = event_value + elif isinstance(event_value, dict): + # Parse dict into ConversationStats model + # This validates the structure and ensures type safety + conversation_stats = ConversationStats.model_validate(event_value) + elif hasattr(event_value, 'usage_to_metrics'): + # Handle objects with usage_to_metrics attribute (e.g., from tests) + # Convert to dict first, then validate + stats_dict = {'usage_to_metrics': event_value.usage_to_metrics} + conversation_stats = ConversationStats.model_validate(stats_dict) + + if conversation_stats and conversation_stats.usage_to_metrics: + # Pass ConversationStats object directly for type safety + await self.update_conversation_statistics( + conversation_id, conversation_stats + ) + except Exception: + logger.exception( + 'Error updating conversation statistics for conversation %s', + conversation_id, + stack_info=True, + ) + async def _secure_select(self): query = select(StoredConversationMetadata).where( StoredConversationMetadata.conversation_version == 'V1' diff --git a/openhands/app_server/config.py b/openhands/app_server/config.py index 2dd50d7fa714..3c40806af015 100644 --- a/openhands/app_server/config.py +++ b/openhands/app_server/config.py @@ -6,9 +6,11 @@ import httpx from fastapi import Depends, Request -from pydantic import Field +from pydantic import Field, SecretStr from sqlalchemy.ext.asyncio import AsyncSession +# Import the event_callback module to ensure all processors are registered +import openhands.app_server.event_callback # noqa: F401 from openhands.agent_server.env_parser import from_env from openhands.app_server.app_conversation.app_conversation_info_service import ( AppConversationInfoService, @@ -72,6 +74,11 @@ def get_default_web_url() -> str | None: return f'https://{web_host}' +def get_openhands_provider_base_url() -> str | None: + """Return the base URL for the OpenHands provider, if configured.""" + return os.getenv('OPENHANDS_PROVIDER_BASE_URL') or None + + def _get_default_lifespan(): # Check legacy parameters for saas mode. If we are in SAAS mode do not apply # OSS alembic migrations @@ -86,6 +93,10 @@ class AppServerConfig(OpenHandsModel): default_factory=get_default_web_url, description='The URL where OpenHands is running (e.g., http://localhost:3000)', ) + openhands_provider_base_url: str | None = Field( + default_factory=get_openhands_provider_base_url, + description='Base URL for the OpenHands provider', + ) # Dependency Injection Injectors event: EventServiceInjector | None = None event_callback: EventCallbackServiceInjector | None = None @@ -183,7 +194,13 @@ def config_from_env() -> AppServerConfig: ) if config.app_conversation is None: - config.app_conversation = LiveStatusAppConversationServiceInjector() + tavily_api_key = None + tavily_api_key_str = os.getenv('TAVILY_API_KEY') or os.getenv('SEARCH_API_KEY') + if tavily_api_key_str: + tavily_api_key = SecretStr(tavily_api_key_str) + config.app_conversation = LiveStatusAppConversationServiceInjector( + tavily_api_key=tavily_api_key + ) if config.user is None: config.user = AuthUserContextInjector() diff --git a/openhands/app_server/event_callback/__init__.py b/openhands/app_server/event_callback/__init__.py new file mode 100644 index 000000000000..41be0a732049 --- /dev/null +++ b/openhands/app_server/event_callback/__init__.py @@ -0,0 +1,21 @@ +"""Event callback system for OpenHands. + +This module provides the event callback system that allows processors to be +registered and executed when specific events occur during conversations. + +All callback processors must be imported here to ensure they are registered +with the discriminated union system used by Pydantic for validation. +""" + +# Import base classes and processors without circular dependencies +from .event_callback_models import EventCallbackProcessor, LoggingCallbackProcessor +from .github_v1_callback_processor import GithubV1CallbackProcessor + +# Note: SetTitleCallbackProcessor is not imported here to avoid circular imports +# It will be registered when imported elsewhere in the application + +__all__ = [ + 'EventCallbackProcessor', + 'LoggingCallbackProcessor', + 'GithubV1CallbackProcessor', +] diff --git a/openhands/app_server/event_callback/github_v1_callback_processor.py b/openhands/app_server/event_callback/github_v1_callback_processor.py new file mode 100644 index 000000000000..1a83bed9c0cd --- /dev/null +++ b/openhands/app_server/event_callback/github_v1_callback_processor.py @@ -0,0 +1,296 @@ +import logging +import os +from typing import Any +from uuid import UUID + +import httpx +from github import Github, GithubIntegration +from pydantic import Field + +from openhands.agent_server.models import AskAgentRequest, AskAgentResponse +from openhands.app_server.event_callback.event_callback_models import ( + EventCallback, + EventCallbackProcessor, +) +from openhands.app_server.event_callback.event_callback_result_models import ( + EventCallbackResult, + EventCallbackResultStatus, +) +from openhands.app_server.event_callback.util import ( + ensure_conversation_found, + ensure_running_sandbox, + get_agent_server_url_from_sandbox, + get_conversation_url, + get_prompt_template, +) +from openhands.sdk import Event +from openhands.sdk.event import ConversationStateUpdateEvent + +_logger = logging.getLogger(__name__) + + +class GithubV1CallbackProcessor(EventCallbackProcessor): + """Callback processor for GitHub V1 integrations.""" + + github_view_data: dict[str, Any] = Field(default_factory=dict) + should_request_summary: bool = Field(default=True) + should_extract: bool = Field(default=True) + inline_pr_comment: bool = Field(default=False) + + async def __call__( + self, + conversation_id: UUID, + callback: EventCallback, + event: Event, + ) -> EventCallbackResult | None: + """Process events for GitHub V1 integration.""" + + # Only handle ConversationStateUpdateEvent + if not isinstance(event, ConversationStateUpdateEvent): + return None + + # Only act when execution has finished + if not (event.key == 'execution_status' and event.value == 'finished'): + return None + + _logger.info('[GitHub V1] Callback agent state was %s', event) + _logger.info( + '[GitHub V1] Should request summary: %s', self.should_request_summary + ) + + if not self.should_request_summary: + return None + + self.should_request_summary = False + + try: + summary = await self._request_summary(conversation_id) + await self._post_summary_to_github(summary) + + return EventCallbackResult( + status=EventCallbackResultStatus.SUCCESS, + event_callback_id=callback.id, + event_id=event.id, + conversation_id=conversation_id, + detail=summary, + ) + except Exception as e: + _logger.exception('[GitHub V1] Error processing callback: %s', e) + + # Only try to post error to GitHub if we have basic requirements + try: + # Check if we have installation ID and credentials before posting + if ( + self.github_view_data.get('installation_id') + and os.getenv('GITHUB_APP_CLIENT_ID') + and os.getenv('GITHUB_APP_PRIVATE_KEY') + ): + await self._post_summary_to_github( + f'OpenHands encountered an error: **{str(e)}**.\n\n' + f'[See the conversation]({get_conversation_url().format(conversation_id)})' + 'for more information.' + ) + except Exception as post_error: + _logger.warning( + '[GitHub V1] Failed to post error message to GitHub: %s', post_error + ) + + return EventCallbackResult( + status=EventCallbackResultStatus.ERROR, + event_callback_id=callback.id, + event_id=event.id, + conversation_id=conversation_id, + detail=str(e), + ) + + # ------------------------------------------------------------------------- + # GitHub helpers + # ------------------------------------------------------------------------- + + def _get_installation_access_token(self) -> str: + installation_id = self.github_view_data.get('installation_id') + + if not installation_id: + raise ValueError( + f'Missing installation ID for GitHub payload: {self.github_view_data}' + ) + + github_app_client_id = os.getenv('GITHUB_APP_CLIENT_ID', '').strip() + github_app_private_key = os.getenv('GITHUB_APP_PRIVATE_KEY', '').replace( + '\\n', '\n' + ) + + if not github_app_client_id or not github_app_private_key: + raise ValueError('GitHub App credentials are not configured') + + github_integration = GithubIntegration( + github_app_client_id, + github_app_private_key, + ) + token_data = github_integration.get_access_token(installation_id) + return token_data.token + + async def _post_summary_to_github(self, summary: str) -> None: + """Post a summary comment to the configured GitHub issue.""" + installation_token = self._get_installation_access_token() + + if not installation_token: + raise RuntimeError('Missing GitHub credentials') + + full_repo_name = self.github_view_data['full_repo_name'] + issue_number = self.github_view_data['issue_number'] + + if self.inline_pr_comment: + with Github(installation_token) as github_client: + repo = github_client.get_repo(full_repo_name) + pr = repo.get_pull(issue_number) + pr.create_review_comment_reply( + comment_id=self.github_view_data.get('comment_id', ''), body=summary + ) + return + + with Github(installation_token) as github_client: + repo = github_client.get_repo(full_repo_name) + issue = repo.get_issue(number=issue_number) + issue.create_comment(summary) + + # ------------------------------------------------------------------------- + # Agent / sandbox helpers + # ------------------------------------------------------------------------- + + async def _ask_question( + self, + httpx_client: httpx.AsyncClient, + agent_server_url: str, + conversation_id: UUID, + session_api_key: str, + message_content: str, + ) -> str: + """Send a message to the agent server via the V1 API and return response text.""" + send_message_request = AskAgentRequest(question=message_content) + + url = ( + f'{agent_server_url.rstrip("/")}' + f'/api/conversations/{conversation_id}/ask_agent' + ) + headers = {'X-Session-API-Key': session_api_key} + payload = send_message_request.model_dump() + + try: + response = await httpx_client.post( + url, + json=payload, + headers=headers, + timeout=30.0, + ) + response.raise_for_status() + + agent_response = AskAgentResponse.model_validate(response.json()) + return agent_response.response + + except httpx.HTTPStatusError as e: + error_detail = f'HTTP {e.response.status_code} error' + try: + error_body = e.response.text + if error_body: + error_detail += f': {error_body}' + except Exception: # noqa: BLE001 + pass + + _logger.error( + '[GitHub V1] HTTP error sending message to %s: %s. ' + 'Request payload: %s. Response headers: %s', + url, + error_detail, + payload, + dict(e.response.headers), + exc_info=True, + ) + raise Exception(f'Failed to send message to agent server: {error_detail}') + + except httpx.TimeoutException: + error_detail = f'Request timeout after 30 seconds to {url}' + _logger.error( + '[GitHub V1] %s. Request payload: %s', + error_detail, + payload, + exc_info=True, + ) + raise Exception(error_detail) + + except httpx.RequestError as e: + error_detail = f'Request error to {url}: {str(e)}' + _logger.error( + '[GitHub V1] %s. Request payload: %s', + error_detail, + payload, + exc_info=True, + ) + raise Exception(error_detail) + + # ------------------------------------------------------------------------- + # Summary orchestration + # ------------------------------------------------------------------------- + + async def _request_summary(self, conversation_id: UUID) -> str: + """ + Ask the agent to produce a summary of its work and return the agent response. + + NOTE: This method now returns a string (the agent server's response text) + and raises exceptions on errors. The wrapping into EventCallbackResult + is handled by __call__. + """ + # Import services within the method to avoid circular imports + from openhands.app_server.config import ( + get_app_conversation_info_service, + get_httpx_client, + get_sandbox_service, + ) + from openhands.app_server.services.injector import InjectorState + from openhands.app_server.user.specifiy_user_context import ( + ADMIN, + USER_CONTEXT_ATTR, + ) + + # Create injector state for dependency injection + state = InjectorState() + setattr(state, USER_CONTEXT_ATTR, ADMIN) + + async with ( + get_app_conversation_info_service(state) as app_conversation_info_service, + get_sandbox_service(state) as sandbox_service, + get_httpx_client(state) as httpx_client, + ): + # 1. Conversation lookup + app_conversation_info = ensure_conversation_found( + await app_conversation_info_service.get_app_conversation_info( + conversation_id + ), + conversation_id, + ) + + # 2. Sandbox lookup + validation + sandbox = ensure_running_sandbox( + await sandbox_service.get_sandbox(app_conversation_info.sandbox_id), + app_conversation_info.sandbox_id, + ) + + assert sandbox.session_api_key is not None, ( + f'No session API key for sandbox: {sandbox.id}' + ) + + # 3. URL + instruction + agent_server_url = get_agent_server_url_from_sandbox(sandbox) + agent_server_url = get_agent_server_url_from_sandbox(sandbox) + + # Prepare message based on agent state + message_content = get_prompt_template('summary_prompt.j2') + + # Ask the agent and return the response text + return await self._ask_question( + httpx_client=httpx_client, + agent_server_url=agent_server_url, + conversation_id=conversation_id, + session_api_key=sandbox.session_api_key, + message_content=message_content, + ) diff --git a/openhands/app_server/event_callback/sql_event_callback_service.py b/openhands/app_server/event_callback/sql_event_callback_service.py index 37e5bce111d2..c45416c37c78 100644 --- a/openhands/app_server/event_callback/sql_event_callback_service.py +++ b/openhands/app_server/event_callback/sql_event_callback_service.py @@ -6,7 +6,6 @@ import asyncio import logging from dataclasses import dataclass -from datetime import datetime from typing import AsyncGenerator from uuid import UUID @@ -15,6 +14,7 @@ from sqlalchemy import Column, Enum, String, and_, func, or_, select from sqlalchemy.ext.asyncio import AsyncSession +from openhands.agent_server.utils import utc_now from openhands.app_server.event_callback.event_callback_models import ( CreateEventCallbackRequest, EventCallback, @@ -177,7 +177,7 @@ async def search_event_callbacks( return EventCallbackPage(items=callbacks, next_page_id=next_page_id) async def save_event_callback(self, event_callback: EventCallback) -> EventCallback: - event_callback.updated_at = datetime.now() + event_callback.updated_at = utc_now() stored_callback = StoredEventCallback(**event_callback.model_dump()) await self.db_session.merge(stored_callback) return event_callback @@ -209,6 +209,10 @@ async def execute_callbacks(self, conversation_id: UUID, event: Event) -> None: for callback in callbacks ] ) + + # Persist any new changes callbacks may have made to itself + for callback in callbacks: + await self.save_event_callback(callback) await self.db_session.commit() async def execute_callback( diff --git a/openhands/app_server/event_callback/util.py b/openhands/app_server/event_callback/util.py new file mode 100644 index 000000000000..1c9e56893545 --- /dev/null +++ b/openhands/app_server/event_callback/util.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING +from uuid import UUID + +from openhands.app_server.sandbox.sandbox_models import ( + AGENT_SERVER, + SandboxInfo, + SandboxStatus, +) +from openhands.app_server.utils.docker_utils import ( + replace_localhost_hostname_for_docker, +) + +if TYPE_CHECKING: + from openhands.app_server.app_conversation.app_conversation_models import ( + AppConversationInfo, + ) + + +def get_conversation_url() -> str: + from openhands.app_server.config import get_global_config + + web_url = get_global_config().web_url + conversation_prefix = 'conversations/{}' + conversation_url = f'{web_url}/{conversation_prefix}' + return conversation_url + + +def ensure_conversation_found( + app_conversation_info: AppConversationInfo | None, conversation_id: UUID +) -> AppConversationInfo: + """Ensure conversation info exists, otherwise raise a clear error.""" + if not app_conversation_info: + raise RuntimeError(f'Conversation not found: {conversation_id}') + return app_conversation_info + + +def ensure_running_sandbox(sandbox: SandboxInfo | None, sandbox_id: str) -> SandboxInfo: + """Ensure sandbox exists, is running, and has a session API key.""" + if not sandbox: + raise RuntimeError(f'Sandbox not found: {sandbox_id}') + + if sandbox.status != SandboxStatus.RUNNING: + raise RuntimeError(f'Sandbox not running: {sandbox_id}') + + if not sandbox.session_api_key: + raise RuntimeError(f'No session API key for sandbox: {sandbox.id}') + + return sandbox + + +def get_agent_server_url_from_sandbox(sandbox: SandboxInfo) -> str: + """Return the agent server URL from sandbox exposed URLs.""" + exposed_urls = sandbox.exposed_urls + if not exposed_urls: + raise RuntimeError(f'No exposed URLs configured for sandbox {sandbox.id!r}') + + try: + agent_server_url = next( + exposed_url.url + for exposed_url in exposed_urls + if exposed_url.name == AGENT_SERVER + ) + except StopIteration: + raise RuntimeError( + f'No {AGENT_SERVER!r} URL found for sandbox {sandbox.id!r}' + ) from None + + return replace_localhost_hostname_for_docker(agent_server_url) + + +def get_prompt_template(template_name: str) -> str: + from jinja2 import Environment, FileSystemLoader + + jinja_env = Environment( + loader=FileSystemLoader('openhands/integrations/templates/resolver/') + ) + summary_instruction_template = jinja_env.get_template(template_name) + summary_instruction = summary_instruction_template.render() + return summary_instruction diff --git a/openhands/app_server/event_callback/webhook_router.py b/openhands/app_server/event_callback/webhook_router.py index 498ebd2fd26c..28236b732515 100644 --- a/openhands/app_server/event_callback/webhook_router.py +++ b/openhands/app_server/event_callback/webhook_router.py @@ -6,9 +6,10 @@ import pkgutil from uuid import UUID -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends, HTTPException, Response, status from fastapi.security import APIKeyHeader from jwt import InvalidTokenError +from pydantic import SecretStr from openhands import tools # type: ignore[attr-defined] from openhands.agent_server.models import ConversationInfo, Success @@ -33,6 +34,7 @@ from openhands.app_server.sandbox.sandbox_service import SandboxService from openhands.app_server.services.injector import InjectorState from openhands.app_server.services.jwt_service import JwtService +from openhands.app_server.user.auth_user_context import AuthUserContext from openhands.app_server.user.specifiy_user_context import ( USER_CONTEXT_ATTR, SpecifyUserContext, @@ -41,6 +43,11 @@ from openhands.app_server.user.user_context import UserContext from openhands.integrations.provider import ProviderType from openhands.sdk import Event +from openhands.sdk.event import ConversationStateUpdateEvent +from openhands.server.user_auth.default_user_auth import DefaultUserAuth +from openhands.server.user_auth.user_auth import ( + get_for_user as get_user_auth_for_user, +) router = APIRouter(prefix='/webhooks', tags=['Webhooks']) sandbox_service_dependency = depends_sandbox_service() @@ -53,16 +60,22 @@ async def valid_sandbox( - sandbox_id: str, user_context: UserContext = Depends(as_admin), session_api_key: str = Depends( APIKeyHeader(name='X-Session-API-Key', auto_error=False) ), sandbox_service: SandboxService = sandbox_service_dependency, ) -> SandboxInfo: - sandbox_info = await sandbox_service.get_sandbox(sandbox_id) - if sandbox_info is None or sandbox_info.session_api_key != session_api_key: - raise HTTPException(status.HTTP_401_UNAUTHORIZED) + if session_api_key is None: + raise HTTPException( + status.HTTP_401_UNAUTHORIZED, detail='X-Session-API-Key header is required' + ) + + sandbox_info = await sandbox_service.get_sandbox_by_session_api_key(session_api_key) + if sandbox_info is None: + raise HTTPException( + status.HTTP_401_UNAUTHORIZED, detail='Invalid session API key' + ) return sandbox_info @@ -87,7 +100,7 @@ async def valid_conversation( return app_conversation_info -@router.post('/{sandbox_id}/conversations') +@router.post('/conversations') async def on_conversation_update( conversation_info: ConversationInfo, sandbox_info: SandboxInfo = Depends(valid_sandbox), @@ -118,7 +131,7 @@ async def on_conversation_update( return Success() -@router.post('/{sandbox_id}/events/{conversation_id}') +@router.post('/events/{conversation_id}') async def on_event( events: list[Event], conversation_id: UUID, @@ -138,6 +151,13 @@ async def on_event( *[event_service.save_event(conversation_id, event) for event in events] ) + # Process stats events for V1 conversations + for event in events: + if isinstance(event, ConversationStateUpdateEvent) and event.key == 'stats': + await app_conversation_info_service.process_stats_event( + event, conversation_id + ) + asyncio.create_task( _run_callbacks_in_bg_and_close( conversation_id, app_conversation_info.created_by_user_id, events @@ -154,23 +174,34 @@ async def on_event( async def get_secret( access_token: str = Depends(APIKeyHeader(name='X-Access-Token', auto_error=False)), jwt_service: JwtService = jwt_dependency, -) -> str: +) -> Response: """Given an access token, retrieve a user secret. The access token is limited by user and provider type, and may include a timeout, limiting the damage in the event that a token is ever leaked""" try: payload = jwt_service.verify_jws_token(access_token) user_id = payload['user_id'] - provider_type = ProviderType[payload['provider_type']] - user_injector = config.user - assert user_injector is not None - user_context = await user_injector.get_for_user(user_id) - secret = None - if user_context: - secret = await user_context.get_latest_token(provider_type) + provider_type = ProviderType(payload['provider_type']) + + # Get UserAuth for the user_id + if user_id: + user_auth = await get_user_auth_for_user(user_id) + else: + # OSS mode - use default user auth + user_auth = DefaultUserAuth() + + # Create UserContext directly + user_context = AuthUserContext(user_auth=user_auth) + + secret = await user_context.get_latest_token(provider_type) if secret is None: raise HTTPException(404, 'No such provider') - return secret + if isinstance(secret, SecretStr): + secret_value = secret.get_secret_value() + else: + secret_value = secret + + return Response(content=secret_value, media_type='text/plain') except InvalidTokenError: raise HTTPException(status.HTTP_401_UNAUTHORIZED) diff --git a/openhands/app_server/sandbox/docker_sandbox_service.py b/openhands/app_server/sandbox/docker_sandbox_service.py index ff6e0669aeab..a0aeddc0e601 100644 --- a/openhands/app_server/sandbox/docker_sandbox_service.py +++ b/openhands/app_server/sandbox/docker_sandbox_service.py @@ -217,7 +217,9 @@ async def search_sandboxes( sandboxes = [] for container in all_containers: - if container.name.startswith(self.container_name_prefix): + if container.name and container.name.startswith( + self.container_name_prefix + ): sandbox_info = await self._container_to_checked_sandbox_info( container ) @@ -258,6 +260,29 @@ async def get_sandbox(self, sandbox_id: str) -> SandboxInfo | None: except (NotFound, APIError): return None + async def get_sandbox_by_session_api_key( + self, session_api_key: str + ) -> SandboxInfo | None: + """Get a single sandbox by session API key.""" + try: + # Get all containers with our prefix + all_containers = self.docker_client.containers.list(all=True) + + for container in all_containers: + if container.name and container.name.startswith( + self.container_name_prefix + ): + # Check if this container has the matching session API key + env_vars = self._get_container_env_vars(container) + container_session_key = env_vars.get(SESSION_API_KEY_VARIABLE) + + if container_session_key == session_api_key: + return await self._container_to_checked_sandbox_info(container) + + return None + except (NotFound, APIError): + return None + async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo: """Start a new sandbox.""" # Enforce sandbox limits by cleaning up old sandboxes @@ -283,8 +308,7 @@ async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo env_vars = sandbox_spec.initial_env.copy() env_vars[SESSION_API_KEY_VARIABLE] = session_api_key env_vars[WEBHOOK_CALLBACK_VARIABLE] = ( - f'http://host.docker.internal:{self.host_port}' - f'/api/v1/webhooks/{container_name}' + f'http://host.docker.internal:{self.host_port}/api/v1/webhooks' ) # Prepare port mappings and add port environment variables diff --git a/openhands/app_server/sandbox/docker_sandbox_spec_service.py b/openhands/app_server/sandbox/docker_sandbox_spec_service.py index 7504cec53749..063b4e8a96c5 100644 --- a/openhands/app_server/sandbox/docker_sandbox_spec_service.py +++ b/openhands/app_server/sandbox/docker_sandbox_spec_service.py @@ -14,9 +14,9 @@ SandboxSpecInfo, ) from openhands.app_server.sandbox.sandbox_spec_service import ( - AGENT_SERVER_IMAGE, SandboxSpecService, SandboxSpecServiceInjector, + get_default_agent_server_image, ) from openhands.app_server.services.injector import InjectorState @@ -34,7 +34,7 @@ def get_docker_client() -> docker.DockerClient: def get_default_sandbox_specs(): return [ SandboxSpecInfo( - id=AGENT_SERVER_IMAGE, + id=get_default_agent_server_image(), command=['--port', '8000'], initial_env={ 'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server', @@ -42,6 +42,8 @@ def get_default_sandbox_specs(): 'LOG_JSON': 'true', 'OH_CONVERSATIONS_PATH': '/workspace/conversations', 'OH_BASH_EVENTS_DIR': '/workspace/bash_events', + 'PYTHONUNBUFFERED': '1', + 'ENV_LOG_LEVEL': '20', }, working_dir='/workspace/project', ) diff --git a/openhands/app_server/sandbox/process_sandbox_service.py b/openhands/app_server/sandbox/process_sandbox_service.py index 716c2e1b1916..200bf62c442b 100644 --- a/openhands/app_server/sandbox/process_sandbox_service.py +++ b/openhands/app_server/sandbox/process_sandbox_service.py @@ -275,6 +275,17 @@ async def get_sandbox(self, sandbox_id: str) -> SandboxInfo | None: return await self._process_to_sandbox_info(sandbox_id, process_info) + async def get_sandbox_by_session_api_key( + self, session_api_key: str + ) -> SandboxInfo | None: + """Get a single sandbox by session API key.""" + # Search through all processes to find one with matching session_api_key + for sandbox_id, process_info in _processes.items(): + if process_info.session_api_key == session_api_key: + return await self._process_to_sandbox_info(sandbox_id, process_info) + + return None + async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo: """Start a new sandbox.""" # Get sandbox spec diff --git a/openhands/app_server/sandbox/process_sandbox_spec_service.py b/openhands/app_server/sandbox/process_sandbox_spec_service.py index b5476669f795..4e2e88a2f91d 100644 --- a/openhands/app_server/sandbox/process_sandbox_spec_service.py +++ b/openhands/app_server/sandbox/process_sandbox_spec_service.py @@ -10,9 +10,9 @@ SandboxSpecInfo, ) from openhands.app_server.sandbox.sandbox_spec_service import ( - AGENT_SERVER_IMAGE, SandboxSpecService, SandboxSpecServiceInjector, + get_default_agent_server_image, ) from openhands.app_server.services.injector import InjectorState @@ -20,7 +20,7 @@ def get_default_sandbox_specs(): return [ SandboxSpecInfo( - id=AGENT_SERVER_IMAGE, + id=get_default_agent_server_image(), command=['python', '-m', 'openhands.agent_server'], initial_env={ # VSCode disabled for now diff --git a/openhands/app_server/sandbox/remote_sandbox_service.py b/openhands/app_server/sandbox/remote_sandbox_service.py index dfa029462e41..076c47847898 100644 --- a/openhands/app_server/sandbox/remote_sandbox_service.py +++ b/openhands/app_server/sandbox/remote_sandbox_service.py @@ -44,6 +44,7 @@ from openhands.app_server.user.specifiy_user_context import ADMIN, USER_CONTEXT_ATTR from openhands.app_server.user.user_context import UserContext from openhands.app_server.utils.sql_utils import Base, UtcDateTime +from openhands.sdk.utils.paging import page_iterator _logger = logging.getLogger(__name__) WEBHOOK_CALLBACK_VARIABLE = 'OH_WEBHOOKS_0_BASE_URL' @@ -121,18 +122,9 @@ async def _send_runtime_api_request( _logger.error(f'HTTP error for URL {url}: {e}') raise - async def _to_sandbox_info( + def _to_sandbox_info( self, stored: StoredRemoteSandbox, runtime: dict[str, Any] | None = None - ) -> SandboxInfo: - # If we did not get passsed runtime data, load some - if runtime is None: - try: - runtime = await self._get_runtime(stored.id) - except Exception: - _logger.exception( - f'Error getting runtime: {stored.id}', stack_info=True - ) - + ): status = self._get_sandbox_status_from_runtime(runtime) # Get session_api_key and exposed urls @@ -232,6 +224,40 @@ async def _get_runtime(self, sandbox_id: str) -> dict[str, Any]: runtime_data = response.json() return runtime_data + async def _get_runtimes_batch( + self, sandbox_ids: list[str] + ) -> dict[str, dict[str, Any]]: + """Get multiple runtimes in a single batch request. + + Args: + sandbox_ids: List of sandbox IDs to fetch + + Returns: + Dictionary mapping sandbox_id to runtime data + """ + if not sandbox_ids: + return {} + + # Build query parameters for the batch endpoint + params = [('ids', sandbox_id) for sandbox_id in sandbox_ids] + + response = await self._send_runtime_api_request( + 'GET', + '/sessions/batch', + params=params, + ) + response.raise_for_status() + batch_data = response.json() + + # The batch endpoint should return a list of runtimes + # Convert to a dictionary keyed by session_id for easy lookup + runtimes_by_id = {} + for runtime in batch_data: + if runtime and 'session_id' in runtime: + runtimes_by_id[runtime['session_id']] = runtime + + return runtimes_by_id + async def _init_environment( self, sandbox_spec: SandboxSpecInfo, sandbox_id: str ) -> dict[str, str]: @@ -240,9 +266,7 @@ async def _init_environment( # If a public facing url is defined, add a callback to the agent server environment. if self.web_url: - environment[WEBHOOK_CALLBACK_VARIABLE] = ( - f'{self.web_url}/api/v1/webhooks/{sandbox_id}' - ) + environment[WEBHOOK_CALLBACK_VARIABLE] = f'{self.web_url}/api/v1/webhooks' # We specify CORS settings only if there is a public facing url - otherwise # we are probably in local development and the only url in use is localhost environment[ALLOW_CORS_ORIGINS_VARIABLE] = self.web_url @@ -284,13 +308,15 @@ async def search_sandboxes( if has_more: next_page_id = str(offset + limit) - # Convert stored callbacks to domain models - items = await asyncio.gather( - *[ - self._to_sandbox_info(stored_sandbox) - for stored_sandbox in stored_sandboxes - ] - ) + # Batch fetch runtime data for all sandboxes + sandbox_ids = [stored_sandbox.id for stored_sandbox in stored_sandboxes] + runtimes_by_id = await self._get_runtimes_batch(sandbox_ids) + + # Convert stored sandboxes to domain models with runtime data + items = [ + self._to_sandbox_info(stored_sandbox, runtimes_by_id.get(stored_sandbox.id)) + for stored_sandbox in stored_sandboxes + ] return SandboxPage(items=items, next_page_id=next_page_id) @@ -299,7 +325,62 @@ async def get_sandbox(self, sandbox_id: str) -> Union[SandboxInfo, None]: stored_sandbox = await self._get_stored_sandbox(sandbox_id) if stored_sandbox is None: return None - return await self._to_sandbox_info(stored_sandbox) + + runtime = None + try: + runtime = await self._get_runtime(stored_sandbox.id) + except Exception: + _logger.exception( + f'Error getting runtime: {stored_sandbox.id}', stack_info=True + ) + + return self._to_sandbox_info(stored_sandbox, runtime) + + async def get_sandbox_by_session_api_key( + self, session_api_key: str + ) -> Union[SandboxInfo, None]: + """Get a single sandbox by session API key.""" + # TODO: We should definitely refactor this and store the session_api_key in + # the v1_remote_sandbox table + try: + response = await self._send_runtime_api_request( + 'GET', + '/list', + ) + response.raise_for_status() + content = response.json() + for runtime in content['runtimes']: + if session_api_key == runtime['session_api_key']: + query = await self._secure_select() + query = query.filter( + StoredRemoteSandbox.id == runtime.get('session_id') + ) + result = await self.db_session.execute(query) + sandbox = result.first() + if sandbox is None: + raise ValueError('sandbox_not_found') + return self._to_sandbox_info(sandbox, runtime) + except Exception: + _logger.exception( + 'Error getting sandbox from session_api_key', stack_info=True + ) + + # Get all stored sandboxes for the current user + stmt = await self._secure_select() + result = await self.db_session.execute(stmt) + stored_sandboxes = result.scalars().all() + + # Check each sandbox's runtime data for matching session_api_key + for stored_sandbox in stored_sandboxes: + try: + runtime = await self._get_runtime(stored_sandbox.id) + if runtime and runtime.get('session_api_key') == session_api_key: + return self._to_sandbox_info(stored_sandbox, runtime) + except Exception: + # Continue checking other sandboxes if one fails + continue + + return None async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo: """Start a new sandbox by creating a remote runtime.""" @@ -367,7 +448,7 @@ async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo # Hack - result doesn't contain this runtime_data['pod_status'] = 'pending' - return await self._to_sandbox_info(stored_sandbox, runtime_data) + return self._to_sandbox_info(stored_sandbox, runtime_data) except httpx.HTTPError as e: _logger.error(f'Failed to start sandbox: {e}') @@ -435,6 +516,81 @@ async def delete_sandbox(self, sandbox_id: str) -> bool: _logger.error(f'Error deleting sandbox {sandbox_id}: {e}') return False + async def pause_old_sandboxes(self, max_num_sandboxes: int) -> list[str]: + """Pause the oldest sandboxes if there are more than max_num_sandboxes running. + In a multi user environment, this will pause sandboxes only for the current user. + + Args: + max_num_sandboxes: Maximum number of sandboxes to keep running + + Returns: + List of sandbox IDs that were paused + """ + if max_num_sandboxes <= 0: + raise ValueError('max_num_sandboxes must be greater than 0') + + response = await self._send_runtime_api_request( + 'GET', + '/list', + ) + content = response.json() + running_session_ids = [ + runtime.get('session_id') for runtime in content['runtimes'] + ] + + query = await self._secure_select() + query = query.filter(StoredRemoteSandbox.id.in_(running_session_ids)).order_by( + StoredRemoteSandbox.created_at.desc() + ) + running_sandboxes = list(await self.db_session.execute(query)) + + # If we're within the limit, no cleanup needed + if len(running_sandboxes) <= max_num_sandboxes: + return [] + + # Determine how many to pause + num_to_pause = len(running_sandboxes) - max_num_sandboxes + sandboxes_to_pause = running_sandboxes[:num_to_pause] + + # Stop the oldest sandboxes + paused_sandbox_ids = [] + for sandbox in sandboxes_to_pause: + try: + success = await self.pause_sandbox(sandbox.id) + if success: + paused_sandbox_ids.append(sandbox.id) + except Exception: + # Continue trying to pause other sandboxes even if one fails + pass + + return paused_sandbox_ids + + async def batch_get_sandboxes( + self, sandbox_ids: list[str] + ) -> list[SandboxInfo | None]: + """Get a batch of sandboxes, returning None for any which were not found.""" + if not sandbox_ids: + return [] + query = await self._secure_select() + query = query.filter(StoredRemoteSandbox.id.in_(sandbox_ids)) + stored_remote_sandboxes = await self.db_session.execute(query) + stored_remote_sandboxes_by_id = { + stored_remote_sandbox[0].id: stored_remote_sandbox[0] + for stored_remote_sandbox in stored_remote_sandboxes + } + runtimes_by_id = await self._get_runtimes_batch( + list(stored_remote_sandboxes_by_id) + ) + results = [] + for sandbox_id in sandbox_ids: + stored_remote_sandbox = stored_remote_sandboxes_by_id.get(sandbox_id) + result = None + if stored_remote_sandbox: + runtime = runtimes_by_id.get(sandbox_id) + result = self._to_sandbox_info(stored_remote_sandbox, runtime) + results.append(result) + return results + def _build_service_url(url: str, service_name: str): scheme, host_and_path = url.split('://') @@ -485,32 +641,26 @@ async def poll_agent_servers(api_url: str, api_key: str, sleep_interval: int): get_event_callback_service(state) as event_callback_service, get_httpx_client(state) as httpx_client, ): - page_id = None matches = 0 - while True: - page = await app_conversation_info_service.search_app_conversation_info( - page_id=page_id + async for app_conversation_info in page_iterator( + app_conversation_info_service.search_app_conversation_info + ): + runtime = runtimes_by_sandbox_id.get( + app_conversation_info.sandbox_id ) - for app_conversation_info in page.items: - runtime = runtimes_by_sandbox_id.get( - app_conversation_info.sandbox_id + if runtime: + matches += 1 + await refresh_conversation( + app_conversation_info_service=app_conversation_info_service, + event_service=event_service, + event_callback_service=event_callback_service, + app_conversation_info=app_conversation_info, + runtime=runtime, + httpx_client=httpx_client, ) - if runtime: - matches += 1 - await refresh_conversation( - app_conversation_info_service=app_conversation_info_service, - event_service=event_service, - event_callback_service=event_callback_service, - app_conversation_info=app_conversation_info, - runtime=runtime, - httpx_client=httpx_client, - ) - page_id = page.next_page_id - if page_id is None: - _logger.debug( - f'Matched {len(runtimes_by_sandbox_id)} Runtimes with {matches} Conversations.' - ) - break + _logger.debug( + f'Matched {len(runtimes_by_sandbox_id)} Runtimes with {matches} Conversations.' + ) except Exception as exc: _logger.exception( @@ -564,37 +714,29 @@ async def refresh_conversation( event_url = ( f'{url}/api/conversations/{app_conversation_info.id.hex}/events/search' ) - page_id = None - while True: + + async def fetch_events_page(page_id: str | None = None) -> EventPage: + """Helper function to fetch a page of events from the agent server.""" params: dict[str, str] = {} if page_id: - params['page_id'] = page_id # type: ignore[unreachable] + params['page_id'] = page_id response = await httpx_client.get( event_url, params=params, headers={'X-Session-API-Key': runtime['session_api_key']}, ) response.raise_for_status() - page = EventPage.model_validate(response.json()) - - to_process = [] - for event in page.items: - existing = await event_service.get_event(event.id) - if existing is None: - await event_service.save_event(app_conversation_info.id, event) - to_process.append(event) + return EventPage.model_validate(response.json()) - for event in to_process: + async for event in page_iterator(fetch_events_page): + existing = await event_service.get_event(event.id) + if existing is None: + await event_service.save_event(app_conversation_info.id, event) await event_callback_service.execute_callbacks( app_conversation_info.id, event ) - page_id = page.next_page_id - if page_id is None: - _logger.debug( - f'Finished Refreshing Conversation {app_conversation_info.id}' - ) - break + _logger.debug(f'Finished Refreshing Conversation {app_conversation_info.id}') except Exception as exc: _logger.exception(f'Error Refreshing Conversation: {exc}', stack_info=True) diff --git a/openhands/app_server/sandbox/remote_sandbox_spec_service.py b/openhands/app_server/sandbox/remote_sandbox_spec_service.py index a2a7c58099cd..6228338d7287 100644 --- a/openhands/app_server/sandbox/remote_sandbox_spec_service.py +++ b/openhands/app_server/sandbox/remote_sandbox_spec_service.py @@ -10,9 +10,9 @@ SandboxSpecInfo, ) from openhands.app_server.sandbox.sandbox_spec_service import ( - AGENT_SERVER_IMAGE, SandboxSpecService, SandboxSpecServiceInjector, + get_default_agent_server_image, ) from openhands.app_server.services.injector import InjectorState @@ -20,7 +20,7 @@ def get_default_sandbox_specs(): return [ SandboxSpecInfo( - id=AGENT_SERVER_IMAGE, + id=get_default_agent_server_image(), command=['/usr/local/bin/openhands-agent-server', '--port', '60000'], initial_env={ 'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server', diff --git a/openhands/app_server/sandbox/sandbox_service.py b/openhands/app_server/sandbox/sandbox_service.py index 43393dfcf759..45274975d70c 100644 --- a/openhands/app_server/sandbox/sandbox_service.py +++ b/openhands/app_server/sandbox/sandbox_service.py @@ -8,6 +8,7 @@ ) from openhands.app_server.services.injector import Injector from openhands.sdk.utils.models import DiscriminatedUnionMixin +from openhands.sdk.utils.paging import page_iterator class SandboxService(ABC): @@ -25,6 +26,12 @@ async def search_sandboxes( async def get_sandbox(self, sandbox_id: str) -> SandboxInfo | None: """Get a single sandbox. Return None if the sandbox was not found.""" + @abstractmethod + async def get_sandbox_by_session_api_key( + self, session_api_key: str + ) -> SandboxInfo | None: + """Get a single sandbox by session API key. Return None if the sandbox was not found.""" + async def batch_get_sandboxes( self, sandbox_ids: list[str] ) -> list[SandboxInfo | None]: @@ -65,7 +72,7 @@ async def delete_sandbox(self, sandbox_id: str) -> bool: """ async def pause_old_sandboxes(self, max_num_sandboxes: int) -> list[str]: - """Stop the oldest sandboxes if there are more than max_num_sandboxes running. + """Pause the oldest sandboxes if there are more than max_num_sandboxes running. In a multi user environment, this will pause sandboxes only for the current user. Args: @@ -77,24 +84,11 @@ async def pause_old_sandboxes(self, max_num_sandboxes: int) -> list[str]: if max_num_sandboxes <= 0: raise ValueError('max_num_sandboxes must be greater than 0') - # Get all sandboxes (we'll search through all pages) - all_sandboxes = [] - page_id = None - - while True: - page = await self.search_sandboxes(page_id=page_id, limit=100) - all_sandboxes.extend(page.items) - - if page.next_page_id is None: - break - page_id = page.next_page_id - - # Filter to only running sandboxes - running_sandboxes = [ - sandbox - for sandbox in all_sandboxes - if sandbox.status == SandboxStatus.RUNNING - ] + # Get all running sandboxes (iterate through all pages) + running_sandboxes = [] + async for sandbox in page_iterator(self.search_sandboxes, limit=100): + if sandbox.status == SandboxStatus.RUNNING: + running_sandboxes.append(sandbox) # If we're within the limit, no cleanup needed if len(running_sandboxes) <= max_num_sandboxes: diff --git a/openhands/app_server/sandbox/sandbox_spec_service.py b/openhands/app_server/sandbox/sandbox_spec_service.py index 997cbe535149..fe9d1653a99e 100644 --- a/openhands/app_server/sandbox/sandbox_spec_service.py +++ b/openhands/app_server/sandbox/sandbox_spec_service.py @@ -1,4 +1,5 @@ import asyncio +import os from abc import ABC, abstractmethod from openhands.app_server.errors import SandboxError @@ -11,7 +12,7 @@ # The version of the agent server to use for deployments. # Typically this will be the same as the values from the pyproject.toml -AGENT_SERVER_IMAGE = 'ghcr.io/openhands/agent-server:15f565b-python' +AGENT_SERVER_IMAGE = 'ghcr.io/openhands/agent-server:97652be-python' class SandboxSpecService(ABC): @@ -57,3 +58,11 @@ class SandboxSpecServiceInjector( DiscriminatedUnionMixin, Injector[SandboxSpecService], ABC ): pass + + +def get_default_agent_server_image(): + agent_server_image_repository = os.getenv('AGENT_SERVER_IMAGE_REPOSITORY') + agent_server_image_tag = os.getenv('AGENT_SERVER_IMAGE_TAG') + if agent_server_image_repository and agent_server_image_tag: + return f'{agent_server_image_repository}:{agent_server_image_tag}' + return AGENT_SERVER_IMAGE diff --git a/openhands/app_server/user/auth_user_context.py b/openhands/app_server/user/auth_user_context.py index 53612364f5a3..4d6488842702 100644 --- a/openhands/app_server/user/auth_user_context.py +++ b/openhands/app_server/user/auth_user_context.py @@ -9,8 +9,12 @@ from openhands.app_server.user.specifiy_user_context import USER_CONTEXT_ATTR from openhands.app_server.user.user_context import UserContext, UserContextInjector from openhands.app_server.user.user_models import UserInfo -from openhands.integrations.provider import ProviderHandler, ProviderType -from openhands.sdk.conversation.secret_source import SecretSource, StaticSecret +from openhands.integrations.provider import ( + PROVIDER_TOKEN_TYPE, + ProviderHandler, + ProviderType, +) +from openhands.sdk.secret import SecretSource, StaticSecret from openhands.server.user_auth.user_auth import UserAuth, get_user_auth USER_AUTH_ATTR = 'user_auth' @@ -44,6 +48,9 @@ async def get_user_info(self) -> UserInfo: self._user_info = user_info return user_info + async def get_provider_tokens(self) -> PROVIDER_TOKEN_TYPE | None: + return await self.user_auth.get_provider_tokens() + async def get_provider_handler(self): provider_handler = self._provider_handler if not provider_handler: @@ -78,6 +85,10 @@ async def get_secrets(self) -> dict[str, SecretSource]: return results + async def get_mcp_api_key(self) -> str | None: + mcp_api_key = await self.user_auth.get_mcp_api_key() + return mcp_api_key + USER_ID_ATTR = 'user_id' diff --git a/openhands/app_server/user/specifiy_user_context.py b/openhands/app_server/user/specifiy_user_context.py index 0855b447bf69..51e62339723e 100644 --- a/openhands/app_server/user/specifiy_user_context.py +++ b/openhands/app_server/user/specifiy_user_context.py @@ -5,8 +5,8 @@ from openhands.app_server.errors import OpenHandsError from openhands.app_server.user.user_context import UserContext from openhands.app_server.user.user_models import UserInfo -from openhands.integrations.provider import ProviderType -from openhands.sdk.conversation.secret_source import SecretSource +from openhands.integrations.provider import PROVIDER_TOKEN_TYPE, ProviderType +from openhands.sdk.secret import SecretSource @dataclass(frozen=True) @@ -24,12 +24,18 @@ async def get_user_info(self) -> UserInfo: async def get_authenticated_git_url(self, repository: str) -> str: raise NotImplementedError() + async def get_provider_tokens(self) -> PROVIDER_TOKEN_TYPE | None: + raise NotImplementedError() + async def get_latest_token(self, provider_type: ProviderType) -> str | None: raise NotImplementedError() async def get_secrets(self) -> dict[str, SecretSource]: raise NotImplementedError() + async def get_mcp_api_key(self) -> str | None: + raise NotImplementedError() + USER_CONTEXT_ATTR = 'user_context' ADMIN = SpecifyUserContext(user_id=None) diff --git a/openhands/app_server/user/user_context.py b/openhands/app_server/user/user_context.py index 75fe957160f7..4102df5cf9c4 100644 --- a/openhands/app_server/user/user_context.py +++ b/openhands/app_server/user/user_context.py @@ -4,8 +4,8 @@ from openhands.app_server.user.user_models import ( UserInfo, ) -from openhands.integrations.provider import ProviderType -from openhands.sdk.conversation.secret_source import SecretSource +from openhands.integrations.provider import PROVIDER_TOKEN_TYPE, ProviderType +from openhands.sdk.secret import SecretSource from openhands.sdk.utils.models import DiscriminatedUnionMixin @@ -26,6 +26,10 @@ async def get_user_info(self) -> UserInfo: async def get_authenticated_git_url(self, repository: str) -> str: """Get the provider tokens for the user""" + @abstractmethod + async def get_provider_tokens(self) -> PROVIDER_TOKEN_TYPE | None: + """Get the latest tokens for all provider types""" + @abstractmethod async def get_latest_token(self, provider_type: ProviderType) -> str | None: """Get the latest token for the provider type given""" @@ -34,6 +38,10 @@ async def get_latest_token(self, provider_type: ProviderType) -> str | None: async def get_secrets(self) -> dict[str, SecretSource]: """Get custom secrets and github provider secrets for the conversation.""" + @abstractmethod + async def get_mcp_api_key(self) -> str | None: + """Get an MCP API Key.""" + class UserContextInjector(DiscriminatedUnionMixin, Injector[UserContext], ABC): """Injector for user contexts.""" diff --git a/openhands/app_server/utils/encryption_key.py b/openhands/app_server/utils/encryption_key.py index 5815bce20e58..62224e1da166 100644 --- a/openhands/app_server/utils/encryption_key.py +++ b/openhands/app_server/utils/encryption_key.py @@ -1,3 +1,4 @@ +import hashlib import os from datetime import datetime from pathlib import Path @@ -30,8 +31,14 @@ def get_default_encryption_keys(workspace_dir: Path) -> list[EncryptionKey]: """Generate default encryption keys.""" master_key = os.getenv('JWT_SECRET') if master_key: + # Derive a deterministic key ID from the secret itself. + # This ensures all pods using the same JWT_SECRET get the same key ID, + # which is critical for multi-pod deployments where tokens may be + # created by one pod and verified by another. + key_id = base62.encodebytes(hashlib.sha256(master_key.encode()).digest()) return [ EncryptionKey( + id=key_id, key=SecretStr(master_key), active=True, notes='jwt secret master key', diff --git a/openhands/controller/agent_controller.py b/openhands/controller/agent_controller.py index 3f2ad876748b..0753f0a0a1ce 100644 --- a/openhands/controller/agent_controller.py +++ b/openhands/controller/agent_controller.py @@ -42,10 +42,6 @@ from openhands.core.logger import LOG_ALL_EVENTS from openhands.core.logger import openhands_logger as logger from openhands.core.schema import AgentState -from openhands.utils.posthog_tracker import ( - track_agent_task_completed, - track_credit_limit_reached, -) from openhands.events import ( EventSource, EventStream, @@ -713,20 +709,6 @@ async def set_agent_state_to(self, new_state: AgentState) -> None: EventSource.ENVIRONMENT, ) - # Track agent task completion in PostHog - if new_state == AgentState.FINISHED: - try: - # Get app_mode from environment, default to 'oss' - app_mode = os.environ.get('APP_MODE', 'oss') - track_agent_task_completed( - conversation_id=self.id, - user_id=self.user_id, - app_mode=app_mode, - ) - except Exception as e: - # Don't let tracking errors interrupt the agent - self.log('warning', f'Failed to track agent completion: {e}') - # Save state whenever agent state changes to ensure we don't lose state # in case of crashes or unexpected circumstances self.save_state() @@ -905,18 +887,6 @@ async def _step(self) -> None: self.state_tracker.run_control_flags() except Exception as e: logger.warning('Control flag limits hit') - # Track credit limit reached if it's a budget exception - if 'budget' in str(e).lower() and self.state.budget_flag: - try: - track_credit_limit_reached( - conversation_id=self.id, - user_id=self.user_id, - current_budget=self.state.budget_flag.current_value, - max_budget=self.state.budget_flag.max_value, - ) - except Exception as track_error: - # Don't let tracking errors interrupt the agent - self.log('warning', f'Failed to track credit limit: {track_error}') await self._react_to_exception(e) return @@ -974,6 +944,23 @@ async def _step(self) -> None: return else: raise LLMContextWindowExceedError() + # Check if this is a tool call validation error that should be recoverable + elif ( + isinstance(e, BadRequestError) + and 'tool call validation failed' in error_str + and ( + 'missing properties' in error_str + or 'missing required' in error_str + ) + ): + # Handle tool call validation errors from Groq as recoverable errors + self.event_stream.add_event( + ErrorObservation( + content=f'Tool call validation failed: {str(e)}. Please check the tool parameters and try again.', + ), + EventSource.AGENT, + ) + return else: raise e diff --git a/openhands/core/config/llm_config.py b/openhands/core/config/llm_config.py index 0089f9b27985..8a5f704b3695 100644 --- a/openhands/core/config/llm_config.py +++ b/openhands/core/config/llm_config.py @@ -50,7 +50,7 @@ class LLMConfig(BaseModel): completion_kwargs: Custom kwargs to pass to litellm.completion. """ - model: str = Field(default='claude-sonnet-4-20250514') + model: str = Field(default='claude-opus-4-5-20251101') api_key: SecretStr | None = Field(default=None) base_url: str | None = Field(default=None) api_version: str | None = Field(default=None) diff --git a/openhands/integrations/provider.py b/openhands/integrations/provider.py index b4289283e7fd..c260f23ee026 100644 --- a/openhands/integrations/provider.py +++ b/openhands/integrations/provider.py @@ -1,8 +1,9 @@ from __future__ import annotations import os +from collections.abc import Mapping from types import MappingProxyType -from typing import Annotated, Any, Coroutine, Literal, cast, overload +from typing import Any, Coroutine, Literal, cast, overload from urllib.parse import quote import httpx @@ -11,7 +12,6 @@ ConfigDict, Field, SecretStr, - WithJsonSchema, ) from openhands.core.logger import openhands_logger as logger @@ -95,16 +95,8 @@ def from_value(cls, secret_value: CustomSecret | dict[str, str]) -> CustomSecret raise ValueError('Unsupport Provider token type') -PROVIDER_TOKEN_TYPE = MappingProxyType[ProviderType, ProviderToken] -CUSTOM_SECRETS_TYPE = MappingProxyType[str, CustomSecret] -PROVIDER_TOKEN_TYPE_WITH_JSON_SCHEMA = Annotated[ - PROVIDER_TOKEN_TYPE, - WithJsonSchema({'type': 'object', 'additionalProperties': {'type': 'string'}}), -] -CUSTOM_SECRETS_TYPE_WITH_JSON_SCHEMA = Annotated[ - CUSTOM_SECRETS_TYPE, - WithJsonSchema({'type': 'object', 'additionalProperties': {'type': 'string'}}), -] +PROVIDER_TOKEN_TYPE = Mapping[ProviderType, ProviderToken] +CUSTOM_SECRETS_TYPE = Mapping[str, CustomSecret] class ProviderHandler: diff --git a/openhands/llm/fn_call_converter.py b/openhands/llm/fn_call_converter.py index 7de88245162e..826b278dc480 100644 --- a/openhands/llm/fn_call_converter.py +++ b/openhands/llm/fn_call_converter.py @@ -421,16 +421,12 @@ def convert_tool_call_to_string(tool_call: dict) -> str: f'Failed to parse arguments as JSON. Arguments: {tool_call["function"]["arguments"]}' ) from e for param_name, param_value in args.items(): - is_multiline = isinstance(param_value, str) and '\n' in param_value + # Don't add extra newlines - keep parameter value as-is ret += f'' - if is_multiline: - ret += '\n' if isinstance(param_value, list) or isinstance(param_value, dict): ret += json.dumps(param_value) else: ret += f'{param_value}' - if is_multiline: - ret += '\n' ret += '\n' ret += '' return ret diff --git a/openhands/llm/llm.py b/openhands/llm/llm.py index b94ed3bc2be6..150fa54925c1 100644 --- a/openhands/llm/llm.py +++ b/openhands/llm/llm.py @@ -188,12 +188,14 @@ def __init__( if 'claude-opus-4-1' in self.config.model.lower(): kwargs['thinking'] = {'type': 'disabled'} - # Anthropic constraint: Opus 4.1 and Sonnet 4 models cannot accept both temperature and top_p + # Anthropic constraint: Opus 4.1, Opus 4.5, and Sonnet 4 models cannot accept both temperature and top_p # Prefer temperature (drop top_p) if both are specified. _model_lower = self.config.model.lower() - # Apply to Opus 4.1 and Sonnet 4 models to avoid API errors + # Apply to Opus 4.1, Opus 4.5, and Sonnet 4 models to avoid API errors if ( - ('claude-opus-4-1' in _model_lower) or ('claude-sonnet-4' in _model_lower) + ('claude-opus-4-1' in _model_lower) + or ('claude-opus-4-5' in _model_lower) + or ('claude-sonnet-4' in _model_lower) ) and ('temperature' in kwargs and 'top_p' in kwargs): kwargs.pop('top_p', None) diff --git a/openhands/llm/model_features.py b/openhands/llm/model_features.py index a9857ffaca85..f592f0bb9892 100644 --- a/openhands/llm/model_features.py +++ b/openhands/llm/model_features.py @@ -132,6 +132,8 @@ class ModelFeatures: 'grok-code-fast-1', # DeepSeek R1 family 'deepseek-r1-0528*', + # Azure GPT-5 family + 'azure/gpt-5*', ] diff --git a/openhands/memory/conversation_memory.py b/openhands/memory/conversation_memory.py index 5ff6ec7e584f..5ae1a2cd715c 100644 --- a/openhands/memory/conversation_memory.py +++ b/openhands/memory/conversation_memory.py @@ -76,6 +76,7 @@ def process_events( self, condensed_history: list[Event], initial_user_action: MessageAction, + forgotten_event_ids: set[int] | None = None, max_message_chars: int | None = None, vision_is_active: bool = False, ) -> list[Message]: @@ -85,16 +86,23 @@ def process_events( Args: condensed_history: The condensed history of events to convert + initial_user_action: The initial user message action, if available. Used to ensure the conversation starts correctly. + forgotten_event_ids: Set of event IDs that have been forgotten/condensed. If the initial user action's ID + is in this set, it will not be re-inserted to prevent re-execution of old instructions. max_message_chars: The maximum number of characters in the content of an event included in the prompt to the LLM. Larger observations are truncated. vision_is_active: Whether vision is active in the LLM. If True, image URLs will be included. - initial_user_action: The initial user message action, if available. Used to ensure the conversation starts correctly. """ events = condensed_history + # Default to empty set if not provided + if forgotten_event_ids is None: + forgotten_event_ids = set() # Ensure the event list starts with SystemMessageAction, then MessageAction(source='user') self._ensure_system_message(events) - self._ensure_initial_user_message(events, initial_user_action) + self._ensure_initial_user_message( + events, initial_user_action, forgotten_event_ids + ) # log visual browsing status logger.debug(f'Visual browsing: {self.agent_config.enable_som_visual_browsing}') @@ -827,9 +835,23 @@ def _ensure_system_message(self, events: list[Event]) -> None: ) def _ensure_initial_user_message( - self, events: list[Event], initial_user_action: MessageAction + self, + events: list[Event], + initial_user_action: MessageAction, + forgotten_event_ids: set[int], ) -> None: - """Checks if the second event is a user MessageAction and inserts the provided one if needed.""" + """Checks if the second event is a user MessageAction and inserts the provided one if needed. + + IMPORTANT: If the initial user action has been condensed (its ID is in forgotten_event_ids), + we do NOT re-insert it. This prevents old instructions from being re-executed after + conversation condensation. The condensation summary already contains the context of + what was requested and completed. + + Args: + events: The list of events to modify in-place + initial_user_action: The initial user message action from the full history + forgotten_event_ids: Set of event IDs that have been forgotten/condensed + """ if ( not events ): # Should have system message from previous step, but safety check @@ -837,6 +859,17 @@ def _ensure_initial_user_message( # Or raise? Let's log for now, _ensure_system_message should handle this. return + # Check if the initial user action has been condensed/forgotten. + # If so, we should NOT re-insert it to prevent re-execution of old instructions. + # The condensation summary already contains the context of what was requested. + initial_user_action_id = initial_user_action.id + if initial_user_action_id in forgotten_event_ids: + logger.info( + f'Initial user action (id={initial_user_action_id}) has been condensed. ' + 'Not re-inserting to prevent re-execution of old instructions.' + ) + return + # We expect events[0] to be SystemMessageAction after _ensure_system_message if len(events) == 1: # Only system message exists diff --git a/openhands/memory/view.py b/openhands/memory/view.py index 87a20b6340e5..81dd8bab5d63 100644 --- a/openhands/memory/view.py +++ b/openhands/memory/view.py @@ -18,6 +18,8 @@ class View(BaseModel): events: list[Event] unhandled_condensation_request: bool = False + # Set of event IDs that have been forgotten/condensed + forgotten_event_ids: set[int] = set() def __len__(self) -> int: return len(self.events) @@ -90,4 +92,5 @@ def from_events(events: list[Event]) -> View: return View( events=kept_events, unhandled_condensation_request=unhandled_condensation_request, + forgotten_event_ids=forgotten_event_ids, ) diff --git a/openhands/runtime/base.py b/openhands/runtime/base.py index 5eb5429f71ca..c7a332166b7d 100644 --- a/openhands/runtime/base.py +++ b/openhands/runtime/base.py @@ -76,6 +76,8 @@ call_sync_from_async, ) +DISABLE_VSCODE_PLUGIN = os.getenv('DISABLE_VSCODE_PLUGIN', 'false').lower() == 'true' + def _default_env_vars(sandbox_config: SandboxConfig) -> dict[str, str]: ret = {} @@ -153,9 +155,11 @@ def __init__( self.plugins = ( copy.deepcopy(plugins) if plugins is not None and len(plugins) > 0 else [] ) + # add VSCode plugin if not in headless mode - if not headless_mode: + if not headless_mode and not DISABLE_VSCODE_PLUGIN: self.plugins.append(VSCodeRequirement()) + logger.info(f'Loaded plugins for runtime {self.sid}: {self.plugins}') self.status_callback = status_callback self.attach_to_existing = attach_to_existing diff --git a/openhands/runtime/browser/browser_env.py b/openhands/runtime/browser/browser_env.py index 55e3ce18902a..c8d09d9c2bdd 100644 --- a/openhands/runtime/browser/browser_env.py +++ b/openhands/runtime/browser/browser_env.py @@ -1,8 +1,10 @@ import atexit import json import multiprocessing +import os import time import uuid +from pathlib import Path import browsergym.core # noqa F401 (we register the openended task as a gym environment) import gymnasium as gym @@ -67,6 +69,16 @@ def init_browser(self) -> None: raise BrowserInitException('Failed to start browser environment.') def browser_process(self) -> None: + def _is_local_runtime() -> bool: + runtime_flag = os.getenv('RUNTIME', '').lower() + return runtime_flag == 'local' + + # Default Playwright cache for local runs only; do not override in docker + if _is_local_runtime() and 'PLAYWRIGHT_BROWSERS_PATH' not in os.environ: + os.environ['PLAYWRIGHT_BROWSERS_PATH'] = str( + Path.home() / '.cache' / 'playwright' + ) + if self.eval_mode: assert self.browsergym_eval_env is not None logger.info('Initializing browser env for web browsing evaluation.') @@ -87,6 +99,11 @@ def browser_process(self) -> None: ) env = gym.make(self.browsergym_eval_env, tags_to_mark='all', timeout=100000) else: + downloads_path = os.getenv('BROWSERGYM_DOWNLOAD_DIR') + if not downloads_path and _is_local_runtime(): + downloads_path = str(Path.home() / '.cache' / 'browsergym-downloads') + if not downloads_path: + downloads_path = '/workspace/.downloads/' env = gym.make( 'browsergym/openended', task_kwargs={'start_url': 'about:blank', 'goal': 'PLACEHOLDER_GOAL'}, @@ -96,7 +113,7 @@ def browser_process(self) -> None: tags_to_mark='all', timeout=100000, pw_context_kwargs={'accept_downloads': True}, - pw_chromium_kwargs={'downloads_path': '/workspace/.downloads/'}, + pw_chromium_kwargs={'downloads_path': downloads_path}, ) obs, info = env.reset() diff --git a/openhands/runtime/builder/docker.py b/openhands/runtime/builder/docker.py index 39a7982cd518..5f0fb2027b26 100644 --- a/openhands/runtime/builder/docker.py +++ b/openhands/runtime/builder/docker.py @@ -19,8 +19,11 @@ def __init__(self, docker_client: docker.DockerClient): version_info = self.docker_client.version() server_version = version_info.get('Version', '').replace('-', '.') + components = version_info.get('Components') self.is_podman = ( - version_info.get('Components')[0].get('Name').startswith('Podman') + components is not None + and len(components) > 0 + and components[0].get('Name', '').startswith('Podman') ) if ( tuple(map(int, server_version.split('.')[:2])) < (18, 9) @@ -79,8 +82,11 @@ def build( self.docker_client = docker.from_env() version_info = self.docker_client.version() server_version = version_info.get('Version', '').split('+')[0].replace('-', '.') + components = version_info.get('Components') self.is_podman = ( - version_info.get('Components')[0].get('Name').startswith('Podman') + components is not None + and len(components) > 0 + and components[0].get('Name', '').startswith('Podman') ) if tuple(map(int, server_version.split('.'))) < (18, 9) and not self.is_podman: raise AgentRuntimeBuildError( diff --git a/openhands/runtime/impl/docker/containers.py b/openhands/runtime/impl/docker/containers.py index 25764b027488..32a5ba1353e2 100644 --- a/openhands/runtime/impl/docker/containers.py +++ b/openhands/runtime/impl/docker/containers.py @@ -7,7 +7,7 @@ def stop_all_containers(prefix: str) -> None: containers = docker_client.containers.list(all=True) for container in containers: try: - if container.name.startswith(prefix): + if container.name and container.name.startswith(prefix): container.stop() except docker.errors.APIError: pass diff --git a/openhands/runtime/impl/kubernetes/README.md b/openhands/runtime/impl/kubernetes/README.md index d16247389da0..36b7452d1ea8 100644 --- a/openhands/runtime/impl/kubernetes/README.md +++ b/openhands/runtime/impl/kubernetes/README.md @@ -40,7 +40,7 @@ Two configuration options are required to use the Kubernetes runtime: 2. **Runtime Container Image**: Specify the container image to use for the runtime environment ```toml [sandbox] - runtime_container_image = "docker.openhands.dev/openhands/runtime:0.62-nikolaik" + runtime_container_image = "docker.openhands.dev/openhands/runtime:1.0-nikolaik" ``` #### Additional Kubernetes Options diff --git a/openhands/runtime/impl/local/local_runtime.py b/openhands/runtime/impl/local/local_runtime.py index ed8d26996ae4..cf81b222ebd5 100644 --- a/openhands/runtime/impl/local/local_runtime.py +++ b/openhands/runtime/impl/local/local_runtime.py @@ -45,6 +45,8 @@ from openhands.utils.http_session import httpx_verify_option from openhands.utils.tenacity_stop import stop_if_should_exit +DISABLE_VSCODE_PLUGIN = os.getenv('DISABLE_VSCODE_PLUGIN', 'false').lower() == 'true' + @dataclass class ActionExecutionServerInfo: @@ -247,7 +249,22 @@ async def connect(self) -> None: ) else: # Set up workspace directory + # For local runtime, prefer a stable host path over /workspace defaults. + if ( + self.config.workspace_base is None + and self.config.runtime + and self.config.runtime.lower() == 'local' + ): + env_base = os.getenv('LOCAL_WORKSPACE_BASE') + if env_base: + self.config.workspace_base = os.path.abspath(env_base) + else: + self.config.workspace_base = os.path.abspath( + os.path.join(os.getcwd(), 'workspace', 'local') + ) + if self.config.workspace_base is not None: + os.makedirs(self.config.workspace_base, exist_ok=True) logger.warning( f'Workspace base path is set to {self.config.workspace_base}. ' 'It will be used as the path for the agent to run in. ' @@ -406,7 +423,7 @@ def setup(cls, config: OpenHandsConfig, headless_mode: bool = False): plugins = _get_plugins(config) # Copy the logic from Runtime where we add a VSCodePlugin on init if missing - if not headless_mode: + if not headless_mode and not DISABLE_VSCODE_PLUGIN: plugins.append(VSCodeRequirement()) for _ in range(initial_num_warm_servers): diff --git a/openhands/server/app.py b/openhands/server/app.py index d5135f23999d..5cee75b163a4 100644 --- a/openhands/server/app.py +++ b/openhands/server/app.py @@ -36,7 +36,7 @@ from openhands.server.types import AppMode from openhands.version import get_version -mcp_app = mcp_server.http_app(path='/mcp') +mcp_app = mcp_server.http_app(path='/mcp', stateless_http=True) def combine_lifespans(*lifespans): diff --git a/openhands/server/routes/git.py b/openhands/server/routes/git.py index 1401bb0dcd34..a6807a2e2a4a 100644 --- a/openhands/server/routes/git.py +++ b/openhands/server/routes/git.py @@ -26,13 +26,11 @@ ) from openhands.server.dependencies import get_dependencies from openhands.server.shared import server_config -from openhands.server.types import AppMode from openhands.server.user_auth import ( get_access_token, get_provider_tokens, get_user_id, ) -from openhands.utils.posthog_tracker import alias_user_identities app = APIRouter(prefix='/api/user', dependencies=get_dependencies()) @@ -119,14 +117,6 @@ async def get_user( try: user: User = await client.get_user() - - # Alias git provider login with Keycloak user ID in PostHog (SaaS mode only) - if user_id and user.login and server_config.app_mode == AppMode.SAAS: - alias_user_identities( - keycloak_user_id=user_id, - git_login=user.login, - ) - return user except UnknownException as e: diff --git a/openhands/server/routes/mcp.py b/openhands/server/routes/mcp.py index 929c66af5b9e..2d541d637c90 100644 --- a/openhands/server/routes/mcp.py +++ b/openhands/server/routes/mcp.py @@ -25,9 +25,7 @@ ) from openhands.storage.data_models.conversation_metadata import ConversationMetadata -mcp_server = FastMCP( - 'mcp', stateless_http=True, mask_error_details=True, dependencies=None -) +mcp_server = FastMCP('mcp', mask_error_details=True) HOST = f'https://{os.getenv("WEB_HOST", "app.all-hands.dev").strip()}' CONVERSATION_URL = HOST + '/conversations/{}' diff --git a/openhands/server/services/conversation_service.py b/openhands/server/services/conversation_service.py index 927e55ce5831..ac2e06b8cdd1 100644 --- a/openhands/server/services/conversation_service.py +++ b/openhands/server/services/conversation_service.py @@ -7,7 +7,7 @@ from openhands.events.action.message import MessageAction from openhands.experiments.experiment_manager import ExperimentManagerImpl from openhands.integrations.provider import ( - CUSTOM_SECRETS_TYPE_WITH_JSON_SCHEMA, + CUSTOM_SECRETS_TYPE, PROVIDER_TOKEN_TYPE, ProviderToken, ) @@ -73,7 +73,7 @@ async def initialize_conversation( async def start_conversation( user_id: str | None, git_provider_tokens: PROVIDER_TOKEN_TYPE | None, - custom_secrets: CUSTOM_SECRETS_TYPE_WITH_JSON_SCHEMA | None, + custom_secrets: CUSTOM_SECRETS_TYPE | None, initial_user_msg: str | None, image_urls: list[str] | None, replay_json: str | None, @@ -164,7 +164,7 @@ async def start_conversation( async def create_new_conversation( user_id: str | None, git_provider_tokens: PROVIDER_TOKEN_TYPE | None, - custom_secrets: CUSTOM_SECRETS_TYPE_WITH_JSON_SCHEMA | None, + custom_secrets: CUSTOM_SECRETS_TYPE | None, selected_repository: str | None, selected_branch: str | None, initial_user_msg: str | None, diff --git a/openhands/server/session/conversation_init_data.py b/openhands/server/session/conversation_init_data.py index cdf76db97702..c1bf660c2840 100644 --- a/openhands/server/session/conversation_init_data.py +++ b/openhands/server/session/conversation_init_data.py @@ -1,4 +1,7 @@ -from pydantic import ConfigDict, Field +from collections.abc import Mapping +from types import MappingProxyType + +from pydantic import ConfigDict, Field, field_validator from openhands.integrations.provider import CUSTOM_SECRETS_TYPE, PROVIDER_TOKEN_TYPE from openhands.integrations.service_types import ProviderType @@ -19,3 +22,17 @@ class ConversationInitData(Settings): model_config = ConfigDict( arbitrary_types_allowed=True, ) + + @field_validator('git_provider_tokens', 'custom_secrets') + @classmethod + def immutable_validator(cls, value: Mapping | None) -> MappingProxyType | None: + """Ensure git_provider_tokens and custom_secrets are always MappingProxyType. + + This validator converts any Mapping (including dict) to MappingProxyType, + ensuring type safety and immutability. If the value is None, it returns None. + """ + if value is None: + return None + if isinstance(value, MappingProxyType): + return value + return MappingProxyType(value) diff --git a/openhands/server/user_auth/default_user_auth.py b/openhands/server/user_auth/default_user_auth.py index 2e0a7b5af992..8bc79af1561e 100644 --- a/openhands/server/user_auth/default_user_auth.py +++ b/openhands/server/user_auth/default_user_auth.py @@ -88,6 +88,9 @@ async def get_provider_tokens(self) -> PROVIDER_TOKEN_TYPE | None: return None return user_secrets.provider_tokens + async def get_mcp_api_key(self) -> str | None: + return None + @classmethod async def get_instance(cls, request: Request) -> UserAuth: user_auth = DefaultUserAuth() diff --git a/openhands/server/user_auth/user_auth.py b/openhands/server/user_auth/user_auth.py index e370d3247438..c61c9ceb8bfb 100644 --- a/openhands/server/user_auth/user_auth.py +++ b/openhands/server/user_auth/user_auth.py @@ -75,6 +75,10 @@ async def get_secrets(self) -> Secrets | None: def get_auth_type(self) -> AuthType | None: return None + @abstractmethod + async def get_mcp_api_key(self) -> str | None: + """Get an mcp api key for the user""" + @classmethod @abstractmethod async def get_instance(cls, request: Request) -> UserAuth: diff --git a/openhands/storage/data_models/secrets.py b/openhands/storage/data_models/secrets.py index ce5302e754af..69b60e9730d0 100644 --- a/openhands/storage/data_models/secrets.py +++ b/openhands/storage/data_models/secrets.py @@ -1,3 +1,4 @@ +from collections.abc import Mapping from types import MappingProxyType from typing import Any @@ -7,6 +8,7 @@ Field, SerializationInfo, field_serializer, + field_validator, model_validator, ) from pydantic.json import pydantic_encoder @@ -14,9 +16,7 @@ from openhands.events.stream import EventStream from openhands.integrations.provider import ( CUSTOM_SECRETS_TYPE, - CUSTOM_SECRETS_TYPE_WITH_JSON_SCHEMA, PROVIDER_TOKEN_TYPE, - PROVIDER_TOKEN_TYPE_WITH_JSON_SCHEMA, CustomSecret, ProviderToken, ) @@ -24,11 +24,11 @@ class Secrets(BaseModel): - provider_tokens: PROVIDER_TOKEN_TYPE_WITH_JSON_SCHEMA = Field( + provider_tokens: PROVIDER_TOKEN_TYPE = Field( default_factory=lambda: MappingProxyType({}) ) - custom_secrets: CUSTOM_SECRETS_TYPE_WITH_JSON_SCHEMA = Field( + custom_secrets: CUSTOM_SECRETS_TYPE = Field( default_factory=lambda: MappingProxyType({}) ) @@ -38,6 +38,11 @@ class Secrets(BaseModel): arbitrary_types_allowed=True, ) + @field_validator('provider_tokens', 'custom_secrets') + @classmethod + def immutable_validator(cls, value: Mapping) -> MappingProxyType: + return MappingProxyType(value) + @field_serializer('provider_tokens') def provider_tokens_serializer( self, provider_tokens: PROVIDER_TOKEN_TYPE, info: SerializationInfo diff --git a/openhands/storage/data_models/settings.py b/openhands/storage/data_models/settings.py index 72785c1822ab..0dc9b99e6238 100644 --- a/openhands/storage/data_models/settings.py +++ b/openhands/storage/data_models/settings.py @@ -1,5 +1,7 @@ from __future__ import annotations +import os + from pydantic import ( BaseModel, ConfigDict, @@ -48,6 +50,7 @@ class Settings(BaseModel): email_verified: bool | None = None git_user_name: str | None = None git_user_email: str | None = None + v1_enabled: bool | None = Field(default=bool(os.getenv('V1_ENABLED') == '1')) model_config = ConfigDict( validate_assignment=True, diff --git a/openhands/storage/settings/file_settings_store.py b/openhands/storage/settings/file_settings_store.py index 3acedeb16fe3..5b43bf6b80fd 100644 --- a/openhands/storage/settings/file_settings_store.py +++ b/openhands/storage/settings/file_settings_store.py @@ -21,6 +21,11 @@ async def load(self) -> Settings | None: json_str = await call_sync_from_async(self.file_store.read, self.path) kwargs = json.loads(json_str) settings = Settings(**kwargs) + + # Turn on V1 in OpenHands + # We can simplify / remove this as part of V0 removal + settings.v1_enabled = True + return settings except FileNotFoundError: return None diff --git a/openhands/utils/llm.py b/openhands/utils/llm.py index 9eeb7c539304..876a89000159 100644 --- a/openhands/utils/llm.py +++ b/openhands/utils/llm.py @@ -60,6 +60,7 @@ def get_supported_llm_models(config: OpenHandsConfig) -> list[str]: 'openhands/gpt-5-2025-08-07', 'openhands/gpt-5-mini-2025-08-07', 'openhands/claude-opus-4-20250514', + 'openhands/claude-opus-4-5-20251101', 'openhands/gemini-2.5-pro', 'openhands/o3', 'openhands/o4-mini', @@ -90,4 +91,4 @@ def get_supported_llm_models(config: OpenHandsConfig) -> list[str]: ] model_list = clarifai_models + model_list - return list(sorted(set(model_list))) + return sorted(set(model_list)) diff --git a/openhands/utils/posthog_tracker.py b/openhands/utils/posthog_tracker.py deleted file mode 100644 index c0859eddc717..000000000000 --- a/openhands/utils/posthog_tracker.py +++ /dev/null @@ -1,270 +0,0 @@ -"""PostHog tracking utilities for OpenHands events.""" - -import os - -from openhands.core.logger import openhands_logger as logger - -# Lazy import posthog to avoid import errors in environments where it's not installed -posthog = None - - -def _init_posthog(): - """Initialize PostHog client lazily.""" - global posthog - if posthog is None: - try: - import posthog as ph - - posthog = ph - posthog.api_key = os.environ.get( - 'POSTHOG_CLIENT_KEY', 'phc_3ESMmY9SgqEAGBB6sMGK5ayYHkeUuknH2vP6FmWH9RA' - ) - posthog.host = os.environ.get('POSTHOG_HOST', 'https://us.i.posthog.com') - except ImportError: - logger.warning( - 'PostHog not installed. Analytics tracking will be disabled.' - ) - posthog = None - - -def track_agent_task_completed( - conversation_id: str, - user_id: str | None = None, - app_mode: str | None = None, -) -> None: - """Track when an agent completes a task. - - Args: - conversation_id: The ID of the conversation/session - user_id: The ID of the user (optional, may be None for unauthenticated users) - app_mode: The application mode (saas/oss), optional - """ - _init_posthog() - - if posthog is None: - return - - # Use conversation_id as distinct_id if user_id is not available - # This ensures we can track completions even for anonymous users - distinct_id = user_id if user_id else f'conversation_{conversation_id}' - - try: - posthog.capture( - distinct_id=distinct_id, - event='agent_task_completed', - properties={ - 'conversation_id': conversation_id, - 'user_id': user_id, - 'app_mode': app_mode or 'unknown', - }, - ) - logger.debug( - 'posthog_track', - extra={ - 'event': 'agent_task_completed', - 'conversation_id': conversation_id, - 'user_id': user_id, - }, - ) - except Exception as e: - logger.warning( - f'Failed to track agent_task_completed to PostHog: {e}', - extra={ - 'conversation_id': conversation_id, - 'error': str(e), - }, - ) - - -def track_user_signup_completed( - user_id: str, - signup_timestamp: str, -) -> None: - """Track when a user completes signup by accepting TOS. - - Args: - user_id: The ID of the user (Keycloak user ID) - signup_timestamp: ISO format timestamp of when TOS was accepted - """ - _init_posthog() - - if posthog is None: - return - - try: - posthog.capture( - distinct_id=user_id, - event='user_signup_completed', - properties={ - 'user_id': user_id, - 'signup_timestamp': signup_timestamp, - }, - ) - logger.debug( - 'posthog_track', - extra={ - 'event': 'user_signup_completed', - 'user_id': user_id, - }, - ) - except Exception as e: - logger.warning( - f'Failed to track user_signup_completed to PostHog: {e}', - extra={ - 'user_id': user_id, - 'error': str(e), - }, - ) - - -def track_credit_limit_reached( - conversation_id: str, - user_id: str | None = None, - current_budget: float = 0.0, - max_budget: float = 0.0, -) -> None: - """Track when a user reaches their credit limit during a conversation. - - Args: - conversation_id: The ID of the conversation/session - user_id: The ID of the user (optional, may be None for unauthenticated users) - current_budget: The current budget spent - max_budget: The maximum budget allowed - """ - _init_posthog() - - if posthog is None: - return - - distinct_id = user_id if user_id else f'conversation_{conversation_id}' - - try: - posthog.capture( - distinct_id=distinct_id, - event='credit_limit_reached', - properties={ - 'conversation_id': conversation_id, - 'user_id': user_id, - 'current_budget': current_budget, - 'max_budget': max_budget, - }, - ) - logger.debug( - 'posthog_track', - extra={ - 'event': 'credit_limit_reached', - 'conversation_id': conversation_id, - 'user_id': user_id, - 'current_budget': current_budget, - 'max_budget': max_budget, - }, - ) - except Exception as e: - logger.warning( - f'Failed to track credit_limit_reached to PostHog: {e}', - extra={ - 'conversation_id': conversation_id, - 'error': str(e), - }, - ) - - -def track_credits_purchased( - user_id: str, - amount_usd: float, - credits_added: float, - stripe_session_id: str, -) -> None: - """Track when a user successfully purchases credits. - - Args: - user_id: The ID of the user (Keycloak user ID) - amount_usd: The amount paid in USD (cents converted to dollars) - credits_added: The number of credits added to the user's account - stripe_session_id: The Stripe checkout session ID - """ - _init_posthog() - - if posthog is None: - return - - try: - posthog.capture( - distinct_id=user_id, - event='credits_purchased', - properties={ - 'user_id': user_id, - 'amount_usd': amount_usd, - 'credits_added': credits_added, - 'stripe_session_id': stripe_session_id, - }, - ) - logger.debug( - 'posthog_track', - extra={ - 'event': 'credits_purchased', - 'user_id': user_id, - 'amount_usd': amount_usd, - 'credits_added': credits_added, - }, - ) - except Exception as e: - logger.warning( - f'Failed to track credits_purchased to PostHog: {e}', - extra={ - 'user_id': user_id, - 'error': str(e), - }, - ) - - -def alias_user_identities( - keycloak_user_id: str, - git_login: str, -) -> None: - """Alias a user's Keycloak ID with their git provider login for unified tracking. - - This allows PostHog to link events tracked from the frontend (using git provider login) - with events tracked from the backend (using Keycloak user ID). - - PostHog Python alias syntax: alias(previous_id, distinct_id) - - previous_id: The old/previous distinct ID that will be merged - - distinct_id: The new/canonical distinct ID to merge into - - For our use case: - - Git provider login is the previous_id (first used in frontend, before backend auth) - - Keycloak user ID is the distinct_id (canonical backend ID) - - Result: All events with git login will be merged into Keycloak user ID - - Args: - keycloak_user_id: The Keycloak user ID (canonical distinct_id) - git_login: The git provider username (GitHub/GitLab/Bitbucket) to merge - - Reference: - https://github.com/PostHog/posthog-python/blob/master/posthog/client.py - """ - _init_posthog() - - if posthog is None: - return - - try: - # Merge git provider login into Keycloak user ID - # posthog.alias(previous_id, distinct_id) - official Python SDK signature - posthog.alias(git_login, keycloak_user_id) - logger.debug( - 'posthog_alias', - extra={ - 'previous_id': git_login, - 'distinct_id': keycloak_user_id, - }, - ) - except Exception as e: - logger.warning( - f'Failed to alias user identities in PostHog: {e}', - extra={ - 'keycloak_user_id': keycloak_user_id, - 'git_login': git_login, - 'error': str(e), - }, - ) diff --git a/poetry.lock b/poetry.lock index 0cc47afea7d7..23789d328587 100644 --- a/poetry.lock +++ b/poetry.lock @@ -254,14 +254,14 @@ files = [ [[package]] name = "anthropic" -version = "0.72.0" +version = "0.75.0" description = "The official Python library for the anthropic API" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "anthropic-0.72.0-py3-none-any.whl", hash = "sha256:0e9f5a7582f038cab8efbb4c959e49ef654a56bfc7ba2da51b5a7b8a84de2e4d"}, - {file = "anthropic-0.72.0.tar.gz", hash = "sha256:8971fe76dcffc644f74ac3883069beb1527641115ae0d6eb8fa21c1ce4082f7a"}, + {file = "anthropic-0.75.0-py3-none-any.whl", hash = "sha256:ea8317271b6c15d80225a9f3c670152746e88805a7a61e14d4a374577164965b"}, + {file = "anthropic-0.75.0.tar.gz", hash = "sha256:e8607422f4ab616db2ea5baacc215dd5f028da99ce2f022e33c7c535b29f3dfb"}, ] [package.dependencies] @@ -1205,34 +1205,37 @@ botocore = ["botocore"] [[package]] name = "browser-use" -version = "0.8.0" +version = "0.10.1" description = "Make websites accessible for AI agents" optional = false python-versions = "<4.0,>=3.11" groups = ["main"] files = [ - {file = "browser_use-0.8.0-py3-none-any.whl", hash = "sha256:b7c299e38ec1c1aec42a236cc6ad2268a366226940d6ff9d88ed461afd5a1cc3"}, - {file = "browser_use-0.8.0.tar.gz", hash = "sha256:2136eb3251424f712a08ee379c9337237c2f93b29b566807db599cf94e6abb5e"}, + {file = "browser_use-0.10.1-py3-none-any.whl", hash = "sha256:96e603bfc71098175342cdcb0592519e6f244412e740f0254e4389fdd82a977f"}, + {file = "browser_use-0.10.1.tar.gz", hash = "sha256:5f211ecfdf1f9fd186160f10df70dedd661821231e30f1bce40939787abab223"}, ] [package.dependencies] aiohttp = "3.12.15" -anthropic = ">=0.68.1,<1.0.0" +anthropic = ">=0.72.1,<1.0.0" anyio = ">=4.9.0" authlib = ">=1.6.0" bubus = ">=1.5.6" -cdp-use = ">=1.4.0" +cdp-use = ">=1.4.4" +click = ">=8.1.8" +cloudpickle = ">=3.1.1" google-api-core = ">=2.25.0" google-api-python-client = ">=2.174.0" google-auth = ">=2.40.3" google-auth-oauthlib = ">=1.2.2" -google-genai = ">=1.29.0,<2.0.0" +google-genai = ">=1.50.0,<2.0.0" groq = ">=0.30.0" -html2text = ">=2025.4.15" httpx = ">=0.28.1" +inquirerpy = ">=0.3.4" +markdownify = ">=1.2.0" mcp = ">=1.10.1" ollama = ">=0.5.1" -openai = ">=1.99.2,<2.0.0" +openai = ">=2.7.2,<3.0.0" pillow = ">=11.2.1" portalocker = ">=2.7.0,<3.0.0" posthog = ">=3.7.0" @@ -1241,19 +1244,24 @@ pydantic = ">=2.11.5" pyobjc = {version = ">=11.0", markers = "platform_system == \"darwin\""} pyotp = ">=2.9.0" pypdf = ">=5.7.0" +python-docx = ">=1.2.0" python-dotenv = ">=1.0.1" reportlab = ">=4.0.0" requests = ">=2.32.3" +rich = ">=14.0.0" screeninfo = {version = ">=0.8.1", markers = "platform_system != \"darwin\""} typing-extensions = ">=4.12.2" uuid7 = ">=0.1.0" [package.extras] -all = ["agentmail (==0.0.59)", "boto3 (>=1.38.45)", "botocore (>=1.37.23)", "click (>=8.1.8)", "imgcat (>=0.6.0)", "langchain-openai (>=0.3.26)", "rich (>=14.0.0)", "textual (>=3.2.0)"] +all = ["agentmail (==0.0.59)", "boto3 (>=1.38.45)", "botocore (>=1.37.23)", "imgcat (>=0.6.0)", "langchain-openai (>=0.3.26)", "oci (>=2.126.4)", "textual (>=3.2.0)"] aws = ["boto3 (>=1.38.45)"] -cli = ["click (>=8.1.8)", "rich (>=14.0.0)", "textual (>=3.2.0)"] -eval = ["anyio (>=4.9.0)", "browserbase (==1.4.0)", "datamodel-code-generator (>=0.26.0)", "hyperbrowser (==0.47.0)", "lmnr[all] (==0.7.17)", "psutil (>=7.0.0)"] +cli = ["textual (>=3.2.0)"] +cli-oci = ["oci (>=2.126.4)", "textual (>=3.2.0)"] +code = ["matplotlib (>=3.9.0)", "numpy (>=2.3.2)", "pandas (>=2.2.0)", "tabulate (>=0.9.0)"] +eval = ["anyio (>=4.9.0)", "datamodel-code-generator (>=0.26.0)", "lmnr[all] (==0.7.17)", "psutil (>=7.0.0)"] examples = ["agentmail (==0.0.59)", "botocore (>=1.37.23)", "imgcat (>=0.6.0)", "langchain-openai (>=0.3.26)"] +oci = ["oci (>=2.126.4)"] video = ["imageio[ffmpeg] (>=2.37.0)", "numpy (>=2.3.2)"] [[package]] @@ -1494,14 +1502,14 @@ files = [ [[package]] name = "cdp-use" -version = "1.4.3" +version = "1.4.4" description = "Type safe generator/client library for CDP" optional = false python-versions = ">=3.11" groups = ["main"] files = [ - {file = "cdp_use-1.4.3-py3-none-any.whl", hash = "sha256:c48664604470c2579aa1e677c3e3e7e24c4f300c54804c093d935abb50479ecd"}, - {file = "cdp_use-1.4.3.tar.gz", hash = "sha256:9029c04bdc49fbd3939d2bf1988ad8d88e260729c7d5e35c2f6c87591f5a10e9"}, + {file = "cdp_use-1.4.4-py3-none-any.whl", hash = "sha256:e37e80e067db2653d6fdf953d4ff9e5d80d75daa27b7c6d48c0261cccbef73e1"}, + {file = "cdp_use-1.4.4.tar.gz", hash = "sha256:330a848b517006eb9ad1dc468aa6434d913cf0c6918610760c36c3fdfdba0fab"}, ] [package.dependencies] @@ -3802,28 +3810,28 @@ testing = ["pytest"] [[package]] name = "google-genai" -version = "1.45.0" +version = "1.53.0" description = "GenAI Python SDK" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "google_genai-1.45.0-py3-none-any.whl", hash = "sha256:e755295063e5fd5a4c44acff782a569e37fa8f76a6c75d0ede3375c70d916b7f"}, - {file = "google_genai-1.45.0.tar.gz", hash = "sha256:96ec32ae99a30b5a1b54cb874b577ec6e41b5d5b808bf0f10ed4620e867f9386"}, + {file = "google_genai-1.53.0-py3-none-any.whl", hash = "sha256:65a3f99e5c03c372d872cda7419f5940e723374bb12a2f3ffd5e3e56e8eb2094"}, + {file = "google_genai-1.53.0.tar.gz", hash = "sha256:938a26d22f3fd32c6eeeb4276ef204ef82884e63af9842ce3eac05ceb39cbd8d"}, ] [package.dependencies] anyio = ">=4.8.0,<5.0.0" -google-auth = ">=2.14.1,<3.0.0" +google-auth = {version = ">=2.14.1,<3.0.0", extras = ["requests"]} httpx = ">=0.28.1,<1.0.0" -pydantic = ">=2.0.0,<3.0.0" +pydantic = ">=2.9.0,<3.0.0" requests = ">=2.28.1,<3.0.0" tenacity = ">=8.2.3,<9.2.0" typing-extensions = ">=4.11.0,<5.0.0" websockets = ">=13.0.0,<15.1.0" [package.extras] -aiohttp = ["aiohttp (<4.0.0)"] +aiohttp = ["aiohttp (<3.13.3)"] local-tokenizer = ["protobuf", "sentencepiece (>=0.2.0)"] [[package]] @@ -3991,67 +3999,71 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [[package]] name = "grpcio" -version = "1.72.1" +version = "1.67.1" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "grpcio-1.72.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:ce2706ff37be7a6de68fbc4c3f8dde247cab48cc70fee5fedfbc9cd923b4ee5a"}, - {file = "grpcio-1.72.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:7db9e15ee7618fbea748176a67d347f3100fa92d36acccd0e7eeb741bc82f72a"}, - {file = "grpcio-1.72.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:8d6e7764181ba4a8b74aa78c98a89c9f3441068ebcee5d6f14c44578214e0be3"}, - {file = "grpcio-1.72.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:237bb619ba33594006025e6f114f62e60d9563afd6f8e89633ee384868e26687"}, - {file = "grpcio-1.72.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7f1d8a442fd242aa432c8e1b8411c79ebc409dad2c637614d726e226ce9ed0c"}, - {file = "grpcio-1.72.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f2359bd4bba85bf94fd9ab8802671b9637a6803bb673d221157a11523a52e6a8"}, - {file = "grpcio-1.72.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3269cfca37570a420a57a785f2a5d4234c5b12aced55f8843dafced2d3f8c9a6"}, - {file = "grpcio-1.72.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:06c023d86398714d6257194c21f2bc0b58a53ce45cee87dd3c54c7932c590e17"}, - {file = "grpcio-1.72.1-cp310-cp310-win32.whl", hash = "sha256:06dbe54eeea5f9dfb3e7ca2ff66c715ff5fc96b07a1feb322122fe14cb42f6aa"}, - {file = "grpcio-1.72.1-cp310-cp310-win_amd64.whl", hash = "sha256:ba593aa2cd52f4468ba29668c83f893d88c128198d6b1273ca788ef53e3ae5fe"}, - {file = "grpcio-1.72.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:4e112c083f90c330b0eaa78a633fb206d49c20c443926e827f8cac9eb9d2ea32"}, - {file = "grpcio-1.72.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c6f7e3275832adab7384193f78b8c1a98b82541562fa08d7244e8a6b4b5c78a4"}, - {file = "grpcio-1.72.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:dd03c8847c47ef7ac5455aafdfb5e553ecf84f228282bd6106762b379f27c25c"}, - {file = "grpcio-1.72.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7497dbdf220b88b66004e2630fb2b1627df5e279db970d3cc20f70d39dce978d"}, - {file = "grpcio-1.72.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c2cde3ae8ae901317c049394ed8d3c6964de6b814ae65fc68636a7337b63aa"}, - {file = "grpcio-1.72.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7a66cef4bc1db81a54108a849e95650da640c9bc1901957bf7d3b1eeb3251ee8"}, - {file = "grpcio-1.72.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fc0435ad45d540597f78978e3fd5515b448193f51f9065fb67dda566336e0f5f"}, - {file = "grpcio-1.72.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:524bad78d610fa1f9f316d47b3aab1ff89d438ba952ee34e3e335ca80a27ba96"}, - {file = "grpcio-1.72.1-cp311-cp311-win32.whl", hash = "sha256:409ee0abf7e74bbf88941046142452cf3d1f3863d34e11e8fd2b07375170c730"}, - {file = "grpcio-1.72.1-cp311-cp311-win_amd64.whl", hash = "sha256:ea483e408fac55569c11158c3e6d6d6a8c3b0f798b68f1c10db9b22c5996e19b"}, - {file = "grpcio-1.72.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:65a5ef28e5852bd281c6d01a923906e8036736e95e370acab8626fcbec041e67"}, - {file = "grpcio-1.72.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:9e5c594a6c779d674204fb9bdaa1e7b71666ff10b34a62e7769fc6868b5d7511"}, - {file = "grpcio-1.72.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d324f4bdb990d852d79b38c59a12d24fcd47cf3b1a38f2e4d2b6d0b1031bc818"}, - {file = "grpcio-1.72.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:841db55dd29cf2f4121b853b2f89813a1b6175163fbb92c5945fb1b0ca259ef2"}, - {file = "grpcio-1.72.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00da930aa2711b955a538e835096aa365a4b7f2701bdc2ce1febb242a103f8a1"}, - {file = "grpcio-1.72.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4b657773480267fbb7ad733fa85abc103c52ab62e5bc97791faf82c53836eefc"}, - {file = "grpcio-1.72.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a08b483f17a6abca2578283a7ae3aa8d4d90347242b0de2898bdb27395c3f20b"}, - {file = "grpcio-1.72.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:299f3ea4e03c1d0548f4a174b48d612412f92c667f2100e30a079ab76fdaa813"}, - {file = "grpcio-1.72.1-cp312-cp312-win32.whl", hash = "sha256:addc721a3708ff789da1bf69876018dc730c1ec9d3d3cb6912776a00c535a5bc"}, - {file = "grpcio-1.72.1-cp312-cp312-win_amd64.whl", hash = "sha256:22ea2aa92a60dff231ba5fcd7f0220a33c2218e556009996f858eeafe294d1c2"}, - {file = "grpcio-1.72.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:294be6e9c323a197434569a41e0fb5b5aa0962fd5d55a3dc890ec5df985f611a"}, - {file = "grpcio-1.72.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:41ec164dac8df2862f67457d9cdf8d8f8b6a4ca475a3ed1ba6547fff98d93717"}, - {file = "grpcio-1.72.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:761736f75c6ddea3732d97eaabe70c616271f5f542a8be95515135fdd1a638f6"}, - {file = "grpcio-1.72.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:082003cb93618964c111c70d69b60ac0dc6566d4c254c9b2a775faa2965ba8f8"}, - {file = "grpcio-1.72.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8660f736da75424949c14f7c8b1ac60a25b2f37cabdec95181834b405373e8a7"}, - {file = "grpcio-1.72.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2ada1abe2ad122b42407b2bfd79d6706a4940d4797f44bd740f5c98ca1ecda9b"}, - {file = "grpcio-1.72.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0db2766d0c482ee740abbe7d00a06cc4fb54f7e5a24d3cf27c3352be18a2b1e8"}, - {file = "grpcio-1.72.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4bdb404d9c2187260b34e2b22783c204fba8a9023a166cf77376190d9cf5a08"}, - {file = "grpcio-1.72.1-cp313-cp313-win32.whl", hash = "sha256:bb64722c3124c906a5b66e50a90fd36442642f653ba88a24f67d08e94bca59f3"}, - {file = "grpcio-1.72.1-cp313-cp313-win_amd64.whl", hash = "sha256:329cc6ff5b431df9614340d3825b066a1ff0a5809a01ba2e976ef48c65a0490b"}, - {file = "grpcio-1.72.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:8941b83addd503c1982090b4631804d0ff1edbbc6c85c9c20ed503b1dc65fef9"}, - {file = "grpcio-1.72.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:d29b80290c5eda561a4c291d6d5b4315a2a5095ab37061118d6e0781858aca0a"}, - {file = "grpcio-1.72.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:4ca56d955564db749c9c6d75e9c4c777854e22b2482d247fb6c5a02d5f28ea78"}, - {file = "grpcio-1.72.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b08a3ef14d2b01eef13882c6d3a2d8fb5fcd73db81bd1e3ab69d4ee75215433a"}, - {file = "grpcio-1.72.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7df49801b3b323e4a21047979e3834cd286b32ee5ceee46f5217826274721f"}, - {file = "grpcio-1.72.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9717617ba2ff65c058ef53b0d5e50f03e8350f0c5597f93bb5c980a31db990c8"}, - {file = "grpcio-1.72.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:212db80b1e8aa7792d51269bfb32164e2333a9bb273370ace3ed2a378505cb01"}, - {file = "grpcio-1.72.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a0d19947d4480af5f363f077f221e665931f479e2604280ac4eafe6daa71f77"}, - {file = "grpcio-1.72.1-cp39-cp39-win32.whl", hash = "sha256:7622ef647dc911ed010a817d9be501df4ae83495b8e5cdd35b555bdcf3880a3e"}, - {file = "grpcio-1.72.1-cp39-cp39-win_amd64.whl", hash = "sha256:f8d8fa7cd2a7f1b4207e215dec8bc07f1202682d9a216ebe028185c15faece30"}, - {file = "grpcio-1.72.1.tar.gz", hash = "sha256:87f62c94a40947cec1a0f91f95f5ba0aa8f799f23a1d42ae5be667b6b27b959c"}, + {file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"}, + {file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"}, + {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f"}, + {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0"}, + {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa"}, + {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292"}, + {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311"}, + {file = "grpcio-1.67.1-cp310-cp310-win32.whl", hash = "sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed"}, + {file = "grpcio-1.67.1-cp310-cp310-win_amd64.whl", hash = "sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e"}, + {file = "grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb"}, + {file = "grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e"}, + {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f"}, + {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc"}, + {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96"}, + {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f"}, + {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970"}, + {file = "grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744"}, + {file = "grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5"}, + {file = "grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953"}, + {file = "grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb"}, + {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0"}, + {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af"}, + {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e"}, + {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75"}, + {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38"}, + {file = "grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78"}, + {file = "grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc"}, + {file = "grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b"}, + {file = "grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1"}, + {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af"}, + {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955"}, + {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8"}, + {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62"}, + {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb"}, + {file = "grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121"}, + {file = "grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba"}, + {file = "grpcio-1.67.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65"}, + {file = "grpcio-1.67.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426"}, + {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab"}, + {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085"}, + {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3"}, + {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8"}, + {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce"}, + {file = "grpcio-1.67.1-cp38-cp38-win32.whl", hash = "sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46"}, + {file = "grpcio-1.67.1-cp38-cp38-win_amd64.whl", hash = "sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771"}, + {file = "grpcio-1.67.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335"}, + {file = "grpcio-1.67.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e"}, + {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8"}, + {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d"}, + {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04"}, + {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8"}, + {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f"}, + {file = "grpcio-1.67.1-cp39-cp39-win32.whl", hash = "sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e"}, + {file = "grpcio-1.67.1-cp39-cp39-win_amd64.whl", hash = "sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98"}, + {file = "grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.72.1)"] +protobuf = ["grpcio-tools (>=1.67.1)"] [[package]] name = "grpcio-status" @@ -4434,6 +4446,25 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "inquirerpy" +version = "0.3.4" +description = "Python port of Inquirer.js (A collection of common interactive command-line user interfaces)" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "InquirerPy-0.3.4-py3-none-any.whl", hash = "sha256:c65fdfbac1fa00e3ee4fb10679f4d3ed7a012abf4833910e63c295827fe2a7d4"}, + {file = "InquirerPy-0.3.4.tar.gz", hash = "sha256:89d2ada0111f337483cb41ae31073108b2ec1e618a49d7110b0d7ade89fc197e"}, +] + +[package.dependencies] +pfzy = ">=0.3.1,<0.4.0" +prompt-toolkit = ">=3.0.1,<4.0.0" + +[package.extras] +docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "myst-parser (>=0.15.1,<0.16.0)", "sphinx-autobuild (>=2021.3.14,<2022.0.0)", "sphinx-copybutton (>=0.4.0,<0.5.0)"] + [[package]] name = "installer" version = "0.7.0" @@ -5609,25 +5640,26 @@ types-tqdm = "*" [[package]] name = "litellm" -version = "1.77.7" +version = "1.80.7" description = "Library to easily interface with LLM API providers" optional = false -python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" +python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "litellm-1.77.7-py3-none-any.whl", hash = "sha256:1b3a1b17bd521a0ad25226fb62a912602c803922aabb4a16adf83834673be574"}, - {file = "litellm-1.77.7.tar.gz", hash = "sha256:e3398fb2575b98726e787c0a1481daed5938d58cafdcd96fbca80c312221af3e"}, + {file = "litellm-1.80.7-py3-none-any.whl", hash = "sha256:f7d993f78c1e0e4e1202b2a925cc6540b55b6e5fb055dd342d88b145ab3102ed"}, + {file = "litellm-1.80.7.tar.gz", hash = "sha256:3977a8d195aef842d01c18bf9e22984829363c6a4b54daf9a43c9dd9f190b42c"}, ] [package.dependencies] aiohttp = ">=3.10" click = "*" fastuuid = ">=0.13.0" +grpcio = ">=1.62.3,<1.68.0" httpx = ">=0.23.0" importlib-metadata = ">=6.8.0" jinja2 = ">=3.1.2,<4.0.0" jsonschema = ">=4.22.0,<5.0.0" -openai = ">=1.99.5" +openai = ">=2.8.0" pydantic = ">=2.5.0,<3.0.0" python-dotenv = ">=0.2.0" tiktoken = ">=0.7.0" @@ -5635,22 +5667,22 @@ tokenizers = "*" [package.extras] caching = ["diskcache (>=5.6.1,<6.0.0)"] -extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0,<0.9.0)"] +extra-proxy = ["azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0)"] mlflow = ["mlflow (>3.1.4) ; python_version >= \"3.10\""] -proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.20)", "litellm-proxy-extras (==0.2.25)", "mcp (>=1.10.0,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"] -semantic-router = ["semantic-router ; python_version >= \"3.9\""] +proxy = ["PyJWT (>=2.10.1,<3.0.0) ; python_version >= \"3.9\"", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.120.1)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.22)", "litellm-proxy-extras (==0.4.9)", "mcp (>=1.21.2,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "soundfile (>=0.12.1,<0.13.0)", "uvicorn (>=0.31.1,<0.32.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=15.0.1,<16.0.0)"] +semantic-router = ["semantic-router (>=0.1.12) ; python_version >= \"3.9\" and python_version < \"3.14\""] utils = ["numpydoc"] [[package]] name = "lmnr" -version = "0.7.20" +version = "0.7.24" description = "Python SDK for Laminar" optional = false python-versions = "<4,>=3.10" groups = ["main"] files = [ - {file = "lmnr-0.7.20-py3-none-any.whl", hash = "sha256:5f9fa7444e6f96c25e097f66484ff29e632bdd1de0e9346948bf5595f4a8af38"}, - {file = "lmnr-0.7.20.tar.gz", hash = "sha256:1f484cd618db2d71af65f90a0b8b36d20d80dc91a5138b811575c8677bf7c4fd"}, + {file = "lmnr-0.7.24-py3-none-any.whl", hash = "sha256:ad780d4a62ece897048811f3368639c240a9329ab31027da8c96545137a3a08a"}, + {file = "lmnr-0.7.24.tar.gz", hash = "sha256:aa6973f46fc4ba95c9061c1feceb58afc02eb43c9376c21e32545371ff6123d7"}, ] [package.dependencies] @@ -5673,14 +5705,15 @@ tqdm = ">=4.0" [package.extras] alephalpha = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)"] -all = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)", "opentelemetry-instrumentation-bedrock (>=0.47.1)", "opentelemetry-instrumentation-chromadb (>=0.47.1)", "opentelemetry-instrumentation-cohere (>=0.47.1)", "opentelemetry-instrumentation-crewai (>=0.47.1)", "opentelemetry-instrumentation-haystack (>=0.47.1)", "opentelemetry-instrumentation-lancedb (>=0.47.1)", "opentelemetry-instrumentation-langchain (>=0.47.1)", "opentelemetry-instrumentation-llamaindex (>=0.47.1)", "opentelemetry-instrumentation-marqo (>=0.47.1)", "opentelemetry-instrumentation-mcp (>=0.47.1)", "opentelemetry-instrumentation-milvus (>=0.47.1)", "opentelemetry-instrumentation-mistralai (>=0.47.1)", "opentelemetry-instrumentation-ollama (>=0.47.1)", "opentelemetry-instrumentation-pinecone (>=0.47.1)", "opentelemetry-instrumentation-qdrant (>=0.47.1)", "opentelemetry-instrumentation-replicate (>=0.47.1)", "opentelemetry-instrumentation-sagemaker (>=0.47.1)", "opentelemetry-instrumentation-together (>=0.47.1)", "opentelemetry-instrumentation-transformers (>=0.47.1)", "opentelemetry-instrumentation-vertexai (>=0.47.1)", "opentelemetry-instrumentation-watsonx (>=0.47.1)", "opentelemetry-instrumentation-weaviate (>=0.47.1)"] +all = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)", "opentelemetry-instrumentation-bedrock (>=0.47.1)", "opentelemetry-instrumentation-chromadb (>=0.47.1)", "opentelemetry-instrumentation-cohere (>=0.47.1)", "opentelemetry-instrumentation-crewai (>=0.47.1)", "opentelemetry-instrumentation-haystack (>=0.47.1)", "opentelemetry-instrumentation-lancedb (>=0.47.1)", "opentelemetry-instrumentation-langchain (>=0.47.1,<0.48.0)", "opentelemetry-instrumentation-llamaindex (>=0.47.1)", "opentelemetry-instrumentation-marqo (>=0.47.1)", "opentelemetry-instrumentation-mcp (>=0.47.1)", "opentelemetry-instrumentation-milvus (>=0.47.1)", "opentelemetry-instrumentation-mistralai (>=0.47.1)", "opentelemetry-instrumentation-ollama (>=0.47.1)", "opentelemetry-instrumentation-pinecone (>=0.47.1)", "opentelemetry-instrumentation-qdrant (>=0.47.1)", "opentelemetry-instrumentation-replicate (>=0.47.1)", "opentelemetry-instrumentation-sagemaker (>=0.47.1)", "opentelemetry-instrumentation-together (>=0.47.1)", "opentelemetry-instrumentation-transformers (>=0.47.1)", "opentelemetry-instrumentation-vertexai (>=0.47.1)", "opentelemetry-instrumentation-watsonx (>=0.47.1)", "opentelemetry-instrumentation-weaviate (>=0.47.1)"] bedrock = ["opentelemetry-instrumentation-bedrock (>=0.47.1)"] chromadb = ["opentelemetry-instrumentation-chromadb (>=0.47.1)"] +claude-agent-sdk = ["lmnr-claude-code-proxy (>=0.1.0a5)"] cohere = ["opentelemetry-instrumentation-cohere (>=0.47.1)"] crewai = ["opentelemetry-instrumentation-crewai (>=0.47.1)"] haystack = ["opentelemetry-instrumentation-haystack (>=0.47.1)"] lancedb = ["opentelemetry-instrumentation-lancedb (>=0.47.1)"] -langchain = ["opentelemetry-instrumentation-langchain (>=0.47.1)"] +langchain = ["opentelemetry-instrumentation-langchain (>=0.47.1,<0.48.0)"] llamaindex = ["opentelemetry-instrumentation-llamaindex (>=0.47.1)"] marqo = ["opentelemetry-instrumentation-marqo (>=0.47.1)"] mcp = ["opentelemetry-instrumentation-mcp (>=0.47.1)"] @@ -5783,8 +5816,11 @@ files = [ {file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"}, {file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"}, {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f7f991a68d20c75cb13c5c9142b2a3f9eb161f1f12a9489c82172d1f133c0"}, {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ac7ba71f9561cd7d7b55e1ea5511543c0282e2b6450f122672a2694621d63b7e"}, {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"}, + {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:ce31158630a6ac85bddd6b830cffd46085ff90498b397bd0a259f59d27a12188"}, {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"}, {file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"}, {file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"}, @@ -5905,14 +5941,14 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markdownify" -version = "1.1.0" +version = "1.2.2" description = "Convert HTML to markdown." optional = false python-versions = "*" groups = ["main"] files = [ - {file = "markdownify-1.1.0-py3-none-any.whl", hash = "sha256:32a5a08e9af02c8a6528942224c91b933b4bd2c7d078f9012943776fc313eeef"}, - {file = "markdownify-1.1.0.tar.gz", hash = "sha256:449c0bbbf1401c5112379619524f33b63490a8fa479456d41de9dc9e37560ebd"}, + {file = "markdownify-1.2.2-py3-none-any.whl", hash = "sha256:3f02d3cc52714084d6e589f70397b6fc9f2f3a8531481bf35e8cc39f975e186a"}, + {file = "markdownify-1.2.2.tar.gz", hash = "sha256:b274f1b5943180b031b699b199cbaeb1e2ac938b75851849a31fd0c3d6603d09"}, ] [package.dependencies] @@ -7188,28 +7224,28 @@ pydantic = ">=2.9" [[package]] name = "openai" -version = "1.99.9" +version = "2.8.0" description = "The official Python library for the openai API" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "evaluation"] files = [ - {file = "openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a"}, - {file = "openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92"}, + {file = "openai-2.8.0-py3-none-any.whl", hash = "sha256:ba975e347f6add2fe13529ccb94d54a578280e960765e5224c34b08d7e029ddf"}, + {file = "openai-2.8.0.tar.gz", hash = "sha256:4851908f6d6fcacbd47ba659c5ac084f7725b752b6bfa1e948b6fbfc111a6bad"}, ] [package.dependencies] anyio = ">=3.5.0,<5" distro = ">=1.7.0,<2" httpx = ">=0.23.0,<1" -jiter = ">=0.4.0,<1" +jiter = ">=0.10.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" tqdm = ">4" typing-extensions = ">=4.11,<5" [package.extras] -aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.8)"] +aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.9)"] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] realtime = ["websockets (>=13,<16)"] voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"] @@ -7344,54 +7380,46 @@ llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0 [[package]] name = "openhands-agent-server" -version = "1.1.0" +version = "1.6.0" description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_agent_server-1.1.0-py3-none-any.whl", hash = "sha256:59a856883df23488c0723e47655ef21649a321fcd4709a25a4690866eff6ac88"}, - {file = "openhands_agent_server-1.1.0.tar.gz", hash = "sha256:e39bebd39afd45cfcfd765005e7c4e5409e46678bd7612ae20bae79f7057b935"}, + {file = "openhands_agent_server-1.6.0-py3-none-any.whl", hash = "sha256:e6ae865ac3e7a96b234e10a0faad23f6210e025bbf7721cb66bc7a71d160848c"}, + {file = "openhands_agent_server-1.6.0.tar.gz", hash = "sha256:44ce7694ae2d4bb0666d318ef13e6618bd4dc73022c60354839fe6130e67d02a"}, ] -develop = false [package.dependencies] aiosqlite = ">=0.19" alembic = ">=1.13" docker = ">=7.1,<8" fastapi = ">=0.104" +openhands-sdk = "*" pydantic = ">=2" sqlalchemy = ">=2" uvicorn = ">=0.31.1" websockets = ">=12" wsproto = ">=1.2.0" -[package.source] -type = "git" -url = "https://github.com/OpenHands/agent-sdk.git" -reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -resolved_reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -subdirectory = "openhands-agent-server" - [[package]] name = "openhands-sdk" -version = "1.1.0" +version = "1.6.0" description = "OpenHands SDK - Core functionality for building AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_sdk-1.1.0-py3-none-any.whl", hash = "sha256:4a984ce1687a48cf99a67fdf3d37b116f8b2840743d4807810b5024af6a1d57e"}, - {file = "openhands_sdk-1.1.0.tar.gz", hash = "sha256:855e0d8f3657205e4119e50520c17e65b3358b1a923f7a051a82512a54bf426c"}, + {file = "openhands_sdk-1.6.0-py3-none-any.whl", hash = "sha256:94d2f87fb35406373da6728ae2d88584137f9e9b67fa0e940444c72f2e44e7d3"}, + {file = "openhands_sdk-1.6.0.tar.gz", hash = "sha256:f45742350e3874a7f5b08befc4a9d5adc7e4454f7ab5f8391c519eee3116090f"}, ] -develop = false [package.dependencies] deprecation = ">=2.1.0" fastmcp = ">=2.11.3" httpx = ">=0.27.0" -litellm = ">=1.77.7.dev9" -lmnr = ">=0.7.20" +litellm = ">=1.80.7" +lmnr = ">=0.7.24" pydantic = ">=2.11.7" python-frontmatter = ">=1.1.0" python-json-logger = ">=3.3.0" @@ -7401,25 +7429,17 @@ websockets = ">=12" [package.extras] boto3 = ["boto3 (>=1.35.0)"] -[package.source] -type = "git" -url = "https://github.com/OpenHands/agent-sdk.git" -reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -resolved_reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -subdirectory = "openhands-sdk" - [[package]] name = "openhands-tools" -version = "1.1.0" +version = "1.6.0" description = "OpenHands Tools - Runtime tools for AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_tools-1.1.0-py3-none-any.whl", hash = "sha256:767d6746f05edade49263aa24450a037485a3dc23379f56917ef19aad22033f9"}, - {file = "openhands_tools-1.1.0.tar.gz", hash = "sha256:c2fadaa4f4e16e9a3df5781ea847565dcae7171584f09ef7c0e1d97c8dfc83f6"}, + {file = "openhands_tools-1.6.0-py3-none-any.whl", hash = "sha256:176556d44186536751b23fe052d3505492cc2afb8d52db20fb7a2cc0169cd57a"}, + {file = "openhands_tools-1.6.0.tar.gz", hash = "sha256:d07ba31050fd4a7891a4c48388aa53ce9f703e17064ddbd59146d6c77e5980b3"}, ] -develop = false [package.dependencies] bashlex = ">=0.18" @@ -7430,13 +7450,7 @@ func-timeout = ">=4.3.5" libtmux = ">=0.46.2" openhands-sdk = "*" pydantic = ">=2.11.7" - -[package.source] -type = "git" -url = "https://github.com/OpenHands/agent-sdk.git" -reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -resolved_reference = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" -subdirectory = "openhands-tools" +tom-swe = ">=1.0.3" [[package]] name = "openpyxl" @@ -7949,6 +7963,21 @@ files = [ [package.dependencies] ptyprocess = ">=0.5" +[[package]] +name = "pfzy" +version = "0.3.4" +description = "Python port of the fzy fuzzy string matching algorithm" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "pfzy-0.3.4-py3-none-any.whl", hash = "sha256:5f50d5b2b3207fa72e7ec0ef08372ef652685470974a107d0d4999fc5a903a96"}, + {file = "pfzy-0.3.4.tar.gz", hash = "sha256:717ea765dd10b63618e7298b2d98efd819e0b30cd5905c9707223dceeb94b3f1"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "myst-parser (>=0.15.1,<0.16.0)", "sphinx-autobuild (>=2021.3.14,<2022.0.0)", "sphinx-copybutton (>=0.4.0,<0.5.0)"] + [[package]] name = "pg8000" version = "1.31.5" @@ -14990,6 +15019,31 @@ dev = ["tokenizers[testing]"] docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] +[[package]] +name = "tom-swe" +version = "1.0.3" +description = "Theory of Mind modeling for Software Engineering assistants" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "tom_swe-1.0.3-py3-none-any.whl", hash = "sha256:7b1172b29eb5c8fb7f1975016e7b6a238511b9ac2a7a980bd400dcb4e29773f2"}, + {file = "tom_swe-1.0.3.tar.gz", hash = "sha256:57c97d0104e563f15bd39edaf2aa6ac4c3e9444afd437fb92458700d22c6c0f5"}, +] + +[package.dependencies] +jinja2 = ">=3.0.0" +json-repair = ">=0.1.0" +litellm = ">=1.0.0" +pydantic = ">=2.0.0" +python-dotenv = ">=1.0.0" +tiktoken = ">=0.8.0" +tqdm = ">=4.65.0" + +[package.extras] +dev = ["aiofiles (>=23.0.0)", "black (>=22.0.0)", "datasets (>=2.0.0)", "fastapi (>=0.104.0)", "httpx (>=0.25.0)", "huggingface-hub (>=0.0.0)", "isort (>=5.0.0)", "mypy (>=1.0.0)", "numpy (>=1.24.0)", "pandas (>=2.0.0)", "pre-commit (>=3.6.0)", "pytest (>=7.0.0)", "pytest-cov (>=6.2.1)", "rich (>=13.0.0)", "ruff (>=0.3.0)", "typing-extensions (>=4.0.0)", "uvicorn (>=0.24.0)"] +search = ["bm25s (>=0.2.0)", "pystemmer (>=2.2.0)"] + [[package]] name = "toml" version = "0.10.2" @@ -16769,4 +16823,4 @@ third-party-runtimes = ["daytona", "e2b-code-interpreter", "modal", "runloop-api [metadata] lock-version = "2.1" python-versions = "^3.12,<3.14" -content-hash = "44c6c1f432337d216b70a6654fb0cd20410ddeb56485999859032aec53e90458" +content-hash = "9764f3b69ec8ed35feebd78a826bbc6bfa4ac6d5b56bc999be8bc738b644e538" diff --git a/pyproject.toml b/pyproject.toml index 75dada88a24d..c70c110dcc15 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ requires = [ [tool.poetry] name = "openhands-ai" -version = "0.62.0" +version = "1.0.0" description = "OpenHands: Code Less, Make More" authors = [ "OpenHands" ] license = "MIT" @@ -26,8 +26,8 @@ build = "build_vscode.py" # Build VSCode extension during Poetry build [tool.poetry.dependencies] python = "^3.12,<3.14" -litellm = ">=1.74.3, <1.78.0, !=1.64.4, !=1.67.*" # avoid 1.64.4 (known bug) & 1.67.* (known bug #10272) -openai = "1.99.9" # Pin due to litellm incompatibility with >=1.100.0 (BerriAI/litellm#13711) +litellm = ">=1.74.3, <=1.80.7, !=1.64.4, !=1.67.*" # avoid 1.64.4 (known bug) & 1.67.* (known bug #10272) +openai = "2.8.0" # Pin due to litellm incompatibility with >=1.100.0 (BerriAI/litellm#13711) aiohttp = ">=3.9.0,!=3.11.13" # Pin to avoid yanked version 3.11.13 google-genai = "*" # To use litellm with Gemini Pro API google-api-python-client = "^2.164.0" # For Google Sheets API @@ -116,9 +116,9 @@ pybase62 = "^1.0.0" #openhands-agent-server = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-agent-server", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" } #openhands-sdk = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-sdk", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" } #openhands-tools = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-tools", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" } -openhands-sdk = "1.1.0" -openhands-agent-server = "1.1.0" -openhands-tools = "1.1.0" +openhands-sdk = "1.6.0" +openhands-agent-server = "1.6.0" +openhands-tools = "1.6.0" python-jose = { version = ">=3.3", extras = [ "cryptography" ] } sqlalchemy = { extras = [ "asyncio" ], version = "^2.0.40" } pg8000 = "^1.31.5" diff --git a/tests/unit/agenthub/test_agents.py b/tests/unit/agenthub/test_agents.py index 2a90dcb66830..09f28e991c86 100644 --- a/tests/unit/agenthub/test_agents.py +++ b/tests/unit/agenthub/test_agents.py @@ -393,7 +393,7 @@ def test_mismatched_tool_call_events_and_auto_add_system_message( # 2. The action message # 3. The observation message mock_state.history = [initial_user_message, action, observation] - messages = agent._get_messages(mock_state.history, initial_user_message) + messages = agent._get_messages(mock_state.history, initial_user_message, set()) assert len(messages) == 4 # System + initial user + action + observation assert messages[0].role == 'system' # First message should be the system message assert ( @@ -404,7 +404,7 @@ def test_mismatched_tool_call_events_and_auto_add_system_message( # The same should hold if the events are presented out-of-order mock_state.history = [initial_user_message, observation, action] - messages = agent._get_messages(mock_state.history, initial_user_message) + messages = agent._get_messages(mock_state.history, initial_user_message, set()) assert len(messages) == 4 assert messages[0].role == 'system' # First message should be the system message assert ( @@ -414,7 +414,7 @@ def test_mismatched_tool_call_events_and_auto_add_system_message( # If only one of the two events is present, then we should just get the system message # plus any valid message from the event mock_state.history = [initial_user_message, action] - messages = agent._get_messages(mock_state.history, initial_user_message) + messages = agent._get_messages(mock_state.history, initial_user_message, set()) assert ( len(messages) == 2 ) # System + initial user message, action is waiting for its observation @@ -422,7 +422,7 @@ def test_mismatched_tool_call_events_and_auto_add_system_message( assert messages[1].role == 'user' mock_state.history = [initial_user_message, observation] - messages = agent._get_messages(mock_state.history, initial_user_message) + messages = agent._get_messages(mock_state.history, initial_user_message, set()) assert ( len(messages) == 2 ) # System + initial user message, observation has no matching action diff --git a/tests/unit/agenthub/test_prompt_caching.py b/tests/unit/agenthub/test_prompt_caching.py index 60cc0bb16f84..2435b1320ace 100644 --- a/tests/unit/agenthub/test_prompt_caching.py +++ b/tests/unit/agenthub/test_prompt_caching.py @@ -80,7 +80,7 @@ def test_get_messages(codeact_agent: CodeActAgent): history.append(message_action_5) codeact_agent.reset() - messages = codeact_agent._get_messages(history, message_action_1) + messages = codeact_agent._get_messages(history, message_action_1, set()) assert ( len(messages) == 6 @@ -122,7 +122,7 @@ def test_get_messages_prompt_caching(codeact_agent: CodeActAgent): history.append(message_action_agent) codeact_agent.reset() - messages = codeact_agent._get_messages(history, initial_user_message) + messages = codeact_agent._get_messages(history, initial_user_message, set()) # Check that only the last two user messages have cache_prompt=True cached_user_messages = [ diff --git a/tests/unit/app_server/test_app_conversation_service_base.py b/tests/unit/app_server/test_app_conversation_service_base.py new file mode 100644 index 000000000000..db31d8d3d200 --- /dev/null +++ b/tests/unit/app_server/test_app_conversation_service_base.py @@ -0,0 +1,1266 @@ +"""Unit tests for git and security functionality in AppConversationServiceBase. + +This module tests the git-related functionality, specifically the clone_or_init_git_repo method +and the recent bug fixes for git checkout operations. +""" + +import subprocess +from types import MethodType +from unittest.mock import AsyncMock, MagicMock, Mock, patch +from uuid import uuid4 + +import pytest + +from openhands.app_server.app_conversation.app_conversation_models import AgentType +from openhands.app_server.app_conversation.app_conversation_service_base import ( + AppConversationServiceBase, +) +from openhands.app_server.sandbox.sandbox_models import SandboxInfo +from openhands.app_server.user.user_context import UserContext + + +class MockUserInfo: + """Mock class for UserInfo to simulate user settings.""" + + def __init__( + self, git_user_name: str | None = None, git_user_email: str | None = None + ): + self.git_user_name = git_user_name + self.git_user_email = git_user_email + + +class MockCommandResult: + """Mock class for command execution result.""" + + def __init__(self, exit_code: int = 0, stderr: str = ''): + self.exit_code = exit_code + self.stderr = stderr + + +class MockWorkspace: + """Mock class for AsyncRemoteWorkspace.""" + + def __init__(self, working_dir: str = '/workspace'): + self.working_dir = working_dir + self.execute_command = AsyncMock(return_value=MockCommandResult()) + + +class MockAppConversationServiceBase: + """Mock class to test git functionality without complex dependencies.""" + + def __init__(self): + self.logger = MagicMock() + + async def clone_or_init_git_repo( + self, + workspace_path: str, + repo_url: str, + branch: str = 'main', + timeout: int = 300, + ) -> bool: + """Clone or initialize a git repository. + + This is a simplified version of the actual method for testing purposes. + """ + try: + # Try to clone the repository + clone_result = subprocess.run( + ['git', 'clone', '--branch', branch, repo_url, workspace_path], + capture_output=True, + text=True, + timeout=timeout, + ) + + if clone_result.returncode == 0: + self.logger.info( + f'Successfully cloned repository {repo_url} to {workspace_path}' + ) + return True + + # If clone fails, try to checkout the branch + checkout_result = subprocess.run( + ['git', 'checkout', branch], + cwd=workspace_path, + capture_output=True, + text=True, + timeout=timeout, + ) + + if checkout_result.returncode == 0: + self.logger.info(f'Successfully checked out branch {branch}') + return True + else: + self.logger.error( + f'Failed to checkout branch {branch}: {checkout_result.stderr}' + ) + return False + + except subprocess.TimeoutExpired: + self.logger.error(f'Git operation timed out after {timeout} seconds') + return False + except Exception as e: + self.logger.error(f'Git operation failed: {str(e)}') + return False + + +@pytest.fixture +def service(): + """Create a mock service instance for testing.""" + return MockAppConversationServiceBase() + + +@pytest.mark.asyncio +async def test_clone_or_init_git_repo_successful_clone(service): + """Test successful git clone operation.""" + with patch('subprocess.run') as mock_run: + # Mock successful clone + mock_run.return_value = MagicMock(returncode=0, stderr='', stdout='Cloning...') + + result = await service.clone_or_init_git_repo( + workspace_path='/tmp/test_repo', + repo_url='https://github.com/test/repo.git', + branch='main', + timeout=300, + ) + + assert result is True + mock_run.assert_called_once_with( + [ + 'git', + 'clone', + '--branch', + 'main', + 'https://github.com/test/repo.git', + '/tmp/test_repo', + ], + capture_output=True, + text=True, + timeout=300, + ) + service.logger.info.assert_called_with( + 'Successfully cloned repository https://github.com/test/repo.git to /tmp/test_repo' + ) + + +@pytest.mark.asyncio +async def test_clone_or_init_git_repo_clone_fails_checkout_succeeds(service): + """Test git clone fails but checkout succeeds.""" + with patch('subprocess.run') as mock_run: + # Mock clone failure, then checkout success + mock_run.side_effect = [ + MagicMock(returncode=1, stderr='Clone failed', stdout=''), # Clone fails + MagicMock( + returncode=0, stderr='', stdout='Switched to branch' + ), # Checkout succeeds + ] + + result = await service.clone_or_init_git_repo( + workspace_path='/tmp/test_repo', + repo_url='https://github.com/test/repo.git', + branch='feature-branch', + timeout=300, + ) + + assert result is True + assert mock_run.call_count == 2 + + # Check clone call + mock_run.assert_any_call( + [ + 'git', + 'clone', + '--branch', + 'feature-branch', + 'https://github.com/test/repo.git', + '/tmp/test_repo', + ], + capture_output=True, + text=True, + timeout=300, + ) + + # Check checkout call + mock_run.assert_any_call( + ['git', 'checkout', 'feature-branch'], + cwd='/tmp/test_repo', + capture_output=True, + text=True, + timeout=300, + ) + + service.logger.info.assert_called_with( + 'Successfully checked out branch feature-branch' + ) + + +@pytest.mark.asyncio +async def test_clone_or_init_git_repo_both_operations_fail(service): + """Test both git clone and checkout operations fail.""" + with patch('subprocess.run') as mock_run: + # Mock both operations failing + mock_run.side_effect = [ + MagicMock(returncode=1, stderr='Clone failed', stdout=''), # Clone fails + MagicMock( + returncode=1, stderr='Checkout failed', stdout='' + ), # Checkout fails + ] + + result = await service.clone_or_init_git_repo( + workspace_path='/tmp/test_repo', + repo_url='https://github.com/test/repo.git', + branch='nonexistent-branch', + timeout=300, + ) + + assert result is False + assert mock_run.call_count == 2 + service.logger.error.assert_called_with( + 'Failed to checkout branch nonexistent-branch: Checkout failed' + ) + + +@pytest.mark.asyncio +async def test_clone_or_init_git_repo_timeout(service): + """Test git operation timeout.""" + with patch('subprocess.run') as mock_run: + # Mock timeout exception + mock_run.side_effect = subprocess.TimeoutExpired( + cmd=['git', 'clone'], timeout=300 + ) + + result = await service.clone_or_init_git_repo( + workspace_path='/tmp/test_repo', + repo_url='https://github.com/test/repo.git', + branch='main', + timeout=300, + ) + + assert result is False + service.logger.error.assert_called_with( + 'Git operation timed out after 300 seconds' + ) + + +@pytest.mark.asyncio +async def test_clone_or_init_git_repo_exception(service): + """Test git operation with unexpected exception.""" + with patch('subprocess.run') as mock_run: + # Mock unexpected exception + mock_run.side_effect = Exception('Unexpected error') + + result = await service.clone_or_init_git_repo( + workspace_path='/tmp/test_repo', + repo_url='https://github.com/test/repo.git', + branch='main', + timeout=300, + ) + + assert result is False + service.logger.error.assert_called_with( + 'Git operation failed: Unexpected error' + ) + + +@pytest.mark.asyncio +async def test_clone_or_init_git_repo_custom_timeout(service): + """Test git operation with custom timeout.""" + with patch('subprocess.run') as mock_run: + # Mock successful clone with custom timeout + mock_run.return_value = MagicMock(returncode=0, stderr='', stdout='Cloning...') + + result = await service.clone_or_init_git_repo( + workspace_path='/tmp/test_repo', + repo_url='https://github.com/test/repo.git', + branch='main', + timeout=600, # Custom timeout + ) + + assert result is True + mock_run.assert_called_once_with( + [ + 'git', + 'clone', + '--branch', + 'main', + 'https://github.com/test/repo.git', + '/tmp/test_repo', + ], + capture_output=True, + text=True, + timeout=600, # Verify custom timeout is used + ) + + +@patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser' +) +def test_create_condenser_default_agent_with_none_max_size(mock_condenser_class): + """Test _create_condenser for DEFAULT agent with condenser_max_size = None uses default.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + mock_llm = MagicMock() + mock_llm_copy = MagicMock() + mock_llm_copy.usage_id = 'condenser' + mock_llm.model_copy.return_value = mock_llm_copy + mock_condenser_instance = MagicMock() + mock_condenser_class.return_value = mock_condenser_instance + + # Act + service._create_condenser(mock_llm, AgentType.DEFAULT, None) + + # Assert + mock_condenser_class.assert_called_once() + call_kwargs = mock_condenser_class.call_args[1] + # When condenser_max_size is None, max_size should not be passed (uses SDK default of 120) + assert 'max_size' not in call_kwargs + # keep_first is never passed (uses SDK default of 4) + assert 'keep_first' not in call_kwargs + assert call_kwargs['llm'].usage_id == 'condenser' + mock_llm.model_copy.assert_called_once() + + +@patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser' +) +def test_create_condenser_default_agent_with_custom_max_size(mock_condenser_class): + """Test _create_condenser for DEFAULT agent with custom condenser_max_size.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + mock_llm = MagicMock() + mock_llm_copy = MagicMock() + mock_llm_copy.usage_id = 'condenser' + mock_llm.model_copy.return_value = mock_llm_copy + mock_condenser_instance = MagicMock() + mock_condenser_class.return_value = mock_condenser_instance + + # Act + service._create_condenser(mock_llm, AgentType.DEFAULT, 150) + + # Assert + mock_condenser_class.assert_called_once() + call_kwargs = mock_condenser_class.call_args[1] + assert call_kwargs['max_size'] == 150 # Custom value should be used + # keep_first is never passed (uses SDK default of 4) + assert 'keep_first' not in call_kwargs + assert call_kwargs['llm'].usage_id == 'condenser' + mock_llm.model_copy.assert_called_once() + + +@patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser' +) +def test_create_condenser_plan_agent_with_none_max_size(mock_condenser_class): + """Test _create_condenser for PLAN agent with condenser_max_size = None uses default.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + mock_llm = MagicMock() + mock_llm_copy = MagicMock() + mock_llm_copy.usage_id = 'planning_condenser' + mock_llm.model_copy.return_value = mock_llm_copy + mock_condenser_instance = MagicMock() + mock_condenser_class.return_value = mock_condenser_instance + + # Act + service._create_condenser(mock_llm, AgentType.PLAN, None) + + # Assert + mock_condenser_class.assert_called_once() + call_kwargs = mock_condenser_class.call_args[1] + # When condenser_max_size is None, max_size should not be passed (uses SDK default of 120) + assert 'max_size' not in call_kwargs + # keep_first is never passed (uses SDK default of 4) + assert 'keep_first' not in call_kwargs + assert call_kwargs['llm'].usage_id == 'planning_condenser' + mock_llm.model_copy.assert_called_once() + + +@patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser' +) +def test_create_condenser_plan_agent_with_custom_max_size(mock_condenser_class): + """Test _create_condenser for PLAN agent with custom condenser_max_size.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + mock_llm = MagicMock() + mock_llm_copy = MagicMock() + mock_llm_copy.usage_id = 'planning_condenser' + mock_llm.model_copy.return_value = mock_llm_copy + mock_condenser_instance = MagicMock() + mock_condenser_class.return_value = mock_condenser_instance + + # Act + service._create_condenser(mock_llm, AgentType.PLAN, 200) + + # Assert + mock_condenser_class.assert_called_once() + call_kwargs = mock_condenser_class.call_args[1] + assert call_kwargs['max_size'] == 200 # Custom value should be used + # keep_first is never passed (uses SDK default of 4) + assert 'keep_first' not in call_kwargs + assert call_kwargs['llm'].usage_id == 'planning_condenser' + mock_llm.model_copy.assert_called_once() + + +# ============================================================================= +# Tests for security analyzer helpers +# ============================================================================= + + +@pytest.mark.parametrize('value', [None, '', 'none', 'NoNe']) +def test_create_security_analyzer_returns_none_for_empty_values(value): + """_create_security_analyzer_from_string returns None for empty/none values.""" + # Arrange + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), bind_methods=('_create_security_analyzer_from_string',) + ) + + # Act + result = service._create_security_analyzer_from_string(value) + + # Assert + assert result is None + + +def test_create_security_analyzer_returns_llm_analyzer(): + """_create_security_analyzer_from_string returns LLMSecurityAnalyzer for llm string.""" + # Arrange + security_analyzer_str = 'llm' + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), bind_methods=('_create_security_analyzer_from_string',) + ) + + # Act + result = service._create_security_analyzer_from_string(security_analyzer_str) + + # Assert + from openhands.sdk.security.llm_analyzer import LLMSecurityAnalyzer + + assert isinstance(result, LLMSecurityAnalyzer) + + +def test_create_security_analyzer_logs_warning_for_unknown_value(): + """_create_security_analyzer_from_string logs warning and returns None for unknown.""" + # Arrange + unknown_value = 'custom' + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), bind_methods=('_create_security_analyzer_from_string',) + ) + + # Act + with patch( + 'openhands.app_server.app_conversation.app_conversation_service_base._logger' + ) as mock_logger: + result = service._create_security_analyzer_from_string(unknown_value) + + # Assert + assert result is None + mock_logger.warning.assert_called_once() + + +def test_select_confirmation_policy_when_disabled_returns_never_confirm(): + """_select_confirmation_policy returns NeverConfirm when confirmation_mode is False.""" + # Arrange + confirmation_mode = False + security_analyzer = 'llm' + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), bind_methods=('_select_confirmation_policy',) + ) + + # Act + policy = service._select_confirmation_policy(confirmation_mode, security_analyzer) + + # Assert + from openhands.sdk.security.confirmation_policy import NeverConfirm + + assert isinstance(policy, NeverConfirm) + + +def test_select_confirmation_policy_llm_returns_confirm_risky(): + """_select_confirmation_policy uses ConfirmRisky when analyzer is llm.""" + # Arrange + confirmation_mode = True + security_analyzer = 'llm' + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), bind_methods=('_select_confirmation_policy',) + ) + + # Act + policy = service._select_confirmation_policy(confirmation_mode, security_analyzer) + + # Assert + from openhands.sdk.security.confirmation_policy import ConfirmRisky + + assert isinstance(policy, ConfirmRisky) + + +@pytest.mark.parametrize('security_analyzer', [None, '', 'none', 'custom']) +def test_select_confirmation_policy_non_llm_returns_always_confirm( + security_analyzer, +): + """_select_confirmation_policy falls back to AlwaysConfirm for non-llm values.""" + # Arrange + confirmation_mode = True + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), bind_methods=('_select_confirmation_policy',) + ) + + # Act + policy = service._select_confirmation_policy(confirmation_mode, security_analyzer) + + # Assert + from openhands.sdk.security.confirmation_policy import AlwaysConfirm + + assert isinstance(policy, AlwaysConfirm) + + +@pytest.mark.asyncio +async def test_set_security_analyzer_skips_when_no_session_key(): + """_set_security_analyzer_from_settings exits early without session_api_key.""" + # Arrange + agent_server_url = 'https://agent.example.com' + conversation_id = uuid4() + httpx_client = AsyncMock() + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), + bind_methods=( + '_create_security_analyzer_from_string', + '_set_security_analyzer_from_settings', + ), + ) + + with patch.object(service, '_create_security_analyzer_from_string') as mock_create: + # Act + await service._set_security_analyzer_from_settings( + agent_server_url=agent_server_url, + session_api_key=None, + conversation_id=conversation_id, + security_analyzer_str='llm', + httpx_client=httpx_client, + ) + + # Assert + mock_create.assert_not_called() + httpx_client.post.assert_not_called() + + +@pytest.mark.asyncio +async def test_set_security_analyzer_skips_when_analyzer_none(): + """_set_security_analyzer_from_settings skips API call when analyzer resolves to None.""" + # Arrange + agent_server_url = 'https://agent.example.com' + session_api_key = 'session-key' + conversation_id = uuid4() + httpx_client = AsyncMock() + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), + bind_methods=( + '_create_security_analyzer_from_string', + '_set_security_analyzer_from_settings', + ), + ) + + with patch.object( + service, '_create_security_analyzer_from_string', return_value=None + ) as mock_create: + # Act + await service._set_security_analyzer_from_settings( + agent_server_url=agent_server_url, + session_api_key=session_api_key, + conversation_id=conversation_id, + security_analyzer_str='none', + httpx_client=httpx_client, + ) + + # Assert + mock_create.assert_called_once_with('none') + httpx_client.post.assert_not_called() + + +class DummyAnalyzer: + """Simple analyzer stub for testing model_dump contract.""" + + def __init__(self, payload: dict): + self._payload = payload + + def model_dump(self) -> dict: + return self._payload + + +@pytest.mark.asyncio +async def test_set_security_analyzer_successfully_calls_agent_server(): + """_set_security_analyzer_from_settings posts analyzer payload when available.""" + # Arrange + agent_server_url = 'https://agent.example.com' + session_api_key = 'session-key' + conversation_id = uuid4() + analyzer_payload = {'type': 'llm'} + httpx_client = AsyncMock() + http_response = MagicMock() + http_response.raise_for_status = MagicMock() + httpx_client.post.return_value = http_response + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), + bind_methods=( + '_create_security_analyzer_from_string', + '_set_security_analyzer_from_settings', + ), + ) + + analyzer = DummyAnalyzer(analyzer_payload) + + with ( + patch.object( + service, + '_create_security_analyzer_from_string', + return_value=analyzer, + ) as mock_create, + patch( + 'openhands.app_server.app_conversation.app_conversation_service_base._logger' + ) as mock_logger, + ): + # Act + await service._set_security_analyzer_from_settings( + agent_server_url=agent_server_url, + session_api_key=session_api_key, + conversation_id=conversation_id, + security_analyzer_str='llm', + httpx_client=httpx_client, + ) + + # Assert + mock_create.assert_called_once_with('llm') + httpx_client.post.assert_awaited_once_with( + f'{agent_server_url}/api/conversations/{conversation_id}/security_analyzer', + json={'security_analyzer': analyzer_payload}, + headers={'X-Session-API-Key': session_api_key}, + timeout=30.0, + ) + http_response.raise_for_status.assert_called_once() + mock_logger.info.assert_called() + + +@pytest.mark.asyncio +async def test_set_security_analyzer_logs_warning_on_failure(): + """_set_security_analyzer_from_settings warns but does not raise on errors.""" + # Arrange + agent_server_url = 'https://agent.example.com' + session_api_key = 'session-key' + conversation_id = uuid4() + analyzer_payload = {'type': 'llm'} + httpx_client = AsyncMock() + httpx_client.post.side_effect = RuntimeError('network down') + service, _ = _create_service_with_mock_user_context( + MockUserInfo(), + bind_methods=( + '_create_security_analyzer_from_string', + '_set_security_analyzer_from_settings', + ), + ) + + analyzer = DummyAnalyzer(analyzer_payload) + + with ( + patch.object( + service, + '_create_security_analyzer_from_string', + return_value=analyzer, + ) as mock_create, + patch( + 'openhands.app_server.app_conversation.app_conversation_service_base._logger' + ) as mock_logger, + ): + # Act + await service._set_security_analyzer_from_settings( + agent_server_url=agent_server_url, + session_api_key=session_api_key, + conversation_id=conversation_id, + security_analyzer_str='llm', + httpx_client=httpx_client, + ) + + # Assert + mock_create.assert_called_once_with('llm') + httpx_client.post.assert_awaited_once() + mock_logger.warning.assert_called() + + +# ============================================================================= +# Tests for _configure_git_user_settings +# ============================================================================= + + +def _create_service_with_mock_user_context( + user_info: MockUserInfo, bind_methods: tuple[str, ...] | None = None +) -> tuple: + """Create a mock service with selected real methods bound for testing. + + Uses MagicMock for the service but binds the real method for testing. + + Returns a tuple of (service, mock_user_context) for testing. + """ + mock_user_context = MagicMock() + mock_user_context.get_user_info = AsyncMock(return_value=user_info) + + # Create a simple mock service and set required attribute + service = MagicMock() + service.user_context = mock_user_context + methods_to_bind = ['_configure_git_user_settings'] + if bind_methods: + methods_to_bind.extend(bind_methods) + # Remove potential duplicates while keeping order + methods_to_bind = list(dict.fromkeys(methods_to_bind)) + + # Bind actual methods from the real class to test implementations directly + for method_name in methods_to_bind: + real_method = getattr(AppConversationServiceBase, method_name) + setattr(service, method_name, MethodType(real_method, service)) + + return service, mock_user_context + + +@pytest.fixture +def mock_workspace(): + """Create a mock workspace instance for testing.""" + return MockWorkspace(working_dir='/workspace/project') + + +@pytest.mark.asyncio +async def test_configure_git_user_settings_both_name_and_email(mock_workspace): + """Test configuring both git user name and email.""" + user_info = MockUserInfo( + git_user_name='Test User', git_user_email='test@example.com' + ) + service, mock_user_context = _create_service_with_mock_user_context(user_info) + + await service._configure_git_user_settings(mock_workspace) + + # Verify get_user_info was called + mock_user_context.get_user_info.assert_called_once() + + # Verify both git config commands were executed + assert mock_workspace.execute_command.call_count == 2 + + # Check git config user.name call + mock_workspace.execute_command.assert_any_call( + 'git config --global user.name "Test User"', '/workspace/project' + ) + + # Check git config user.email call + mock_workspace.execute_command.assert_any_call( + 'git config --global user.email "test@example.com"', '/workspace/project' + ) + + +@pytest.mark.asyncio +async def test_configure_git_user_settings_only_name(mock_workspace): + """Test configuring only git user name.""" + user_info = MockUserInfo(git_user_name='Test User', git_user_email=None) + service, _ = _create_service_with_mock_user_context(user_info) + + await service._configure_git_user_settings(mock_workspace) + + # Verify only user.name was configured + assert mock_workspace.execute_command.call_count == 1 + mock_workspace.execute_command.assert_called_once_with( + 'git config --global user.name "Test User"', '/workspace/project' + ) + + +@pytest.mark.asyncio +async def test_configure_git_user_settings_only_email(mock_workspace): + """Test configuring only git user email.""" + user_info = MockUserInfo(git_user_name=None, git_user_email='test@example.com') + service, _ = _create_service_with_mock_user_context(user_info) + + await service._configure_git_user_settings(mock_workspace) + + # Verify only user.email was configured + assert mock_workspace.execute_command.call_count == 1 + mock_workspace.execute_command.assert_called_once_with( + 'git config --global user.email "test@example.com"', '/workspace/project' + ) + + +@pytest.mark.asyncio +async def test_configure_git_user_settings_neither_set(mock_workspace): + """Test when neither git user name nor email is set.""" + user_info = MockUserInfo(git_user_name=None, git_user_email=None) + service, _ = _create_service_with_mock_user_context(user_info) + + await service._configure_git_user_settings(mock_workspace) + + # Verify no git config commands were executed + mock_workspace.execute_command.assert_not_called() + + +@pytest.mark.asyncio +async def test_configure_git_user_settings_empty_strings(mock_workspace): + """Test when git user name and email are empty strings.""" + user_info = MockUserInfo(git_user_name='', git_user_email='') + service, _ = _create_service_with_mock_user_context(user_info) + + await service._configure_git_user_settings(mock_workspace) + + # Empty strings are falsy, so no commands should be executed + mock_workspace.execute_command.assert_not_called() + + +@pytest.mark.asyncio +async def test_configure_git_user_settings_get_user_info_fails(mock_workspace): + """Test handling of exception when get_user_info fails.""" + user_info = MockUserInfo() + service, mock_user_context = _create_service_with_mock_user_context(user_info) + mock_user_context.get_user_info = AsyncMock( + side_effect=Exception('User info error') + ) + + # Should not raise exception, just log warning + await service._configure_git_user_settings(mock_workspace) + + # Verify no git config commands were executed + mock_workspace.execute_command.assert_not_called() + + +@pytest.mark.asyncio +async def test_configure_git_user_settings_name_command_fails(mock_workspace): + """Test handling when git config user.name command fails.""" + user_info = MockUserInfo( + git_user_name='Test User', git_user_email='test@example.com' + ) + service, _ = _create_service_with_mock_user_context(user_info) + + # Make the first command fail (user.name), second succeed (user.email) + mock_workspace.execute_command = AsyncMock( + side_effect=[ + MockCommandResult(exit_code=1, stderr='Permission denied'), + MockCommandResult(exit_code=0), + ] + ) + + # Should not raise exception + await service._configure_git_user_settings(mock_workspace) + + # Verify both commands were still attempted + assert mock_workspace.execute_command.call_count == 2 + + +@pytest.mark.asyncio +async def test_configure_git_user_settings_email_command_fails(mock_workspace): + """Test handling when git config user.email command fails.""" + user_info = MockUserInfo( + git_user_name='Test User', git_user_email='test@example.com' + ) + service, _ = _create_service_with_mock_user_context(user_info) + + # Make the first command succeed (user.name), second fail (user.email) + mock_workspace.execute_command = AsyncMock( + side_effect=[ + MockCommandResult(exit_code=0), + MockCommandResult(exit_code=1, stderr='Permission denied'), + ] + ) + + # Should not raise exception + await service._configure_git_user_settings(mock_workspace) + + # Verify both commands were still attempted + assert mock_workspace.execute_command.call_count == 2 + + +@pytest.mark.asyncio +async def test_configure_git_user_settings_special_characters_in_name(mock_workspace): + """Test git user name with special characters.""" + user_info = MockUserInfo( + git_user_name="Test O'Brien", git_user_email='test@example.com' + ) + service, _ = _create_service_with_mock_user_context(user_info) + + await service._configure_git_user_settings(mock_workspace) + + # Verify the name is passed with special characters + mock_workspace.execute_command.assert_any_call( + 'git config --global user.name "Test O\'Brien"', '/workspace/project' + ) + + +# ============================================================================= +# Tests for load_and_merge_all_skills with org skills +# ============================================================================= + + +class TestLoadAndMergeAllSkillsWithOrgSkills: + """Test load_and_merge_all_skills includes organization skills.""" + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_includes_org_skills( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test that load_and_merge_all_skills loads and merges org skills.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + # Create distinct mock skills for each source + sandbox_skill = Mock() + sandbox_skill.name = 'sandbox_skill' + global_skill = Mock() + global_skill.name = 'global_skill' + user_skill = Mock() + user_skill.name = 'user_skill' + org_skill = Mock() + org_skill.name = 'org_skill' + repo_skill = Mock() + repo_skill.name = 'repo_skill' + + mock_load_sandbox.return_value = [sandbox_skill] + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [repo_skill] + + # Act + result = await service.load_and_merge_all_skills( + sandbox, remote_workspace, 'owner/repo', '/workspace' + ) + + # Assert + assert len(result) == 5 + names = {s.name for s in result} + assert names == { + 'sandbox_skill', + 'global_skill', + 'user_skill', + 'org_skill', + 'repo_skill', + } + mock_load_org.assert_called_once_with( + remote_workspace, 'owner/repo', '/workspace', mock_user_context + ) + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_org_skills_precedence( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test that org skills have correct precedence (higher than user, lower than repo).""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + # Create skills with same name but different sources + user_skill = Mock() + user_skill.name = 'common_skill' + user_skill.source = 'user' + + org_skill = Mock() + org_skill.name = 'common_skill' + org_skill.source = 'org' + + repo_skill = Mock() + repo_skill.name = 'common_skill' + repo_skill.source = 'repo' + + mock_load_sandbox.return_value = [] + mock_load_global.return_value = [] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [repo_skill] + + # Act + result = await service.load_and_merge_all_skills( + sandbox, remote_workspace, 'owner/repo', '/workspace' + ) + + # Assert + # Should have only one skill with repo source (highest precedence) + assert len(result) == 1 + assert result[0].source == 'repo' + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_org_skills_override_user_skills( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test that org skills override user skills for same name.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + # Create skills with same name + user_skill = Mock() + user_skill.name = 'shared_skill' + user_skill.priority = 'low' + + org_skill = Mock() + org_skill.name = 'shared_skill' + org_skill.priority = 'high' + + mock_load_sandbox.return_value = [] + mock_load_global.return_value = [] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [] + + # Act + result = await service.load_and_merge_all_skills( + sandbox, remote_workspace, 'owner/repo', '/workspace' + ) + + # Assert + assert len(result) == 1 + assert result[0].priority == 'high' # Org skill should win + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_handles_org_skills_failure( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test that failure to load org skills doesn't break the overall process.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + global_skill = Mock() + global_skill.name = 'global_skill' + repo_skill = Mock() + repo_skill.name = 'repo_skill' + + mock_load_sandbox.return_value = [] + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [] + mock_load_org.return_value = [] # Org skills failed/empty + mock_load_repo.return_value = [repo_skill] + + # Act + result = await service.load_and_merge_all_skills( + sandbox, remote_workspace, 'owner/repo', '/workspace' + ) + + # Assert + # Should still have skills from other sources + assert len(result) == 2 + names = {s.name for s in result} + assert names == {'global_skill', 'repo_skill'} + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_no_selected_repository( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test skill loading when no repository is selected.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + global_skill = Mock() + global_skill.name = 'global_skill' + + mock_load_sandbox.return_value = [] + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [] + mock_load_org.return_value = [] + mock_load_repo.return_value = [] + + # Act + result = await service.load_and_merge_all_skills( + sandbox, remote_workspace, None, '/workspace' + ) + + # Assert + assert len(result) == 1 + # Org skills should be called even with None repository + mock_load_org.assert_called_once_with( + remote_workspace, None, '/workspace', mock_user_context + ) diff --git a/tests/unit/app_server/test_app_conversation_skills_endpoint.py b/tests/unit/app_server/test_app_conversation_skills_endpoint.py new file mode 100644 index 000000000000..e84412bcd0aa --- /dev/null +++ b/tests/unit/app_server/test_app_conversation_skills_endpoint.py @@ -0,0 +1,503 @@ +"""Unit tests for the V1 skills endpoint in app_conversation_router. + +This module tests the GET /{conversation_id}/skills endpoint functionality, +following TDD best practices with AAA structure. +""" + +from unittest.mock import AsyncMock, MagicMock +from uuid import uuid4 + +import pytest +from fastapi import status + +from openhands.app_server.app_conversation.app_conversation_models import ( + AppConversation, +) +from openhands.app_server.app_conversation.app_conversation_router import ( + get_conversation_skills, +) +from openhands.app_server.app_conversation.app_conversation_service_base import ( + AppConversationServiceBase, +) +from openhands.app_server.sandbox.sandbox_models import ( + AGENT_SERVER, + ExposedUrl, + SandboxInfo, + SandboxStatus, +) +from openhands.app_server.sandbox.sandbox_spec_models import SandboxSpecInfo +from openhands.app_server.user.user_context import UserContext +from openhands.sdk.context.skills import KeywordTrigger, Skill, TaskTrigger + + +def _make_service_mock( + *, + user_context: UserContext, + conversation_return: AppConversation | None = None, + skills_return: list[Skill] | None = None, + raise_on_load: bool = False, +): + """Create a mock service that passes the isinstance check and returns the desired values.""" + + mock_cls = type('AppConversationServiceMock', (MagicMock,), {}) + AppConversationServiceBase.register(mock_cls) + + service = mock_cls() + service.user_context = user_context + service.get_app_conversation = AsyncMock(return_value=conversation_return) + + async def _load_skills(*_args, **_kwargs): + if raise_on_load: + raise Exception('Skill loading failed') + return skills_return or [] + + service.load_and_merge_all_skills = AsyncMock(side_effect=_load_skills) + return service + + +@pytest.mark.asyncio +class TestGetConversationSkills: + """Test suite for get_conversation_skills endpoint.""" + + async def test_get_skills_returns_repo_and_knowledge_skills(self): + """Test successful retrieval of both repo and knowledge skills. + + Arrange: Setup conversation, sandbox, and skills with different types + Act: Call get_conversation_skills endpoint + Assert: Response contains both repo and knowledge skills with correct types + """ + # Arrange + conversation_id = uuid4() + sandbox_id = str(uuid4()) + working_dir = '/workspace' + + # Create mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test-user', + sandbox_id=sandbox_id, + selected_repository='owner/repo', + sandbox_status=SandboxStatus.RUNNING, + ) + + # Create mock sandbox with agent server URL + mock_sandbox = SandboxInfo( + id=sandbox_id, + created_by_user_id='test-user', + status=SandboxStatus.RUNNING, + sandbox_spec_id=str(uuid4()), + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://localhost:8000', port=8000) + ], + ) + + # Create mock sandbox spec + mock_sandbox_spec = SandboxSpecInfo( + id=str(uuid4()), command=None, working_dir=working_dir + ) + + # Create mock skills - repo skill (no trigger) + repo_skill = Skill( + name='repo_skill', + content='Repository skill content', + trigger=None, + ) + + # Create mock skills - knowledge skill (with KeywordTrigger) + knowledge_skill = Skill( + name='knowledge_skill', + content='Knowledge skill content', + trigger=KeywordTrigger(keywords=['test', 'help']), + ) + + # Mock services + mock_user_context = MagicMock(spec=UserContext) + mock_app_conversation_service = _make_service_mock( + user_context=mock_user_context, + conversation_return=mock_conversation, + skills_return=[repo_skill, knowledge_skill], + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Act + response = await get_conversation_skills( + conversation_id=conversation_id, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Assert + assert response.status_code == status.HTTP_200_OK + content = response.body.decode('utf-8') + import json + + data = json.loads(content) + assert 'skills' in data + assert len(data['skills']) == 2 + + # Check repo skill + repo_skill_data = next( + (s for s in data['skills'] if s['name'] == 'repo_skill'), None + ) + assert repo_skill_data is not None + assert repo_skill_data['type'] == 'repo' + assert repo_skill_data['content'] == 'Repository skill content' + assert repo_skill_data['triggers'] == [] + + # Check knowledge skill + knowledge_skill_data = next( + (s for s in data['skills'] if s['name'] == 'knowledge_skill'), None + ) + assert knowledge_skill_data is not None + assert knowledge_skill_data['type'] == 'knowledge' + assert knowledge_skill_data['content'] == 'Knowledge skill content' + assert knowledge_skill_data['triggers'] == ['test', 'help'] + + async def test_get_skills_returns_404_when_conversation_not_found(self): + """Test endpoint returns 404 when conversation doesn't exist. + + Arrange: Setup mocks to return None for conversation + Act: Call get_conversation_skills endpoint + Assert: Response is 404 with appropriate error message + """ + # Arrange + conversation_id = uuid4() + + mock_user_context = MagicMock(spec=UserContext) + mock_app_conversation_service = _make_service_mock( + user_context=mock_user_context, + conversation_return=None, + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_spec_service = MagicMock() + + # Act + response = await get_conversation_skills( + conversation_id=conversation_id, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Assert + assert response.status_code == status.HTTP_404_NOT_FOUND + content = response.body.decode('utf-8') + import json + + data = json.loads(content) + assert 'error' in data + assert str(conversation_id) in data['error'] + + async def test_get_skills_returns_404_when_sandbox_not_found(self): + """Test endpoint returns 404 when sandbox doesn't exist. + + Arrange: Setup conversation but no sandbox + Act: Call get_conversation_skills endpoint + Assert: Response is 404 with sandbox error message + """ + # Arrange + conversation_id = uuid4() + sandbox_id = str(uuid4()) + + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test-user', + sandbox_id=sandbox_id, + sandbox_status=SandboxStatus.RUNNING, + ) + + mock_user_context = MagicMock(spec=UserContext) + mock_app_conversation_service = _make_service_mock( + user_context=mock_user_context, + conversation_return=mock_conversation, + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=None) + + mock_sandbox_spec_service = MagicMock() + + # Act + response = await get_conversation_skills( + conversation_id=conversation_id, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Assert + assert response.status_code == status.HTTP_404_NOT_FOUND + content = response.body.decode('utf-8') + import json + + data = json.loads(content) + assert 'error' in data + assert 'Sandbox not found' in data['error'] + + async def test_get_skills_returns_404_when_sandbox_not_running(self): + """Test endpoint returns 404 when sandbox is not in RUNNING state. + + Arrange: Setup conversation with stopped sandbox + Act: Call get_conversation_skills endpoint + Assert: Response is 404 with sandbox not running message + """ + # Arrange + conversation_id = uuid4() + sandbox_id = str(uuid4()) + + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test-user', + sandbox_id=sandbox_id, + sandbox_status=SandboxStatus.PAUSED, + ) + + mock_sandbox = SandboxInfo( + id=sandbox_id, + created_by_user_id='test-user', + status=SandboxStatus.PAUSED, + sandbox_spec_id=str(uuid4()), + session_api_key='test-api-key', + ) + + mock_user_context = MagicMock(spec=UserContext) + mock_app_conversation_service = _make_service_mock( + user_context=mock_user_context, + conversation_return=mock_conversation, + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + + # Act + response = await get_conversation_skills( + conversation_id=conversation_id, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Assert + assert response.status_code == status.HTTP_404_NOT_FOUND + content = response.body.decode('utf-8') + import json + + data = json.loads(content) + assert 'error' in data + assert 'not running' in data['error'] + + async def test_get_skills_handles_task_trigger_skills(self): + """Test endpoint correctly handles skills with TaskTrigger. + + Arrange: Setup skill with TaskTrigger + Act: Call get_conversation_skills endpoint + Assert: Skill is categorized as knowledge type with correct triggers + """ + # Arrange + conversation_id = uuid4() + sandbox_id = str(uuid4()) + + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test-user', + sandbox_id=sandbox_id, + sandbox_status=SandboxStatus.RUNNING, + ) + + mock_sandbox = SandboxInfo( + id=sandbox_id, + created_by_user_id='test-user', + status=SandboxStatus.RUNNING, + sandbox_spec_id=str(uuid4()), + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://localhost:8000', port=8000) + ], + ) + + mock_sandbox_spec = SandboxSpecInfo( + id=str(uuid4()), command=None, working_dir='/workspace' + ) + + # Create task skill with TaskTrigger + task_skill = Skill( + name='task_skill', + content='Task skill content', + trigger=TaskTrigger(triggers=['task', 'execute']), + ) + + mock_user_context = MagicMock(spec=UserContext) + mock_app_conversation_service = _make_service_mock( + user_context=mock_user_context, + conversation_return=mock_conversation, + skills_return=[task_skill], + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Act + response = await get_conversation_skills( + conversation_id=conversation_id, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Assert + assert response.status_code == status.HTTP_200_OK + content = response.body.decode('utf-8') + import json + + data = json.loads(content) + assert len(data['skills']) == 1 + skill_data = data['skills'][0] + assert skill_data['type'] == 'knowledge' + assert skill_data['triggers'] == ['task', 'execute'] + + async def test_get_skills_returns_500_on_skill_loading_error(self): + """Test endpoint returns 500 when skill loading fails. + + Arrange: Setup mocks to raise exception during skill loading + Act: Call get_conversation_skills endpoint + Assert: Response is 500 with error message + """ + # Arrange + conversation_id = uuid4() + sandbox_id = str(uuid4()) + + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test-user', + sandbox_id=sandbox_id, + sandbox_status=SandboxStatus.RUNNING, + ) + + mock_sandbox = SandboxInfo( + id=sandbox_id, + created_by_user_id='test-user', + status=SandboxStatus.RUNNING, + sandbox_spec_id=str(uuid4()), + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://localhost:8000', port=8000) + ], + ) + + mock_sandbox_spec = SandboxSpecInfo( + id=str(uuid4()), command=None, working_dir='/workspace' + ) + + mock_user_context = MagicMock(spec=UserContext) + mock_app_conversation_service = _make_service_mock( + user_context=mock_user_context, + conversation_return=mock_conversation, + raise_on_load=True, + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Act + response = await get_conversation_skills( + conversation_id=conversation_id, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Assert + assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR + content = response.body.decode('utf-8') + import json + + data = json.loads(content) + assert 'error' in data + assert 'Error getting skills' in data['error'] + + async def test_get_skills_returns_empty_list_when_no_skills_loaded(self): + """Test endpoint returns empty skills list when no skills are found. + + Arrange: Setup all skill loaders to return empty lists + Act: Call get_conversation_skills endpoint + Assert: Response contains empty skills array + """ + # Arrange + conversation_id = uuid4() + sandbox_id = str(uuid4()) + + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test-user', + sandbox_id=sandbox_id, + sandbox_status=SandboxStatus.RUNNING, + ) + + mock_sandbox = SandboxInfo( + id=sandbox_id, + created_by_user_id='test-user', + status=SandboxStatus.RUNNING, + sandbox_spec_id=str(uuid4()), + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://localhost:8000', port=8000) + ], + ) + + mock_sandbox_spec = SandboxSpecInfo( + id=str(uuid4()), command=None, working_dir='/workspace' + ) + + mock_user_context = MagicMock(spec=UserContext) + mock_app_conversation_service = _make_service_mock( + user_context=mock_user_context, + conversation_return=mock_conversation, + skills_return=[], + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Act + response = await get_conversation_skills( + conversation_id=conversation_id, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Assert + assert response.status_code == status.HTTP_200_OK + content = response.body.decode('utf-8') + import json + + data = json.loads(content) + assert 'skills' in data + assert len(data['skills']) == 0 diff --git a/tests/unit/app_server/test_github_v1_callback_processor.py b/tests/unit/app_server/test_github_v1_callback_processor.py new file mode 100644 index 000000000000..acf958a8e3d1 --- /dev/null +++ b/tests/unit/app_server/test_github_v1_callback_processor.py @@ -0,0 +1,771 @@ +""" +Tests for the GithubV1CallbackProcessor. + +Covers: +- Event filtering +- Successful summary + GitHub posting +- Inline PR comments +- Error conditions (missing IDs/credentials, conversation/sandbox issues) +- Agent server HTTP/timeout errors +- Low-level helper methods +""" + +import os +from unittest.mock import AsyncMock, MagicMock, patch +from uuid import uuid4 + +import httpx +import pytest + +from openhands.app_server.app_conversation.app_conversation_models import ( + AppConversationInfo, +) +from openhands.app_server.event_callback.event_callback_models import EventCallback +from openhands.app_server.event_callback.event_callback_result_models import ( + EventCallbackResultStatus, +) +from openhands.app_server.event_callback.github_v1_callback_processor import ( + GithubV1CallbackProcessor, +) +from openhands.app_server.sandbox.sandbox_models import ( + ExposedUrl, + SandboxInfo, + SandboxStatus, +) +from openhands.events.action.message import MessageAction +from openhands.sdk.event import ConversationStateUpdateEvent + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture +def github_callback_processor(): + return GithubV1CallbackProcessor( + github_view_data={ + 'installation_id': 12345, + 'full_repo_name': 'test-owner/test-repo', + 'issue_number': 42, + }, + should_request_summary=True, + should_extract=True, + inline_pr_comment=False, + ) + + +@pytest.fixture +def github_callback_processor_inline(): + return GithubV1CallbackProcessor( + github_view_data={ + 'installation_id': 12345, + 'full_repo_name': 'test-owner/test-repo', + 'issue_number': 42, + 'comment_id': 'comment_123', + }, + should_request_summary=True, + should_extract=True, + inline_pr_comment=True, + ) + + +@pytest.fixture +def conversation_state_update_event(): + return ConversationStateUpdateEvent(key='execution_status', value='finished') + + +@pytest.fixture +def wrong_event(): + return MessageAction(content='Hello world') + + +@pytest.fixture +def wrong_state_event(): + return ConversationStateUpdateEvent(key='execution_status', value='running') + + +@pytest.fixture +def event_callback(): + return EventCallback( + id=uuid4(), + conversation_id=uuid4(), + processor=GithubV1CallbackProcessor(), + event_kind='ConversationStateUpdateEvent', + ) + + +@pytest.fixture +def mock_app_conversation_info(): + return AppConversationInfo( + conversation_id=uuid4(), + sandbox_id='sandbox_123', + title='Test Conversation', + created_by_user_id='test_user_123', + ) + + +@pytest.fixture +def mock_sandbox_info(): + return SandboxInfo( + id='sandbox_123', + status=SandboxStatus.RUNNING, + session_api_key='test_api_key', + created_by_user_id='test_user_123', + sandbox_spec_id='spec_123', + exposed_urls=[ + ExposedUrl(name='AGENT_SERVER', url='http://localhost:8000', port=8000), + ], + ) + + +# --------------------------------------------------------------------------- +# Helper for common service mocks +# --------------------------------------------------------------------------- + + +async def _setup_happy_path_services( + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + app_conversation_info, + sandbox_info, + agent_response_text='Test summary from agent', +): + # app_conversation_info_service + mock_app_conversation_info_service = AsyncMock() + mock_app_conversation_info_service.get_app_conversation_info.return_value = ( + app_conversation_info + ) + mock_get_app_conversation_info_service.return_value.__aenter__.return_value = ( + mock_app_conversation_info_service + ) + + # sandbox_service + mock_sandbox_service = AsyncMock() + mock_sandbox_service.get_sandbox.return_value = sandbox_info + mock_get_sandbox_service.return_value.__aenter__.return_value = mock_sandbox_service + + # httpx_client + mock_httpx_client = AsyncMock() + mock_response = MagicMock() + mock_response.json.return_value = {'response': agent_response_text} + mock_response.raise_for_status.return_value = None + mock_httpx_client.post.return_value = mock_response + mock_get_httpx_client.return_value.__aenter__.return_value = mock_httpx_client + + return mock_httpx_client + + +# --------------------------------------------------------------------------- +# Tests +# --------------------------------------------------------------------------- + + +class TestGithubV1CallbackProcessor: + async def test_call_with_wrong_event_type( + self, github_callback_processor, wrong_event, event_callback + ): + result = await github_callback_processor( + conversation_id=uuid4(), + callback=event_callback, + event=wrong_event, + ) + assert result is None + + async def test_call_with_wrong_state_event( + self, github_callback_processor, wrong_state_event, event_callback + ): + result = await github_callback_processor( + conversation_id=uuid4(), + callback=event_callback, + event=wrong_state_event, + ) + assert result is None + + async def test_call_should_request_summary_false( + self, github_callback_processor, conversation_state_update_event, event_callback + ): + github_callback_processor.should_request_summary = False + + result = await github_callback_processor( + conversation_id=uuid4(), + callback=event_callback, + event=conversation_state_update_event, + ) + assert result is None + + # ------------------------------------------------------------------ # + # Successful paths + # ------------------------------------------------------------------ # + + @patch.dict( + os.environ, + { + 'GITHUB_APP_CLIENT_ID': 'test_client_id', + 'GITHUB_APP_PRIVATE_KEY': 'test_private_key', + }, + ) + @patch('openhands.app_server.config.get_app_conversation_info_service') + @patch('openhands.app_server.config.get_sandbox_service') + @patch('openhands.app_server.config.get_httpx_client') + @patch( + 'openhands.app_server.event_callback.github_v1_callback_processor.get_prompt_template' + ) + @patch( + 'openhands.app_server.event_callback.github_v1_callback_processor.GithubIntegration' + ) + @patch('openhands.app_server.event_callback.github_v1_callback_processor.Github') + async def test_successful_callback_execution( + self, + mock_github, + mock_github_integration, + mock_get_prompt_template, + mock_get_httpx_client, + mock_get_sandbox_service, + mock_get_app_conversation_info_service, + github_callback_processor, + conversation_state_update_event, + event_callback, + mock_app_conversation_info, + mock_sandbox_info, + ): + conversation_id = uuid4() + + # Common service mocks + mock_httpx_client = await _setup_happy_path_services( + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + mock_app_conversation_info, + mock_sandbox_info, + ) + + mock_get_prompt_template.return_value = 'Please provide a summary' + + # GitHub integration + mock_token_data = MagicMock() + mock_token_data.token = 'test_access_token' + mock_integration_instance = MagicMock() + mock_integration_instance.get_access_token.return_value = mock_token_data + mock_github_integration.return_value = mock_integration_instance + + # GitHub API + mock_github_client = MagicMock() + mock_repo = MagicMock() + mock_issue = MagicMock() + mock_repo.get_issue.return_value = mock_issue + mock_github_client.get_repo.return_value = mock_repo + mock_github.return_value.__enter__.return_value = mock_github_client + + result = await github_callback_processor( + conversation_id=conversation_id, + callback=event_callback, + event=conversation_state_update_event, + ) + + assert result is not None + assert result.status == EventCallbackResultStatus.SUCCESS + assert result.event_callback_id == event_callback.id + assert result.event_id == conversation_state_update_event.id + assert result.conversation_id == conversation_id + assert result.detail == 'Test summary from agent' + assert github_callback_processor.should_request_summary is False + + mock_github_integration.assert_called_once_with( + 'test_client_id', 'test_private_key' + ) + mock_integration_instance.get_access_token.assert_called_once_with(12345) + + mock_github.assert_called_once_with('test_access_token') + mock_github_client.get_repo.assert_called_once_with('test-owner/test-repo') + mock_repo.get_issue.assert_called_once_with(number=42) + mock_issue.create_comment.assert_called_once_with('Test summary from agent') + + mock_httpx_client.post.assert_called_once() + url_arg, kwargs = mock_httpx_client.post.call_args + url = url_arg[0] if url_arg else kwargs['url'] + assert 'ask_agent' in url + assert kwargs['headers']['X-Session-API-Key'] == 'test_api_key' + assert kwargs['json']['question'] == 'Please provide a summary' + + @patch.dict( + os.environ, + { + 'GITHUB_APP_CLIENT_ID': 'test_client_id', + 'GITHUB_APP_PRIVATE_KEY': 'test_private_key', + }, + ) + @patch('openhands.app_server.config.get_app_conversation_info_service') + @patch('openhands.app_server.config.get_sandbox_service') + @patch('openhands.app_server.config.get_httpx_client') + @patch( + 'openhands.app_server.event_callback.github_v1_callback_processor.get_prompt_template' + ) + @patch( + 'openhands.app_server.event_callback.github_v1_callback_processor.GithubIntegration' + ) + @patch('openhands.app_server.event_callback.github_v1_callback_processor.Github') + async def test_successful_inline_pr_comment( + self, + mock_github, + mock_github_integration, + mock_get_prompt_template, + mock_get_httpx_client, + mock_get_sandbox_service, + mock_get_app_conversation_info_service, + github_callback_processor_inline, + conversation_state_update_event, + event_callback, + mock_app_conversation_info, + mock_sandbox_info, + ): + conversation_id = uuid4() + + await _setup_happy_path_services( + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + mock_app_conversation_info, + mock_sandbox_info, + ) + + mock_get_prompt_template.return_value = 'Please provide a summary' + + mock_token_data = MagicMock() + mock_token_data.token = 'test_access_token' + mock_integration_instance = MagicMock() + mock_integration_instance.get_access_token.return_value = mock_token_data + mock_github_integration.return_value = mock_integration_instance + + mock_github_client = MagicMock() + mock_repo = MagicMock() + mock_pr = MagicMock() + mock_repo.get_pull.return_value = mock_pr + mock_github_client.get_repo.return_value = mock_repo + mock_github.return_value.__enter__.return_value = mock_github_client + + result = await github_callback_processor_inline( + conversation_id=conversation_id, + callback=event_callback, + event=conversation_state_update_event, + ) + + assert result is not None + assert result.status == EventCallbackResultStatus.SUCCESS + + mock_repo.get_pull.assert_called_once_with(42) + mock_pr.create_review_comment_reply.assert_called_once_with( + comment_id='comment_123', body='Test summary from agent' + ) + + # ------------------------------------------------------------------ # + # Error paths + # ------------------------------------------------------------------ # + + @patch('openhands.app_server.config.get_httpx_client') + @patch('openhands.app_server.config.get_sandbox_service') + @patch('openhands.app_server.config.get_app_conversation_info_service') + async def test_missing_installation_id( + self, + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + conversation_state_update_event, + event_callback, + mock_app_conversation_info, + mock_sandbox_info, + ): + processor = GithubV1CallbackProcessor( + github_view_data={}, should_request_summary=True + ) + conversation_id = uuid4() + + await _setup_happy_path_services( + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + mock_app_conversation_info, + mock_sandbox_info, + ) + + result = await processor( + conversation_id=conversation_id, + callback=event_callback, + event=conversation_state_update_event, + ) + + assert result is not None + assert result.status == EventCallbackResultStatus.ERROR + assert 'Missing installation ID' in result.detail + + @patch.dict(os.environ, {}, clear=True) + @patch('openhands.app_server.config.get_httpx_client') + @patch('openhands.app_server.config.get_sandbox_service') + @patch('openhands.app_server.config.get_app_conversation_info_service') + async def test_missing_github_credentials( + self, + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + github_callback_processor, + conversation_state_update_event, + event_callback, + mock_app_conversation_info, + mock_sandbox_info, + ): + conversation_id = uuid4() + + await _setup_happy_path_services( + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + mock_app_conversation_info, + mock_sandbox_info, + ) + + result = await github_callback_processor( + conversation_id=conversation_id, + callback=event_callback, + event=conversation_state_update_event, + ) + + assert result is not None + assert result.status == EventCallbackResultStatus.ERROR + assert 'GitHub App credentials are not configured' in result.detail + + @patch.dict( + os.environ, + { + 'GITHUB_APP_CLIENT_ID': 'test_client_id', + 'GITHUB_APP_PRIVATE_KEY': 'test_private_key', + }, + ) + @patch('openhands.app_server.config.get_app_conversation_info_service') + @patch('openhands.app_server.config.get_sandbox_service') + async def test_sandbox_not_running( + self, + mock_get_sandbox_service, + mock_get_app_conversation_info_service, + github_callback_processor, + conversation_state_update_event, + event_callback, + mock_app_conversation_info, + ): + conversation_id = uuid4() + + mock_app_conversation_info_service = AsyncMock() + mock_app_conversation_info_service.get_app_conversation_info.return_value = ( + mock_app_conversation_info + ) + mock_get_app_conversation_info_service.return_value.__aenter__.return_value = ( + mock_app_conversation_info_service + ) + + non_running_sandbox = SandboxInfo( + id='sandbox_123', + status=SandboxStatus.PAUSED, + session_api_key='test_api_key', + created_by_user_id='test_user_123', + sandbox_spec_id='spec_123', + ) + mock_sandbox_service = AsyncMock() + mock_sandbox_service.get_sandbox.return_value = non_running_sandbox + mock_get_sandbox_service.return_value.__aenter__.return_value = ( + mock_sandbox_service + ) + + result = await github_callback_processor( + conversation_id=conversation_id, + callback=event_callback, + event=conversation_state_update_event, + ) + + assert result is not None + assert result.status == EventCallbackResultStatus.ERROR + assert 'Sandbox not running' in result.detail + + @patch.dict( + os.environ, + { + 'GITHUB_APP_CLIENT_ID': 'test_client_id', + 'GITHUB_APP_PRIVATE_KEY': 'test_private_key', + }, + ) + @patch('openhands.app_server.config.get_app_conversation_info_service') + @patch('openhands.app_server.config.get_sandbox_service') + @patch('openhands.app_server.config.get_httpx_client') + @patch( + 'openhands.app_server.event_callback.github_v1_callback_processor.get_prompt_template' + ) + async def test_agent_server_http_error( + self, + mock_get_prompt_template, + mock_get_httpx_client, + mock_get_sandbox_service, + mock_get_app_conversation_info_service, + github_callback_processor, + conversation_state_update_event, + event_callback, + mock_app_conversation_info, + mock_sandbox_info, + ): + conversation_id = uuid4() + + # Set up happy path except httpx + await _setup_happy_path_services( + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + mock_app_conversation_info, + mock_sandbox_info, + ) + + mock_get_prompt_template.return_value = 'Please provide a summary' + + mock_httpx_client = mock_get_httpx_client.return_value.__aenter__.return_value + mock_response = MagicMock() + mock_response.status_code = 500 + mock_response.text = 'Internal Server Error' + mock_response.headers = {} + mock_error = httpx.HTTPStatusError( + 'HTTP 500 error', request=MagicMock(), response=mock_response + ) + mock_httpx_client.post.side_effect = mock_error + + result = await github_callback_processor( + conversation_id=conversation_id, + callback=event_callback, + event=conversation_state_update_event, + ) + + assert result is not None + assert result.status == EventCallbackResultStatus.ERROR + assert 'Failed to send message to agent server' in result.detail + + @patch.dict( + os.environ, + { + 'GITHUB_APP_CLIENT_ID': 'test_client_id', + 'GITHUB_APP_PRIVATE_KEY': 'test_private_key', + }, + ) + @patch('openhands.app_server.config.get_app_conversation_info_service') + @patch('openhands.app_server.config.get_sandbox_service') + @patch('openhands.app_server.config.get_httpx_client') + @patch( + 'openhands.app_server.event_callback.github_v1_callback_processor.get_prompt_template' + ) + async def test_agent_server_timeout( + self, + mock_get_prompt_template, + mock_get_httpx_client, + mock_get_sandbox_service, + mock_get_app_conversation_info_service, + github_callback_processor, + conversation_state_update_event, + event_callback, + mock_app_conversation_info, + mock_sandbox_info, + ): + conversation_id = uuid4() + + await _setup_happy_path_services( + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + mock_app_conversation_info, + mock_sandbox_info, + ) + + mock_get_prompt_template.return_value = 'Please provide a summary' + + mock_httpx_client = mock_get_httpx_client.return_value.__aenter__.return_value + mock_httpx_client.post.side_effect = httpx.TimeoutException('Request timeout') + + result = await github_callback_processor( + conversation_id=conversation_id, + callback=event_callback, + event=conversation_state_update_event, + ) + + assert result is not None + assert result.status == EventCallbackResultStatus.ERROR + assert 'Request timeout after 30 seconds' in result.detail + + # ------------------------------------------------------------------ # + # Low-level helper tests + # ------------------------------------------------------------------ # + + def test_get_installation_access_token_missing_id(self): + processor = GithubV1CallbackProcessor(github_view_data={}) + + with pytest.raises(ValueError, match='Missing installation ID'): + processor._get_installation_access_token() + + @patch.dict(os.environ, {}, clear=True) + def test_get_installation_access_token_missing_credentials( + self, github_callback_processor + ): + with pytest.raises( + ValueError, match='GitHub App credentials are not configured' + ): + github_callback_processor._get_installation_access_token() + + @patch.dict( + os.environ, + { + 'GITHUB_APP_CLIENT_ID': 'test_client_id', + 'GITHUB_APP_PRIVATE_KEY': 'test_private_key\\nwith_newlines', + }, + ) + @patch( + 'openhands.app_server.event_callback.github_v1_callback_processor.GithubIntegration' + ) + def test_get_installation_access_token_success( + self, mock_github_integration, github_callback_processor + ): + mock_token_data = MagicMock() + mock_token_data.token = 'test_access_token' + mock_integration_instance = MagicMock() + mock_integration_instance.get_access_token.return_value = mock_token_data + mock_github_integration.return_value = mock_integration_instance + + token = github_callback_processor._get_installation_access_token() + + assert token == 'test_access_token' + mock_github_integration.assert_called_once_with( + 'test_client_id', 'test_private_key\nwith_newlines' + ) + mock_integration_instance.get_access_token.assert_called_once_with(12345) + + @patch('openhands.app_server.event_callback.github_v1_callback_processor.Github') + async def test_post_summary_to_github_issue_comment( + self, mock_github, github_callback_processor + ): + mock_github_client = MagicMock() + mock_repo = MagicMock() + mock_issue = MagicMock() + mock_repo.get_issue.return_value = mock_issue + mock_github_client.get_repo.return_value = mock_repo + mock_github.return_value.__enter__.return_value = mock_github_client + + with patch.object( + github_callback_processor, + '_get_installation_access_token', + return_value='test_token', + ): + await github_callback_processor._post_summary_to_github('Test summary') + + mock_github.assert_called_once_with('test_token') + mock_github_client.get_repo.assert_called_once_with('test-owner/test-repo') + mock_repo.get_issue.assert_called_once_with(number=42) + mock_issue.create_comment.assert_called_once_with('Test summary') + + @patch('openhands.app_server.event_callback.github_v1_callback_processor.Github') + async def test_post_summary_to_github_pr_comment( + self, mock_github, github_callback_processor_inline + ): + mock_github_client = MagicMock() + mock_repo = MagicMock() + mock_pr = MagicMock() + mock_repo.get_pull.return_value = mock_pr + mock_github_client.get_repo.return_value = mock_repo + mock_github.return_value.__enter__.return_value = mock_github_client + + with patch.object( + github_callback_processor_inline, + '_get_installation_access_token', + return_value='test_token', + ): + await github_callback_processor_inline._post_summary_to_github( + 'Test summary' + ) + + mock_github.assert_called_once_with('test_token') + mock_github_client.get_repo.assert_called_once_with('test-owner/test-repo') + mock_repo.get_pull.assert_called_once_with(42) + mock_pr.create_review_comment_reply.assert_called_once_with( + comment_id='comment_123', body='Test summary' + ) + + async def test_post_summary_to_github_missing_token( + self, github_callback_processor + ): + with patch.object( + github_callback_processor, '_get_installation_access_token', return_value='' + ): + with pytest.raises(RuntimeError, match='Missing GitHub credentials'): + await github_callback_processor._post_summary_to_github('Test summary') + + @patch.dict( + os.environ, + { + 'GITHUB_APP_CLIENT_ID': 'test_client_id', + 'GITHUB_APP_PRIVATE_KEY': 'test_private_key', + 'WEB_HOST': 'test.example.com', + }, + ) + @patch('openhands.app_server.config.get_httpx_client') + @patch('openhands.app_server.config.get_sandbox_service') + @patch('openhands.app_server.config.get_app_conversation_info_service') + async def test_exception_handling_posts_error_to_github( + self, + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + github_callback_processor, + conversation_state_update_event, + event_callback, + mock_app_conversation_info, + mock_sandbox_info, + ): + conversation_id = uuid4() + + # happy-ish path, except httpx error + mock_httpx_client = await _setup_happy_path_services( + mock_get_app_conversation_info_service, + mock_get_sandbox_service, + mock_get_httpx_client, + mock_app_conversation_info, + mock_sandbox_info, + ) + mock_httpx_client.post.side_effect = Exception('Simulated agent server error') + + with ( + patch( + 'openhands.app_server.event_callback.github_v1_callback_processor.GithubIntegration' + ) as mock_github_integration, + patch( + 'openhands.app_server.event_callback.github_v1_callback_processor.Github' + ) as mock_github, + ): + mock_integration = MagicMock() + mock_github_integration.return_value = mock_integration + mock_integration.get_access_token.return_value.token = 'test_token' + + mock_gh = MagicMock() + mock_github.return_value.__enter__.return_value = mock_gh + mock_repo = MagicMock() + mock_issue = MagicMock() + mock_repo.get_issue.return_value = mock_issue + mock_gh.get_repo.return_value = mock_repo + + result = await github_callback_processor( + conversation_id=conversation_id, + callback=event_callback, + event=conversation_state_update_event, + ) + + assert result is not None + assert result.status == EventCallbackResultStatus.ERROR + assert 'Simulated agent server error' in result.detail + + mock_issue.create_comment.assert_called_once() + call_args = mock_issue.create_comment.call_args + error_comment = call_args[1].get('body') or call_args[0][0] + assert ( + 'OpenHands encountered an error: **Simulated agent server error**' + in error_comment + ) + assert f'conversations/{conversation_id}' in error_comment + assert 'for more information.' in error_comment diff --git a/tests/unit/app_server/test_live_status_app_conversation_service.py b/tests/unit/app_server/test_live_status_app_conversation_service.py new file mode 100644 index 000000000000..f662f331465d --- /dev/null +++ b/tests/unit/app_server/test_live_status_app_conversation_service.py @@ -0,0 +1,1356 @@ +"""Unit tests for the methods in LiveStatusAppConversationService.""" + +from unittest.mock import AsyncMock, Mock, patch +from uuid import UUID, uuid4 + +import pytest + +from openhands.agent_server.models import SendMessageRequest, StartConversationRequest +from openhands.app_server.app_conversation.app_conversation_models import ( + AgentType, + AppConversationStartRequest, +) +from openhands.app_server.app_conversation.live_status_app_conversation_service import ( + LiveStatusAppConversationService, +) +from openhands.app_server.sandbox.sandbox_models import ( + AGENT_SERVER, + ExposedUrl, + SandboxInfo, + SandboxStatus, +) +from openhands.app_server.sandbox.sandbox_spec_models import SandboxSpecInfo +from openhands.app_server.user.user_context import UserContext +from openhands.integrations.provider import ProviderType +from openhands.sdk import Agent +from openhands.sdk.llm import LLM +from openhands.sdk.secret import LookupSecret, StaticSecret +from openhands.sdk.workspace import LocalWorkspace +from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace +from openhands.server.types import AppMode + + +class TestLiveStatusAppConversationService: + """Test cases for the methods in LiveStatusAppConversationService.""" + + def setup_method(self): + """Set up test fixtures.""" + # Create mock dependencies + self.mock_user_context = Mock(spec=UserContext) + self.mock_user_auth = Mock() + self.mock_user_context.user_auth = self.mock_user_auth + self.mock_jwt_service = Mock() + self.mock_sandbox_service = Mock() + self.mock_sandbox_spec_service = Mock() + self.mock_app_conversation_info_service = Mock() + self.mock_app_conversation_start_task_service = Mock() + self.mock_event_callback_service = Mock() + self.mock_httpx_client = Mock() + + # Create service instance + self.service = LiveStatusAppConversationService( + init_git_in_empty_workspace=True, + user_context=self.mock_user_context, + app_conversation_info_service=self.mock_app_conversation_info_service, + app_conversation_start_task_service=self.mock_app_conversation_start_task_service, + event_callback_service=self.mock_event_callback_service, + sandbox_service=self.mock_sandbox_service, + sandbox_spec_service=self.mock_sandbox_spec_service, + jwt_service=self.mock_jwt_service, + sandbox_startup_timeout=30, + sandbox_startup_poll_frequency=1, + httpx_client=self.mock_httpx_client, + web_url='https://test.example.com', + openhands_provider_base_url='https://provider.example.com', + access_token_hard_timeout=None, + app_mode='test', + keycloak_auth_cookie=None, + ) + + # Mock user info + self.mock_user = Mock() + self.mock_user.id = 'test_user_123' + self.mock_user.llm_model = 'gpt-4' + self.mock_user.llm_base_url = 'https://api.openai.com/v1' + self.mock_user.llm_api_key = 'test_api_key' + self.mock_user.confirmation_mode = False + self.mock_user.search_api_key = None # Default to None + self.mock_user.condenser_max_size = None # Default to None + self.mock_user.llm_base_url = 'https://api.openai.com/v1' + self.mock_user.mcp_config = None # Default to None to avoid error handling path + + # Mock sandbox + self.mock_sandbox = Mock(spec=SandboxInfo) + self.mock_sandbox.id = uuid4() + self.mock_sandbox.status = SandboxStatus.RUNNING + + @pytest.mark.asyncio + async def test_setup_secrets_for_git_providers_no_provider_tokens(self): + """Test _setup_secrets_for_git_providers with no provider tokens.""" + # Arrange + base_secrets = {'existing': 'secret'} + self.mock_user_context.get_secrets.return_value = base_secrets + self.mock_user_context.get_provider_tokens = AsyncMock(return_value=None) + + # Act + result = await self.service._setup_secrets_for_git_providers(self.mock_user) + + # Assert + assert result == base_secrets + self.mock_user_context.get_secrets.assert_called_once() + self.mock_user_context.get_provider_tokens.assert_called_once() + + @pytest.mark.asyncio + async def test_setup_secrets_for_git_providers_with_web_url(self): + """Test _setup_secrets_for_git_providers with web URL (creates access token).""" + # Arrange + from pydantic import SecretStr + + from openhands.integrations.provider import ProviderToken + + base_secrets = {} + self.mock_user_context.get_secrets.return_value = base_secrets + self.mock_jwt_service.create_jws_token.return_value = 'test_access_token' + + # Mock provider tokens + provider_tokens = { + ProviderType.GITHUB: ProviderToken(token=SecretStr('github_token')), + ProviderType.GITLAB: ProviderToken(token=SecretStr('gitlab_token')), + } + self.mock_user_context.get_provider_tokens = AsyncMock( + return_value=provider_tokens + ) + + # Act + result = await self.service._setup_secrets_for_git_providers(self.mock_user) + + # Assert + assert 'GITHUB_TOKEN' in result + assert 'GITLAB_TOKEN' in result + assert isinstance(result['GITHUB_TOKEN'], LookupSecret) + assert isinstance(result['GITLAB_TOKEN'], LookupSecret) + assert ( + result['GITHUB_TOKEN'].url + == 'https://test.example.com/api/v1/webhooks/secrets' + ) + assert result['GITHUB_TOKEN'].headers['X-Access-Token'] == 'test_access_token' + + # Should be called twice, once for each provider + assert self.mock_jwt_service.create_jws_token.call_count == 2 + + @pytest.mark.asyncio + async def test_setup_secrets_for_git_providers_with_saas_mode(self): + """Test _setup_secrets_for_git_providers with SaaS mode (includes keycloak cookie).""" + # Arrange + from pydantic import SecretStr + + from openhands.integrations.provider import ProviderToken + + self.service.app_mode = 'saas' + self.service.keycloak_auth_cookie = 'test_cookie' + base_secrets = {} + self.mock_user_context.get_secrets.return_value = base_secrets + self.mock_jwt_service.create_jws_token.return_value = 'test_access_token' + + # Mock provider tokens + provider_tokens = { + ProviderType.GITLAB: ProviderToken(token=SecretStr('gitlab_token')), + } + self.mock_user_context.get_provider_tokens = AsyncMock( + return_value=provider_tokens + ) + + # Act + result = await self.service._setup_secrets_for_git_providers(self.mock_user) + + # Assert + assert 'GITLAB_TOKEN' in result + lookup_secret = result['GITLAB_TOKEN'] + assert isinstance(lookup_secret, LookupSecret) + assert 'Cookie' in lookup_secret.headers + assert lookup_secret.headers['Cookie'] == 'keycloak_auth=test_cookie' + + @pytest.mark.asyncio + async def test_setup_secrets_for_git_providers_without_web_url(self): + """Test _setup_secrets_for_git_providers without web URL (uses static token).""" + # Arrange + from pydantic import SecretStr + + from openhands.integrations.provider import ProviderToken + + self.service.web_url = None + base_secrets = {} + self.mock_user_context.get_secrets.return_value = base_secrets + self.mock_user_context.get_latest_token.return_value = 'static_token_value' + + # Mock provider tokens + provider_tokens = { + ProviderType.GITHUB: ProviderToken(token=SecretStr('github_token')), + } + self.mock_user_context.get_provider_tokens = AsyncMock( + return_value=provider_tokens + ) + + # Act + result = await self.service._setup_secrets_for_git_providers(self.mock_user) + + # Assert + assert 'GITHUB_TOKEN' in result + assert isinstance(result['GITHUB_TOKEN'], StaticSecret) + assert result['GITHUB_TOKEN'].value.get_secret_value() == 'static_token_value' + self.mock_user_context.get_latest_token.assert_called_once_with( + ProviderType.GITHUB + ) + + @pytest.mark.asyncio + async def test_setup_secrets_for_git_providers_no_static_token(self): + """Test _setup_secrets_for_git_providers when no static token is available.""" + # Arrange + from pydantic import SecretStr + + from openhands.integrations.provider import ProviderToken + + self.service.web_url = None + base_secrets = {} + self.mock_user_context.get_secrets.return_value = base_secrets + self.mock_user_context.get_latest_token.return_value = None + + # Mock provider tokens + provider_tokens = { + ProviderType.GITHUB: ProviderToken(token=SecretStr('github_token')), + } + self.mock_user_context.get_provider_tokens = AsyncMock( + return_value=provider_tokens + ) + + # Act + result = await self.service._setup_secrets_for_git_providers(self.mock_user) + + # Assert + assert 'GITHUB_TOKEN' not in result + assert result == base_secrets + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_with_custom_model(self): + """Test _configure_llm_and_mcp with custom LLM model.""" + # Arrange + custom_model = 'gpt-3.5-turbo' + self.mock_user_context.get_mcp_api_key.return_value = 'mcp_api_key' + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, custom_model + ) + + # Assert + assert isinstance(llm, LLM) + assert llm.model == custom_model + assert llm.base_url == self.mock_user.llm_base_url + assert llm.api_key.get_secret_value() == self.mock_user.llm_api_key + assert llm.usage_id == 'agent' + + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert ( + mcp_config['mcpServers']['default']['url'] + == 'https://test.example.com/mcp/mcp' + ) + assert ( + mcp_config['mcpServers']['default']['headers']['X-Session-API-Key'] + == 'mcp_api_key' + ) + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_openhands_model_prefers_user_base_url(self): + """openhands/* model uses user.llm_base_url when provided.""" + # Arrange + self.mock_user.llm_model = 'openhands/special' + self.mock_user.llm_base_url = 'https://user-llm.example.com' + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, _ = await self.service._configure_llm_and_mcp( + self.mock_user, self.mock_user.llm_model + ) + + # Assert + assert llm.base_url == 'https://user-llm.example.com' + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_openhands_model_uses_provider_default(self): + """openhands/* model falls back to configured provider base URL.""" + # Arrange + self.mock_user.llm_model = 'openhands/default' + self.mock_user.llm_base_url = None + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, _ = await self.service._configure_llm_and_mcp( + self.mock_user, self.mock_user.llm_model + ) + + # Assert + assert llm.base_url == 'https://provider.example.com' + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_openhands_model_no_base_urls(self): + """openhands/* model sets base_url to None when no sources available.""" + # Arrange + self.mock_user.llm_model = 'openhands/default' + self.mock_user.llm_base_url = None + self.service.openhands_provider_base_url = None + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, _ = await self.service._configure_llm_and_mcp( + self.mock_user, self.mock_user.llm_model + ) + + # Assert + assert llm.base_url == 'https://llm-proxy.app.all-hands.dev/' + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_non_openhands_model_ignores_provider(self): + """Non-openhands model ignores provider base URL and uses user base URL.""" + # Arrange + self.mock_user.llm_model = 'gpt-4' + self.mock_user.llm_base_url = 'https://user-llm.example.com' + self.service.openhands_provider_base_url = 'https://provider.example.com' + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, _ = await self.service._configure_llm_and_mcp(self.mock_user, None) + + # Assert + assert llm.base_url == 'https://user-llm.example.com' + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_with_user_default_model(self): + """Test _configure_llm_and_mcp using user's default model.""" + # Arrange + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert llm.model == self.mock_user.llm_model + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'headers' not in mcp_config['mcpServers']['default'] + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_without_web_url(self): + """Test _configure_llm_and_mcp without web URL (no MCP config).""" + # Arrange + self.service.web_url = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert mcp_config == {} + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_tavily_with_user_search_api_key(self): + """Test _configure_llm_and_mcp adds tavily when user has search_api_key.""" + # Arrange + from pydantic import SecretStr + + self.mock_user.search_api_key = SecretStr('user_search_key') + self.mock_user_context.get_mcp_api_key.return_value = 'mcp_api_key' + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' in mcp_config['mcpServers'] + assert ( + mcp_config['mcpServers']['tavily']['url'] + == 'https://mcp.tavily.com/mcp/?tavilyApiKey=user_search_key' + ) + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_tavily_with_env_tavily_key(self): + """Test _configure_llm_and_mcp adds tavily when service has tavily_api_key.""" + # Arrange + self.service.tavily_api_key = 'env_tavily_key' + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' in mcp_config['mcpServers'] + assert ( + mcp_config['mcpServers']['tavily']['url'] + == 'https://mcp.tavily.com/mcp/?tavilyApiKey=env_tavily_key' + ) + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_tavily_user_key_takes_precedence(self): + """Test _configure_llm_and_mcp user search_api_key takes precedence over env key.""" + # Arrange + from pydantic import SecretStr + + self.mock_user.search_api_key = SecretStr('user_search_key') + self.service.tavily_api_key = 'env_tavily_key' + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + assert 'tavily' in mcp_config['mcpServers'] + assert ( + mcp_config['mcpServers']['tavily']['url'] + == 'https://mcp.tavily.com/mcp/?tavilyApiKey=user_search_key' + ) + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_no_tavily_without_keys(self): + """Test _configure_llm_and_mcp does not add tavily when no keys are available.""" + # Arrange + self.mock_user.search_api_key = None + self.service.tavily_api_key = None + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' not in mcp_config['mcpServers'] + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_saas_mode_no_tavily_without_user_key(self): + """Test _configure_llm_and_mcp does not add tavily in SAAS mode without user search_api_key. + + In SAAS mode, the global tavily_api_key should not be passed to the service instance, + so tavily should only be added if the user has their own search_api_key. + """ + # Arrange - simulate SAAS mode where no global tavily key is available + self.service.app_mode = AppMode.SAAS.value + self.service.tavily_api_key = None # In SAAS mode, this should be None + self.mock_user.search_api_key = None + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' not in mcp_config['mcpServers'] + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_saas_mode_with_user_search_key(self): + """Test _configure_llm_and_mcp adds tavily in SAAS mode when user has search_api_key. + + Even in SAAS mode, if the user has their own search_api_key, tavily should be added. + """ + # Arrange - simulate SAAS mode with user having their own search key + from pydantic import SecretStr + + self.service.app_mode = AppMode.SAAS.value + self.service.tavily_api_key = None # In SAAS mode, this should be None + self.mock_user.search_api_key = SecretStr('user_search_key') + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' in mcp_config['mcpServers'] + assert ( + mcp_config['mcpServers']['tavily']['url'] + == 'https://mcp.tavily.com/mcp/?tavilyApiKey=user_search_key' + ) + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_tavily_with_empty_user_search_key(self): + """Test _configure_llm_and_mcp handles empty user search_api_key correctly.""" + # Arrange + from pydantic import SecretStr + + self.mock_user.search_api_key = SecretStr('') # Empty string + self.service.tavily_api_key = 'env_tavily_key' + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + assert 'tavily' in mcp_config['mcpServers'] + # Should fall back to env key since user key is empty + assert ( + mcp_config['mcpServers']['tavily']['url'] + == 'https://mcp.tavily.com/mcp/?tavilyApiKey=env_tavily_key' + ) + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_tavily_with_whitespace_user_search_key(self): + """Test _configure_llm_and_mcp handles whitespace-only user search_api_key correctly.""" + # Arrange + from pydantic import SecretStr + + self.mock_user.search_api_key = SecretStr(' ') # Whitespace only + self.service.tavily_api_key = 'env_tavily_key' + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + assert 'tavily' in mcp_config['mcpServers'] + # Should fall back to env key since user key is whitespace only + assert ( + mcp_config['mcpServers']['tavily']['url'] + == 'https://mcp.tavily.com/mcp/?tavilyApiKey=env_tavily_key' + ) + + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.get_planning_tools' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.AppConversationServiceBase._create_condenser' + ) + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.format_plan_structure' + ) + def test_create_agent_with_context_planning_agent( + self, mock_format_plan, mock_create_condenser, mock_get_tools + ): + """Test _create_agent_with_context for planning agent type.""" + # Arrange + mock_llm = Mock(spec=LLM) + mock_llm.model_copy.return_value = mock_llm + mock_get_tools.return_value = [] + mock_condenser = Mock() + mock_create_condenser.return_value = mock_condenser + mock_format_plan.return_value = 'test_plan_structure' + mcp_config = {'default': {'url': 'test'}} + system_message_suffix = 'Test suffix' + + # Act + with patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.Agent' + ) as mock_agent_class: + mock_agent_instance = Mock() + mock_agent_instance.model_copy.return_value = mock_agent_instance + mock_agent_class.return_value = mock_agent_instance + + self.service._create_agent_with_context( + mock_llm, + AgentType.PLAN, + system_message_suffix, + mcp_config, + self.mock_user.condenser_max_size, + ) + + # Assert + mock_agent_class.assert_called_once() + call_kwargs = mock_agent_class.call_args[1] + assert call_kwargs['llm'] == mock_llm + assert call_kwargs['system_prompt_filename'] == 'system_prompt_planning.j2' + assert ( + call_kwargs['system_prompt_kwargs']['plan_structure'] + == 'test_plan_structure' + ) + assert call_kwargs['mcp_config'] == mcp_config + assert call_kwargs['security_analyzer'] is None + assert call_kwargs['condenser'] == mock_condenser + mock_create_condenser.assert_called_once_with( + mock_llm, AgentType.PLAN, self.mock_user.condenser_max_size + ) + + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.get_default_tools' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.AppConversationServiceBase._create_condenser' + ) + def test_create_agent_with_context_default_agent( + self, mock_create_condenser, mock_get_tools + ): + """Test _create_agent_with_context for default agent type.""" + # Arrange + mock_llm = Mock(spec=LLM) + mock_llm.model_copy.return_value = mock_llm + mock_get_tools.return_value = [] + mock_condenser = Mock() + mock_create_condenser.return_value = mock_condenser + mcp_config = {'default': {'url': 'test'}} + + # Act + with patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.Agent' + ) as mock_agent_class: + mock_agent_instance = Mock() + mock_agent_instance.model_copy.return_value = mock_agent_instance + mock_agent_class.return_value = mock_agent_instance + + self.service._create_agent_with_context( + mock_llm, + AgentType.DEFAULT, + None, + mcp_config, + self.mock_user.condenser_max_size, + ) + + # Assert + mock_agent_class.assert_called_once() + call_kwargs = mock_agent_class.call_args[1] + assert call_kwargs['llm'] == mock_llm + assert call_kwargs['system_prompt_kwargs']['cli_mode'] is False + assert call_kwargs['mcp_config'] == mcp_config + assert call_kwargs['condenser'] == mock_condenser + mock_get_tools.assert_called_once_with(enable_browser=True) + mock_create_condenser.assert_called_once_with( + mock_llm, AgentType.DEFAULT, self.mock_user.condenser_max_size + ) + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.ExperimentManagerImpl' + ) + async def test_finalize_conversation_request_with_skills( + self, mock_experiment_manager + ): + """Test _finalize_conversation_request with skills loading.""" + # Arrange + mock_agent = Mock(spec=Agent) + mock_updated_agent = Mock(spec=Agent) + mock_experiment_manager.run_agent_variant_tests__v1.return_value = ( + mock_updated_agent + ) + + conversation_id = uuid4() + workspace = LocalWorkspace(working_dir='/test') + initial_message = Mock(spec=SendMessageRequest) + secrets = {'test': StaticSecret(value='secret')} + remote_workspace = Mock(spec=AsyncRemoteWorkspace) + + # Mock the skills loading method + self.service._load_skills_and_update_agent = AsyncMock( + return_value=mock_updated_agent + ) + + # Act + result = await self.service._finalize_conversation_request( + mock_agent, + conversation_id, + self.mock_user, + workspace, + initial_message, + secrets, + self.mock_sandbox, + remote_workspace, + 'test_repo', + '/test/dir', + ) + + # Assert + assert isinstance(result, StartConversationRequest) + assert result.conversation_id == conversation_id + assert result.agent == mock_updated_agent + assert result.workspace == workspace + assert result.initial_message == initial_message + assert result.secrets == secrets + + mock_experiment_manager.run_agent_variant_tests__v1.assert_called_once_with( + self.mock_user.id, conversation_id, mock_agent + ) + self.service._load_skills_and_update_agent.assert_called_once_with( + self.mock_sandbox, + mock_updated_agent, + remote_workspace, + 'test_repo', + '/test/dir', + ) + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.ExperimentManagerImpl' + ) + async def test_finalize_conversation_request_without_skills( + self, mock_experiment_manager + ): + """Test _finalize_conversation_request without remote workspace (no skills).""" + # Arrange + mock_agent = Mock(spec=Agent) + mock_updated_agent = Mock(spec=Agent) + mock_experiment_manager.run_agent_variant_tests__v1.return_value = ( + mock_updated_agent + ) + + workspace = LocalWorkspace(working_dir='/test') + secrets = {'test': StaticSecret(value='secret')} + + # Act + result = await self.service._finalize_conversation_request( + mock_agent, + None, + self.mock_user, + workspace, + None, + secrets, + self.mock_sandbox, + None, + None, + '/test/dir', + ) + + # Assert + assert isinstance(result, StartConversationRequest) + assert isinstance(result.conversation_id, UUID) + assert result.agent == mock_updated_agent + mock_experiment_manager.run_agent_variant_tests__v1.assert_called_once() + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.ExperimentManagerImpl' + ) + async def test_finalize_conversation_request_skills_loading_fails( + self, mock_experiment_manager + ): + """Test _finalize_conversation_request when skills loading fails.""" + # Arrange + mock_agent = Mock(spec=Agent) + mock_updated_agent = Mock(spec=Agent) + mock_experiment_manager.run_agent_variant_tests__v1.return_value = ( + mock_updated_agent + ) + + workspace = LocalWorkspace(working_dir='/test') + secrets = {'test': StaticSecret(value='secret')} + remote_workspace = Mock(spec=AsyncRemoteWorkspace) + + # Mock skills loading to raise an exception + self.service._load_skills_and_update_agent = AsyncMock( + side_effect=Exception('Skills loading failed') + ) + + # Act + with patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service._logger' + ) as mock_logger: + result = await self.service._finalize_conversation_request( + mock_agent, + None, + self.mock_user, + workspace, + None, + secrets, + self.mock_sandbox, + remote_workspace, + 'test_repo', + '/test/dir', + ) + + # Assert + assert isinstance(result, StartConversationRequest) + assert ( + result.agent == mock_updated_agent + ) # Should still use the experiment-modified agent + mock_logger.warning.assert_called_once() + + @pytest.mark.asyncio + async def test_build_start_conversation_request_for_user_integration(self): + """Test the main _build_start_conversation_request_for_user method integration.""" + # Arrange + self.mock_user_context.get_user_info.return_value = self.mock_user + + # Mock all the helper methods + mock_secrets = {'GITHUB_TOKEN': Mock()} + mock_llm = Mock(spec=LLM) + mock_mcp_config = {'default': {'url': 'test'}} + mock_agent = Mock(spec=Agent) + mock_final_request = Mock(spec=StartConversationRequest) + + self.service._setup_secrets_for_git_providers = AsyncMock( + return_value=mock_secrets + ) + self.service._configure_llm_and_mcp = AsyncMock( + return_value=(mock_llm, mock_mcp_config) + ) + self.service._create_agent_with_context = Mock(return_value=mock_agent) + self.service._finalize_conversation_request = AsyncMock( + return_value=mock_final_request + ) + + # Act + result = await self.service._build_start_conversation_request_for_user( + sandbox=self.mock_sandbox, + initial_message=None, + system_message_suffix='Test suffix', + git_provider=ProviderType.GITHUB, + working_dir='/test/dir', + agent_type=AgentType.DEFAULT, + llm_model='gpt-4', + conversation_id=None, + remote_workspace=None, + selected_repository='test/repo', + ) + + # Assert + assert result == mock_final_request + + self.service._setup_secrets_for_git_providers.assert_called_once_with( + self.mock_user + ) + self.service._configure_llm_and_mcp.assert_called_once_with( + self.mock_user, 'gpt-4' + ) + self.service._create_agent_with_context.assert_called_once_with( + mock_llm, + AgentType.DEFAULT, + 'Test suffix', + mock_mcp_config, + self.mock_user.condenser_max_size, + secrets=mock_secrets, + ) + self.service._finalize_conversation_request.assert_called_once() + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.AsyncRemoteWorkspace' + ) + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.ConversationInfo' + ) + async def test_start_app_conversation_default_title_uses_first_five_characters( + self, mock_conversation_info_class, mock_remote_workspace_class + ): + """Test that v1 conversations use first 5 characters of conversation ID for default title.""" + # Arrange + conversation_id = uuid4() + conversation_id_hex = conversation_id.hex + expected_title = f'Conversation {conversation_id_hex[:5]}' + + # Mock user context + self.mock_user_context.get_user_id = AsyncMock(return_value='test_user_123') + self.mock_user_context.get_user_info = AsyncMock(return_value=self.mock_user) + + # Mock sandbox and sandbox spec + mock_sandbox_spec = Mock(spec=SandboxSpecInfo) + mock_sandbox_spec.working_dir = '/test/workspace' + self.mock_sandbox.sandbox_spec_id = str(uuid4()) + self.mock_sandbox.id = str(uuid4()) # Ensure sandbox.id is a string + self.mock_sandbox.session_api_key = 'test_session_key' + exposed_url = ExposedUrl( + name=AGENT_SERVER, url='http://agent-server:8000', port=60000 + ) + self.mock_sandbox.exposed_urls = [exposed_url] + + self.mock_sandbox_service.get_sandbox = AsyncMock( + return_value=self.mock_sandbox + ) + self.mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Mock remote workspace + mock_remote_workspace = Mock() + mock_remote_workspace_class.return_value = mock_remote_workspace + + # Mock the wait for sandbox and setup scripts + async def mock_wait_for_sandbox(task): + task.sandbox_id = self.mock_sandbox.id + yield task + + async def mock_run_setup_scripts(task, sandbox, workspace): + yield task + + self.service._wait_for_sandbox_start = mock_wait_for_sandbox + self.service.run_setup_scripts = mock_run_setup_scripts + + # Mock build start conversation request + mock_agent = Mock(spec=Agent) + mock_agent.llm = Mock(spec=LLM) + mock_agent.llm.model = 'gpt-4' + mock_start_request = Mock(spec=StartConversationRequest) + mock_start_request.agent = mock_agent + mock_start_request.model_dump.return_value = {'test': 'data'} + + self.service._build_start_conversation_request_for_user = AsyncMock( + return_value=mock_start_request + ) + + # Mock ConversationInfo returned from agent server + mock_conversation_info = Mock() + mock_conversation_info.id = conversation_id + mock_conversation_info_class.model_validate.return_value = ( + mock_conversation_info + ) + + # Mock HTTP response from agent server + mock_response = Mock() + mock_response.json.return_value = {'id': str(conversation_id)} + mock_response.raise_for_status = Mock() + self.mock_httpx_client.post = AsyncMock(return_value=mock_response) + + # Mock event callback service + self.mock_event_callback_service.save_event_callback = AsyncMock() + + # Create request + request = AppConversationStartRequest() + + # Act + async for task in self.service._start_app_conversation(request): + # Consume all tasks to reach the point where title is set + pass + + # Assert + # Verify that save_app_conversation_info was called with the correct title format + self.mock_app_conversation_info_service.save_app_conversation_info.assert_called_once() + call_args = ( + self.mock_app_conversation_info_service.save_app_conversation_info.call_args + ) + saved_info = call_args[0][0] # First positional argument + + assert saved_info.title == expected_title, ( + f'Expected title to be "{expected_title}" (first 5 chars), ' + f'but got "{saved_info.title}"' + ) + assert saved_info.id == conversation_id + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_with_custom_sse_servers(self): + """Test _configure_llm_and_mcp merges custom SSE servers with UUID-based names.""" + # Arrange + + from openhands.core.config.mcp_config import MCPConfig, MCPSSEServerConfig + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[ + MCPSSEServerConfig(url='https://linear.app/sse', api_key='linear_key'), + MCPSSEServerConfig(url='https://notion.com/sse'), + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + + # Should have default server + 2 custom SSE servers + mcp_servers = mcp_config['mcpServers'] + assert 'default' in mcp_servers + + # Find SSE servers (they have sse_ prefix) + sse_servers = {k: v for k, v in mcp_servers.items() if k.startswith('sse_')} + assert len(sse_servers) == 2 + + # Verify SSE server configurations + for server_name, server_config in sse_servers.items(): + assert server_name.startswith('sse_') + assert len(server_name) > 4 # Has UUID suffix + assert 'url' in server_config + assert 'transport' in server_config + assert server_config['transport'] == 'sse' + + # Check if this is the Linear server (has headers) + if 'headers' in server_config: + assert server_config['headers']['Authorization'] == 'Bearer linear_key' + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_with_custom_shttp_servers(self): + """Test _configure_llm_and_mcp merges custom SHTTP servers with timeout.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPSHTTPServerConfig + + self.mock_user.mcp_config = MCPConfig( + shttp_servers=[ + MCPSHTTPServerConfig( + url='https://example.com/mcp', + api_key='test_key', + timeout=120, + ) + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + mcp_servers = mcp_config['mcpServers'] + + # Find SHTTP servers + shttp_servers = {k: v for k, v in mcp_servers.items() if k.startswith('shttp_')} + assert len(shttp_servers) == 1 + + server_config = list(shttp_servers.values())[0] + assert server_config['url'] == 'https://example.com/mcp' + assert server_config['transport'] == 'streamable-http' + assert server_config['headers']['Authorization'] == 'Bearer test_key' + assert server_config['timeout'] == 120 + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_with_custom_stdio_servers(self): + """Test _configure_llm_and_mcp merges custom STDIO servers with explicit names.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPStdioServerConfig + + self.mock_user.mcp_config = MCPConfig( + stdio_servers=[ + MCPStdioServerConfig( + name='my-custom-server', + command='npx', + args=['-y', 'my-package'], + env={'API_KEY': 'secret'}, + ) + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + mcp_servers = mcp_config['mcpServers'] + + # STDIO server should use its explicit name + assert 'my-custom-server' in mcp_servers + server_config = mcp_servers['my-custom-server'] + assert server_config['command'] == 'npx' + assert server_config['args'] == ['-y', 'my-package'] + assert server_config['env'] == {'API_KEY': 'secret'} + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_merges_system_and_custom_servers(self): + """Test _configure_llm_and_mcp merges both system and custom MCP servers.""" + # Arrange + from pydantic import SecretStr + + from openhands.core.config.mcp_config import ( + MCPConfig, + MCPSSEServerConfig, + MCPStdioServerConfig, + ) + + self.mock_user.search_api_key = SecretStr('tavily_key') + self.mock_user.mcp_config = MCPConfig( + sse_servers=[MCPSSEServerConfig(url='https://custom.com/sse')], + stdio_servers=[ + MCPStdioServerConfig( + name='custom-stdio', command='node', args=['app.js'] + ) + ], + ) + self.mock_user_context.get_mcp_api_key.return_value = 'mcp_api_key' + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + + # Should have system servers + assert 'default' in mcp_servers + assert 'tavily' in mcp_servers + + # Should have custom SSE server with UUID name + sse_servers = [k for k in mcp_servers if k.startswith('sse_')] + assert len(sse_servers) == 1 + + # Should have custom STDIO server with explicit name + assert 'custom-stdio' in mcp_servers + + # Total: default + tavily + 1 SSE + 1 STDIO = 4 servers + assert len(mcp_servers) == 4 + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_custom_config_error_handling(self): + """Test _configure_llm_and_mcp handles errors in custom MCP config gracefully.""" + # Arrange + self.mock_user.mcp_config = Mock() + # Simulate error when accessing sse_servers + self.mock_user.mcp_config.sse_servers = property( + lambda self: (_ for _ in ()).throw(Exception('Config error')) + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert - should still return valid config with system servers only + assert isinstance(llm, LLM) + mcp_servers = mcp_config['mcpServers'] + assert 'default' in mcp_servers + # Custom servers should not be added due to error + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_sdk_format_with_mcpservers_wrapper(self): + """Test _configure_llm_and_mcp returns SDK-required format with mcpServers key.""" + # Arrange + self.mock_user_context.get_mcp_api_key.return_value = 'mcp_key' + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert - SDK expects {'mcpServers': {...}} format + assert 'mcpServers' in mcp_config + assert isinstance(mcp_config['mcpServers'], dict) + + # Verify structure matches SDK expectations + for server_name, server_config in mcp_config['mcpServers'].items(): + assert isinstance(server_name, str) + assert isinstance(server_config, dict) + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_empty_custom_config(self): + """Test _configure_llm_and_mcp handles empty custom MCP config.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[], stdio_servers=[], shttp_servers=[] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + # Should only have system default server + assert 'default' in mcp_servers + assert len(mcp_servers) == 1 + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_sse_server_without_api_key(self): + """Test _configure_llm_and_mcp handles SSE servers without API keys.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPSSEServerConfig + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[MCPSSEServerConfig(url='https://public.com/sse')] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + sse_servers = {k: v for k, v in mcp_servers.items() if k.startswith('sse_')} + + # Server should exist but without headers + assert len(sse_servers) == 1 + server_config = list(sse_servers.values())[0] + assert 'headers' not in server_config + assert server_config['url'] == 'https://public.com/sse' + assert server_config['transport'] == 'sse' + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_shttp_server_without_timeout(self): + """Test _configure_llm_and_mcp handles SHTTP servers without timeout.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPSHTTPServerConfig + + self.mock_user.mcp_config = MCPConfig( + shttp_servers=[MCPSHTTPServerConfig(url='https://example.com/mcp')] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + shttp_servers = {k: v for k, v in mcp_servers.items() if k.startswith('shttp_')} + + assert len(shttp_servers) == 1 + server_config = list(shttp_servers.values())[0] + # Timeout should be included even if None (defaults to 60) + assert 'timeout' in server_config + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_stdio_server_without_env(self): + """Test _configure_llm_and_mcp handles STDIO servers without environment variables.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPStdioServerConfig + + self.mock_user.mcp_config = MCPConfig( + stdio_servers=[ + MCPStdioServerConfig( + name='simple-server', command='node', args=['app.js'] + ) + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + assert 'simple-server' in mcp_servers + server_config = mcp_servers['simple-server'] + + # Should not have env key if not provided + assert 'env' not in server_config + assert server_config['command'] == 'node' + assert server_config['args'] == ['app.js'] + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_multiple_servers_same_type(self): + """Test _configure_llm_and_mcp handles multiple custom servers of the same type.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPSSEServerConfig + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[ + MCPSSEServerConfig(url='https://server1.com/sse'), + MCPSSEServerConfig(url='https://server2.com/sse'), + MCPSSEServerConfig(url='https://server3.com/sse'), + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + sse_servers = {k: v for k, v in mcp_servers.items() if k.startswith('sse_')} + + # All 3 servers should be present with unique UUID-based names + assert len(sse_servers) == 3 + + # Verify all have unique names + server_names = list(sse_servers.keys()) + assert len(set(server_names)) == 3 # All names are unique + + # Verify all URLs are preserved + urls = [v['url'] for v in sse_servers.values()] + assert 'https://server1.com/sse' in urls + assert 'https://server2.com/sse' in urls + assert 'https://server3.com/sse' in urls + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_mixed_server_types(self): + """Test _configure_llm_and_mcp handles all three server types together.""" + # Arrange + from openhands.core.config.mcp_config import ( + MCPConfig, + MCPSHTTPServerConfig, + MCPSSEServerConfig, + MCPStdioServerConfig, + ) + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[ + MCPSSEServerConfig(url='https://sse.example.com/sse', api_key='sse_key') + ], + shttp_servers=[ + MCPSHTTPServerConfig(url='https://shttp.example.com/mcp', timeout=90) + ], + stdio_servers=[ + MCPStdioServerConfig( + name='stdio-server', + command='npx', + args=['mcp-server'], + env={'TOKEN': 'value'}, + ) + ], + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + + # Check all server types are present + sse_count = len([k for k in mcp_servers if k.startswith('sse_')]) + shttp_count = len([k for k in mcp_servers if k.startswith('shttp_')]) + stdio_count = 1 if 'stdio-server' in mcp_servers else 0 + + assert sse_count == 1 + assert shttp_count == 1 + assert stdio_count == 1 + + # Verify each type has correct configuration + sse_server = next(v for k, v in mcp_servers.items() if k.startswith('sse_')) + assert sse_server['transport'] == 'sse' + assert sse_server['headers']['Authorization'] == 'Bearer sse_key' + + shttp_server = next(v for k, v in mcp_servers.items() if k.startswith('shttp_')) + assert shttp_server['transport'] == 'streamable-http' + assert shttp_server['timeout'] == 90 + + stdio_server = mcp_servers['stdio-server'] + assert stdio_server['command'] == 'npx' + assert stdio_server['env'] == {'TOKEN': 'value'} diff --git a/tests/unit/app_server/test_remote_sandbox_service.py b/tests/unit/app_server/test_remote_sandbox_service.py index 567ecad2e30a..c70ad7d324a1 100644 --- a/tests/unit/app_server/test_remote_sandbox_service.py +++ b/tests/unit/app_server/test_remote_sandbox_service.py @@ -291,9 +291,7 @@ async def test_init_environment_with_web_url(self, remote_sandbox_service): ) # Verify - expected_webhook_url = ( - 'https://web.example.com/api/v1/webhooks/test-sandbox-123' - ) + expected_webhook_url = 'https://web.example.com/api/v1/webhooks' assert environment['EXISTING_VAR'] == 'existing_value' assert environment[WEBHOOK_CALLBACK_VARIABLE] == expected_webhook_url assert environment[ALLOW_CORS_ORIGINS_VARIABLE] == 'https://web.example.com' @@ -333,7 +331,7 @@ async def test_to_sandbox_info_with_running_runtime(self, remote_sandbox_service runtime_data = create_runtime_data(status='running', pod_status='ready') # Execute - sandbox_info = await remote_sandbox_service._to_sandbox_info( + sandbox_info = remote_sandbox_service._to_sandbox_info( stored_sandbox, runtime_data ) @@ -360,7 +358,7 @@ async def test_to_sandbox_info_with_starting_runtime(self, remote_sandbox_servic runtime_data = create_runtime_data(status='running', pod_status='pending') # Execute - sandbox_info = await remote_sandbox_service._to_sandbox_info( + sandbox_info = remote_sandbox_service._to_sandbox_info( stored_sandbox, runtime_data ) @@ -369,23 +367,6 @@ async def test_to_sandbox_info_with_starting_runtime(self, remote_sandbox_servic assert sandbox_info.session_api_key == 'test-session-key' assert sandbox_info.exposed_urls is None - @pytest.mark.asyncio - async def test_to_sandbox_info_without_runtime(self, remote_sandbox_service): - """Test conversion to SandboxInfo without runtime data.""" - # Setup - stored_sandbox = create_stored_sandbox() - remote_sandbox_service._get_runtime = AsyncMock( - side_effect=Exception('Runtime not found') - ) - - # Execute - sandbox_info = await remote_sandbox_service._to_sandbox_info(stored_sandbox) - - # Verify - assert sandbox_info.status == SandboxStatus.MISSING - assert sandbox_info.session_api_key is None - assert sandbox_info.exposed_urls is None - @pytest.mark.asyncio async def test_to_sandbox_info_loads_runtime_when_none_provided( self, remote_sandbox_service @@ -393,15 +374,12 @@ async def test_to_sandbox_info_loads_runtime_when_none_provided( """Test that runtime data is loaded when not provided.""" # Setup stored_sandbox = create_stored_sandbox() - runtime_data = create_runtime_data() - remote_sandbox_service._get_runtime = AsyncMock(return_value=runtime_data) # Execute - sandbox_info = await remote_sandbox_service._to_sandbox_info(stored_sandbox) + sandbox_info = remote_sandbox_service._to_sandbox_info(stored_sandbox, None) # Verify - remote_sandbox_service._get_runtime.assert_called_once_with('test-sandbox-123') - assert sandbox_info.status == SandboxStatus.RUNNING + assert sandbox_info.status == SandboxStatus.MISSING class TestSandboxLifecycle: @@ -679,15 +657,18 @@ async def test_search_sandboxes_basic(self, remote_sandbox_service): mock_result = MagicMock() mock_result.scalars.return_value = mock_scalars remote_sandbox_service.db_session.execute = AsyncMock(return_value=mock_result) - remote_sandbox_service._to_sandbox_info = AsyncMock( - side_effect=lambda stored: SandboxInfo( - id=stored.id, - created_by_user_id=stored.created_by_user_id, - sandbox_spec_id=stored.sandbox_spec_id, - status=SandboxStatus.RUNNING, - session_api_key='test-key', - created_at=stored.created_at, - ) + + # Mock the batch endpoint response + mock_batch_response = MagicMock() + mock_batch_response.raise_for_status.return_value = None + mock_batch_response.json.return_value = { + 'runtimes': [ + create_runtime_data('sb1'), + create_runtime_data('sb2'), + ] + } + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_batch_response ) # Execute @@ -699,6 +680,14 @@ async def test_search_sandboxes_basic(self, remote_sandbox_service): assert result.items[0].id == 'sb1' assert result.items[1].id == 'sb2' + # Verify that the batch endpoint was called + remote_sandbox_service.httpx_client.request.assert_called_once_with( + 'GET', + 'https://api.example.com/sessions/batch', + headers={'X-API-Key': 'test-api-key'}, + params=[('ids', 'sb1'), ('ids', 'sb2')], + ) + @pytest.mark.asyncio async def test_search_sandboxes_with_pagination(self, remote_sandbox_service): """Test sandbox search with pagination.""" @@ -712,15 +701,15 @@ async def test_search_sandboxes_with_pagination(self, remote_sandbox_service): mock_result = MagicMock() mock_result.scalars.return_value = mock_scalars remote_sandbox_service.db_session.execute = AsyncMock(return_value=mock_result) - remote_sandbox_service._to_sandbox_info = AsyncMock( - side_effect=lambda stored: SandboxInfo( - id=stored.id, - created_by_user_id=stored.created_by_user_id, - sandbox_spec_id=stored.sandbox_spec_id, - status=SandboxStatus.RUNNING, - session_api_key='test-key', - created_at=stored.created_at, - ) + + # Mock the batch endpoint response + mock_batch_response = MagicMock() + mock_batch_response.raise_for_status.return_value = None + mock_batch_response.json.return_value = { + 'runtimes': [create_runtime_data(f'sb{i}') for i in range(6)] + } + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_batch_response ) # Execute @@ -741,15 +730,15 @@ async def test_search_sandboxes_with_page_id(self, remote_sandbox_service): mock_result = MagicMock() mock_result.scalars.return_value = mock_scalars remote_sandbox_service.db_session.execute = AsyncMock(return_value=mock_result) - remote_sandbox_service._to_sandbox_info = AsyncMock( - side_effect=lambda stored: SandboxInfo( - id=stored.id, - created_by_user_id=stored.created_by_user_id, - sandbox_spec_id=stored.sandbox_spec_id, - status=SandboxStatus.RUNNING, - session_api_key='test-key', - created_at=stored.created_at, - ) + + # Mock the batch endpoint response + mock_batch_response = MagicMock() + mock_batch_response.raise_for_status.return_value = None + mock_batch_response.json.return_value = { + 'runtimes': [create_runtime_data('sb1')] + } + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_batch_response ) # Execute @@ -759,6 +748,76 @@ async def test_search_sandboxes_with_page_id(self, remote_sandbox_service): # Note: We can't easily verify the exact SQL query, but we can verify the method was called remote_sandbox_service.db_session.execute.assert_called_once() + @pytest.mark.asyncio + async def test_get_runtimes_batch_success(self, remote_sandbox_service): + """Test successful batch runtime retrieval.""" + # Setup + sandbox_ids = ['sb1', 'sb2', 'sb3'] + mock_response = MagicMock() + mock_response.raise_for_status.return_value = None + mock_response.json.return_value = [ + create_runtime_data('sb1'), + create_runtime_data('sb2'), + create_runtime_data('sb3'), + ] + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_response + ) + + # Execute + result = await remote_sandbox_service._get_runtimes_batch(sandbox_ids) + + # Verify + assert len(result) == 3 + assert 'sb1' in result + assert 'sb2' in result + assert 'sb3' in result + assert result['sb1']['session_id'] == 'sb1' + + # Verify the correct API call was made + remote_sandbox_service.httpx_client.request.assert_called_once_with( + 'GET', + 'https://api.example.com/sessions/batch', + headers={'X-API-Key': 'test-api-key'}, + params=[('ids', 'sb1'), ('ids', 'sb2'), ('ids', 'sb3')], + ) + + @pytest.mark.asyncio + async def test_get_runtimes_batch_empty_list(self, remote_sandbox_service): + """Test batch runtime retrieval with empty sandbox list.""" + # Execute + result = await remote_sandbox_service._get_runtimes_batch([]) + + # Verify + assert result == {} + # Verify no API call was made + remote_sandbox_service.httpx_client.request.assert_not_called() + + @pytest.mark.asyncio + async def test_get_runtimes_batch_partial_results(self, remote_sandbox_service): + """Test batch runtime retrieval with partial results (some sandboxes not found).""" + # Setup + sandbox_ids = ['sb1', 'sb2', 'sb3'] + mock_response = MagicMock() + mock_response.raise_for_status.return_value = None + mock_response.json.return_value = [ + create_runtime_data('sb1'), + create_runtime_data('sb3'), + # sb2 is missing from the response + ] + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_response + ) + + # Execute + result = await remote_sandbox_service._get_runtimes_batch(sandbox_ids) + + # Verify + assert len(result) == 2 + assert 'sb1' in result + assert 'sb2' not in result # Missing from response + assert 'sb3' in result + @pytest.mark.asyncio async def test_get_sandbox_exists(self, remote_sandbox_service): """Test getting an existing sandbox.""" @@ -767,7 +826,7 @@ async def test_get_sandbox_exists(self, remote_sandbox_service): remote_sandbox_service._get_stored_sandbox = AsyncMock( return_value=stored_sandbox ) - remote_sandbox_service._to_sandbox_info = AsyncMock( + remote_sandbox_service._to_sandbox_info = MagicMock( return_value=SandboxInfo( id='test-sandbox-123', created_by_user_id='test-user-123', diff --git a/tests/unit/app_server/test_sandbox_service.py b/tests/unit/app_server/test_sandbox_service.py index 9a651318217e..f3eea1d2eaf6 100644 --- a/tests/unit/app_server/test_sandbox_service.py +++ b/tests/unit/app_server/test_sandbox_service.py @@ -27,6 +27,7 @@ class MockSandboxService(SandboxService): def __init__(self): self.search_sandboxes_mock = AsyncMock() self.get_sandbox_mock = AsyncMock() + self.get_sandbox_by_session_api_key_mock = AsyncMock() self.start_sandbox_mock = AsyncMock() self.resume_sandbox_mock = AsyncMock() self.pause_sandbox_mock = AsyncMock() @@ -40,6 +41,11 @@ async def search_sandboxes( async def get_sandbox(self, sandbox_id: str) -> SandboxInfo | None: return await self.get_sandbox_mock(sandbox_id) + async def get_sandbox_by_session_api_key( + self, session_api_key: str + ) -> SandboxInfo | None: + return await self.get_sandbox_by_session_api_key_mock(session_api_key) + async def start_sandbox(self, sandbox_spec_id: str | None = None) -> SandboxInfo: return await self.start_sandbox_mock(sandbox_spec_id) diff --git a/tests/unit/app_server/test_skill_loader.py b/tests/unit/app_server/test_skill_loader.py index c9e54ba5a1a5..e4daadfa1485 100644 --- a/tests/unit/app_server/test_skill_loader.py +++ b/tests/unit/app_server/test_skill_loader.py @@ -11,15 +11,27 @@ import pytest from openhands.app_server.app_conversation.skill_loader import ( + _cleanup_org_repository, + _clone_org_repository, + _determine_org_repo_path, _determine_repo_root, _find_and_load_global_skill_files, _find_and_load_skill_md_files, + _get_org_repository_url, + _is_azure_devops_repository, + _is_gitlab_repository, + _load_skills_from_org_directories, _load_special_files, + _merge_org_skills_with_precedence, _read_file_from_workspace, + _validate_repository_for_org_skills, load_global_skills, + load_org_skills, load_repo_skills, merge_skills, ) +from openhands.integrations.provider import ProviderType +from openhands.integrations.service_types import AuthenticationError # ===== Test Fixtures ===== @@ -667,6 +679,669 @@ def test_merge_skills_mixed_empty_and_filled(self): assert len(result) == 2 +# ===== Tests for Organization Skills Functions ===== + + +class TestIsGitlabRepository: + """Test _is_gitlab_repository helper function.""" + + @pytest.mark.asyncio + async def test_is_gitlab_repository_true(self): + """Test GitLab repository detection returns True.""" + # Arrange + mock_user_context = AsyncMock() + mock_provider_handler = AsyncMock() + mock_repository = Mock() + mock_repository.git_provider = ProviderType.GITLAB + + mock_user_context.get_provider_handler.return_value = mock_provider_handler + mock_provider_handler.verify_repo_provider.return_value = mock_repository + + # Act + result = await _is_gitlab_repository('owner/repo', mock_user_context) + + # Assert + assert result is True + mock_provider_handler.verify_repo_provider.assert_called_once_with('owner/repo') + + @pytest.mark.asyncio + async def test_is_gitlab_repository_false(self): + """Test non-GitLab repository detection returns False.""" + # Arrange + mock_user_context = AsyncMock() + mock_provider_handler = AsyncMock() + mock_repository = Mock() + mock_repository.git_provider = ProviderType.GITHUB + + mock_user_context.get_provider_handler.return_value = mock_provider_handler + mock_provider_handler.verify_repo_provider.return_value = mock_repository + + # Act + result = await _is_gitlab_repository('owner/repo', mock_user_context) + + # Assert + assert result is False + + @pytest.mark.asyncio + async def test_is_gitlab_repository_exception_handling(self): + """Test exception handling returns False.""" + # Arrange + mock_user_context = AsyncMock() + mock_user_context.get_provider_handler.side_effect = Exception('API error') + + # Act + result = await _is_gitlab_repository('owner/repo', mock_user_context) + + # Assert + assert result is False + + +class TestIsAzureDevOpsRepository: + """Test _is_azure_devops_repository helper function.""" + + @pytest.mark.asyncio + async def test_is_azure_devops_repository_true(self): + """Test Azure DevOps repository detection returns True.""" + # Arrange + mock_user_context = AsyncMock() + mock_provider_handler = AsyncMock() + mock_repository = Mock() + mock_repository.git_provider = ProviderType.AZURE_DEVOPS + + mock_user_context.get_provider_handler.return_value = mock_provider_handler + mock_provider_handler.verify_repo_provider.return_value = mock_repository + + # Act + result = await _is_azure_devops_repository( + 'org/project/repo', mock_user_context + ) + + # Assert + assert result is True + mock_provider_handler.verify_repo_provider.assert_called_once_with( + 'org/project/repo' + ) + + @pytest.mark.asyncio + async def test_is_azure_devops_repository_false(self): + """Test non-Azure DevOps repository detection returns False.""" + # Arrange + mock_user_context = AsyncMock() + mock_provider_handler = AsyncMock() + mock_repository = Mock() + mock_repository.git_provider = ProviderType.GITHUB + + mock_user_context.get_provider_handler.return_value = mock_provider_handler + mock_provider_handler.verify_repo_provider.return_value = mock_repository + + # Act + result = await _is_azure_devops_repository('owner/repo', mock_user_context) + + # Assert + assert result is False + + @pytest.mark.asyncio + async def test_is_azure_devops_repository_exception_handling(self): + """Test exception handling returns False.""" + # Arrange + mock_user_context = AsyncMock() + mock_user_context.get_provider_handler.side_effect = Exception('Network error') + + # Act + result = await _is_azure_devops_repository('owner/repo', mock_user_context) + + # Assert + assert result is False + + +class TestDetermineOrgRepoPath: + """Test _determine_org_repo_path helper function.""" + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader._is_gitlab_repository') + @patch( + 'openhands.app_server.app_conversation.skill_loader._is_azure_devops_repository' + ) + async def test_github_repository_path(self, mock_is_azure, mock_is_gitlab): + """Test org path for GitHub repository.""" + # Arrange + mock_user_context = AsyncMock() + mock_is_gitlab.return_value = False + mock_is_azure.return_value = False + + # Act + org_repo, org_name = await _determine_org_repo_path( + 'owner/repo', mock_user_context + ) + + # Assert + assert org_repo == 'owner/.openhands' + assert org_name == 'owner' + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader._is_gitlab_repository') + @patch( + 'openhands.app_server.app_conversation.skill_loader._is_azure_devops_repository' + ) + async def test_gitlab_repository_path(self, mock_is_azure, mock_is_gitlab): + """Test org path for GitLab repository.""" + # Arrange + mock_user_context = AsyncMock() + mock_is_gitlab.return_value = True + mock_is_azure.return_value = False + + # Act + org_repo, org_name = await _determine_org_repo_path( + 'owner/repo', mock_user_context + ) + + # Assert + assert org_repo == 'owner/openhands-config' + assert org_name == 'owner' + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader._is_gitlab_repository') + @patch( + 'openhands.app_server.app_conversation.skill_loader._is_azure_devops_repository' + ) + async def test_azure_devops_repository_path(self, mock_is_azure, mock_is_gitlab): + """Test org path for Azure DevOps repository.""" + # Arrange + mock_user_context = AsyncMock() + mock_is_gitlab.return_value = False + mock_is_azure.return_value = True + + # Act + org_repo, org_name = await _determine_org_repo_path( + 'org/project/repo', mock_user_context + ) + + # Assert + assert org_repo == 'org/openhands-config/openhands-config' + assert org_name == 'org' + + +class TestValidateRepositoryForOrgSkills: + """Test _validate_repository_for_org_skills helper function.""" + + def test_valid_repository_two_parts(self): + """Test validation passes for repository with two parts.""" + # Act + result = _validate_repository_for_org_skills('owner/repo') + + # Assert + assert result is True + + def test_valid_repository_three_parts(self): + """Test validation passes for repository with three parts (Azure DevOps).""" + # Act + result = _validate_repository_for_org_skills('org/project/repo') + + # Assert + assert result is True + + def test_invalid_repository_one_part(self): + """Test validation fails for repository with only one part.""" + # Act + result = _validate_repository_for_org_skills('repo') + + # Assert + assert result is False + + def test_invalid_repository_empty_string(self): + """Test validation fails for empty string.""" + # Act + result = _validate_repository_for_org_skills('') + + # Assert + assert result is False + + +class TestGetOrgRepositoryUrl: + """Test _get_org_repository_url helper function.""" + + @pytest.mark.asyncio + async def test_successful_url_retrieval(self): + """Test successfully retrieving authenticated URL.""" + # Arrange + mock_user_context = AsyncMock() + expected_url = 'https://token@github.com/owner/.openhands.git' + mock_user_context.get_authenticated_git_url.return_value = expected_url + + # Act + result = await _get_org_repository_url('owner/.openhands', mock_user_context) + + # Assert + assert result == expected_url + mock_user_context.get_authenticated_git_url.assert_called_once_with( + 'owner/.openhands' + ) + + @pytest.mark.asyncio + async def test_authentication_error(self): + """Test handling of authentication error returns None.""" + # Arrange + mock_user_context = AsyncMock() + mock_user_context.get_authenticated_git_url.side_effect = AuthenticationError( + 'Not found' + ) + + # Act + result = await _get_org_repository_url('owner/.openhands', mock_user_context) + + # Assert + assert result is None + + @pytest.mark.asyncio + async def test_general_exception(self): + """Test handling of general exception returns None.""" + # Arrange + mock_user_context = AsyncMock() + mock_user_context.get_authenticated_git_url.side_effect = Exception( + 'Network error' + ) + + # Act + result = await _get_org_repository_url('owner/.openhands', mock_user_context) + + # Assert + assert result is None + + +class TestCloneOrgRepository: + """Test _clone_org_repository helper function.""" + + @pytest.mark.asyncio + async def test_successful_clone(self, mock_async_remote_workspace): + """Test successful repository clone.""" + # Arrange + result_obj = Mock() + result_obj.exit_code = 0 + mock_async_remote_workspace.execute_command.return_value = result_obj + + # Act + success = await _clone_org_repository( + mock_async_remote_workspace, + 'https://github.com/owner/.openhands.git', + '/workspace/_org_openhands_owner', + '/workspace', + 'owner/.openhands', + ) + + # Assert + assert success is True + mock_async_remote_workspace.execute_command.assert_called_once() + call_args = mock_async_remote_workspace.execute_command.call_args + assert 'git clone' in call_args[0][0] + assert '--depth 1' in call_args[0][0] + + @pytest.mark.asyncio + async def test_failed_clone(self, mock_async_remote_workspace): + """Test failed repository clone.""" + # Arrange + result_obj = Mock() + result_obj.exit_code = 1 + result_obj.stderr = 'Repository not found' + mock_async_remote_workspace.execute_command.return_value = result_obj + + # Act + success = await _clone_org_repository( + mock_async_remote_workspace, + 'https://github.com/owner/.openhands.git', + '/workspace/_org_openhands_owner', + '/workspace', + 'owner/.openhands', + ) + + # Assert + assert success is False + + +class TestLoadSkillsFromOrgDirectories: + """Test _load_skills_from_org_directories helper function.""" + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._find_and_load_skill_md_files' + ) + async def test_load_from_both_directories( + self, mock_find_and_load, mock_async_remote_workspace, mock_skills_list + ): + """Test loading skills from both skills/ and microagents/ directories.""" + # Arrange + skills_dir_skills = [mock_skills_list[0]] + microagents_dir_skills = [mock_skills_list[1], mock_skills_list[2]] + mock_find_and_load.side_effect = [skills_dir_skills, microagents_dir_skills] + + # Act + result_skills, result_microagents = await _load_skills_from_org_directories( + mock_async_remote_workspace, '/workspace/_org_openhands_owner', '/workspace' + ) + + # Assert + assert result_skills == skills_dir_skills + assert result_microagents == microagents_dir_skills + assert mock_find_and_load.call_count == 2 + + # Verify correct directories were checked + first_call = mock_find_and_load.call_args_list[0] + second_call = mock_find_and_load.call_args_list[1] + assert '/skills' in first_call[0][1] + assert '/microagents' in second_call[0][1] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._find_and_load_skill_md_files' + ) + async def test_load_with_empty_directories( + self, mock_find_and_load, mock_async_remote_workspace + ): + """Test loading when both directories are empty.""" + # Arrange + mock_find_and_load.side_effect = [[], []] + + # Act + result_skills, result_microagents = await _load_skills_from_org_directories( + mock_async_remote_workspace, '/workspace/_org_openhands_owner', '/workspace' + ) + + # Assert + assert result_skills == [] + assert result_microagents == [] + + +class TestMergeOrgSkillsWithPrecedence: + """Test _merge_org_skills_with_precedence helper function.""" + + def test_merge_no_duplicates(self, mock_skills_list): + """Test merging skills with no name conflicts.""" + # Arrange + skills_dir_skills = [mock_skills_list[0]] + microagents_dir_skills = [mock_skills_list[1], mock_skills_list[2]] + + # Act + result = _merge_org_skills_with_precedence( + skills_dir_skills, microagents_dir_skills + ) + + # Assert + assert len(result) == 3 + names = {s.name for s in result} + assert names == {'skill_0', 'skill_1', 'skill_2'} + + def test_merge_with_duplicate_skills_dir_wins(self): + """Test skills/ directory takes precedence over microagents/.""" + # Arrange + skill_from_microagents = Mock() + skill_from_microagents.name = 'common_skill' + skill_from_microagents.source = 'microagents' + + skill_from_skills = Mock() + skill_from_skills.name = 'common_skill' + skill_from_skills.source = 'skills' + + # Act + result = _merge_org_skills_with_precedence( + [skill_from_skills], [skill_from_microagents] + ) + + # Assert + assert len(result) == 1 + assert result[0].source == 'skills' + + def test_merge_with_empty_lists(self): + """Test merging with empty skill lists.""" + # Act + result = _merge_org_skills_with_precedence([], []) + + # Assert + assert result == [] + + def test_merge_with_only_skills_dir(self, mock_skills_list): + """Test merging with only skills/ directory populated.""" + # Act + result = _merge_org_skills_with_precedence([mock_skills_list[0]], []) + + # Assert + assert len(result) == 1 + assert result[0] == mock_skills_list[0] + + def test_merge_with_only_microagents_dir(self, mock_skills_list): + """Test merging with only microagents/ directory populated.""" + # Act + result = _merge_org_skills_with_precedence([], [mock_skills_list[0]]) + + # Assert + assert len(result) == 1 + assert result[0] == mock_skills_list[0] + + +class TestCleanupOrgRepository: + """Test _cleanup_org_repository helper function.""" + + @pytest.mark.asyncio + async def test_cleanup_successful(self, mock_async_remote_workspace): + """Test successful cleanup of org repository directory.""" + # Arrange + result_obj = Mock() + result_obj.exit_code = 0 + mock_async_remote_workspace.execute_command.return_value = result_obj + + # Act + await _cleanup_org_repository( + mock_async_remote_workspace, + '/workspace/_org_openhands_owner', + '/workspace', + ) + + # Assert + mock_async_remote_workspace.execute_command.assert_called_once() + call_args = mock_async_remote_workspace.execute_command.call_args + assert 'rm -rf' in call_args[0][0] + assert '/workspace/_org_openhands_owner' in call_args[0][0] + + +class TestLoadOrgSkills: + """Test load_org_skills main function.""" + + @pytest.mark.asyncio + async def test_load_org_skills_no_selected_repository( + self, mock_async_remote_workspace + ): + """Test load_org_skills returns empty list when no repository selected.""" + # Arrange + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, None, '/workspace', mock_user_context + ) + + # Assert + assert result == [] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + async def test_load_org_skills_invalid_repository( + self, mock_validate, mock_async_remote_workspace + ): + """Test load_org_skills returns empty list for invalid repository.""" + # Arrange + mock_validate.return_value = False + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, 'invalid', '/workspace', mock_user_context + ) + + # Assert + assert result == [] + mock_validate.assert_called_once_with('invalid') + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.skill_loader._determine_org_repo_path' + ) + @patch('openhands.app_server.app_conversation.skill_loader._get_org_repository_url') + async def test_load_org_skills_no_url_available( + self, + mock_get_url, + mock_determine_path, + mock_validate, + mock_async_remote_workspace, + ): + """Test load_org_skills returns empty list when URL cannot be retrieved.""" + # Arrange + mock_validate.return_value = True + mock_determine_path.return_value = ('owner/.openhands', 'owner') + mock_get_url.return_value = None + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert result == [] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.skill_loader._determine_org_repo_path' + ) + @patch('openhands.app_server.app_conversation.skill_loader._get_org_repository_url') + @patch('openhands.app_server.app_conversation.skill_loader._clone_org_repository') + async def test_load_org_skills_clone_fails( + self, + mock_clone, + mock_get_url, + mock_determine_path, + mock_validate, + mock_async_remote_workspace, + ): + """Test load_org_skills returns empty list when clone fails.""" + # Arrange + mock_validate.return_value = True + mock_determine_path.return_value = ('owner/.openhands', 'owner') + mock_get_url.return_value = 'https://github.com/owner/.openhands.git' + mock_clone.return_value = False + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert result == [] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.skill_loader._determine_org_repo_path' + ) + @patch('openhands.app_server.app_conversation.skill_loader._get_org_repository_url') + @patch('openhands.app_server.app_conversation.skill_loader._clone_org_repository') + @patch( + 'openhands.app_server.app_conversation.skill_loader._load_skills_from_org_directories' + ) + @patch('openhands.app_server.app_conversation.skill_loader._cleanup_org_repository') + async def test_load_org_skills_success( + self, + mock_cleanup, + mock_load_skills, + mock_clone, + mock_get_url, + mock_determine_path, + mock_validate, + mock_async_remote_workspace, + mock_skills_list, + ): + """Test successful org skills loading.""" + # Arrange + mock_validate.return_value = True + mock_determine_path.return_value = ('owner/.openhands', 'owner') + mock_get_url.return_value = 'https://github.com/owner/.openhands.git' + mock_clone.return_value = True + mock_load_skills.return_value = ([mock_skills_list[0]], [mock_skills_list[1]]) + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert len(result) == 2 + mock_cleanup.assert_called_once() + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + async def test_load_org_skills_handles_authentication_error( + self, mock_validate, mock_async_remote_workspace + ): + """Test load_org_skills handles AuthenticationError gracefully.""" + # Arrange + mock_validate.side_effect = AuthenticationError('Auth failed') + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert result == [] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + async def test_load_org_skills_handles_general_exception( + self, mock_validate, mock_async_remote_workspace + ): + """Test load_org_skills handles general exceptions gracefully.""" + # Arrange + mock_validate.side_effect = Exception('Unexpected error') + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert result == [] + + # ===== Integration Tests ===== @@ -754,3 +1429,110 @@ async def test_loading_with_override_precedence( # Should have only one skill with repo source (highest precedence) assert len(all_skills) == 1 assert all_skills[0].source == 'repo' + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader.load_global_skills') + @patch('openhands.sdk.context.skills.load_user_skills') + @patch('openhands.app_server.app_conversation.skill_loader.load_org_skills') + @patch('openhands.app_server.app_conversation.skill_loader.load_repo_skills') + async def test_loading_with_org_skills_precedence( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_async_remote_workspace, + ): + """Test that org skills fit correctly in precedence order.""" + # Arrange + # Create skills with same name but different sources + global_skill = Mock() + global_skill.name = 'shared_skill' + global_skill.priority = 'low' + + user_skill = Mock() + user_skill.name = 'shared_skill' + user_skill.priority = 'medium' + + org_skill = Mock() + org_skill.name = 'shared_skill' + org_skill.priority = 'high' + + repo_skill = Mock() + repo_skill.name = 'shared_skill' + repo_skill.priority = 'highest' + + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [repo_skill] + + mock_user_context = AsyncMock() + + # Act + global_skills = mock_load_global() + user_skills = mock_load_user() + org_skills = await mock_load_org( + mock_async_remote_workspace, 'owner/repo', '/workspace', mock_user_context + ) + repo_skills = await mock_load_repo( + mock_async_remote_workspace, 'owner/repo', '/workspace' + ) + + # Merge with correct precedence: global < user < org < repo + all_skills = merge_skills([global_skills, user_skills, org_skills, repo_skills]) + + # Assert + assert len(all_skills) == 1 + assert all_skills[0].priority == 'highest' # Repo has highest precedence + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader.load_global_skills') + @patch('openhands.sdk.context.skills.load_user_skills') + @patch('openhands.app_server.app_conversation.skill_loader.load_org_skills') + @patch('openhands.app_server.app_conversation.skill_loader.load_repo_skills') + async def test_loading_org_skills_with_unique_names( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_async_remote_workspace, + ): + """Test loading org skills with unique names alongside other sources.""" + # Arrange + global_skill = Mock() + global_skill.name = 'global_skill' + + user_skill = Mock() + user_skill.name = 'user_skill' + + org_skill = Mock() + org_skill.name = 'org_skill' + + repo_skill = Mock() + repo_skill.name = 'repo_skill' + + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [repo_skill] + + mock_user_context = AsyncMock() + + # Act + global_skills = mock_load_global() + user_skills = mock_load_user() + org_skills = await mock_load_org( + mock_async_remote_workspace, 'owner/repo', '/workspace', mock_user_context + ) + repo_skills = await mock_load_repo( + mock_async_remote_workspace, 'owner/repo', '/workspace' + ) + + all_skills = merge_skills([global_skills, user_skills, org_skills, repo_skills]) + + # Assert + assert len(all_skills) == 4 + names = {s.name for s in all_skills} + assert names == {'global_skill', 'user_skill', 'org_skill', 'repo_skill'} diff --git a/tests/unit/app_server/test_webhook_router_stats.py b/tests/unit/app_server/test_webhook_router_stats.py new file mode 100644 index 000000000000..ba5664a196b7 --- /dev/null +++ b/tests/unit/app_server/test_webhook_router_stats.py @@ -0,0 +1,615 @@ +"""Tests for stats event processing in webhook_router. + +This module tests the stats event processing functionality introduced for +updating conversation statistics from ConversationStateUpdateEvent events. +""" + +from datetime import datetime, timezone +from typing import AsyncGenerator +from unittest.mock import AsyncMock, MagicMock, patch +from uuid import uuid4 + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.pool import StaticPool + +from openhands.app_server.app_conversation.app_conversation_models import ( + AppConversationInfo, +) +from openhands.app_server.app_conversation.sql_app_conversation_info_service import ( + SQLAppConversationInfoService, + StoredConversationMetadata, +) +from openhands.app_server.user.specifiy_user_context import SpecifyUserContext +from openhands.app_server.utils.sql_utils import Base +from openhands.sdk.conversation.conversation_stats import ConversationStats +from openhands.sdk.event import ConversationStateUpdateEvent +from openhands.sdk.llm.utils.metrics import Metrics, TokenUsage + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture +async def async_engine(): + """Create an async SQLite engine for testing.""" + engine = create_async_engine( + 'sqlite+aiosqlite:///:memory:', + poolclass=StaticPool, + connect_args={'check_same_thread': False}, + echo=False, + ) + + # Create all tables + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + await engine.dispose() + + +@pytest.fixture +async def async_session(async_engine) -> AsyncGenerator[AsyncSession, None]: + """Create an async session for testing.""" + async_session_maker = async_sessionmaker( + async_engine, class_=AsyncSession, expire_on_commit=False + ) + + async with async_session_maker() as db_session: + yield db_session + + +@pytest.fixture +def service(async_session) -> SQLAppConversationInfoService: + """Create a SQLAppConversationInfoService instance for testing.""" + return SQLAppConversationInfoService( + db_session=async_session, user_context=SpecifyUserContext(user_id=None) + ) + + +@pytest.fixture +async def v1_conversation_metadata(async_session, service): + """Create a V1 conversation metadata record for testing.""" + conversation_id = uuid4() + stored = StoredConversationMetadata( + conversation_id=str(conversation_id), + user_id='test_user_123', + sandbox_id='sandbox_123', + conversation_version='V1', + title='Test Conversation', + accumulated_cost=0.0, + prompt_tokens=0, + completion_tokens=0, + cache_read_tokens=0, + cache_write_tokens=0, + reasoning_tokens=0, + context_window=0, + per_turn_token=0, + created_at=datetime.now(timezone.utc), + last_updated_at=datetime.now(timezone.utc), + ) + async_session.add(stored) + await async_session.commit() + return conversation_id, stored + + +@pytest.fixture +def stats_event_with_dict_value(): + """Create a ConversationStateUpdateEvent with dict value.""" + event_value = { + 'usage_to_metrics': { + 'agent': { + 'accumulated_cost': 0.03411525, + 'max_budget_per_task': None, + 'accumulated_token_usage': { + 'prompt_tokens': 8770, + 'completion_tokens': 82, + 'cache_read_tokens': 0, + 'cache_write_tokens': 8767, + 'reasoning_tokens': 0, + 'context_window': 0, + 'per_turn_token': 8852, + }, + }, + 'condenser': { + 'accumulated_cost': 0.0, + 'accumulated_token_usage': { + 'prompt_tokens': 0, + 'completion_tokens': 0, + }, + }, + } + } + return ConversationStateUpdateEvent(key='stats', value=event_value) + + +@pytest.fixture +def stats_event_with_object_value(): + """Create a ConversationStateUpdateEvent with object value.""" + event_value = MagicMock() + event_value.usage_to_metrics = { + 'agent': { + 'accumulated_cost': 0.05, + 'accumulated_token_usage': { + 'prompt_tokens': 1000, + 'completion_tokens': 100, + }, + } + } + return ConversationStateUpdateEvent(key='stats', value=event_value) + + +@pytest.fixture +def stats_event_no_usage_to_metrics(): + """Create a ConversationStateUpdateEvent without usage_to_metrics.""" + event_value = {'some_other_key': 'value'} + return ConversationStateUpdateEvent(key='stats', value=event_value) + + +# --------------------------------------------------------------------------- +# Tests for update_conversation_statistics +# --------------------------------------------------------------------------- + + +class TestUpdateConversationStatistics: + """Test the update_conversation_statistics method.""" + + @pytest.mark.asyncio + async def test_update_statistics_success( + self, service, async_session, v1_conversation_metadata + ): + """Test successfully updating conversation statistics.""" + conversation_id, stored = v1_conversation_metadata + + agent_metrics = Metrics( + model_name='test-model', + accumulated_cost=0.03411525, + max_budget_per_task=10.0, + accumulated_token_usage=TokenUsage( + model='test-model', + prompt_tokens=8770, + completion_tokens=82, + cache_read_tokens=0, + cache_write_tokens=8767, + reasoning_tokens=0, + context_window=0, + per_turn_token=8852, + ), + ) + stats = ConversationStats(usage_to_metrics={'agent': agent_metrics}) + + await service.update_conversation_statistics(conversation_id, stats) + + # Verify the update + await async_session.refresh(stored) + assert stored.accumulated_cost == 0.03411525 + assert stored.max_budget_per_task == 10.0 + assert stored.prompt_tokens == 8770 + assert stored.completion_tokens == 82 + assert stored.cache_read_tokens == 0 + assert stored.cache_write_tokens == 8767 + assert stored.reasoning_tokens == 0 + assert stored.context_window == 0 + assert stored.per_turn_token == 8852 + assert stored.last_updated_at is not None + + @pytest.mark.asyncio + async def test_update_statistics_partial_update( + self, service, async_session, v1_conversation_metadata + ): + """Test updating only some statistics fields.""" + conversation_id, stored = v1_conversation_metadata + + # Set initial values + stored.accumulated_cost = 0.01 + stored.prompt_tokens = 100 + await async_session.commit() + + agent_metrics = Metrics( + model_name='test-model', + accumulated_cost=0.05, + accumulated_token_usage=TokenUsage( + model='test-model', + prompt_tokens=200, + completion_tokens=0, # Default value + ), + ) + stats = ConversationStats(usage_to_metrics={'agent': agent_metrics}) + + await service.update_conversation_statistics(conversation_id, stats) + + # Verify updated fields + await async_session.refresh(stored) + assert stored.accumulated_cost == 0.05 + assert stored.prompt_tokens == 200 + # completion_tokens should remain unchanged (not None in stats) + assert stored.completion_tokens == 0 + + @pytest.mark.asyncio + async def test_update_statistics_no_agent_metrics( + self, service, v1_conversation_metadata + ): + """Test that update is skipped when no agent metrics are present.""" + conversation_id, stored = v1_conversation_metadata + original_cost = stored.accumulated_cost + + condenser_metrics = Metrics( + model_name='test-model', + accumulated_cost=0.1, + ) + stats = ConversationStats(usage_to_metrics={'condenser': condenser_metrics}) + + await service.update_conversation_statistics(conversation_id, stats) + + # Verify no update occurred + assert stored.accumulated_cost == original_cost + + @pytest.mark.asyncio + async def test_update_statistics_conversation_not_found(self, service): + """Test that update is skipped when conversation doesn't exist.""" + nonexistent_id = uuid4() + agent_metrics = Metrics( + model_name='test-model', + accumulated_cost=0.1, + ) + stats = ConversationStats(usage_to_metrics={'agent': agent_metrics}) + + # Should not raise an exception + await service.update_conversation_statistics(nonexistent_id, stats) + + @pytest.mark.asyncio + async def test_update_statistics_v0_conversation_skipped( + self, service, async_session + ): + """Test that V0 conversations are skipped.""" + conversation_id = uuid4() + stored = StoredConversationMetadata( + conversation_id=str(conversation_id), + user_id='test_user_123', + sandbox_id='sandbox_123', + conversation_version='V0', # V0 conversation + title='V0 Conversation', + accumulated_cost=0.0, + created_at=datetime.now(timezone.utc), + last_updated_at=datetime.now(timezone.utc), + ) + async_session.add(stored) + await async_session.commit() + + original_cost = stored.accumulated_cost + + agent_metrics = Metrics( + model_name='test-model', + accumulated_cost=0.1, + ) + stats = ConversationStats(usage_to_metrics={'agent': agent_metrics}) + + await service.update_conversation_statistics(conversation_id, stats) + + # Verify no update occurred + await async_session.refresh(stored) + assert stored.accumulated_cost == original_cost + + @pytest.mark.asyncio + async def test_update_statistics_with_none_values( + self, service, async_session, v1_conversation_metadata + ): + """Test that None values in stats don't overwrite existing values.""" + conversation_id, stored = v1_conversation_metadata + + # Set initial values + stored.accumulated_cost = 0.01 + stored.max_budget_per_task = 5.0 + stored.prompt_tokens = 100 + await async_session.commit() + + agent_metrics = Metrics( + model_name='test-model', + accumulated_cost=0.05, + max_budget_per_task=None, # None value + accumulated_token_usage=TokenUsage( + model='test-model', + prompt_tokens=200, + completion_tokens=0, # Default value (None is not valid for int) + ), + ) + stats = ConversationStats(usage_to_metrics={'agent': agent_metrics}) + + await service.update_conversation_statistics(conversation_id, stats) + + # Verify updated fields and that None values didn't overwrite + await async_session.refresh(stored) + assert stored.accumulated_cost == 0.05 + assert stored.max_budget_per_task == 5.0 # Should remain unchanged + assert stored.prompt_tokens == 200 + assert ( + stored.completion_tokens == 0 + ) # Should remain unchanged (was 0, None doesn't update) + + +# --------------------------------------------------------------------------- +# Tests for process_stats_event +# --------------------------------------------------------------------------- + + +class TestProcessStatsEvent: + """Test the process_stats_event method.""" + + @pytest.mark.asyncio + async def test_process_stats_event_with_dict_value( + self, + service, + async_session, + stats_event_with_dict_value, + v1_conversation_metadata, + ): + """Test processing stats event with dict value.""" + conversation_id, stored = v1_conversation_metadata + + await service.process_stats_event(stats_event_with_dict_value, conversation_id) + + # Verify the update occurred + await async_session.refresh(stored) + assert stored.accumulated_cost == 0.03411525 + assert stored.prompt_tokens == 8770 + assert stored.completion_tokens == 82 + + @pytest.mark.asyncio + async def test_process_stats_event_with_object_value( + self, + service, + async_session, + stats_event_with_object_value, + v1_conversation_metadata, + ): + """Test processing stats event with object value.""" + conversation_id, stored = v1_conversation_metadata + + await service.process_stats_event( + stats_event_with_object_value, conversation_id + ) + + # Verify the update occurred + await async_session.refresh(stored) + assert stored.accumulated_cost == 0.05 + assert stored.prompt_tokens == 1000 + assert stored.completion_tokens == 100 + + @pytest.mark.asyncio + async def test_process_stats_event_no_usage_to_metrics( + self, + service, + async_session, + stats_event_no_usage_to_metrics, + v1_conversation_metadata, + ): + """Test processing stats event without usage_to_metrics.""" + conversation_id, stored = v1_conversation_metadata + original_cost = stored.accumulated_cost + + await service.process_stats_event( + stats_event_no_usage_to_metrics, conversation_id + ) + + # Verify update_conversation_statistics was NOT called + await async_session.refresh(stored) + assert stored.accumulated_cost == original_cost + + @pytest.mark.asyncio + async def test_process_stats_event_service_error_handled( + self, service, stats_event_with_dict_value + ): + """Test that errors from service are caught and logged.""" + conversation_id = uuid4() + + # Should not raise an exception + with ( + patch.object( + service, + 'update_conversation_statistics', + side_effect=Exception('Database error'), + ), + patch( + 'openhands.app_server.app_conversation.sql_app_conversation_info_service.logger' + ) as mock_logger, + ): + await service.process_stats_event( + stats_event_with_dict_value, conversation_id + ) + + # Verify error was logged + mock_logger.exception.assert_called_once() + + @pytest.mark.asyncio + async def test_process_stats_event_empty_usage_to_metrics( + self, service, async_session, v1_conversation_metadata + ): + """Test processing stats event with empty usage_to_metrics.""" + conversation_id, stored = v1_conversation_metadata + original_cost = stored.accumulated_cost + + # Create event with empty usage_to_metrics + event = ConversationStateUpdateEvent( + key='stats', value={'usage_to_metrics': {}} + ) + + await service.process_stats_event(event, conversation_id) + + # Empty dict is falsy, so update_conversation_statistics should NOT be called + await async_session.refresh(stored) + assert stored.accumulated_cost == original_cost + + +# --------------------------------------------------------------------------- +# Integration tests for on_event endpoint +# --------------------------------------------------------------------------- + + +class TestOnEventStatsProcessing: + """Test stats event processing in the on_event endpoint.""" + + @pytest.mark.asyncio + async def test_on_event_processes_stats_events(self): + """Test that on_event processes stats events.""" + from openhands.app_server.event_callback.webhook_router import on_event + from openhands.app_server.sandbox.sandbox_models import ( + SandboxInfo, + SandboxStatus, + ) + + conversation_id = uuid4() + sandbox_id = 'sandbox_123' + + # Create stats event + stats_event = ConversationStateUpdateEvent( + key='stats', + value={ + 'usage_to_metrics': { + 'agent': { + 'accumulated_cost': 0.1, + 'accumulated_token_usage': { + 'prompt_tokens': 1000, + }, + } + } + }, + ) + + # Create non-stats event + other_event = ConversationStateUpdateEvent( + key='execution_status', value='running' + ) + + events = [stats_event, other_event] + + # Mock dependencies + mock_sandbox = SandboxInfo( + id=sandbox_id, + status=SandboxStatus.RUNNING, + session_api_key='test_key', + created_by_user_id='user_123', + sandbox_spec_id='spec_123', + ) + + mock_app_conversation_info = AppConversationInfo( + id=conversation_id, + sandbox_id=sandbox_id, + created_by_user_id='user_123', + ) + + mock_event_service = AsyncMock() + mock_app_conversation_info_service = AsyncMock() + mock_app_conversation_info_service.get_app_conversation_info.return_value = ( + mock_app_conversation_info + ) + + # Set up process_stats_event to call update_conversation_statistics + async def process_stats_event_side_effect(event, conversation_id): + # Simulate what process_stats_event does - call update_conversation_statistics + from openhands.sdk.conversation.conversation_stats import ConversationStats + + if isinstance(event.value, dict): + stats = ConversationStats.model_validate(event.value) + if stats and stats.usage_to_metrics: + await mock_app_conversation_info_service.update_conversation_statistics( + conversation_id, stats + ) + + mock_app_conversation_info_service.process_stats_event.side_effect = ( + process_stats_event_side_effect + ) + + with ( + patch( + 'openhands.app_server.event_callback.webhook_router.valid_sandbox', + return_value=mock_sandbox, + ), + patch( + 'openhands.app_server.event_callback.webhook_router.valid_conversation', + return_value=mock_app_conversation_info, + ), + patch( + 'openhands.app_server.event_callback.webhook_router._run_callbacks_in_bg_and_close' + ) as mock_callbacks, + ): + await on_event( + events=events, + conversation_id=conversation_id, + sandbox_info=mock_sandbox, + app_conversation_info_service=mock_app_conversation_info_service, + event_service=mock_event_service, + ) + + # Verify events were saved + assert mock_event_service.save_event.call_count == 2 + + # Verify stats event was processed + mock_app_conversation_info_service.update_conversation_statistics.assert_called_once() + + # Verify callbacks were scheduled + mock_callbacks.assert_called_once() + + @pytest.mark.asyncio + async def test_on_event_skips_non_stats_events(self): + """Test that on_event skips non-stats events.""" + from openhands.app_server.event_callback.webhook_router import on_event + from openhands.app_server.sandbox.sandbox_models import ( + SandboxInfo, + SandboxStatus, + ) + from openhands.events.action.message import MessageAction + + conversation_id = uuid4() + sandbox_id = 'sandbox_123' + + # Create non-stats events + events = [ + ConversationStateUpdateEvent(key='execution_status', value='running'), + MessageAction(content='test'), + ] + + mock_sandbox = SandboxInfo( + id=sandbox_id, + status=SandboxStatus.RUNNING, + session_api_key='test_key', + created_by_user_id='user_123', + sandbox_spec_id='spec_123', + ) + + mock_app_conversation_info = AppConversationInfo( + id=conversation_id, + sandbox_id=sandbox_id, + created_by_user_id='user_123', + ) + + mock_event_service = AsyncMock() + mock_app_conversation_info_service = AsyncMock() + mock_app_conversation_info_service.get_app_conversation_info.return_value = ( + mock_app_conversation_info + ) + + with ( + patch( + 'openhands.app_server.event_callback.webhook_router.valid_sandbox', + return_value=mock_sandbox, + ), + patch( + 'openhands.app_server.event_callback.webhook_router.valid_conversation', + return_value=mock_app_conversation_info, + ), + patch( + 'openhands.app_server.event_callback.webhook_router._run_callbacks_in_bg_and_close' + ), + ): + await on_event( + events=events, + conversation_id=conversation_id, + sandbox_info=mock_sandbox, + app_conversation_info_service=mock_app_conversation_info_service, + event_service=mock_event_service, + ) + + # Verify stats update was NOT called + mock_app_conversation_info_service.update_conversation_statistics.assert_not_called() diff --git a/tests/unit/controller/test_agent_controller.py b/tests/unit/controller/test_agent_controller.py index da12ee8f9e4e..3da87ecfdf36 100644 --- a/tests/unit/controller/test_agent_controller.py +++ b/tests/unit/controller/test_agent_controller.py @@ -24,6 +24,7 @@ from openhands.events import Event, EventSource, EventStream, EventStreamSubscriber from openhands.events.action import ChangeAgentStateAction, CmdRunAction, MessageAction from openhands.events.action.agent import CondensationAction, RecallAction +from openhands.events.action.empty import NullAction from openhands.events.action.message import SystemMessageAction from openhands.events.event import RecallType from openhands.events.observation import ( @@ -299,6 +300,64 @@ async def test_react_to_content_policy_violation( await controller.close() +@pytest.mark.asyncio +async def test_tool_call_validation_error_handling( + mock_agent_with_stats, + test_event_stream, +): + """Test that tool call validation errors from Groq are handled as recoverable errors.""" + mock_agent, conversation_stats, llm_registry = mock_agent_with_stats + + controller = AgentController( + agent=mock_agent, + event_stream=test_event_stream, + conversation_stats=conversation_stats, + iteration_delta=10, + sid='test', + confirmation_mode=False, + headless_mode=True, + ) + + controller.state.agent_state = AgentState.RUNNING + + # Track call count to only raise error on first call + # This prevents a feedback loop where ErrorObservation triggers another step + # which raises the same error again (since the mock always raises) + call_count = 0 + + def mock_step(state): + nonlocal call_count + call_count += 1 + if call_count == 1: + raise BadRequestError( + message='litellm.BadRequestError: GroqException - {"error":{"message":"tool call validation failed: parameters for tool str_replace_editor did not match schema: errors: [missing properties: \'path\']","type":"invalid_request_error","code":"tool_use_failed"}}', + model='groq/llama3-8b-8192', + llm_provider='groq', + ) + # Return NullAction on subsequent calls to break the feedback loop + return NullAction() + + mock_agent.step = mock_step + + # Call _step which should handle the tool validation error + await controller._step() + + # Verify that the agent state is still RUNNING (not ERROR) + assert controller.state.agent_state == AgentState.RUNNING + + # Verify that an ErrorObservation was added to the event stream + events = list(test_event_stream.get_events()) + error_observations = [e for e in events if isinstance(e, ErrorObservation)] + assert len(error_observations) == 1 + + error_obs = error_observations[0] + assert 'tool call validation failed' in error_obs.content + assert 'missing properties' in error_obs.content + assert 'path' in error_obs.content + + await controller.close() + + @pytest.mark.asyncio async def test_run_controller_with_fatal_error( test_event_stream, mock_memory, mock_agent_with_stats diff --git a/tests/unit/controller/test_agent_controller_posthog.py b/tests/unit/controller/test_agent_controller_posthog.py deleted file mode 100644 index 630c18e3aa53..000000000000 --- a/tests/unit/controller/test_agent_controller_posthog.py +++ /dev/null @@ -1,243 +0,0 @@ -"""Integration tests for PostHog tracking in AgentController.""" - -import asyncio -from unittest.mock import MagicMock, patch - -import pytest - -from openhands.controller.agent import Agent -from openhands.controller.agent_controller import AgentController -from openhands.core.config import OpenHandsConfig -from openhands.core.config.agent_config import AgentConfig -from openhands.core.config.llm_config import LLMConfig -from openhands.core.schema import AgentState -from openhands.events import EventSource, EventStream -from openhands.events.action.message import SystemMessageAction -from openhands.llm.llm_registry import LLMRegistry -from openhands.server.services.conversation_stats import ConversationStats -from openhands.storage.memory import InMemoryFileStore - - -@pytest.fixture(scope='function') -def event_loop(): - """Create event loop for async tests.""" - loop = asyncio.get_event_loop_policy().new_event_loop() - yield loop - loop.close() - - -@pytest.fixture -def mock_agent_with_stats(): - """Create a mock agent with properly connected LLM registry and conversation stats.""" - import uuid - - # Create LLM registry - config = OpenHandsConfig() - llm_registry = LLMRegistry(config=config) - - # Create conversation stats - file_store = InMemoryFileStore({}) - conversation_id = f'test-conversation-{uuid.uuid4()}' - conversation_stats = ConversationStats( - file_store=file_store, conversation_id=conversation_id, user_id='test-user' - ) - - # Connect registry to stats - llm_registry.subscribe(conversation_stats.register_llm) - - # Create mock agent - agent = MagicMock(spec=Agent) - agent_config = MagicMock(spec=AgentConfig) - llm_config = LLMConfig( - model='gpt-4o', - api_key='test_key', - num_retries=2, - retry_min_wait=1, - retry_max_wait=2, - ) - agent_config.disabled_microagents = [] - agent_config.enable_mcp = True - agent_config.enable_stuck_detection = True - llm_registry.service_to_llm.clear() - mock_llm = llm_registry.get_llm('agent_llm', llm_config) - agent.llm = mock_llm - agent.name = 'test-agent' - agent.sandbox_plugins = [] - agent.config = agent_config - agent.llm_registry = llm_registry - agent.prompt_manager = MagicMock() - - # Add a proper system message mock - system_message = SystemMessageAction( - content='Test system message', tools=['test_tool'] - ) - system_message._source = EventSource.AGENT - system_message._id = -1 # Set invalid ID to avoid the ID check - agent.get_system_message.return_value = system_message - - return agent, conversation_stats, llm_registry - - -@pytest.fixture -def mock_event_stream(): - """Create a mock event stream.""" - mock = MagicMock( - spec=EventStream, - event_stream=EventStream(sid='test', file_store=InMemoryFileStore({})), - ) - mock.get_latest_event_id.return_value = 0 - return mock - - -@pytest.mark.asyncio -async def test_agent_finish_triggers_posthog_tracking( - mock_agent_with_stats, mock_event_stream -): - """Test that setting agent state to FINISHED triggers PostHog tracking.""" - mock_agent, conversation_stats, llm_registry = mock_agent_with_stats - - controller = AgentController( - agent=mock_agent, - event_stream=mock_event_stream, - conversation_stats=conversation_stats, - iteration_delta=10, - sid='test-conversation-123', - user_id='test-user-456', - confirmation_mode=False, - headless_mode=True, - ) - - with ( - patch('openhands.utils.posthog_tracker.posthog') as mock_posthog, - patch('os.environ.get') as mock_env_get, - ): - # Setup mocks - mock_posthog.capture = MagicMock() - mock_env_get.return_value = 'saas' - - # Initialize posthog in the tracker module - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - # Set agent state to FINISHED - await controller.set_agent_state_to(AgentState.FINISHED) - - # Verify PostHog tracking was called - mock_posthog.capture.assert_called_once() - call_args = mock_posthog.capture.call_args - - assert call_args[1]['distinct_id'] == 'test-user-456' - assert call_args[1]['event'] == 'agent_task_completed' - assert 'conversation_id' in call_args[1]['properties'] - assert call_args[1]['properties']['user_id'] == 'test-user-456' - assert call_args[1]['properties']['app_mode'] == 'saas' - - await controller.close() - - -@pytest.mark.asyncio -async def test_agent_finish_without_user_id(mock_agent_with_stats, mock_event_stream): - """Test tracking when user_id is None.""" - mock_agent, conversation_stats, llm_registry = mock_agent_with_stats - - controller = AgentController( - agent=mock_agent, - event_stream=mock_event_stream, - conversation_stats=conversation_stats, - iteration_delta=10, - sid='test-conversation-789', - user_id=None, - confirmation_mode=False, - headless_mode=True, - ) - - with ( - patch('openhands.utils.posthog_tracker.posthog') as mock_posthog, - patch('os.environ.get') as mock_env_get, - ): - mock_posthog.capture = MagicMock() - mock_env_get.return_value = 'oss' - - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - await controller.set_agent_state_to(AgentState.FINISHED) - - mock_posthog.capture.assert_called_once() - call_args = mock_posthog.capture.call_args - - # When user_id is None, distinct_id should be conversation_id - assert call_args[1]['distinct_id'].startswith('conversation_') - assert call_args[1]['properties']['user_id'] is None - - await controller.close() - - -@pytest.mark.asyncio -async def test_other_states_dont_trigger_tracking( - mock_agent_with_stats, mock_event_stream -): - """Test that non-FINISHED states don't trigger tracking.""" - mock_agent, conversation_stats, llm_registry = mock_agent_with_stats - - controller = AgentController( - agent=mock_agent, - event_stream=mock_event_stream, - conversation_stats=conversation_stats, - iteration_delta=10, - sid='test-conversation-999', - confirmation_mode=False, - headless_mode=True, - ) - - with patch('openhands.utils.posthog_tracker.posthog') as mock_posthog: - mock_posthog.capture = MagicMock() - - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - # Try different states - await controller.set_agent_state_to(AgentState.RUNNING) - await controller.set_agent_state_to(AgentState.PAUSED) - await controller.set_agent_state_to(AgentState.STOPPED) - - # PostHog should not be called for non-FINISHED states - mock_posthog.capture.assert_not_called() - - await controller.close() - - -@pytest.mark.asyncio -async def test_tracking_error_doesnt_break_agent( - mock_agent_with_stats, mock_event_stream -): - """Test that tracking errors don't interrupt agent operation.""" - mock_agent, conversation_stats, llm_registry = mock_agent_with_stats - - controller = AgentController( - agent=mock_agent, - event_stream=mock_event_stream, - conversation_stats=conversation_stats, - iteration_delta=10, - sid='test-conversation-error', - confirmation_mode=False, - headless_mode=True, - ) - - with patch('openhands.utils.posthog_tracker.posthog') as mock_posthog: - mock_posthog.capture = MagicMock(side_effect=Exception('PostHog error')) - - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - # Should not raise an exception - await controller.set_agent_state_to(AgentState.FINISHED) - - # Agent state should still be FINISHED despite tracking error - assert controller.state.agent_state == AgentState.FINISHED - - await controller.close() diff --git a/tests/unit/experiments/test_experiment_manager.py b/tests/unit/experiments/test_experiment_manager.py index e891b9893a36..c389423cf597 100644 --- a/tests/unit/experiments/test_experiment_manager.py +++ b/tests/unit/experiments/test_experiment_manager.py @@ -126,11 +126,8 @@ async def test_experiment_manager_called_with_correct_parameters_in_context__noo self, ): """ - Use the real LiveStatusAppConversationService to build a StartConversationRequest, - and verify ExperimentManagerImpl.run_agent_variant_tests__v1: - - is called exactly once with the (user_id, generated conversation_id, agent) - - returns the *same* agent instance (no copy/mutation) - - does not tweak agent fields (LLM, system prompt, etc.) + Test that ExperimentManagerImpl.run_agent_variant_tests__v1 is called with correct parameters + and returns the same agent instance (no copy/mutation) when building a StartConversationRequest. """ # --- Arrange: fixed UUID to assert call parameters deterministically fixed_conversation_id = UUID('00000000-0000-0000-0000-000000000001') @@ -143,6 +140,7 @@ async def test_experiment_manager_called_with_correct_parameters_in_context__noo mock_agent = Mock(spec=Agent) mock_agent.llm = mock_llm mock_agent.system_prompt_filename = 'default_system_prompt.j2' + mock_agent.model_copy = Mock(return_value=mock_agent) # Minimal, real-ish user context used by the service class DummyUserContext: @@ -154,6 +152,8 @@ async def get_user_info(self): llm_base_url=None, llm_api_key=None, confirmation_mode=False, + condenser_max_size=None, + security_analyzer=None, ) async def get_secrets(self): @@ -189,6 +189,7 @@ async def get_user_id(self): sandbox_startup_poll_frequency=1, httpx_client=httpx_client, web_url=None, + openhands_provider_base_url=None, access_token_hard_timeout=None, ) @@ -202,24 +203,56 @@ async def get_user_id(self): # Patch the pieces invoked by the service with ( - patch( - 'openhands.app_server.app_conversation.live_status_app_conversation_service.get_default_agent', + patch.object( + service, + '_setup_secrets_for_git_providers', + return_value={}, + ), + patch.object( + service, + '_configure_llm_and_mcp', + return_value=(mock_llm, {}), + ), + patch.object( + service, + '_create_agent_with_context', + return_value=mock_agent, + ), + patch.object( + service, + '_load_skills_and_update_agent', return_value=mock_agent, ), patch( 'openhands.app_server.app_conversation.live_status_app_conversation_service.uuid4', return_value=fixed_conversation_id, ), + patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.ExperimentManagerImpl' + ) as mock_experiment_manager, ): + # Configure the experiment manager mock to return the same agent + mock_experiment_manager.run_agent_variant_tests__v1.return_value = ( + mock_agent + ) + # --- Act: build the start request start_req = await service._build_start_conversation_request_for_user( sandbox=sandbox, initial_message=None, + system_message_suffix=None, # No additional system message suffix git_provider=None, # Keep secrets path simple working_dir='/tmp/project', # Arbitrary path ) - # The agent in the StartConversationRequest is the *same* object we provided + # --- Assert: verify experiment manager was called with correct parameters + mock_experiment_manager.run_agent_variant_tests__v1.assert_called_once_with( + 'test_user_123', # user_id + fixed_conversation_id, # conversation_id + mock_agent, # agent (after model_copy with agent_context) + ) + + # The agent in the StartConversationRequest is the *same* object returned by experiment manager assert start_req.agent is mock_agent # No tweaks to agent fields by the experiment manager (noop) diff --git a/tests/unit/llm/test_llm.py b/tests/unit/llm/test_llm.py index dfdb4e05b475..b04425e631da 100644 --- a/tests/unit/llm/test_llm.py +++ b/tests/unit/llm/test_llm.py @@ -1255,6 +1255,25 @@ def test_opus_41_keeps_temperature_top_p(mock_completion): assert 'top_p' not in call_kwargs +@patch('openhands.llm.llm.litellm_completion') +def test_opus_45_keeps_temperature_drops_top_p(mock_completion): + mock_completion.return_value = { + 'choices': [{'message': {'content': 'ok'}}], + } + config = LLMConfig( + model='anthropic/claude-opus-4-5-20251101', + api_key='k', + temperature=0.7, + top_p=0.9, + ) + llm = LLM(config, service_id='svc') + llm.completion(messages=[{'role': 'user', 'content': 'hi'}]) + call_kwargs = mock_completion.call_args[1] + assert call_kwargs.get('temperature') == 0.7 + # Anthropic rejects both temperature and top_p together on Opus 4.5; we keep temperature and drop top_p + assert 'top_p' not in call_kwargs + + @patch('openhands.llm.llm.litellm_completion') def test_sonnet_4_keeps_temperature_drops_top_p(mock_completion): mock_completion.return_value = { diff --git a/tests/unit/llm/test_llm_fncall_converter.py b/tests/unit/llm/test_llm_fncall_converter.py index ff4b7961efe6..b4270f89b023 100644 --- a/tests/unit/llm/test_llm_fncall_converter.py +++ b/tests/unit/llm/test_llm_fncall_converter.py @@ -701,6 +701,8 @@ def test_get_example_for_tools_all_tools(): """, ), # Test case with indented code block to verify indentation is preserved + # Note: multiline parameter values should NOT have extra newlines before/after + # to prevent newline accumulation across multiple LLM response cycles ( [ { @@ -716,16 +718,12 @@ def test_get_example_for_tools_all_tools(): """ str_replace /test/file.py - -def example(): - pass - - -def example(): +def example(): + pass +def example(): # This is indented print("hello") - return True - + return True """, ), # Test case with list parameter value diff --git a/tests/unit/memory/test_conversation_memory.py b/tests/unit/memory/test_conversation_memory.py index abaa8d9a3d17..50fd48f49a35 100644 --- a/tests/unit/memory/test_conversation_memory.py +++ b/tests/unit/memory/test_conversation_memory.py @@ -158,7 +158,8 @@ def test_ensure_initial_user_message_adds_if_only_system( system_message = SystemMessageAction(content='System') system_message._source = EventSource.AGENT events = [system_message] - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert len(events) == 2 assert events[0] == system_message assert events[1] == initial_user_action @@ -177,7 +178,8 @@ def test_ensure_initial_user_message_correct_already_present( agent_message, ] original_events = list(events) - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert events == original_events @@ -189,7 +191,8 @@ def test_ensure_initial_user_message_incorrect_at_index_1( incorrect_second_message = MessageAction(content='Assistant') incorrect_second_message._source = EventSource.AGENT events = [system_message, incorrect_second_message] - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert len(events) == 3 assert events[0] == system_message assert events[1] == initial_user_action # Correct one inserted @@ -206,7 +209,8 @@ def test_ensure_initial_user_message_correct_present_later( # Correct initial message is present, but later in the list events = [system_message, incorrect_second_message] conversation_memory._ensure_system_message(events) - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert len(events) == 3 # Should still insert at index 1, not remove the later one assert events[0] == system_message assert events[1] == initial_user_action # Correct one inserted at index 1 @@ -222,7 +226,8 @@ def test_ensure_initial_user_message_different_user_msg_at_index_1( different_user_message = MessageAction(content='Different User Message') different_user_message._source = EventSource.USER events = [system_message, different_user_message] - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert len(events) == 2 assert events[0] == system_message assert events[1] == different_user_message # Original second message remains @@ -1583,3 +1588,132 @@ def test_process_ipython_observation_with_vision_disabled( assert isinstance(message.content[1], ImageContent) # Check that NO explanatory text about filtered images was added when vision is disabled assert 'invalid or empty image(s) were filtered' not in message.content[0].text + + +def test_ensure_initial_user_message_not_reinserted_when_condensed( + conversation_memory, initial_user_action +): + """Test that initial user message is NOT re-inserted when it has been condensed. + + This is a critical test for bug #11910: Old instructions should not be re-executed + after conversation condensation. If the initial user message has been condensed + (its ID is in the forgotten_event_ids set), we should NOT re-insert it to prevent + the LLM from seeing old instructions as fresh commands. + """ + system_message = SystemMessageAction(content='System') + system_message._source = EventSource.AGENT + + # Simulate that the initial_user_action has been condensed by adding its ID + # to the forgotten_event_ids set + initial_user_action._id = 1 # Assign an ID to the initial user action + forgotten_event_ids = {1} # The initial user action's ID is in the forgotten set + + events = [system_message] # Only system message, no user message + + # Call _ensure_initial_user_message with the condensed event ID + conversation_memory._ensure_initial_user_message( + events, initial_user_action, forgotten_event_ids + ) + + # The initial user action should NOT be inserted because it was condensed + assert len(events) == 1 + assert events[0] == system_message + # Verify the initial user action was NOT added + assert initial_user_action not in events + + +def test_ensure_initial_user_message_reinserted_when_not_condensed( + conversation_memory, initial_user_action +): + """Test that initial user message IS re-inserted when it has NOT been condensed. + + This ensures backward compatibility: when no condensation has happened, + the initial user message should still be inserted as before. + """ + system_message = SystemMessageAction(content='System') + system_message._source = EventSource.AGENT + + # The initial user action has NOT been condensed + initial_user_action._id = 1 + forgotten_event_ids = {5, 10, 15} # Different IDs, not including the initial action + + events = [system_message] + + # Call _ensure_initial_user_message with non-matching forgotten IDs + conversation_memory._ensure_initial_user_message( + events, initial_user_action, forgotten_event_ids + ) + + # The initial user action SHOULD be inserted because it was NOT condensed + assert len(events) == 2 + assert events[0] == system_message + assert events[1] == initial_user_action + + +def test_process_events_does_not_reinsert_condensed_initial_message( + conversation_memory, +): + """Test that process_events does not re-insert initial user message when condensed. + + This is an integration test for the full process_events flow, verifying that + when the initial user message has been condensed, it is not re-inserted into + the conversation sent to the LLM. + """ + # Create a system message + system_message = SystemMessageAction(content='System message') + system_message._source = EventSource.AGENT + system_message._id = 0 + + # Create the initial user message (will be marked as condensed) + initial_user_message = MessageAction(content='Do task A, B, and C') + initial_user_message._source = EventSource.USER + initial_user_message._id = 1 + + # Create a condensation summary observation + from openhands.events.observation.agent import AgentCondensationObservation + + condensation_summary = AgentCondensationObservation( + content='Summary: User requested tasks A, B, C. Task A was completed successfully.' + ) + condensation_summary._id = 2 + + # Create a recent user message (not condensed) + recent_user_message = MessageAction(content='Now continue with task D') + recent_user_message._source = EventSource.USER + recent_user_message._id = 3 + + # Simulate condensed history: system + summary + recent message + # The initial user message (id=1) has been condensed/forgotten + condensed_history = [system_message, condensation_summary, recent_user_message] + + # The initial user message's ID is in the forgotten set + forgotten_event_ids = {1} + + messages = conversation_memory.process_events( + condensed_history=condensed_history, + initial_user_action=initial_user_message, + forgotten_event_ids=forgotten_event_ids, + max_message_chars=None, + vision_is_active=False, + ) + + # Verify the structure of messages + # Should have: system, condensation summary, recent user message + # Should NOT have the initial user message "Do task A, B, and C" + assert len(messages) == 3 + assert messages[0].role == 'system' + assert messages[0].content[0].text == 'System message' + + # The second message should be the condensation summary, NOT the initial user message + assert messages[1].role == 'user' + assert 'Summary: User requested tasks A, B, C' in messages[1].content[0].text + + # The third message should be the recent user message + assert messages[2].role == 'user' + assert 'Now continue with task D' in messages[2].content[0].text + + # Critically, the old instruction should NOT appear + for msg in messages: + for content in msg.content: + if hasattr(content, 'text'): + assert 'Do task A, B, and C' not in content.text diff --git a/tests/unit/server/data_models/test_conversation.py b/tests/unit/server/data_models/test_conversation.py index 98cf9b09ea98..79ff91fa7fd8 100644 --- a/tests/unit/server/data_models/test_conversation.py +++ b/tests/unit/server/data_models/test_conversation.py @@ -15,6 +15,9 @@ AppConversation, AppConversationPage, ) +from openhands.app_server.app_conversation.app_conversation_router import ( + read_conversation_file, +) from openhands.app_server.app_conversation.live_status_app_conversation_service import ( LiveStatusAppConversationService, ) @@ -27,6 +30,7 @@ SandboxInfo, SandboxStatus, ) +from openhands.app_server.sandbox.sandbox_spec_models import SandboxSpecInfo from openhands.app_server.user.user_context import UserContext from openhands.integrations.service_types import ( AuthenticationError, @@ -37,6 +41,10 @@ ) from openhands.runtime.runtime_status import RuntimeStatus from openhands.sdk.conversation.state import ConversationExecutionStatus +from openhands.sdk.workspace.models import FileOperationResult +from openhands.sdk.workspace.remote.async_remote_workspace import ( + AsyncRemoteWorkspace, +) from openhands.server.data_models.conversation_info import ConversationInfo from openhands.server.data_models.conversation_info_result_set import ( ConversationInfoResultSet, @@ -980,14 +988,6 @@ async def test_delete_conversation(): @pytest.mark.asyncio async def test_delete_v1_conversation_success(): """Test successful deletion of a V1 conversation.""" - from uuid import uuid4 - - from openhands.app_server.app_conversation.app_conversation_models import ( - AppConversation, - ) - from openhands.app_server.sandbox.sandbox_models import SandboxStatus - from openhands.sdk.conversation.state import ConversationExecutionStatus - conversation_uuid = uuid4() conversation_id = str(conversation_uuid) @@ -1060,8 +1060,6 @@ async def test_delete_v1_conversation_success(): @pytest.mark.asyncio async def test_delete_v1_conversation_not_found(): """Test deletion of a V1 conversation that doesn't exist.""" - from uuid import uuid4 - conversation_uuid = uuid4() conversation_id = str(conversation_uuid) @@ -1198,8 +1196,6 @@ async def test_delete_v1_conversation_invalid_uuid(): @pytest.mark.asyncio async def test_delete_v1_conversation_service_error(): """Test deletion when app conversation service raises an error.""" - from uuid import uuid4 - conversation_uuid = uuid4() conversation_id = str(conversation_uuid) @@ -1293,14 +1289,6 @@ async def test_delete_v1_conversation_service_error(): @pytest.mark.asyncio async def test_delete_v1_conversation_with_agent_server(): """Test V1 conversation deletion with agent server integration.""" - from uuid import uuid4 - - from openhands.app_server.app_conversation.app_conversation_models import ( - AppConversation, - ) - from openhands.app_server.sandbox.sandbox_models import SandboxStatus - from openhands.sdk.conversation.state import ConversationExecutionStatus - conversation_uuid = uuid4() conversation_id = str(conversation_uuid) @@ -2178,6 +2166,7 @@ async def mock_get_app_conversation(conv_id): sandbox_startup_poll_frequency=2, httpx_client=mock_httpx_client, web_url=None, + openhands_provider_base_url=None, access_token_hard_timeout=None, ) @@ -2299,6 +2288,7 @@ async def test_delete_v1_conversation_with_no_sub_conversations(): sandbox_startup_poll_frequency=2, httpx_client=mock_httpx_client, web_url=None, + openhands_provider_base_url=None, access_token_hard_timeout=None, ) @@ -2450,6 +2440,7 @@ def mock_delete_info(conv_id: uuid.UUID): sandbox_startup_poll_frequency=2, httpx_client=mock_httpx_client, web_url=None, + openhands_provider_base_url=None, access_token_hard_timeout=None, ) @@ -2475,3 +2466,919 @@ def mock_delete_info(conv_id: uuid.UUID): assert sub2_uuid in delete_calls assert parent_uuid in delete_calls assert sub1_uuid not in delete_calls # Failed before deletion + + +@pytest.mark.asyncio +async def test_read_conversation_file_success(): + """Test successfully retrieving file content from conversation workspace.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + file_content = '# Project Plan\n\n## Phase 1\n- Task 1\n- Task 2\n' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.RUNNING, + execution_status=ConversationExecutionStatus.RUNNING, + session_api_key='test-api-key', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock sandbox + mock_sandbox = SandboxInfo( + id='test-sandbox-id', + created_by_user_id='test_user', + sandbox_spec_id='test-spec-id', + status=SandboxStatus.RUNNING, + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://agent:8000', port=8000) + ], + ) + + # Mock sandbox spec + mock_sandbox_spec = SandboxSpecInfo( + id='test-spec-id', + command=None, + working_dir='/workspace', + created_at=datetime.now(timezone.utc), + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Mock tempfile and file operations + temp_file_path = '/tmp/test_file_12345' + mock_file_result = FileOperationResult( + success=True, + source_path=file_path, + destination_path=temp_file_path, + file_size=len(file_content.encode('utf-8')), + ) + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.AsyncRemoteWorkspace' + ) as mock_workspace_class: + mock_workspace = MagicMock(spec=AsyncRemoteWorkspace) + mock_workspace.file_download = AsyncMock(return_value=mock_file_result) + mock_workspace_class.return_value = mock_workspace + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.tempfile.NamedTemporaryFile' + ) as mock_tempfile: + mock_temp_file = MagicMock() + mock_temp_file.name = temp_file_path + mock_tempfile.return_value.__enter__ = MagicMock( + return_value=mock_temp_file + ) + mock_tempfile.return_value.__exit__ = MagicMock(return_value=None) + + with patch('builtins.open', create=True) as mock_open: + mock_file_handle = MagicMock() + mock_file_handle.read.return_value = file_content.encode('utf-8') + mock_open.return_value.__enter__ = MagicMock( + return_value=mock_file_handle + ) + mock_open.return_value.__exit__ = MagicMock(return_value=None) + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.os.unlink' + ) as mock_unlink: + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result + assert result == file_content + + # Verify services were called correctly + mock_app_conversation_service.get_app_conversation.assert_called_once_with( + conversation_id + ) + mock_sandbox_service.get_sandbox.assert_called_once_with( + 'test-sandbox-id' + ) + mock_sandbox_spec_service.get_sandbox_spec.assert_called_once_with( + 'test-spec-id' + ) + + # Verify workspace was created and file_download was called + mock_workspace_class.assert_called_once() + mock_workspace.file_download.assert_called_once_with( + source_path=file_path, + destination_path=temp_file_path, + ) + + # Verify file was read and cleaned up + mock_open.assert_called_once_with(temp_file_path, 'rb') + mock_unlink.assert_called_once_with(temp_file_path) + + +@pytest.mark.asyncio +async def test_read_conversation_file_different_path(): + """Test successfully retrieving file content from a different file path.""" + conversation_id = uuid4() + file_path = '/workspace/project/src/main.py' + file_content = 'def main():\n print("Hello, World!")\n' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.RUNNING, + execution_status=ConversationExecutionStatus.RUNNING, + session_api_key='test-api-key', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock sandbox + mock_sandbox = SandboxInfo( + id='test-sandbox-id', + created_by_user_id='test_user', + sandbox_spec_id='test-spec-id', + status=SandboxStatus.RUNNING, + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://agent:8000', port=8000) + ], + ) + + # Mock sandbox spec + mock_sandbox_spec = SandboxSpecInfo( + id='test-spec-id', + command=None, + working_dir='/workspace', + created_at=datetime.now(timezone.utc), + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Mock tempfile and file operations + temp_file_path = '/tmp/test_file_67890' + mock_file_result = FileOperationResult( + success=True, + source_path=file_path, + destination_path=temp_file_path, + file_size=len(file_content.encode('utf-8')), + ) + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.AsyncRemoteWorkspace' + ) as mock_workspace_class: + mock_workspace = MagicMock(spec=AsyncRemoteWorkspace) + mock_workspace.file_download = AsyncMock(return_value=mock_file_result) + mock_workspace_class.return_value = mock_workspace + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.tempfile.NamedTemporaryFile' + ) as mock_tempfile: + mock_temp_file = MagicMock() + mock_temp_file.name = temp_file_path + mock_tempfile.return_value.__enter__ = MagicMock( + return_value=mock_temp_file + ) + mock_tempfile.return_value.__exit__ = MagicMock(return_value=None) + + with patch('builtins.open', create=True) as mock_open: + mock_file_handle = MagicMock() + mock_file_handle.read.return_value = file_content.encode('utf-8') + mock_open.return_value.__enter__ = MagicMock( + return_value=mock_file_handle + ) + mock_open.return_value.__exit__ = MagicMock(return_value=None) + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.os.unlink' + ) as mock_unlink: + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result + assert result == file_content + + # Verify workspace was created and file_download was called + mock_workspace_class.assert_called_once() + mock_workspace.file_download.assert_called_once_with( + source_path=file_path, + destination_path=temp_file_path, + ) + + # Verify file was read and cleaned up + mock_open.assert_called_once_with(temp_file_path, 'rb') + mock_unlink.assert_called_once_with(temp_file_path) + + +@pytest.mark.asyncio +async def test_read_conversation_file_conversation_not_found(): + """Test when conversation doesn't exist.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock(return_value=None) + + mock_sandbox_service = MagicMock() + mock_sandbox_spec_service = MagicMock() + + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result + assert result == '' + + # Verify only conversation service was called + mock_app_conversation_service.get_app_conversation.assert_called_once_with( + conversation_id + ) + mock_sandbox_service.get_sandbox.assert_not_called() + mock_sandbox_spec_service.get_sandbox_spec.assert_not_called() + + +@pytest.mark.asyncio +async def test_read_conversation_file_sandbox_not_found(): + """Test when sandbox doesn't exist.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.RUNNING, + execution_status=ConversationExecutionStatus.RUNNING, + session_api_key='test-api-key', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=None) + + mock_sandbox_spec_service = MagicMock() + + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result + assert result == '' + + # Verify services were called + mock_app_conversation_service.get_app_conversation.assert_called_once_with( + conversation_id + ) + mock_sandbox_service.get_sandbox.assert_called_once_with('test-sandbox-id') + mock_sandbox_spec_service.get_sandbox_spec.assert_not_called() + + +@pytest.mark.asyncio +async def test_read_conversation_file_sandbox_not_running(): + """Test when sandbox is not in RUNNING status.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.PAUSED, + execution_status=None, + session_api_key=None, + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock sandbox + mock_sandbox = SandboxInfo( + id='test-sandbox-id', + created_by_user_id='test_user', + sandbox_spec_id='test-spec-id', + status=SandboxStatus.PAUSED, + session_api_key=None, + exposed_urls=None, + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result + assert result == '' + + # Verify services were called + mock_app_conversation_service.get_app_conversation.assert_called_once_with( + conversation_id + ) + mock_sandbox_service.get_sandbox.assert_called_once_with('test-sandbox-id') + mock_sandbox_spec_service.get_sandbox_spec.assert_not_called() + + +@pytest.mark.asyncio +async def test_read_conversation_file_sandbox_spec_not_found(): + """Test when sandbox spec doesn't exist.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.RUNNING, + execution_status=ConversationExecutionStatus.RUNNING, + session_api_key='test-api-key', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock sandbox + mock_sandbox = SandboxInfo( + id='test-sandbox-id', + created_by_user_id='test_user', + sandbox_spec_id='test-spec-id', + status=SandboxStatus.RUNNING, + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://agent:8000', port=8000) + ], + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock(return_value=None) + + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result + assert result == '' + + # Verify services were called + mock_app_conversation_service.get_app_conversation.assert_called_once_with( + conversation_id + ) + mock_sandbox_service.get_sandbox.assert_called_once_with('test-sandbox-id') + mock_sandbox_spec_service.get_sandbox_spec.assert_called_once_with('test-spec-id') + + +@pytest.mark.asyncio +async def test_read_conversation_file_no_exposed_urls(): + """Test when sandbox has no exposed URLs.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.RUNNING, + execution_status=ConversationExecutionStatus.RUNNING, + session_api_key='test-api-key', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock sandbox with no exposed URLs + mock_sandbox = SandboxInfo( + id='test-sandbox-id', + created_by_user_id='test_user', + sandbox_spec_id='test-spec-id', + status=SandboxStatus.RUNNING, + session_api_key='test-api-key', + exposed_urls=None, + ) + + # Mock sandbox spec + mock_sandbox_spec = SandboxSpecInfo( + id='test-spec-id', + command=None, + working_dir='/workspace', + created_at=datetime.now(timezone.utc), + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result + assert result == '' + + +@pytest.mark.asyncio +async def test_read_conversation_file_no_agent_server_url(): + """Test when sandbox has exposed URLs but no AGENT_SERVER.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.RUNNING, + execution_status=ConversationExecutionStatus.RUNNING, + session_api_key='test-api-key', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock sandbox with exposed URLs but no AGENT_SERVER + mock_sandbox = SandboxInfo( + id='test-sandbox-id', + created_by_user_id='test_user', + sandbox_spec_id='test-spec-id', + status=SandboxStatus.RUNNING, + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name='OTHER_SERVICE', url='http://other:9000', port=9000) + ], + ) + + # Mock sandbox spec + mock_sandbox_spec = SandboxSpecInfo( + id='test-spec-id', + command=None, + working_dir='/workspace', + created_at=datetime.now(timezone.utc), + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result + assert result == '' + + +@pytest.mark.asyncio +async def test_read_conversation_file_file_not_found(): + """Test when file doesn't exist.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.RUNNING, + execution_status=ConversationExecutionStatus.RUNNING, + session_api_key='test-api-key', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock sandbox + mock_sandbox = SandboxInfo( + id='test-sandbox-id', + created_by_user_id='test_user', + sandbox_spec_id='test-spec-id', + status=SandboxStatus.RUNNING, + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://agent:8000', port=8000) + ], + ) + + # Mock sandbox spec + mock_sandbox_spec = SandboxSpecInfo( + id='test-spec-id', + command=None, + working_dir='/workspace', + created_at=datetime.now(timezone.utc), + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Mock tempfile and file operations for file not found + temp_file_path = '/tmp/test_file_not_found' + mock_file_result = FileOperationResult( + success=False, + source_path=file_path, + destination_path=temp_file_path, + error=f'File not found: {file_path}', + ) + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.AsyncRemoteWorkspace' + ) as mock_workspace_class: + mock_workspace = MagicMock(spec=AsyncRemoteWorkspace) + mock_workspace.file_download = AsyncMock(return_value=mock_file_result) + mock_workspace_class.return_value = mock_workspace + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.tempfile.NamedTemporaryFile' + ) as mock_tempfile: + mock_temp_file = MagicMock() + mock_temp_file.name = temp_file_path + mock_tempfile.return_value.__enter__ = MagicMock( + return_value=mock_temp_file + ) + mock_tempfile.return_value.__exit__ = MagicMock(return_value=None) + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.os.unlink' + ) as mock_unlink: + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result (empty string when file_download fails) + assert result == '' + + # Verify cleanup still happens + mock_unlink.assert_called_once_with(temp_file_path) + + +@pytest.mark.asyncio +async def test_read_conversation_file_empty_file(): + """Test when file exists but is empty.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.RUNNING, + execution_status=ConversationExecutionStatus.RUNNING, + session_api_key='test-api-key', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock sandbox + mock_sandbox = SandboxInfo( + id='test-sandbox-id', + created_by_user_id='test_user', + sandbox_spec_id='test-spec-id', + status=SandboxStatus.RUNNING, + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://agent:8000', port=8000) + ], + ) + + # Mock sandbox spec + mock_sandbox_spec = SandboxSpecInfo( + id='test-spec-id', + command=None, + working_dir='/workspace', + created_at=datetime.now(timezone.utc), + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Mock tempfile and file operations for empty file + temp_file_path = '/tmp/test_file_empty' + empty_content = '' + mock_file_result = FileOperationResult( + success=True, + source_path=file_path, + destination_path=temp_file_path, + file_size=0, + ) + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.AsyncRemoteWorkspace' + ) as mock_workspace_class: + mock_workspace = MagicMock(spec=AsyncRemoteWorkspace) + mock_workspace.file_download = AsyncMock(return_value=mock_file_result) + mock_workspace_class.return_value = mock_workspace + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.tempfile.NamedTemporaryFile' + ) as mock_tempfile: + mock_temp_file = MagicMock() + mock_temp_file.name = temp_file_path + mock_tempfile.return_value.__enter__ = MagicMock( + return_value=mock_temp_file + ) + mock_tempfile.return_value.__exit__ = MagicMock(return_value=None) + + with patch('builtins.open', create=True) as mock_open: + mock_file_handle = MagicMock() + mock_file_handle.read.return_value = empty_content.encode('utf-8') + mock_open.return_value.__enter__ = MagicMock( + return_value=mock_file_handle + ) + mock_open.return_value.__exit__ = MagicMock(return_value=None) + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.os.unlink' + ) as mock_unlink: + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result (empty string when file is empty) + assert result == '' + + # Verify cleanup happens + mock_unlink.assert_called_once_with(temp_file_path) + + +@pytest.mark.asyncio +async def test_read_conversation_file_command_exception(): + """Test when command execution raises an exception.""" + conversation_id = uuid4() + file_path = '/workspace/project/PLAN.md' + + # Mock conversation + mock_conversation = AppConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test-sandbox-id', + title='Test Conversation', + sandbox_status=SandboxStatus.RUNNING, + execution_status=ConversationExecutionStatus.RUNNING, + session_api_key='test-api-key', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + trigger=ConversationTrigger.GUI, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Mock sandbox + mock_sandbox = SandboxInfo( + id='test-sandbox-id', + created_by_user_id='test_user', + sandbox_spec_id='test-spec-id', + status=SandboxStatus.RUNNING, + session_api_key='test-api-key', + exposed_urls=[ + ExposedUrl(name=AGENT_SERVER, url='http://agent:8000', port=8000) + ], + ) + + # Mock sandbox spec + mock_sandbox_spec = SandboxSpecInfo( + id='test-spec-id', + command=None, + working_dir='/workspace', + created_at=datetime.now(timezone.utc), + ) + + # Mock services + mock_app_conversation_service = MagicMock() + mock_app_conversation_service.get_app_conversation = AsyncMock( + return_value=mock_conversation + ) + + mock_sandbox_service = MagicMock() + mock_sandbox_service.get_sandbox = AsyncMock(return_value=mock_sandbox) + + mock_sandbox_spec_service = MagicMock() + mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Mock tempfile and file operations for exception case + temp_file_path = '/tmp/test_file_exception' + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.AsyncRemoteWorkspace' + ) as mock_workspace_class: + mock_workspace = MagicMock(spec=AsyncRemoteWorkspace) + mock_workspace.file_download = AsyncMock( + side_effect=Exception('Connection timeout') + ) + mock_workspace_class.return_value = mock_workspace + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.tempfile.NamedTemporaryFile' + ) as mock_tempfile: + mock_temp_file = MagicMock() + mock_temp_file.name = temp_file_path + mock_tempfile.return_value.__enter__ = MagicMock( + return_value=mock_temp_file + ) + mock_tempfile.return_value.__exit__ = MagicMock(return_value=None) + + with patch( + 'openhands.app_server.app_conversation.app_conversation_router.os.unlink' + ) as mock_unlink: + # Call the endpoint + result = await read_conversation_file( + conversation_id=conversation_id, + file_path=file_path, + app_conversation_service=mock_app_conversation_service, + sandbox_service=mock_sandbox_service, + sandbox_spec_service=mock_sandbox_spec_service, + ) + + # Verify result (empty string on exception) + assert result == '' + + # Verify cleanup still happens even on exception + mock_unlink.assert_called_once_with(temp_file_path) diff --git a/tests/unit/server/routes/test_mcp_routes.py b/tests/unit/server/routes/test_mcp_routes.py index 1a55cc0a39e4..8677b8c85c37 100644 --- a/tests/unit/server/routes/test_mcp_routes.py +++ b/tests/unit/server/routes/test_mcp_routes.py @@ -1,3 +1,4 @@ +import warnings from unittest.mock import AsyncMock, patch import pytest @@ -7,6 +8,38 @@ from openhands.server.types import AppMode +def test_mcp_server_no_stateless_http_deprecation_warning(): + """Test that mcp_server is created without stateless_http deprecation warning. + + This test verifies the fix for the fastmcp deprecation warning: + 'Providing `stateless_http` when creating a server is deprecated. + Provide it when calling `run` or as a global setting instead.' + + The fix moves the stateless_http parameter from FastMCP() constructor + to the http_app() method call. + """ + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + + # Import the mcp_server which triggers FastMCP creation + from openhands.server.routes.mcp import mcp_server + + # Check that no deprecation warning about stateless_http was raised + stateless_http_warnings = [ + warning + for warning in w + if issubclass(warning.category, DeprecationWarning) + and 'stateless_http' in str(warning.message) + ] + + assert len(stateless_http_warnings) == 0, ( + f'Unexpected stateless_http deprecation warning: {stateless_http_warnings}' + ) + + # Verify mcp_server was created successfully + assert mcp_server is not None + + @pytest.mark.asyncio async def test_get_conversation_link_non_saas_mode(): """Test get_conversation_link in non-SAAS mode.""" diff --git a/tests/unit/server/routes/test_settings_api.py b/tests/unit/server/routes/test_settings_api.py index f01b1d77df3a..6ea408038810 100644 --- a/tests/unit/server/routes/test_settings_api.py +++ b/tests/unit/server/routes/test_settings_api.py @@ -46,6 +46,9 @@ async def get_secrets_store(self) -> SecretsStore | None: async def get_secrets(self) -> Secrets | None: return None + async def get_mcp_api_key(self) -> str | None: + return None + @classmethod async def get_instance(cls, request: Request) -> UserAuth: return MockUserAuth() diff --git a/tests/unit/server/routes/test_settings_store_functions.py b/tests/unit/server/routes/test_settings_store_functions.py index 6296a8e354cf..c6eb6f5628c8 100644 --- a/tests/unit/server/routes/test_settings_store_functions.py +++ b/tests/unit/server/routes/test_settings_store_functions.py @@ -2,13 +2,16 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest +from fastapi import FastAPI from fastapi.testclient import TestClient from pydantic import SecretStr from openhands.integrations.provider import ProviderToken from openhands.integrations.service_types import ProviderType from openhands.server.routes.secrets import ( - app, + app as secrets_router, +) +from openhands.server.routes.secrets import ( check_provider_tokens, ) from openhands.server.routes.settings import store_llm_settings @@ -27,7 +30,12 @@ async def get_settings_store(request): @pytest.fixture def test_client(): - # Create a test client + # Create a test client with a FastAPI app that includes the secrets router + # This is necessary because TestClient with APIRouter directly doesn't set up + # the full middleware stack in newer FastAPI versions (0.118.0+) + test_app = FastAPI() + test_app.include_router(secrets_router) + with ( patch.dict(os.environ, {'SESSION_API_KEY': ''}, clear=False), patch('openhands.server.dependencies._SESSION_API_KEY', None), @@ -36,7 +44,7 @@ def test_client(): AsyncMock(return_value=''), ), ): - client = TestClient(app) + client = TestClient(test_app) yield client diff --git a/tests/unit/server/session/test_conversation_init_data.py b/tests/unit/server/session/test_conversation_init_data.py new file mode 100644 index 000000000000..3c5d7d97f792 --- /dev/null +++ b/tests/unit/server/session/test_conversation_init_data.py @@ -0,0 +1,272 @@ +"""Unit tests for ConversationInitData - specifically testing the field validator. + +These tests verify that the immutable_validator correctly converts dict to MappingProxyType +for git_provider_tokens and custom_secrets fields, ensuring type safety. +""" + +from types import MappingProxyType + +import pytest +from pydantic import SecretStr + +from openhands.integrations.provider import CustomSecret, ProviderToken, ProviderType +from openhands.server.session.conversation_init_data import ConversationInitData +from openhands.storage.data_models.settings import Settings + + +@pytest.fixture +def base_settings(): + """Create a base Settings object with minimal required fields.""" + return Settings( + language='en', + agent='CodeActAgent', + max_iterations=100, + llm_model='anthropic/claude-3-5-sonnet-20241022', + llm_api_key=SecretStr('test_api_key_12345'), + llm_base_url=None, + ) + + +class TestConversationInitDataValidator: + """Test suite for ConversationInitData field validator.""" + + def test_git_provider_tokens_dict_converted_to_mappingproxy(self, base_settings): + """Test that dict passed as git_provider_tokens is converted to MappingProxyType.""" + # Create provider tokens as a regular dict + provider_tokens_dict = { + ProviderType.GITHUB: ProviderToken( + token=SecretStr('ghp_test_token_123'), user_id='test_user' + ), + ProviderType.GITLAB: ProviderToken( + token=SecretStr('glpat_test_token_456'), user_id='test_user_2' + ), + } + + # Create ConversationInitData with dict + init_data = ConversationInitData( + **base_settings.__dict__, + git_provider_tokens=provider_tokens_dict, + ) + + # Verify it's now a MappingProxyType + assert isinstance(init_data.git_provider_tokens, MappingProxyType) + assert ProviderType.GITHUB in init_data.git_provider_tokens + assert ProviderType.GITLAB in init_data.git_provider_tokens + assert ( + init_data.git_provider_tokens[ProviderType.GITHUB].token.get_secret_value() + == 'ghp_test_token_123' + ) + + def test_git_provider_tokens_mappingproxy_preserved(self, base_settings): + """Test that MappingProxyType passed as git_provider_tokens is converted to MappingProxyType.""" + # Create provider tokens as MappingProxyType + provider_token = ProviderToken( + token=SecretStr('ghp_test_token_789'), user_id='test_user_3' + ) + provider_tokens_proxy = MappingProxyType({ProviderType.GITHUB: provider_token}) + + # Create ConversationInitData with MappingProxyType + init_data = ConversationInitData( + **base_settings.__dict__, + git_provider_tokens=provider_tokens_proxy, + ) + + # Verify it's a MappingProxyType (Pydantic may create a new one, but type is preserved) + assert isinstance(init_data.git_provider_tokens, MappingProxyType) + assert ( + init_data.git_provider_tokens[ProviderType.GITHUB].token.get_secret_value() + == 'ghp_test_token_789' + ) + assert ( + init_data.git_provider_tokens[ProviderType.GITHUB].user_id == 'test_user_3' + ) + + def test_git_provider_tokens_none_preserved(self, base_settings): + """Test that None passed as git_provider_tokens is preserved.""" + # Create ConversationInitData with None + init_data = ConversationInitData( + **base_settings.__dict__, + git_provider_tokens=None, + ) + + # Verify it's still None + assert init_data.git_provider_tokens is None + + def test_custom_secrets_dict_converted_to_mappingproxy(self, base_settings): + """Test that dict passed as custom_secrets is converted to MappingProxyType.""" + # Create custom secrets as a regular dict + custom_secrets_dict = { + 'API_KEY': CustomSecret( + secret=SecretStr('api_key_123'), description='API key for service' + ), + 'DATABASE_URL': CustomSecret( + secret=SecretStr('postgres://localhost'), description='Database URL' + ), + } + + # Create ConversationInitData with dict + init_data = ConversationInitData( + **base_settings.__dict__, + custom_secrets=custom_secrets_dict, + ) + + # Verify it's now a MappingProxyType + assert isinstance(init_data.custom_secrets, MappingProxyType) + assert 'API_KEY' in init_data.custom_secrets + assert 'DATABASE_URL' in init_data.custom_secrets + assert ( + init_data.custom_secrets['API_KEY'].secret.get_secret_value() + == 'api_key_123' + ) + + def test_custom_secrets_mappingproxy_preserved(self, base_settings): + """Test that MappingProxyType passed as custom_secrets is converted to MappingProxyType.""" + # Create custom secrets as MappingProxyType + custom_secret = CustomSecret( + secret=SecretStr('api_key_456'), description='API key' + ) + custom_secrets_proxy = MappingProxyType({'API_KEY': custom_secret}) + + # Create ConversationInitData with MappingProxyType + init_data = ConversationInitData( + **base_settings.__dict__, + custom_secrets=custom_secrets_proxy, + ) + + # Verify it's a MappingProxyType (Pydantic may create a new one, but type is preserved) + assert isinstance(init_data.custom_secrets, MappingProxyType) + assert ( + init_data.custom_secrets['API_KEY'].secret.get_secret_value() + == 'api_key_456' + ) + assert init_data.custom_secrets['API_KEY'].description == 'API key' + + def test_custom_secrets_none_preserved(self, base_settings): + """Test that None passed as custom_secrets is preserved.""" + # Create ConversationInitData with None + init_data = ConversationInitData( + **base_settings.__dict__, + custom_secrets=None, + ) + + # Verify it's still None + assert init_data.custom_secrets is None + + def test_both_fields_dict_converted(self, base_settings): + """Test that both fields are converted when passed as dicts.""" + provider_tokens_dict = { + ProviderType.GITHUB: ProviderToken( + token=SecretStr('ghp_token'), user_id='user1' + ) + } + custom_secrets_dict = { + 'SECRET': CustomSecret( + secret=SecretStr('secret_value'), description='A secret' + ) + } + + init_data = ConversationInitData( + **base_settings.__dict__, + git_provider_tokens=provider_tokens_dict, + custom_secrets=custom_secrets_dict, + ) + + # Both should be MappingProxyType + assert isinstance(init_data.git_provider_tokens, MappingProxyType) + assert isinstance(init_data.custom_secrets, MappingProxyType) + + def test_empty_dict_converted_to_mappingproxy(self, base_settings): + """Test that empty dict is converted to empty MappingProxyType.""" + # Create ConversationInitData with empty dicts + init_data = ConversationInitData( + **base_settings.__dict__, + git_provider_tokens={}, + custom_secrets={}, + ) + + # Both should be MappingProxyType (even if empty) + assert isinstance(init_data.git_provider_tokens, MappingProxyType) + assert isinstance(init_data.custom_secrets, MappingProxyType) + assert len(init_data.git_provider_tokens) == 0 + assert len(init_data.custom_secrets) == 0 + + def test_validator_prevents_mutation(self, base_settings): + """Test that MappingProxyType prevents mutation of the underlying data.""" + provider_tokens_dict = { + ProviderType.GITHUB: ProviderToken( + token=SecretStr('ghp_token'), user_id='user1' + ) + } + + init_data = ConversationInitData( + **base_settings.__dict__, + git_provider_tokens=provider_tokens_dict, + ) + + # Verify it's a MappingProxyType (which is immutable) + assert isinstance(init_data.git_provider_tokens, MappingProxyType) + + # Verify that attempting to modify would raise (MappingProxyType is read-only) + with pytest.raises(TypeError): + # MappingProxyType doesn't support item assignment + init_data.git_provider_tokens[ProviderType.GITLAB] = ProviderToken( + token=SecretStr('new_token') + ) + + def test_validator_with_settings_dict_unpacking(self, base_settings): + """Test validator works when creating from unpacked settings dict. + + This simulates the real-world usage in conversation_service.py where + session_init_args is created from settings.__dict__. + """ + # Simulate the pattern used in conversation_service.py + session_init_args = {**base_settings.__dict__} + session_init_args['git_provider_tokens'] = { + ProviderType.GITHUB: ProviderToken( + token=SecretStr('ghp_from_dict'), user_id='user_from_dict' + ) + } + + # Create ConversationInitData from unpacked dict + init_data = ConversationInitData(**session_init_args) + + # Verify it's converted to MappingProxyType + assert isinstance(init_data.git_provider_tokens, MappingProxyType) + assert ( + init_data.git_provider_tokens[ProviderType.GITHUB].token.get_secret_value() + == 'ghp_from_dict' + ) + + def test_validator_with_mixed_types(self, base_settings): + """Test validator with one field as dict and one as MappingProxyType.""" + # git_provider_tokens as dict + provider_tokens_dict = { + ProviderType.GITHUB: ProviderToken( + token=SecretStr('ghp_dict_token'), user_id='user_dict' + ) + } + + # custom_secrets as MappingProxyType + custom_secret = CustomSecret( + secret=SecretStr('secret_proxy'), description='From proxy' + ) + custom_secrets_proxy = MappingProxyType({'SECRET': custom_secret}) + + init_data = ConversationInitData( + **base_settings.__dict__, + git_provider_tokens=provider_tokens_dict, + custom_secrets=custom_secrets_proxy, + ) + + # Both should be MappingProxyType + assert isinstance(init_data.git_provider_tokens, MappingProxyType) + assert isinstance(init_data.custom_secrets, MappingProxyType) + # Verify the content is preserved (Pydantic may create new MappingProxyType instances) + assert ( + init_data.git_provider_tokens[ProviderType.GITHUB].token.get_secret_value() + == 'ghp_dict_token' + ) + assert ( + init_data.custom_secrets['SECRET'].secret.get_secret_value() + == 'secret_proxy' + ) diff --git a/tests/unit/server/test_openapi_schema_generation.py b/tests/unit/server/test_openapi_schema_generation.py index 2aa798e1e650..eb967e496c68 100644 --- a/tests/unit/server/test_openapi_schema_generation.py +++ b/tests/unit/server/test_openapi_schema_generation.py @@ -46,6 +46,9 @@ async def get_secrets_store(self) -> SecretsStore | None: async def get_secrets(self) -> Secrets | None: return None + async def get_mcp_api_key(self) -> str | None: + return None + @classmethod async def get_instance(cls, request: Request) -> UserAuth: return MockUserAuth() diff --git a/tests/unit/utils/test_posthog_tracker.py b/tests/unit/utils/test_posthog_tracker.py deleted file mode 100644 index cec0eff0ccc0..000000000000 --- a/tests/unit/utils/test_posthog_tracker.py +++ /dev/null @@ -1,356 +0,0 @@ -"""Unit tests for PostHog tracking utilities.""" - -from unittest.mock import MagicMock, patch - -import pytest - -from openhands.utils.posthog_tracker import ( - alias_user_identities, - track_agent_task_completed, - track_credit_limit_reached, - track_credits_purchased, - track_user_signup_completed, -) - - -@pytest.fixture -def mock_posthog(): - """Mock the posthog module.""" - with patch('openhands.utils.posthog_tracker.posthog') as mock_ph: - mock_ph.capture = MagicMock() - yield mock_ph - - -def test_track_agent_task_completed_with_user_id(mock_posthog): - """Test tracking agent task completion with user ID.""" - # Initialize posthog manually in the test - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - track_agent_task_completed( - conversation_id='test-conversation-123', - user_id='user-456', - app_mode='saas', - ) - - mock_posthog.capture.assert_called_once_with( - distinct_id='user-456', - event='agent_task_completed', - properties={ - 'conversation_id': 'test-conversation-123', - 'user_id': 'user-456', - 'app_mode': 'saas', - }, - ) - - -def test_track_agent_task_completed_without_user_id(mock_posthog): - """Test tracking agent task completion without user ID (anonymous).""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - track_agent_task_completed( - conversation_id='test-conversation-789', - user_id=None, - app_mode='oss', - ) - - mock_posthog.capture.assert_called_once_with( - distinct_id='conversation_test-conversation-789', - event='agent_task_completed', - properties={ - 'conversation_id': 'test-conversation-789', - 'user_id': None, - 'app_mode': 'oss', - }, - ) - - -def test_track_agent_task_completed_default_app_mode(mock_posthog): - """Test tracking with default app_mode.""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - track_agent_task_completed( - conversation_id='test-conversation-999', - user_id='user-111', - ) - - mock_posthog.capture.assert_called_once_with( - distinct_id='user-111', - event='agent_task_completed', - properties={ - 'conversation_id': 'test-conversation-999', - 'user_id': 'user-111', - 'app_mode': 'unknown', - }, - ) - - -def test_track_agent_task_completed_handles_errors(mock_posthog): - """Test that tracking errors are handled gracefully.""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - mock_posthog.capture.side_effect = Exception('PostHog API error') - - # Should not raise an exception - track_agent_task_completed( - conversation_id='test-conversation-error', - user_id='user-error', - app_mode='saas', - ) - - -def test_track_agent_task_completed_when_posthog_not_installed(): - """Test tracking when posthog is not installed.""" - import openhands.utils.posthog_tracker as tracker - - # Simulate posthog not being installed - tracker.posthog = None - - # Should not raise an exception - track_agent_task_completed( - conversation_id='test-conversation-no-ph', - user_id='user-no-ph', - app_mode='oss', - ) - - -def test_track_user_signup_completed(mock_posthog): - """Test tracking user signup completion.""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - track_user_signup_completed( - user_id='test-user-123', - signup_timestamp='2025-01-15T10:30:00Z', - ) - - mock_posthog.capture.assert_called_once_with( - distinct_id='test-user-123', - event='user_signup_completed', - properties={ - 'user_id': 'test-user-123', - 'signup_timestamp': '2025-01-15T10:30:00Z', - }, - ) - - -def test_track_user_signup_completed_handles_errors(mock_posthog): - """Test that user signup tracking errors are handled gracefully.""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - mock_posthog.capture.side_effect = Exception('PostHog API error') - - # Should not raise an exception - track_user_signup_completed( - user_id='test-user-error', - signup_timestamp='2025-01-15T12:00:00Z', - ) - - -def test_track_user_signup_completed_when_posthog_not_installed(): - """Test user signup tracking when posthog is not installed.""" - import openhands.utils.posthog_tracker as tracker - - # Simulate posthog not being installed - tracker.posthog = None - - # Should not raise an exception - track_user_signup_completed( - user_id='test-user-no-ph', - signup_timestamp='2025-01-15T13:00:00Z', - ) - - -def test_track_credit_limit_reached_with_user_id(mock_posthog): - """Test tracking credit limit reached with user ID.""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - track_credit_limit_reached( - conversation_id='test-conversation-456', - user_id='user-789', - current_budget=10.50, - max_budget=10.00, - ) - - mock_posthog.capture.assert_called_once_with( - distinct_id='user-789', - event='credit_limit_reached', - properties={ - 'conversation_id': 'test-conversation-456', - 'user_id': 'user-789', - 'current_budget': 10.50, - 'max_budget': 10.00, - }, - ) - - -def test_track_credit_limit_reached_without_user_id(mock_posthog): - """Test tracking credit limit reached without user ID (anonymous).""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - track_credit_limit_reached( - conversation_id='test-conversation-999', - user_id=None, - current_budget=5.25, - max_budget=5.00, - ) - - mock_posthog.capture.assert_called_once_with( - distinct_id='conversation_test-conversation-999', - event='credit_limit_reached', - properties={ - 'conversation_id': 'test-conversation-999', - 'user_id': None, - 'current_budget': 5.25, - 'max_budget': 5.00, - }, - ) - - -def test_track_credit_limit_reached_handles_errors(mock_posthog): - """Test that credit limit tracking errors are handled gracefully.""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - mock_posthog.capture.side_effect = Exception('PostHog API error') - - # Should not raise an exception - track_credit_limit_reached( - conversation_id='test-conversation-error', - user_id='user-error', - current_budget=15.00, - max_budget=10.00, - ) - - -def test_track_credit_limit_reached_when_posthog_not_installed(): - """Test credit limit tracking when posthog is not installed.""" - import openhands.utils.posthog_tracker as tracker - - # Simulate posthog not being installed - tracker.posthog = None - - # Should not raise an exception - track_credit_limit_reached( - conversation_id='test-conversation-no-ph', - user_id='user-no-ph', - current_budget=8.00, - max_budget=5.00, - ) - - -def test_track_credits_purchased(mock_posthog): - """Test tracking credits purchased.""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - - track_credits_purchased( - user_id='test-user-999', - amount_usd=50.00, - credits_added=50.00, - stripe_session_id='cs_test_abc123', - ) - - mock_posthog.capture.assert_called_once_with( - distinct_id='test-user-999', - event='credits_purchased', - properties={ - 'user_id': 'test-user-999', - 'amount_usd': 50.00, - 'credits_added': 50.00, - 'stripe_session_id': 'cs_test_abc123', - }, - ) - - -def test_track_credits_purchased_handles_errors(mock_posthog): - """Test that credits purchased tracking errors are handled gracefully.""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - mock_posthog.capture.side_effect = Exception('PostHog API error') - - # Should not raise an exception - track_credits_purchased( - user_id='test-user-error', - amount_usd=100.00, - credits_added=100.00, - stripe_session_id='cs_test_error', - ) - - -def test_track_credits_purchased_when_posthog_not_installed(): - """Test credits purchased tracking when posthog is not installed.""" - import openhands.utils.posthog_tracker as tracker - - # Simulate posthog not being installed - tracker.posthog = None - - # Should not raise an exception - track_credits_purchased( - user_id='test-user-no-ph', - amount_usd=25.00, - credits_added=25.00, - stripe_session_id='cs_test_no_ph', - ) - - -def test_alias_user_identities(mock_posthog): - """Test aliasing user identities. - - Verifies that posthog.alias(previous_id, distinct_id) is called correctly - where git_login is the previous_id and keycloak_user_id is the distinct_id. - """ - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - mock_posthog.alias = MagicMock() - - alias_user_identities( - keycloak_user_id='keycloak-123', - git_login='git-user', - ) - - # Verify: posthog.alias(previous_id='git-user', distinct_id='keycloak-123') - mock_posthog.alias.assert_called_once_with('git-user', 'keycloak-123') - - -def test_alias_user_identities_handles_errors(mock_posthog): - """Test that aliasing errors are handled gracefully.""" - import openhands.utils.posthog_tracker as tracker - - tracker.posthog = mock_posthog - mock_posthog.alias = MagicMock(side_effect=Exception('PostHog API error')) - - # Should not raise an exception - alias_user_identities( - keycloak_user_id='keycloak-error', - git_login='git-error', - ) - - -def test_alias_user_identities_when_posthog_not_installed(): - """Test aliasing when posthog is not installed.""" - import openhands.utils.posthog_tracker as tracker - - # Simulate posthog not being installed - tracker.posthog = None - - # Should not raise an exception - alias_user_identities( - keycloak_user_id='keycloak-no-ph', - git_login='git-no-ph', - ) diff --git a/trigger_commit.txt b/trigger_commit.txt deleted file mode 100644 index 402f8bb0e55f..000000000000 --- a/trigger_commit.txt +++ /dev/null @@ -1 +0,0 @@ -# Trigger E2E test run