Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ dependencies = [
"requests>=2.32",
"loguru>=0.7",
"pydantic",
"revengai>=2.11.0",
"revengai>=3.0.0",
"libbs>=2.16.5",
]

Expand Down
20 changes: 9 additions & 11 deletions reai_toolkit/app/coordinators/create_analysis_coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,20 +37,18 @@ def run_dialog(self) -> None:

def is_authed(self) -> bool:
return self.app.auth_service.is_authenticated()

def _on_complete(self, service_response: GenericApiReturn) -> None:
"""Handle completion of analysis creation."""
if service_response.success:
if service_response.success and isinstance(service_response.data, AnalysisCreateResponse):
self.safe_info(
msg="Analysis created successfully, please wait while it is processed."
)
else:
self.safe_error(message=service_response.error_message)

data: AnalysisCreateResponse = service_response.data
# Should have analysis id - refresh to update menu options
self.safe_refresh()

# Should have analysis id - refresh to update menu options
self.safe_refresh()

# Call Sync Task to poll status
self.analysis_status_coord.poll_status(analysis_id=data.analysis_id)
# Call Sync Task to poll status
self.analysis_status_coord.poll_status(analysis_id=service_response.data.analysis_id)
else:
error_message: str = service_response.error_message or "Unknown error"
self.safe_error(error_message)
4 changes: 2 additions & 2 deletions reai_toolkit/app/coordinators/poll_status_coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def is_active_worker(self) -> bool:
"""Check if the analysis sync worker is active."""
return self.analysis_status_service.is_worker_running()

def poll_status(self, analysis_id: str) -> None:
def poll_status(self, analysis_id: int) -> None:
"""Poll the status of an analysis until completion."""
self.analysis_status_service.start_polling(
analysis_id=analysis_id, thread_callback=self._on_complete
Expand All @@ -54,7 +54,7 @@ def _on_complete(self, generic_return: GenericApiReturn[int]) -> None:
Handle completion of analysis status polling.
"""
if not generic_return.success:
self.safe_error(message=generic_return.error_message)
self.safe_error(message=generic_return.error_message or "failed to poll analysis status")

self.analysis_sync_coord.sync_analysis()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def __init__(self, netstore_service: SimpleNetStore, sdk_config: Configuration):
def call_callback(self, generic_return: GenericApiReturn) -> None:
self._thread_callback(generic_return)

def start_polling(self, analysis_id: str, thread_callback: Callable[..., Any]) -> None:
def start_polling(self, analysis_id: int, thread_callback: Callable[..., Any]) -> None:
"""
Starts polling the analysis status as a background job.
"""
Expand Down
63 changes: 40 additions & 23 deletions reai_toolkit/app/services/upload/upload_service.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,31 @@
from typing import Callable
import threading
from pathlib import Path
from typing import Optional, Tuple

from loguru import logger
from revengai import AnalysesCoreApi, Configuration, Symbols
from revengai.models import (
AnalysisCreateRequest,
AnalysisCreateResponse,
AnalysisScope,
UploadFileType,
)
from revengai import AnalysesCoreApi, BaseResponseConfigResponse, Configuration, Symbols
from revengai.api.config_api import ConfigApi

from revengai.models.analysis_create_request import AnalysisCreateRequest
from revengai.models.analysis_create_response import AnalysisCreateResponse
from revengai.models.analysis_scope import AnalysisScope
from revengai.models.upload_file_type import UploadFileType
from revengai.models.base_response_upload_response import BaseResponseUploadResponse

from reai_toolkit.app.core.netstore_service import SimpleNetStore
from reai_toolkit.app.core.shared_schema import GenericApiReturn
from reai_toolkit.app.core.utils import collect_symbols_from_ida, sha256_file
from reai_toolkit.app.core.utils import sha256_file
from reai_toolkit.app.interfaces.thread_service import IThreadService


class UploadService(IThreadService):
_thread_callback: Optional[callable] = None
MAX_DEFAULT_FILE_SIZE_BYTES = 10 * 1024 * 1024


class UploadService(IThreadService):
def __init__(self, netstore_service: SimpleNetStore, sdk_config: Configuration):
super().__init__(netstore_service=netstore_service, sdk_config=sdk_config)
self._thread_callback: Optional[Callable[[GenericApiReturn], None]] = None

def start_analysis(
self,
Expand All @@ -31,7 +35,7 @@ def start_analysis(
debug_file_path: str | None = None,
tags: Optional[list[str]] = None,
public: bool = True,
thread_callback: Optional[callable] = None
thread_callback: Optional[Callable[[GenericApiReturn], None]] = None
) -> None:
"""
Starts the analysis as a background job.
Expand All @@ -50,7 +54,8 @@ def start_analysis(
)

def call_callback(self, generic_return: GenericApiReturn) -> None:
self._thread_callback(generic_return)
if self._thread_callback:
self._thread_callback(generic_return)

def analyse_file(
self,
Expand Down Expand Up @@ -80,7 +85,7 @@ def analyse_file(
debug_sha256 = sha256_file(dp)

# First, upload the file
upload_response = self.upload_user_file(
upload_response: GenericApiReturn[BaseResponseUploadResponse] = self.upload_user_file(
file_path=file_path,
upload_file_type=UploadFileType.BINARY, # must match server UploadFileType
force_overwrite=True,
Expand All @@ -89,9 +94,6 @@ def analyse_file(
if upload_response.success:
logger.info("RevEng.AI: Uploaded binary file")
else:
logger.error(
f"RevEng.AI: Failed to upload binary file: {upload_response.error_message}"
)
self.call_callback(generic_return=upload_response)
return

Expand Down Expand Up @@ -120,16 +122,25 @@ def analyse_file(
)
self.call_callback(generic_return=final_response)

def _get_max_upload_size(self) -> int:
with self.yield_api_client(sdk_config=self.sdk_config) as api_client:
config_client: ConfigApi = ConfigApi(api_client)
response: BaseResponseConfigResponse = config_client.get_config()
if response.data:
return response.data.max_file_size_bytes

return MAX_DEFAULT_FILE_SIZE_BYTES

def _upload_file_req(
self,
upload_file_type: UploadFileType,
file: Tuple[str, bytes],
packed_password: Optional[str] = None,
force_overwrite: bool = False,
) -> None:
) -> BaseResponseUploadResponse:
with self.yield_api_client(sdk_config=self.sdk_config) as api_client:
analyses_client = AnalysesCoreApi(api_client)
analyses_client.upload_file(
return analyses_client.upload_file(
upload_file_type=UploadFileType(upload_file_type),
force_overwrite=force_overwrite,
packed_password=packed_password,
Expand All @@ -143,19 +154,25 @@ def upload_user_file(
upload_file_type: UploadFileType,
packed_password: Optional[str] = None,
force_overwrite: bool = False,
) -> GenericApiReturn[None]:
) -> GenericApiReturn[BaseResponseUploadResponse]:
p = Path(file_path)
if not p.is_file():
return GenericApiReturn(success=False, error_message="File does not exist.")

try:
file_bytes = p.read_bytes()
blob: bytes = p.read_bytes()
except Exception:
return GenericApiReturn(success=False, error_message="File does not exist.")

response = self.api_request_returning(

max_upload_size_bytes: int = self._get_max_upload_size()

if len(blob) > max_upload_size_bytes:
max_upload_size_mb: float = max_upload_size_bytes / (1024 * 1024)
return GenericApiReturn(success=False, error_message=f"Failed to upload binary due to it exceeding maximum size limit of {max_upload_size_mb}MiB")

response: GenericApiReturn[BaseResponseUploadResponse] = self.api_request_returning(
lambda: self._upload_file_req(
upload_file_type, (p.name, file_bytes), packed_password, force_overwrite
upload_file_type, (p.name, blob), packed_password, force_overwrite
)
)

Expand Down