diff --git a/README.md b/README.md index 7f8594b..29ef7a6 100644 --- a/README.md +++ b/README.md @@ -1 +1,197 @@ -# Remote Developer \ No newline at end of file +# 🚀 Remote Developer CLI 🚀 + +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) + +Automate your remote development workflow with this powerful CLI tool! 💻✨ + +## 🌟 Features + +- **Devcontainer Automation:** Effortlessly create, build, and start devcontainers on remote hosts. 📦 +- **Interactive Shell:** Access your devcontainer with an interactive shell for seamless command execution. 🐚 +- **Automatic File Synchronization:** Keep your local and remote directories in sync with real-time file monitoring. 🔄 +- **SSH Key Management:** Securely manage SSH keys for authentication. 🔑 +- **Docker Integration:** Validate and install Docker and Docker Compose on remote servers. 🐳 +- **Configuration Management:** Easily configure your remote development environment with a JSON configuration file. ⚙️ +- **Caching:** Reuses SSH connections and container names for faster subsequent runs. ⚡ +- **Logging:** Detailed logging for debugging and monitoring. 📝 + +## 🛠️ Prerequisites + +- Python >=3.11 +- [uv](https://docs.astral.sh/uv/getting-started/installation/) python package manager + +- A remote host with SSH access +- Docker and Docker Compose installed on the remote host + +> **Note**: The CLI can install them for you! + +## 📦 Installation + +1. Clone the repository: + + ```bash + git clone github.com/mohamedashraf-eng/remote-developer + cd remote-developer + ``` + +2. Install the required Python packages: + + ```bash + uv venv + uv sync && uv sync --upgrade + ``` + +## ⚙️ Configuration + +1. Create a `config.json` file (or copy and modify `remote-developer-config.example.json`): + + ```json + { + "remote_host": "user@remote-host.example.com", + "docker_image": "example-image:latest", + "remote_dir": "/home/user/example-project", + "port_mappings": [ + "1234:1234", + "5678:5678" + ], + "devcontainer_template": "./templates/devcontainer-template.txt.example", + "dockerfile_template": "./templates/dockerfile-template.txt.example", + "docker_compose_template": "./templates/docker-compose-template.txt.example", + "dockerignore_template": "./templates/dockerignore-template.txt.example" + } + ``` + + - `remote_host`: SSH user and host (e.g., `user@192.168.1.100`). + - `docker_image`: Docker image to use for the devcontainer. + - `remote_dir`: Remote directory where the project will be stored. + - `port_mappings`: List of port mappings (e.g., `["8080:8080", "3000:3000"]`). + - `devcontainer_template`, `dockerfile_template`, `docker_compose_template`, `dockerignore_template`: Paths to template files. + +2. Customize the template files in the `templates/` directory as needed. Example templates are provided with the `.example` extension. + +> **Tip:** The base templates function correctly as is. + +## 🚀 Usage + +```bash +uv run remote_developer.py --config config.json --path /path/to/your/project +``` + +- `--config`: Path to the `config.json` file. +- `--path`: Path to your local project directory. + +### Commands + +#### `start` + +Starts the devcontainer. + +```bash +python remote_developer.py --config config.json --path /path/to/your/project start +``` + +Options: + +- `--auto-sync`: Automatically syncs the local and remote directories. +- `--keep-alive`: Keeps the SSH connection alive and opens an interactive shell after starting the devcontainer. + +Example: + +```bash +python remote_developer.py --config config.json --path /path/to/your/project start --auto-sync --keep-alive +``` + +#### `sync` + +Syncs files from the local directory to the remote directory. + +```bash +python remote_developer.py --config config.json --path /path/to/your/project sync +``` + +Options: + +- `--auto-sync`: Starts auto-sync in the background, monitoring for file changes. + +Example: + +```bash +python remote_developer.py --config config.json --path /path/to/your/project sync --auto-sync +``` + +#### `run` + +Runs a command on the remote host inside the devcontainer. + +```bash +python remote_developer.py --config config.json --path /path/to/your/project run +``` + +Example: + +```bash +python remote_developer.py --config config.json --path /path/to/your/project run ls -l /home/user/example-project/workspace +``` + +## 💡 Examples + +1. **Start the devcontainer and keep the shell open:** + + ```bash + python remote_developer.py --config config.json --path /path/to/your/project start --keep-alive + ``` + + This will start the devcontainer and open an interactive shell. You can then execute commands directly in the devcontainer. Type `exit` to close the shell. + +2. **Start the devcontainer with automatic file synchronization:** + + ```bash + python remote_developer.py --config config.json --path /path/to/your/project start --auto-sync + ``` + + This will start the devcontainer and automatically sync files between your local and remote directories. + +3. **Run a specific command in the devcontainer:** + + ```bash + python remote_developer.py --config config.json --path /path/to/your/project run python --version + ``` + + This will execute the `python --version` command inside the devcontainer and print the output. + +## 🔒 Security + +- SSH keys are used for authentication. The CLI will guide you through generating and setting up SSH keys if needed. +- The CLI uses `rsync` over SSH for file synchronization, ensuring secure data transfer. +- The `CLICommandExecutor` class includes security measures to prevent command injection vulnerabilities. + +## 📝 Logging + +Detailed logs are generated to help you troubleshoot any issues. The log level can be configured using the `LOG_LEVEL` environment variable (e.g., `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`). + +Example: + +```bash +LOG_LEVEL=DEBUG python remote_developer.py --config config.json --path /path/to/your/project start +``` + +## ⚙️ Advanced Configuration + +### Environment Variables + +- `CACHE_LOCATION`: Specifies the location of the cache file. Defaults to platform-specific locations (Windows Registry, macOS plist, Linux JSON file). +- `LOG_LEVEL`: Specifies the log level. Defaults to `INFO`. + +### Templates + +The CLI uses template files for generating the `devcontainer.json`, `Dockerfile`, and `docker-compose.yml` files. You can customize these templates to suit your specific needs. + +## 🤝 Contributing + +Contributions are welcome! Please submit a pull request with your changes. + +## 📜 License + +This project is licensed under the MIT License. See the `LICENSE` file for details. + +`Copyright (c) 2025 MoWx` diff --git a/docs/building.plantuml b/docs/building.plantuml new file mode 100644 index 0000000..0fd22ae --- /dev/null +++ b/docs/building.plantuml @@ -0,0 +1,42 @@ +@startuml +' Styling +skinparam activity { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam condition { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam note { + BackgroundColor LightYellow + BorderColor Black + FontName Arial + FontSize 11 +} + +' Start +start + +:Create Devcontainer Files (docker-compose.yml, Dockerfile); +if () then ([File Creation Failed]) + :Log Error: Devcontainer File Creation Failed; + stop +else ([Files Created]) + :Build and Start Devcontainer (docker-compose up -d); + if () then ([Devcontainer Start Failed]) + :Log Error: Devcontainer Start Failed; + stop + else ([Devcontainer Started]) + endif +endif + +stop + +@enduml diff --git a/docs/connectivity.plantuml b/docs/connectivity.plantuml new file mode 100644 index 0000000..f57fbf0 --- /dev/null +++ b/docs/connectivity.plantuml @@ -0,0 +1,37 @@ +@startuml +' Styling +skinparam activity { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam condition { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam note { + BackgroundColor LightYellow + BorderColor Black + FontName Arial + FontSize 11 +} + +' Start +start + +:Execute Remote Command; +if () then ([Command Execution Failed]) + :Log Error: Command Execution Failed; + :Return Error; +else ([Command Executed]) + :Return Output; +endif + +stop + +@enduml diff --git a/docs/execution.plantuml b/docs/execution.plantuml new file mode 100644 index 0000000..60063ac --- /dev/null +++ b/docs/execution.plantuml @@ -0,0 +1,35 @@ +@startuml +' Styling +skinparam activity { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam condition { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam note { + BackgroundColor LightYellow + BorderColor Black + FontName Arial + FontSize 11 +} + +' Start +start + +:Run Command in Devcontainer; +if () then ([Command Execution Failed]) + :Log Error: Command Execution Failed; +else ([Command Executed]) +endif + +stop + +@enduml diff --git a/docs/remote_developer_flowchart.plantuml b/docs/remote_developer_flowchart.plantuml new file mode 100644 index 0000000..b5c94a5 --- /dev/null +++ b/docs/remote_developer_flowchart.plantuml @@ -0,0 +1,155 @@ +@startuml +' Styling +skinparam activity { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam condition { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam note { + BackgroundColor LightYellow + BorderColor Black + FontName Arial + FontSize 11 +} + +' Start +start + +' Load Configuration +:Load Configuration from config.json; +if () then ([File Not Found]) + :Log Error: Config file not found; + :Display Error to User; + stop +elseif () then ([Invalid JSON]) + :Log Error: Invalid JSON in config file; + :Display Error to User; + stop +else ([Success]) +endif + +' Establish SSH Connection +:Establish SSH Connection; +if () then ([Key Found and Paired]) + :Connect using SSH Key; +elseif () then ([No Key or Not Paired]) + :Prompt User for Password; + :Connect using Password; + if () then ([Auth Failed]) + :Log Error: Authentication Failed; + :Display Error to User; + stop + else ([Auth Success]) + endif +else ([Connection Error]) + :Log Error: SSH Connection Error; + :Display Error to User; + stop +endif + +' Validate Configuration +:Validate Configuration Values; +if () then ([Invalid Config]) + :Log Error: Configuration Validation Failed; + :Display Error to User; + :Close SSH Connection (if open); + stop +else ([Valid Config]) +endif + +' Check and Setup SSH Keys +:Check for SSH Keys; +if () then ([Keys Exist and Paired]) + :Use Existing SSH Keys; +elseif () then ([No Keys or Not Paired]) + :Generate SSH Key Pair; + if () then ([Key Generation Failed]) + :Log Error: SSH Key Generation Failed; + :Display Error to User; + :Close SSH Connection (if open); + stop + else ([Key Generation Success]) + :Automate Copy Public Key to Remote; + if () then ([Copy Failed]) + :Log Error: Public Key Copy Failed; + :Display Error to User; + :Close SSH Connection (if open); + stop + else ([Copy Success]) + :Update Config File (private key path, ssh_paired=True); + if () then ([Update Failed]) + :Log Error: Config File Update Failed; + :Display Error to User; + :Close SSH Connection (if open); + stop + else ([Update Success]) + endif + endif + endif +endif + +' Ensure Remote Directory +:Ensure Remote Directory Exists; +if () then ([Directory Creation Failed]) + :Log Error: Remote Directory Creation Failed; + :Display Error to User; + :Close SSH Connection (if open); + stop +else ([Directory Exists or Created]) +endif + +' Create Devcontainer Files +:Create Devcontainer Files (docker-compose.yml, Dockerfile); +if () then ([File Creation Failed]) + :Log Error: Devcontainer File Creation Failed; + :Display Error to User; + :Close SSH Connection (if open); + stop +else ([Files Created]) +endif + +' Build and Start Devcontainer +:Build and Start Devcontainer (docker-compose up -d); +if () then ([Devcontainer Start Failed]) + :Log Error: Devcontainer Start Failed; + :Display Error to User; + :Close SSH Connection (if open); + stop +else ([Devcontainer Started]) +endif + +' File Sync +:Start File Synchronization (rsync); +note right: Continuous background process +if () then ([Sync Interrupted]) + :Log Info: File Sync Interrupted; +else ([Sync Error]) + :Log Warning: File Sync Error; +endif + +' Run Command (Optional) +if () then ([Run Command Requested]) + :Run Command in Devcontainer; + if () then ([Command Execution Failed]) + :Log Error: Command Execution Failed; + :Display Error to User; + else ([Command Executed]) + :Display Command Output to User; + endif +endif + +' Close SSH Connection +:Close SSH Connection; + +stop + +@enduml diff --git a/docs/security.plantuml b/docs/security.plantuml new file mode 100644 index 0000000..6d9fa43 --- /dev/null +++ b/docs/security.plantuml @@ -0,0 +1,54 @@ +@startuml +' Styling +skinparam activity { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam condition { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam note { + BackgroundColor LightYellow + BorderColor Black + FontName Arial + FontSize 11 +} + +' Start +start + +:Check for Existing SSH Keys; +if () then ([Keys Exist and Paired]) + :Return Private Key Path; +else ([No Keys or Not Paired]) + :Generate SSH Key Pair; + if () then ([Key Generation Failed]) + :Log Error: SSH Key Generation Failed; + stop + else ([Key Generation Success]) + :Automate Copy Public Key to Remote; + if () then ([Copy Failed]) + :Log Error: Public Key Copy Failed; + stop + else ([Copy Success]) + :Update Config File (private key path, ssh_paired=True); + if () then ([Update Failed]) + :Log Error: Config File Update Failed; + stop + else ([Update Success]) + :Return Private Key Path; + endif + endif + endif +endif + +stop + +@enduml diff --git a/docs/syncing.plantuml b/docs/syncing.plantuml new file mode 100644 index 0000000..e5b3d90 --- /dev/null +++ b/docs/syncing.plantuml @@ -0,0 +1,37 @@ +@startuml +' Styling +skinparam activity { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam condition { + BackgroundColor White + BorderColor Black + FontName Arial + FontSize 12 +} + +skinparam note { + BackgroundColor LightYellow + BorderColor Black + FontName Arial + FontSize 11 +} + +' Start +start + +:Start File Synchronization (rsync); +note right: Continuous background process +if () then ([Sync Interrupted]) + :Log Info: File Sync Interrupted; +else ([Sync Error]) + :Log Warning: File Sync Error; +endif + +stop + +@enduml diff --git a/pyproject.toml b/pyproject.toml index cc61ca2..0c8be30 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] -name = "agenticai" +name = "remote-developer" version = "0.1.0" -description = "A multi-agent AI framework for developing and testing intelligent agents" +description = "Remote Developer Software" readme = "README.md" requires-python = ">=3.11" dependencies = [ diff --git a/remote_developer/__init__.py b/remote_developer/__init__.py new file mode 100644 index 0000000..859ef74 --- /dev/null +++ b/remote_developer/__init__.py @@ -0,0 +1 @@ +"""Remote Developer Software.""" diff --git a/remote_developer/components/__init__.py b/remote_developer/components/__init__.py new file mode 100644 index 0000000..cc84ec7 --- /dev/null +++ b/remote_developer/components/__init__.py @@ -0,0 +1 @@ +"""Component modules for remote_developer.""" diff --git a/remote_developer/components/building.py b/remote_developer/components/building.py new file mode 100644 index 0000000..969587a --- /dev/null +++ b/remote_developer/components/building.py @@ -0,0 +1,472 @@ +"""Building module.""" + +import os +import tempfile +import posixpath +import yaml +import json +import uuid +from utils.logger import Logger, get_level_from_env +from components.connectivity import execute_remote_command +from utils import regcache + +logger = Logger(__name__, level=get_level_from_env()) + +CACHE_LOCATION = os.getenv("CACHE_LOCATION") + + +async def create_devcontainer_files(config, ssh_client): + """Creates the devcontainer files (docker-compose.yml, Dockerfile) and copies the project to the remote server. + + Args: + config (dict): Configuration dictionary containing remote directory, Docker image, + port mappings, template file paths, and local directory. + ssh_client (paramiko.SSHClient): SSH client object for connecting to the remote server. + + Returns: + bool: True if the files were created and uploaded successfully, False otherwise. + """ + remote_dir = config["remote_dir"] + docker_image = config["docker_image"] + port_mappings = config.get("port_mappings", []) + local_dir = config["local_dir"] + remote_host = config["remote_host"] + + # Define the new directory structure + devcontainer_dir = posixpath.join(remote_dir, ".devcontainer") + docker_dir = posixpath.join(remote_dir, "docker") + workspace_dir = posixpath.join(remote_dir, "workspace") + + # Get or generate container name + connection_info = regcache.get_cache_item(remote_host, CACHE_LOCATION) or {} + container_id = connection_info.get("container_id") + + if not container_id: + container_id = str(uuid.uuid4()) # Generate a new UUID + connection_info["container_id"] = container_id + regcache.set_cache_item(remote_host, connection_info, CACHE_LOCATION) + logger.info(f"Generated new container name for {remote_host}: {container_id}") + else: + logger.debug(f"Retrieved container name from cache for {remote_host}: {container_id}") + + config["container_id"] = container_id # Add container_id to config + + sftp = ssh_client.open_sftp() + try: + # Create the directory structure + await create_remote_directory(sftp, devcontainer_dir, ssh_client) + await create_remote_directory(sftp, docker_dir, ssh_client) + await create_remote_directory(sftp, workspace_dir, ssh_client) + + # Define the files to upload with their templates and remote paths + files_to_upload = [ + { + "template_path": config["devcontainer_template"], + "remote_path": posixpath.join(devcontainer_dir, "devcontainer.json"), + "description": "devcontainer.json", + "template_vars": { + "_project_id": uuid.uuid4(), + }, + "is_yaml": True, + }, + { + "template_path": config["dockerfile_template"], + "remote_path": posixpath.join(docker_dir, "Dockerfile"), + "description": "Dockerfile", + "template_vars": {"_from": docker_image, "_workdir": workspace_dir}, + }, + { + "template_path": config["docker_compose_template"], + "remote_path": posixpath.join(docker_dir, "docker-compose.yml"), + "description": "docker-compose.yml", + "template_vars": { + "_workspace_dir": workspace_dir, + "_port_mappings": "\n ".join([f"- {p}" for p in port_mappings]), + }, + }, + { + "template_path": config["dockerignore_template"], + "remote_path": posixpath.join(docker_dir, ".dockerignore"), + "description": ".dockerignore", + }, + ] + + # Upload the files + for file_info in files_to_upload: + if not await upload_templated_file(sftp, file_info, ssh_client): + return False + + # Copy the project files to the workspace directory + logger.info(f"Copying project from {local_dir} to {workspace_dir}") + await upload_directory(sftp, local_dir, workspace_dir, ssh_client) + logger.debug(f"Project copied successfully from {local_dir} to {workspace_dir}") + + # Build and start the devcontainer + if not await build_and_start_devcontainer(config, ssh_client): + return False + + except Exception as e: + logger.error(f"An error occurred: {e}") + return False + finally: + sftp.close() + return True + + +async def upload_templated_file(sftp, file_info, ssh_client): + """Uploads a file to the remote server, substituting variables in the template. + + Args: + sftp (paramiko.sftp_client.SFTPClient): SFTP client object. + file_info (dict): Dictionary containing template path, remote path, description, and template variables. + ssh_client (paramiko.SSHClient): SSH client object (not currently used, but kept for potential future use). + + Returns: + bool: True if the file was uploaded successfully, False otherwise. + """ + template_path = file_info.get("template_path") + remote_path = file_info.get("remote_path") + description = file_info.get("description") + template_vars = file_info.get("template_vars", {}) + is_yaml = file_info.get("is_yaml", False) # Check if it's a YAML file + + try: + with open(template_path) as f: + template_content = f.read() + logger.debug(f"Template loaded from {template_path}") + except FileNotFoundError: + logger.error(f"Template not found at {template_path}") + return False + + try: + if is_yaml: + # Load YAML and convert to JSON + template_content = template_content.format(**template_vars) + yaml_data = yaml.safe_load(template_content) + file_contents = json.dumps(yaml_data, indent=2) + else: + # Substitute variables in the template + file_contents = template_content.format(**template_vars) + except (KeyError, yaml.YAMLError) as e: + logger.error(f"Error processing template: {e}") + return False + + try: + sftp.stat(remote_path) + logger.info(f"{description} already exists at {remote_path}. Skipping upload.") + return True + except OSError: + logger.info(f"Uploading {description} to {remote_path}") + try: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as tmp: + tmp.write(file_contents) + tmp.close() + sftp.put(tmp.name, remote_path) + os.remove(tmp.name) + logger.debug(f"{description} uploaded successfully.") + return True + except Exception as e: + logger.error(f"Error uploading {description} to {remote_path}: {e}") + return False + + +async def upload_directory(sftp, local_path, remote_path, ssh_client): + """Uploads a directory and its contents to the remote server, creating non-existent directories. + + Args: + sftp (paramiko.sftp_client.SFTPClient): SFTP client object. + local_path (str): Path to the local directory. + remote_path (str): Path to the remote directory. + ssh_client (paramiko.SSHClient): SSH client object for executing remote commands. + """ + ignored_files = [ + ".venv", + ".cache", + ".mypy_cache", + "__pycache__", + ".pytest_cache", + ".tox", + ".eggs", + ".egg-info", + "dist", + "build", + "buck-out", + "coverage.xml", + "nosetests.xml", + "coverage_html_report", + "htmlcov", + "nose2.html-report", + "nose2.junit-xml-report", + "nose2.junit-xml-report.xml", + ] + + for item in os.listdir(local_path): + if item in ignored_files: + continue + local_item_path = os.path.join(local_path, item) + remote_item_path = posixpath.join(remote_path, item) # Use posixpath.join + + if os.path.isfile(local_item_path): + try: + # Double-check that local_item_path is the correct file path + if not os.path.exists(local_item_path): + logger.error(f"Local file does not exist: {local_item_path}") + raise FileNotFoundError(f"Local file does not exist: {local_item_path}") + + # Check if the remote file exists + try: + sftp.stat(remote_item_path) + logger.debug(f"File already exists at {remote_item_path}. Skipping upload.") + except OSError: + logger.debug(f"Uploading file: {local_item_path} to {remote_item_path}") + sftp.put(local_item_path, remote_item_path) + logger.debug( + f"File uploaded successfully: {local_item_path} to {remote_item_path}" + ) + + except Exception as e: + logger.error(f"Failed to upload file {local_item_path} to {remote_item_path}: {e}") + raise + elif os.path.isdir(local_item_path): + # Check if the remote directory exists + try: + sftp.stat(remote_item_path) + logger.debug(f"Remote directory already exists: {remote_item_path}") + except FileNotFoundError: + # Create the remote directory if it doesn't exist + logger.debug(f"Remote directory does not exist: {remote_item_path}. Creating it.") + try: + sftp.mkdir(remote_item_path) + logger.debug(f"Remote directory created successfully: {remote_item_path}") + except Exception as e: + logger.error(f"Error creating remote directory: {remote_item_path}: {e}") + raise Exception(f"Failed to create remote directory: {remote_item_path}") from e + + await upload_directory(sftp, local_item_path, remote_item_path, ssh_client) + + +async def create_remote_directory(sftp, remote_path, ssh_client): + """Creates a directory on the remote server if it doesn't exist. + + Args: + sftp (paramiko.sftp_client.SFTPClient): SFTP client object. + remote_path (str): Path to the remote directory. + ssh_client (paramiko.SSHClient): SSH client object for executing remote commands. + """ + try: + sftp.stat(remote_path) + logger.debug(f"Remote directory already exists: {remote_path}") + except FileNotFoundError: + logger.debug(f"Remote directory does not exist: {remote_path}. Creating it.") + try: + sftp.mkdir(remote_path) + logger.debug(f"Remote directory created successfully: {remote_path}") + except Exception as e: + logger.error(f"Error creating remote directory: {remote_path}: {e}") + raise Exception(f"Failed to create remote directory: {remote_path}") from e + + +async def validate_and_install_docker_requirements(ssh_client): + """Validates Docker and Docker Compose requirements on the remote server and installs them if missing. + + Args: + ssh_client (paramiko.SSHClient): SSH client object for connecting to the remote server. + + Returns: + bool: True if all requirements are met or successfully installed, False otherwise. + """ + try: + # Check if Docker is installed + docker_installed, docker_version = await check_docker_installed(ssh_client) + if not docker_installed: + logger.warning("Docker is not installed. Attempting to install Docker.") + if not await install_docker(ssh_client): + logger.error("Failed to install Docker.") + return False + docker_installed, docker_version = await check_docker_installed(ssh_client) + if not docker_installed: + logger.error("Docker installation failed.") + return False + logger.info(f"Docker is installed (Version: {docker_version})") + + # Check if Docker Compose is installed + docker_compose_installed, docker_compose_version = await check_docker_compose_installed( + ssh_client + ) + if not docker_compose_installed: + logger.warning("Docker Compose is not installed. Attempting to install Docker Compose.") + if not await install_docker_compose(ssh_client): + logger.error("Failed to install Docker Compose.") + return False + docker_compose_installed, docker_compose_version = await check_docker_compose_installed( + ssh_client + ) + if not docker_compose_installed: + logger.error("Docker Compose installation failed.") + return False + logger.info(f"Docker Compose is installed (Version: {docker_compose_version})") + + return True + + except Exception as e: + logger.error(f"An error occurred while validating Docker requirements: {e}") + return False + + +async def check_docker_installed(ssh_client): + """Checks if Docker is installed on the remote server and returns the version.""" + try: + command = "docker --version" + output, error = await execute_remote_command(ssh_client, command) + if error: + logger.debug(f"Docker not installed: {error}") + return False, None + version = output.splitlines()[0].split("version ")[1].strip() + logger.debug(f"Docker version: {version}") + return True, version + except Exception as e: + logger.error(f"Error checking Docker version: {e}") + return False, None + + +async def check_docker_compose_installed(ssh_client): + """Checks if Docker Compose is installed on the remote server and returns the version.""" + try: + command = "docker compose version" + output, error = await execute_remote_command(ssh_client, command) + if error: + logger.debug(f"Docker Compose not installed: {error}") + return False, None + version = output.splitlines()[0].split(" ")[3].strip() + logger.debug(f"Docker Compose version: {version}") + return True, version + except Exception as e: + logger.error(f"Error checking Docker Compose version: {e}") + return False, None + + +async def install_docker(ssh_client): + """Installs Docker on the remote server (Ubuntu/Debian).""" + try: + # Update package index + command1 = "apt-get update" + output1, error1 = await execute_remote_command(ssh_client, command1) + if error1: + logger.error(f"Error updating package index: {error1}") + return False + + # Install Docker + command2 = "apt-get install -y docker.io" + output2, error2 = await execute_remote_command(ssh_client, command2) + if error2: + logger.error(f"Error installing Docker: {error2}") + return False + + # Start Docker service + command3 = "systemctl start docker" + output3, error3 = await execute_remote_command(ssh_client, command3) + if error3: + logger.warning(f"Error starting Docker service: {error3}") + # Non-critical error, continue anyway + + logger.info("Docker installed successfully.") + return True + except Exception as e: + logger.error(f"Error installing Docker: {e}") + return False + + +async def install_docker_compose(ssh_client): + """Installs Docker Compose on the remote server (in user's home directory).""" + try: + # Ensure ~/.local/bin exists + command0 = "mkdir -p ~/.local/bin" + output0, error0 = await execute_remote_command(ssh_client, command0) + if error0: + logger.error(f"Error creating ~/.local/bin directory: {error0}") + return False + + # Download Docker Compose + command1 = 'curl -sL "https://github.com/docker/compose/releases/latest/download/docker-compose-$(uname -s)-$(uname -m)" -o ~/.local/bin/docker-compose' + output1, error1 = await execute_remote_command(ssh_client, command1) + if error1: + logger.error(f"Error downloading Docker Compose: {error1}") + return False + + # Apply executable permissions + command2 = "chmod +x ~/.local/bin/docker-compose" + output2, error2 = await execute_remote_command(ssh_client, command2) + if error2: + logger.error(f"Error applying executable permissions to Docker Compose: {error2}") + return False + + # Add ~/.local/bin to PATH (if not already there) + command3 = "if ! grep -q ~/.local/bin ~/.profile; then echo 'export PATH=$PATH:~/.local/bin' >> ~/.profile; fi" + output3, error3 = await execute_remote_command(ssh_client, command3) + if error3: + logger.warning(f"Error adding ~/.local/bin to PATH: {error3}") + # Non-critical error, continue anyway + + # Source ~/.profile to update the PATH in the current session + command4 = "source ~/.profile" + output4, error4 = await execute_remote_command(ssh_client, command4) + if error4: + logger.warning(f"Error sourcing ~/.profile: {error4}") + # Non-critical error, continue anyway + + # Check if Docker Compose is installed (after updating PATH) + docker_compose_installed, _ = await check_docker_compose_installed(ssh_client) + if not docker_compose_installed: + logger.error("Docker Compose installation failed after updating PATH.") + return False + + logger.info("Docker Compose installed successfully in ~/.local/bin.") + return True + except Exception as e: + logger.error(f"Error installing Docker Compose: {e}") + return False + + +async def build_and_start_devcontainer(config, ssh_client): + """Builds the Docker image and starts the devcontainer on the remote server. + + Args: + config (dict): Configuration dictionary containing remote directory. + ssh_client (paramiko.SSHClient): SSH client object for connecting to the remote server. + + Returns: + bool: True if the devcontainer was built and started successfully, False otherwise. + """ + is_valid = await validate_and_install_docker_requirements(ssh_client) + + if not is_valid: + logger.error("Failed to validate and install Docker requirements.") + return False + + remote_dir = config["remote_dir"] + docker_dir = posixpath.join(remote_dir, "docker") + container_id = config["container_id"] + try: + command = f"cd {docker_dir} && docker compose -p {container_id} build && docker compose -p {container_id} up -d" + output, error = await execute_remote_command(ssh_client, command) + + if error: + if "created" in error.lower(): + logger.info("Devcontainer already created.") + elif "running" in error.lower(): + logger.info("Devcontainer already running.") + elif "starting" in error.lower(): + logger.info("Devcontainer already starting.") + else: + logger.error(f"Error starting devcontainer: {error}") + return False + return True + else: + logger.info("Devcontainer started successfully.") + logger.debug(f"Devcontainer start output: {output.strip()}") + return True + except Exception as e: + logger.error( + f"An unexpected error occurred building the docker image and running the docker compose: {e}" + ) + return False diff --git a/remote_developer/components/connectivity.py b/remote_developer/components/connectivity.py new file mode 100644 index 0000000..a97ba75 --- /dev/null +++ b/remote_developer/components/connectivity.py @@ -0,0 +1,36 @@ +"""Connectivity module.""" + +from utils.logger import Logger, get_level_from_env + +logger = Logger(__name__, level=get_level_from_env()) + + +async def execute_remote_command(ssh_client, command): + """Executes a command on the remote server using the SSH client.""" + try: + logger.debug(f"Executing remote command: {command}") + stdin, stdout, stderr = ssh_client.exec_command(command) + output = stdout.read().decode("utf-8") + error = stderr.read().decode("utf-8") + if output: + logger.debug(f"Command output: {output.strip()}") + if error: + logger.error(f"Command error: {error.strip()}") + return output, error + except Exception as e: + logger.error(f"Error executing command: {e}") + return None, str(e) + + +async def ensure_remote_directory(config, ssh_client): + """Ensures that the remote directory exists.""" + remote_host = config["remote_host"] + remote_dir = config["remote_dir"] + + logger.info(f"Ensuring remote directory exists: {remote_dir} on {remote_host}") + command = f"[ -d {remote_dir} ] || mkdir -p {remote_dir}" + output, error = await execute_remote_command(ssh_client, command) + if error: + logger.error(f"Error creating remote directory: {error.strip()}") + return False + return True diff --git a/remote_developer/components/execution.py b/remote_developer/components/execution.py new file mode 100644 index 0000000..ff681d1 --- /dev/null +++ b/remote_developer/components/execution.py @@ -0,0 +1,47 @@ +"""Execution module.""" + +from utils.logger import Logger, get_level_from_env +from components.connectivity import execute_remote_command +import posixpath +import os +from utils import regcache + +logger = Logger(__name__, level=get_level_from_env()) + +CACHE_LOCATION = os.getenv("CACHE_LOCATION") + + +async def run_command_in_devcontainer(config, ssh_client, command_parts): + """Runs a command inside the running devcontainer. + + Args: + config (dict): Configuration dictionary containing remote directory and container name. + ssh_client (paramiko.SSHClient): SSH client object for connecting to the remote server. + command_parts (list): A list of strings representing the command and its arguments. + """ + remote_host = config["remote_host"] + remote_dir = config["remote_dir"] + workspace_dir = posixpath.join(remote_dir, "workspace") + + # Get container name from cache + connection_info = regcache.get_cache_item(remote_host, CACHE_LOCATION) or {} + container_id = connection_info.get("container_id") + + # Construct the command to execute inside the container + command = " ".join(command_parts) + docker_exec_command = f'docker exec -t $(docker ps -q --filter "name={container_id}" | head -n 1) bash -c "cd {workspace_dir} && {command}"' + + try: + logger.debug(f"Executing command in devcontainer: {command}") + output, error = await execute_remote_command(ssh_client, docker_exec_command) + + if error: + logger.error(f"Error executing command in devcontainer: {error}") + else: + logger.debug("Command executed successfully in devcontainer.") + print(f"Command output:\n {output.strip()}") + + except Exception as e: + logger.error( + f"An unexpected error occurred while running the command in the devcontainer: {e}" + ) diff --git a/remote_developer/components/security.py b/remote_developer/components/security.py new file mode 100644 index 0000000..427520e --- /dev/null +++ b/remote_developer/components/security.py @@ -0,0 +1,365 @@ +"""Security module.""" + +import paramiko +import os +import click +import datetime +from utils.logger import Logger, get_level_from_env +from utils.cli_command_executor import CLICommandExecutor +from utils import regcache + +logger = Logger(__name__, level=get_level_from_env()) + +cli_executor = CLICommandExecutor() + +CACHE_LOCATION = os.getenv("CACHE_LOCATION") + + +async def check_and_setup_ssh_keys(config, config_path): + """Checks for existing SSH keys and guides the user through setup if needed. + + Args: + config (dict): Configuration dictionary. + config_path (str): Path to the configuration file. + + Returns: + bool: True if the configuration file was updated successfully, False otherwise. + """ + connection_info = regcache.get_cache_item(config["remote_host"], CACHE_LOCATION) or {} + private_key_path = connection_info.get("private_key_path", None) + + if private_key_path and os.path.exists(private_key_path): + logger.info(f"Using existing SSH key at: {private_key_path}") + else: + username = config["remote_host"].split("@")[0] + date_str = datetime.datetime.now().strftime("%Y%m%d") # Get current date in YYYYMMDD format + key_name = f"{username}_{date_str}_ed25519" # Construct the key name + private_key_path = os.path.expanduser( + os.path.join("~/.ssh", key_name) + ) # Expand ~ to the user's home directory + public_key_path = private_key_path + ".pub" + + if os.path.exists(private_key_path) and os.path.exists(public_key_path): + logger.info(f"SSH key pair found at: {private_key_path} and {public_key_path}") + logger.info("Skipping key generation as key already exists.") + else: + logger.warning("No SSH key pair found or not paired. Starting SSH key setup...") + if not await generate_ssh_key_pair(private_key_path): + return False # Key generation failed + logger.info("SSH key pair generated successfully.") + + # Update connection info in cache + connection_info["remote_host"] = config["remote_host"] + connection_info["private_key_path"] = private_key_path + connection_info["ssh_paired"] = os.path.exists(private_key_path) and os.path.exists( + public_key_path + ) + regcache.set_cache_item(config["remote_host"], connection_info, CACHE_LOCATION) + + return True + + +async def generate_ssh_key_pair(private_key_path): + """Generates a new SSH key pair using ssh-keygen (Ed25519). + + Args: + private_key_path (str): The path where the private key should be stored. + + Returns: + bool: True if the key pair was generated successfully, False otherwise. + """ + try: + # Use subprocess to run ssh-keygen + command = [ + "ssh-keygen", + "-t", + "ed25519", + "-f", + private_key_path, + "-N", + "", + ] # Generate Ed25519 key + logger.debug(f"Running command: {' '.join(command)}") + result = await cli_executor.execute_command(command) + + logger.debug(f"ssh-keygen output: {result.stdout.strip()}") + if result.stderr: + logger.warning(f"ssh-keygen warning: {result.stderr.strip()}") + return True + except FileNotFoundError: + logger.critical("ssh-keygen command not found. Please ensure openssh-client is installed.") + return False + except Exception as e: + logger.error(f"Error generating SSH key pair: {e}") + return False + + +async def automate_copy_public_key(config, public_key_path, ssh_client): + """Automates the process of copying the public key to the remote host. + + Args: + config (dict): Configuration dictionary containing remote host. + public_key_path (str): Path to the public key file. + ssh_client (paramiko.SSHClient): Established SSH client connection. + + Returns: + bool: True if the public key was copied successfully, False otherwise. + """ + try: + # Read the public key + with open(public_key_path) as f: + public_key = f.read().strip() + + # Construct the SSH command to append the public key to authorized_keys + ssh_command = ( + "mkdir -p -m 700 ~/.ssh && " # Ensure .ssh directory exists with secure permissions + "touch ~/.ssh/authorized_keys && " # Create authorized_keys if it doesn't exist + f"echo '{public_key}' >> ~/.ssh/authorized_keys && " # Append public key + "chmod 600 ~/.ssh/authorized_keys" # Set correct permissions + ) + + # Execute the SSH command + stdin, stdout, stderr = ssh_client.exec_command(ssh_command) + output = stdout.read().decode("utf-8") + error = stderr.read().decode("utf-8") + + if error: + logger.error(f"Error copying public key: {error.strip()}") + return False + else: + logger.debug(f"Public key copied successfully. Output: {output.strip()}") + return True + + except Exception as e: + logger.error(f"Error occurred while copying the public key: {e}") + return False + + +class SavingKeyVerifier(paramiko.MissingHostKeyPolicy): + """A custom MissingHostKeyPolicy that saves the host key and prompts the user.""" + + def __init__(self): + """Initializes the key verifier.""" + self.key = None + self.hostname = None + self.keytype = None + + def missing_host_key(self, client, hostname, key): + """Called when a host key is not found in the known_hosts file.""" + logger.debug(f"missing_host_key called for {hostname}") # Added debug log + self.key = key + self.hostname = hostname + self.keytype = key.get_name() + return + + +async def establish_ssh_connection(config, config_path): + """Establishes an SSH connection to the remote host, reusing a cached connection if available. + + Args: + config (dict): Configuration dictionary. + config_path (str): Path to the configuration file. + + Returns: + paramiko.SSHClient: An SSH client object if the connection was established + successfully, None otherwise. + """ + hostname = config["remote_host"].split("@")[1] + username = config["remote_host"].split("@")[0] + remote_host = config["remote_host"] + + try: + logger.info(f"Connecting to remote host: {remote_host}") + + # Load connection info from cache + connection_info = regcache.get_cache_item(remote_host, CACHE_LOCATION) + private_key_path = connection_info.get("private_key_path") if connection_info else None + public_key_path = private_key_path + ".pub" if private_key_path else None + status = connection_info.get("status") if connection_info else "disconnected" + ssh_paired = connection_info.get("ssh_paired", False) if connection_info else False + + # Check if there's a cached connection and if it's healthy + if ( + connection_info + and status == "connected" + and private_key_path + and os.path.exists(private_key_path) + ): + logger.info(f"Reusing cached connection for {remote_host} with key: {private_key_path}") + # TODO(Wx): Add validation to check if the connection is still alive. + ssh_client = create_ssh_client(private_key_path, hostname, username, config) + return ssh_client + + # If no cached connection or it's not healthy, establish a new connection + logger.info(f"Establishing new SSH connection for {remote_host}") + ssh_client = create_ssh_client(private_key_path, hostname, username, config) + + key_verifier = SavingKeyVerifier() # Instance of the custom policy + ssh_client.set_missing_host_key_policy(key_verifier) # Set the custom policy + + # Try connecting with the private key first + if private_key_path and os.path.exists(private_key_path): + logger.debug(f"Attempting SSH connection with key: {private_key_path}") + try: + ssh_client.connect( + hostname=hostname, + username=username, + key_filename=private_key_path, + look_for_keys=False, + allow_agent=False, + ) # Agent disabled + except Exception as e: + logger.error(f"Connection failed with key: {e}") + # If key auth fails, try password auth + logger.debug("Attempting SSH connection with password after key failure.") + password = click.prompt("Enter password for remote host", hide_input=True) + ssh_client.connect( + hostname=hostname, username=username, password=password, allow_agent=False + ) # Agent disabled + + else: + # Prompt for password only if no key is available + password = click.prompt("Enter password for remote host", hide_input=True) + logger.debug("Attempting SSH connection with password.") + ssh_client.connect( + hostname=hostname, username=username, password=password, allow_agent=False + ) # Agent disabled + + logger.info("SSH connection established.") + + # Automate public key transfer after successful connection + if not ssh_paired: + if not await automate_copy_public_key(config, public_key_path, ssh_client): + logger.error("Failed to automate public key transfer.") + ssh_client.close() + return None + # Update connection info in cache + connection_info = regcache.get_cache_item(remote_host, CACHE_LOCATION) or {} + connection_info["ssh_paired"] = True + regcache.set_cache_item(remote_host, connection_info, CACHE_LOCATION) + logger.info("Public key copied and ssh_paired set to True in cache.") + else: + logger.info("Skipping public key copy as connection is already paired.") + + # Update connection info in cache + connection_info = regcache.get_cache_item(remote_host, CACHE_LOCATION) or {} + connection_info["status"] = "connected" + connection_info["last_connected"] = datetime.datetime.now().isoformat() + # connection_info["metadata"] = {"local_ip": get_local_ip()} # Implement get_local_ip() + regcache.set_cache_item(remote_host, connection_info, CACHE_LOCATION) + + return ssh_client + + except paramiko.AuthenticationException: + logger.error("Authentication failed. Check your username, password, and SSH keys.") + click.echo("Authentication failed.") + if ssh_client: + ssh_client.close() + return None + except paramiko.ssh_exception.SSHException as e: + logger.warning(f"SSH Key Verification Failed: {e}") + + if key_verifier.key: # Check if the key was captured + if click.confirm( + f"Do you trust this host ({hostname}) and want to add it to your known_hosts file?" + ): + try: + # Add the host key to known_hosts + known_hosts_path = os.path.expanduser("~/.ssh/known_hosts") + with open(known_hosts_path, "a") as f: + f.write( + f"{hostname} {key_verifier.key.get_name()} {key_verifier.key.get_base64()}\n" + ) + logger.info(f"Added host key for {hostname} to {known_hosts_path}") + click.echo(f"Added host key for {hostname} to {known_hosts_path}") + + # Retry the connection + ssh_client.close() # Close the previous client + + ssh_client = paramiko.SSHClient() + ssh_client.load_system_host_keys() + key_verifier = SavingKeyVerifier() # Instance of the custom policy + ssh_client.set_missing_host_key_policy(key_verifier) # Set the custom policy + + if "private_key_path" in config and os.path.exists(config["private_key_path"]): + logger.debug( + f"Attempting SSH connection with key: {config['private_key_path']}" + ) + ssh_client.connect( + hostname=hostname, + username=username, + key_filename=config["private_key_path"], + look_for_keys=False, + allow_agent=False, + ) # Agent disabled + else: + # Prompt for password only if no key is available + password = click.prompt("Enter password for remote host", hide_input=True) + logger.debug("Attempting SSH connection with password.") + ssh_client.connect( + hostname=hostname, + username=username, + password=password, + allow_agent=False, + ) # Agent disabled + + # Automate public key transfer after successful connection + if not await automate_copy_public_key(config, public_key_path, ssh_client): + logger.error("Failed to automate public key transfer.") + ssh_client.close() + return None + + return ssh_client + + except Exception as e: + logger.error(f"Error adding host key to known_hosts: {e}") + click.echo(f"Error adding host key to known_hosts: {e}") + if ssh_client: + ssh_client.close() + return None + else: + click.echo("Host key verification failed. Connection rejected.") + if ssh_client: + ssh_client.close() + return None + else: + logger.error("Host key was not captured during initial connection attempt.") + click.echo("Host key was not captured during initial connection attempt.") + if ssh_client: + ssh_client.close() + return None + + except Exception as e: + logger.error(f"An unexpected error occurred: {e}") + click.echo(f"An unexpected error occurred: {e}") + if ssh_client: + ssh_client.close() + return None + + +def create_ssh_client(private_key_path, hostname, username, config): + """Creates and configures an SSH client.""" + ssh_client = paramiko.SSHClient() + ssh_client.load_system_host_keys() + key_verifier = SavingKeyVerifier() # Instance of the custom policy + ssh_client.set_missing_host_key_policy(key_verifier) # Set the custom policy + return ssh_client + + +async def close_ssh_connection(ssh_client, remote_host): + """Closes the SSH connection and updates the cache. + + Args: + ssh_client (paramiko.SSHClient): The SSH client object to close. + remote_host (str): The remote host address. + + Returns: + None + """ + if ssh_client: + ssh_client.close() + logger.info("SSH connection closed.") + # Update connection info in cache + connection_info = regcache.get_cache_item(remote_host, CACHE_LOCATION) or {} + connection_info["status"] = "disconnected" + regcache.set_cache_item(remote_host, connection_info, CACHE_LOCATION) diff --git a/remote_developer/components/syncing.py b/remote_developer/components/syncing.py new file mode 100644 index 0000000..f6740d1 --- /dev/null +++ b/remote_developer/components/syncing.py @@ -0,0 +1,56 @@ +"""Syncing module.""" + +import time +from utils.logger import Logger, get_level_from_env +from utils.cli_command_executor import CLICommandExecutor + +logger = Logger(__name__, level=get_level_from_env()) + +cli_executor = CLICommandExecutor() + + +async def sync_files(config): + """Syncs files from the local directory to the remote directory using rsync. + + Args: + config (dict): Configuration dictionary containing local directory, remote host, + remote directory, and private key path. + + Returns: + None: This function does not return a value; it runs indefinitely. + """ + local_dir = config["local_dir"] + remote_host = config["remote_host"] + remote_dir = config["remote_dir"] + + # Construct the rsync command + rsync_command = [ + "rsync", + "-avz", # Archive mode (preserves permissions, etc.), verbose, compress + "--delete", # Delete extraneous files from dest dirs + "-e", + f"ssh -o StrictHostKeyChecking=no -i {config.get('private_key_path', '')} -o UserKnownHostsFile=/dev/null", # -i is important for the key. + f"{local_dir}/", # Source directory (note the trailing slash) + f"{remote_host}:{remote_dir}", # Destination directory + ] + # Run this forever to ensure the changes + while True: + try: + logger.debug(f"Running rsync command: {' '.join(rsync_command)}") + result = await cli_executor.execute_command(rsync_command) + + if result.stdout: + logger.debug(f"rsync output: {result.stdout.strip()}") + if result.stderr: + logger.warning(f"rsync error: {result.stderr.strip()}") + + except FileNotFoundError: + logger.critical("rsync command not found. Please install rsync.") + exit(1) + except Exception as e: + logger.error(f"Error syncing files: {e}") + except KeyboardInterrupt: + logger.info("Sync interrupted") + exit() + + time.sleep(1) diff --git a/remote_developer/pyproject.toml b/remote_developer/pyproject.toml new file mode 100644 index 0000000..972ec12 --- /dev/null +++ b/remote_developer/pyproject.toml @@ -0,0 +1,27 @@ +[project] +name = "remote-developer" +version = "0.1.0" +description = "Remote Developer Software" +authors = [ { name = "Wx"} ] +readme = "README.md" +requires-python = ">=3.11" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] + +dependencies = [ + "click>=8.1.8", + "paramiko>=3.5.1", + "python-dotenv>=1.0.1", + "pyyaml>=6.0.2", + "watchdog>=6.0.0", +] + +[project.urls] +"Homepage" = "https://github.com/mohamedashraf-eng/remote-developer" +"Bug Tracker" = "https://github.com/mohamedashraf-eng/remote-developer/issues" + +[tool.uv.sources] +remote-developer = { index = "remote-developer" } diff --git a/remote_developer/remote-developer-config.example.json b/remote_developer/remote-developer-config.example.json new file mode 100644 index 0000000..9ed344b --- /dev/null +++ b/remote_developer/remote-developer-config.example.json @@ -0,0 +1,13 @@ +{ + "remote_host": "user@remote-host.example.com", + "docker_image": "example-image:latest", + "remote_dir": "/home/user/example-project", + "port_mappings": [ + "1234:1234", + "5678:5678" + ], + "devcontainer_template": "./templates/devcontainer-template.txt.example", + "dockerfile_template": "./templates/dockerfile-template.txt.example", + "docker_compose_template": "./templates/docker-compose-template.txt.example", + "dockerignore_template": "./templates/dockerignore-template.txt.example" +} diff --git a/remote_developer/remote_developer.py b/remote_developer/remote_developer.py new file mode 100644 index 0000000..103abb5 --- /dev/null +++ b/remote_developer/remote_developer.py @@ -0,0 +1,204 @@ +"""Main entry point for the remote_developer CLI.""" + +import click +import json +import asyncio +import os + +from utils.logger import Logger, get_level_from_env +from utils import validation + +from components import security, connectivity, building, syncing, execution +from utils.wdm import start_watching + +logger = Logger(__name__, level=get_level_from_env()) + +CACHE_LOCATION = os.getenv("CACHE_LOCATION") + + +async def setup_and_connect(ctx): + """Sets up the environment and establishes an SSH connection. + + Returns: + An SSH client object if successful, None otherwise. + """ + config_path = ctx.obj["config_path"] + config = ctx.obj["config"] + ssh_client = None + + try: + # Validate Configurations + if not await validation.validate_config_values(config): + raise Exception("Configuration validation failed.") + + # Check and setup SSH keys + if not await security.check_and_setup_ssh_keys(config, config_path): + raise Exception("SSH key setup failed.") + + # Reload the configuration from file + logger.info("Reloading configuration after SSH key setup.") + with open(config_path) as f: + config = json.load(f) + ctx.obj["config"] = config + + # Establish SSH connection + ssh_client = await security.establish_ssh_connection(config, config_path) + if not ssh_client: + raise Exception("Failed to establish SSH connection.") + + if not await connectivity.ensure_remote_directory(config, ssh_client): + raise Exception("Failed to ensure remote directory exists.") + if not await building.create_devcontainer_files(config, ssh_client): + raise Exception("Failed to create devcontainer files.") + if not await building.build_and_start_devcontainer(config, ssh_client): + raise Exception("Failed to build and start devcontainer.") + + return ssh_client + + except Exception as e: + logger.error(f"An error occurred during setup: {e}") + if ssh_client: + try: + await security.close_ssh_connection(ssh_client, config["remote_host"]) + except Exception as close_err: + logger.error(f"Error closing SSH connection during setup failure: {close_err}") + return None + + +async def run_rdc_command(ctx, ssh_client, command): + """Runs a command in the devcontainer.""" + try: + await execution.run_command_in_devcontainer(ctx.obj["config"], ssh_client, command) + except Exception as e: + logger.error(f"An error occurred while running the command: {e}") + + +async def interactive_shell(ctx, ssh_client): + """Opens an interactive shell to the devcontainer.""" + logger.info("Entering interactive shell. Type 'exit' to quit.") + try: + while True: + try: + command = input("[remote-dev]> ") + if command.lower() == "exit": + break + await run_rdc_command(ctx, ssh_client, command.split()) + except KeyboardInterrupt: + logger.debug("\nExiting interactive shell.") + break + except Exception as e: + logger.error(f"Error running command: {e}") + finally: + try: + await security.close_ssh_connection(ssh_client, ctx.obj["config"]["remote_host"]) + except Exception as close_err: + logger.error(f"Error closing SSH connection: {close_err}") + + +@click.group() +@click.pass_context +@click.option("--config", required=True, help="Path to the config.json file.") +@click.option("--path", required=True, help="Path to the project directory.") +@click.option( + "--auto-sync", is_flag=True, help="Automatically syncs the local and remote directories." +) +@click.option( + "--keep-alive", is_flag=True, help="Keep the SSH connection alive after the command completes." +) +def cli(ctx, config, path, auto_sync, keep_alive): + """Remote development automation.""" + ctx.ensure_object(dict) + ctx.obj["config_path"] = config + ctx.obj["project_path"] = path + ctx.obj["auto_sync"] = auto_sync + ctx.obj["keep_alive"] = keep_alive + + try: + logger.info(f"Loading configuration from {config}") + with open(config) as f: + ctx.obj["config"] = json.load(f) + logger.debug(f"Configuration loaded: {ctx.obj['config']}") + + # Modify the config to update the local_dir field + if not os.path.isabs(path): + path = os.path.abspath(path) + if not os.path.isdir(path): + logger.error(f"Invalid path: {path}. Please provide a valid directory path.") + exit(1) + ctx.obj["config"]["local_dir"] = path + logger.info(f"Setting local_dir to: {path}") + + # Save the updated config back to the file + with open(config, "w") as f: + json.dump(ctx.obj["config"], f, indent=2) + logger.debug("Configuration updated with new local_dir") + + except FileNotFoundError: + logger.error(f"Config file not found at {config}") + exit(1) + except json.JSONDecodeError: + logger.error("Invalid JSON in config file.") + exit(1) + + +@cli.command() +@click.pass_context +def start(ctx): + """Starts the devcontainer.""" + + async def start_devcontainer_async(): + ssh_client = await setup_and_connect(ctx) + if ssh_client: + if ctx.obj.get("keep_alive"): + await interactive_shell(ctx, ssh_client) + else: + try: + await security.close_ssh_connection( + ssh_client, ctx.obj["config"]["remote_host"] + ) + except Exception as close_err: + logger.error(f"Error closing SSH connection: {close_err}") + + asyncio.run(start_devcontainer_async()) + + if ctx.obj.get("auto_sync"): + config = ctx.obj["config"] + asyncio.create_task(start_watching(config), name="file-watching") + + +@cli.command() +@click.pass_context +def sync(ctx): + """Syncs files from the local directory to the remote directory.""" + config = ctx.obj["config"] + asyncio.run(syncing.sync_files(config)) + + +@cli.command() +@click.argument("command", nargs=-1) +@click.pass_context +def run(ctx, command): + """Runs a command on the remote host.""" + config = ctx.obj["config"] + config_path = ctx.obj["config_path"] + + async def run_remote_command_async(): + ssh_client = await security.establish_ssh_connection(config, config_path) + if not ssh_client: + raise Exception("Failed to establish SSH connection.") + if ssh_client: + try: + await run_rdc_command(ctx, ssh_client, command) + finally: + try: + await security.close_ssh_connection( + ssh_client, ctx.obj["config"]["remote_host"] + ) + except Exception as close_err: + logger.error(f"Error closing SSH connection: {close_err}") + + asyncio.run(run_remote_command_async()) + + +if __name__ == "__main__": + cli(obj={}) diff --git a/remote_developer/templates/devcontainer-template.txt b/remote_developer/templates/devcontainer-template.txt new file mode 100644 index 0000000..d517d35 --- /dev/null +++ b/remote_developer/templates/devcontainer-template.txt @@ -0,0 +1,21 @@ +name: "project-{_project_id}" +build: + dockerfile: "../docker/Dockerfile" + args: + DEBIAN_FRONTEND: "noninteractive" +workspaceFolder: "/workspace" +features: + ghcr.io/devcontainers-contrib/features/apt-packages:1: + packages: "make file lsb-release cmake git gcc-multilib wget xz-utils ruby github-cli" +customizations: + vscode: + extensions: + - "ms-vscode.cmake-tools" + - "github.vscode-github-actions" + - "dbaeumer.vscode-eslint" + - "eamodio.gitlens" + - "mhutchie.git-graph" + - "ms-vscode.cpptools" + - "ms-python.python" + - "ms-python.vscode-pylance" +postCreateCommand: "uv sync" diff --git a/remote_developer/templates/docker-compose-template.txt b/remote_developer/templates/docker-compose-template.txt new file mode 100644 index 0000000..d00739e --- /dev/null +++ b/remote_developer/templates/docker-compose-template.txt @@ -0,0 +1,11 @@ +services: + app: + build: + context: . + dockerfile: Dockerfile + volumes: + - {_workspace_dir}:{_workspace_dir} + working_dir: {_workspace_dir} + ports: + {_port_mappings} + tty: true diff --git a/remote_developer/templates/dockerfile-template.txt b/remote_developer/templates/dockerfile-template.txt new file mode 100644 index 0000000..d620a68 --- /dev/null +++ b/remote_developer/templates/dockerfile-template.txt @@ -0,0 +1,18 @@ +FROM {_from} + +# Avoid stuck at console asking for input +ARG DEBIAN_FRONTEND=noninteractive + +# Update and install necessary packages +RUN apt-get update && apt-get install -y \ + python3 \ + python3-pip \ + python3-venv \ + build-essential \ + && apt-get clean + +# Set the working directory +WORKDIR {_workdir} + +# Set the default command +CMD ["/bin/bash"] diff --git a/remote_developer/templates/dockerignore-template.txt b/remote_developer/templates/dockerignore-template.txt new file mode 100644 index 0000000..c0de4ab --- /dev/null +++ b/remote_developer/templates/dockerignore-template.txt @@ -0,0 +1,8 @@ +# Python +__pycache__ +app.egg-info +*.pyc +.mypy_cache +.coverage +htmlcov +.venv diff --git a/remote_developer/utils/__init__.py b/remote_developer/utils/__init__.py new file mode 100644 index 0000000..8cf43a2 --- /dev/null +++ b/remote_developer/utils/__init__.py @@ -0,0 +1 @@ +"""Utility modules for remote_developer.""" diff --git a/remote_developer/utils/cli_command_executor.py b/remote_developer/utils/cli_command_executor.py new file mode 100644 index 0000000..18e4dd3 --- /dev/null +++ b/remote_developer/utils/cli_command_executor.py @@ -0,0 +1,103 @@ +"""Utility module for executing CLI commands.""" + +import asyncio +import subprocess +import shlex +import re +from utils.logger import Logger + +logger = Logger(__name__) + + +class CommandResult: + """Represents the result of a command execution.""" + + def __init__(self, stdout, stderr, returncode): + """Initializes a new CommandResult instance.""" + self.stdout = stdout + self.stderr = stderr + self.returncode = returncode + + +class CLICommandExecutor: + """Executes CLI commands using subprocess.""" + + def __init__(self, allowed_commands=None, disallowed_patterns=None): + """ + Initializes the CLICommandExecutor with optional security configurations. + + Args: + allowed_commands (list, optional): A list of explicitly allowed commands + (e.g., ['rsync', 'ping']). If None, all + commands are allowed subject to disallowed_patterns. + Defaults to None. + disallowed_patterns (list, optional): A list of regular expression patterns + that, if matched in a command, will cause + the execution to be rejected. This is a + secondary safety net. Defaults to None. + """ + self.allowed_commands = allowed_commands + self.disallowed_patterns = ( + disallowed_patterns + if disallowed_patterns is not None + # Default disallowed patterns for security. + else [ + r";", # Prevent command chaining + r"\|", # Prevent command piping + r">>", # Prevent output redirection/overwriting + r"`", # Prevent command substitution + r"\$", # Prevent variable expansion + ] + ) + logger.debug( + f"CLICommandExecutor initialized with allowed_commands: {self.allowed_commands} and disallowed_patterns: {self.disallowed_patterns}" + ) + + async def execute_command(self, command): + """Executes a command using subprocess and returns the output.""" + logger.debug(f"Attempting to execute command: {command}") + if not self._is_command_safe(command): + logger.warning(f"Command {command} is not safe to execute, blocking execution.") + return CommandResult( + "", "Command execution blocked due to security policy.", 1 + ) # Or raise an exception + + try: + process = await asyncio.create_subprocess_exec( + *command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + logger.debug(f"Process started with PID: {process.pid}") + stdout, stderr = await process.communicate() + logger.debug(f"Process finished with return code: {process.returncode}") + result = CommandResult(stdout.decode(), stderr.decode(), process.returncode) + logger.debug( + f"Command execution result: stdout='{result.stdout}', stderr='{result.stderr}', returncode={result.returncode}" + ) + return result + except FileNotFoundError as e: + logger.error(f"Command not found: {e}") + return CommandResult( + "", f"Command not found: {e}", 127 + ) # Mimic shell return code for command not found + except Exception as e: + logger.exception(f"An unexpected error occurred: {e}") + return CommandResult("", f"An unexpected error occurred: {e}", 1) + + def _is_command_safe(self, command): + """Checks if a command is safe to execute based on allowed commands and disallowed patterns.""" + # Check if command is allowed (if allowed_commands is specified) + if self.allowed_commands is not None and command[0] not in self.allowed_commands: + logger.warning(f"Command {command[0]} is not in the list of allowed commands.") + return False + + # Check for disallowed patterns + command_string = " ".join(shlex.quote(arg) for arg in command) # Quote arguments for safety + logger.debug(f"Command string after quoting: {command_string}") + for pattern in self.disallowed_patterns: + if re.search(pattern, command_string): + logger.warning(f"Command blocked due to matching disallowed pattern: {pattern}") + return False + + return True diff --git a/remote_developer/utils/logger.py b/remote_developer/utils/logger.py new file mode 100644 index 0000000..e665980 --- /dev/null +++ b/remote_developer/utils/logger.py @@ -0,0 +1,204 @@ +"""Logger module.""" + +import logging +import sys +from enum import Enum + + +class LogLevel(Enum): + """ + Enumeration for log levels. + + This enum provides a type-safe way to represent different log levels. + Each member corresponds to a logging level from the Python `logging` module. + + Members: + DEBUG: Represents the DEBUG log level. + INFO: Represents the INFO log level. + WARNING: Represents the WARNING log level. + ERROR: Represents the ERROR log level. + CRITICAL: Represents the CRITICAL log level. + """ + + DEBUG = logging.DEBUG + INFO = logging.INFO + WARNING = logging.WARNING + ERROR = logging.ERROR + CRITICAL = logging.CRITICAL + + +class Color: + """ + Defines ANSI escape codes for colored terminal output. + + This class provides constants for various colors that can be used to + colorize log messages in the terminal. + + Attributes: + RESET (str): Resets the color to default. + RED (str): ANSI escape code for red color. + GREEN (str): ANSI escape code for green color. + YELLOW (str): ANSI escape code for yellow color. + BLUE (str): ANSI escape code for blue color. + MAGENTA (str): ANSI escape code for magenta color. + CYAN (str): ANSI escape code for cyan color. + WHITE (str): ANSI escape code for white color. + """ + + RESET = "\033[0m" + RED = "\033[91m" + GREEN = "\033[92m" + YELLOW = "\033[93m" + BLUE = "\033[94m" + MAGENTA = "\033[95m" + CYAN = "\033[96m" + WHITE = "\033[97m" + + +class ColoredFormatter(logging.Formatter): + """ + A custom formatter that adds colors to log messages based on their level. + + This formatter inherits from `logging.Formatter` and overrides the `format` + method to add ANSI escape codes to log messages based on their log level. + + Attributes: + COLOR_MAP (dict): A dictionary mapping log levels to their corresponding colors. + """ + + COLOR_MAP = { + logging.DEBUG: Color.CYAN, + logging.INFO: Color.GREEN, + logging.WARNING: Color.YELLOW, + logging.ERROR: Color.RED, + logging.CRITICAL: Color.MAGENTA, + } + + def format(self, record): + """ + Formats the log record with color. + + Args: + record (logging.LogRecord): The log record to format. + + Returns: + str: The formatted log message with color. + """ + log_color = self.COLOR_MAP.get(record.levelno, Color.WHITE) + message = super().format(record) + return f"{log_color}{message}{Color.RESET}" + + +class Logger: + """ + A custom logger class that abstracts the Python `logging` module. + + This class provides a simplified interface for logging messages with + different levels and optional colorization. It uses the `logging` module + under the hood but provides a more convenient way to create and use loggers. + + Attributes: + logger (logging.Logger): The underlying logger instance. + """ + + def __init__(self, name: str, level: LogLevel = LogLevel.DEBUG, colorize: bool = True): + """ + Initializes a new logger instance. + + Args: + name (str): The name of the logger. + level (LogLevel, optional): The minimum log level to record. Defaults to LogLevel.DEBUG. + colorize (bool, optional): Whether to colorize the log output. Defaults to True. + """ + self.logger = logging.getLogger(name) + self.logger.setLevel(level.value) + + handler = logging.StreamHandler(sys.stdout) + if colorize: + formatter = ColoredFormatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + else: + formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + handler.setFormatter(formatter) + + self.logger.addHandler(handler) + + def log(self, level: LogLevel, message: str, *args, **kwargs): + """ + Logs a message with the specified log level. + + Args: + level (LogLevel): The log level of the message. + message (str): The message to log. + *args: Additional arguments to pass to the logging function. + **kwargs: Additional keyword arguments to pass to the logging function. + """ + self.logger.log(level.value, message, *args, **kwargs) + + def debug(self, message: str, *args, **kwargs): + """ + Logs a message with the DEBUG log level. + + Args: + message (str): The message to log. + *args: Additional arguments to pass to the logging function. + **kwargs: Additional keyword arguments to pass to the logging function. + """ + self.log(LogLevel.DEBUG, message, *args, **kwargs) + + def info(self, message: str, *args, **kwargs): + """ + Logs a message with the INFO log level. + + Args: + message (str): The message to log. + *args: Additional arguments to pass to the logging function. + **kwargs: Additional keyword arguments to pass to the logging function. + """ + self.log(LogLevel.INFO, message, *args, **kwargs) + + def warning(self, message: str, *args, **kwargs): + """ + Logs a message with the WARNING log level. + + Args: + message (str): The message to log. + *args: Additional arguments to pass to the logging function. + **kwargs: Additional keyword arguments to pass to the logging function. + """ + self.log(LogLevel.WARNING, message, *args, **kwargs) + + def error(self, message: str, *args, **kwargs): + """ + Logs a message with the ERROR log level. + + Args: + message (str): The message to log. + *args: Additional arguments to pass to the logging function. + **kwargs: Additional keyword arguments to pass to the logging function. + """ + self.log(LogLevel.ERROR, message, *args, **kwargs) + + def critical(self, message: str, *args, **kwargs): + """ + Logs a message with the CRITICAL log level. + + Args: + message (str): The message to log. + *args: Additional arguments to pass to the logging function. + **kwargs: Additional keyword arguments to pass to the logging function. + """ + self.log(LogLevel.CRITICAL, message, *args, **kwargs) + + +def get_level_from_env(): + """Retrieves the log level from environment variables.""" + import os + import dotenv + + dotenv.load_dotenv() + level_name = os.getenv("LOG_LEVEL", "INFO").upper() + if level_name not in LogLevel.__members__: + raise ValueError( + f"Invalid log level: {level_name}. Valid levels are: {', '.join(LogLevel.__members__.keys())}" + ) + return LogLevel[level_name] diff --git a/remote_developer/utils/regcache.py b/remote_developer/utils/regcache.py new file mode 100644 index 0000000..77b5007 --- /dev/null +++ b/remote_developer/utils/regcache.py @@ -0,0 +1,125 @@ +"""A generic utility module for managing a platform-specific cache.""" + +import os +import json +import platform +from utils.logger import Logger # Assuming you have a logger + +logger = Logger(__name__) # Initialize logger + +# Platform-specific cache storage +if platform.system() == "Windows": + import winreg + + CACHE_LOCATION = "Software\\RemoteDeveloper" # Registry key +elif platform.system() == "Darwin": # macOS + import plistlib + + CACHE_LOCATION = os.path.expanduser("~/Library/Preferences/remote_developer.plist") +else: # Linux + CACHE_LOCATION = os.path.expanduser("~/.config/remote_developer_cache.json") + + +def load_cache(cache_location=CACHE_LOCATION): + """Loads the cache from storage.""" + logger.debug(f"Loading cache from: {cache_location} (Platform: {platform.system()})") + try: + if platform.system() == "Windows": + try: + key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, cache_location) + logger.debug(f"Successfully opened Windows Registry key: {cache_location}") + except FileNotFoundError: + logger.info( + f"Windows Registry key not found: {cache_location}. Returning empty cache." + ) + return {} # Return empty dictionary if key doesn't exist + cache = {} + i = 0 + while True: + try: + name, value, _ = winreg.EnumValue(key, i) + cache[name] = json.loads(value) # Store as JSON string in registry + i += 1 + except OSError: + break # No more values + winreg.CloseKey(key) + return cache + elif platform.system() == "Darwin": + if os.path.exists(cache_location): + logger.debug(f"Loading cache from plist file: {cache_location}") + with open(cache_location, "rb") as f: + cache = plistlib.load(f) + return cache + else: + logger.info(f"plist file not found: {cache_location}. Returning empty cache.") + return {} + else: + if os.path.exists(cache_location): + logger.debug(f"Loading cache from JSON file: {cache_location}") + with open(cache_location) as f: + cache = json.load(f) + return cache + else: + logger.info(f"JSON file not found: {cache_location}. Returning empty cache.") + return {} + except Exception as e: + logger.error(f"Error loading cache: {e}") + return {} + + +def save_cache(cache, cache_location=CACHE_LOCATION): + """Saves the cache to storage.""" + logger.debug(f"Saving cache to: {cache_location} (Platform: {platform.system()})") + try: + if platform.system() == "Windows": + try: + key = winreg.CreateKey(winreg.HKEY_CURRENT_USER, cache_location) + logger.debug(f"Successfully created Windows Registry key: {cache_location}") + except FileNotFoundError: + logger.debug( + f"Registry key not found, attempting to open for write: {cache_location}" + ) + key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, cache_location, 0, winreg.KEY_WRITE) + logger.debug(f"Successfully opened Registry key for write: {cache_location}") + for name, value in cache.items(): + winreg.SetValueEx( + key, name, 0, winreg.REG_SZ, json.dumps(value) + ) # Store as JSON string in registry + winreg.CloseKey(key) + elif platform.system() == "Darwin": + os.makedirs(os.path.dirname(cache_location), exist_ok=True) + with open(cache_location, "wb") as f: + plistlib.dump(cache, f) + else: + os.makedirs(os.path.dirname(cache_location), exist_ok=True) + with open(cache_location, "w") as f: + json.dump(cache, f, indent=2) + except Exception as e: + logger.error(f"Error saving cache: {e}") + + +def get_cache_item(key, cache_location=CACHE_LOCATION): + """Retrieves a specific item from the cache.""" + logger.debug(f"Getting cache item: Key={key}, Cache Location={cache_location}") + cache = load_cache(cache_location) + item = cache.get(key) + return item + + +def set_cache_item(key, value, cache_location=CACHE_LOCATION): + """Sets a specific item in the cache.""" + logger.debug(f"Setting cache item: Key={key}, Cache Location={cache_location}") + cache = load_cache(cache_location) + cache[key] = value + save_cache(cache, cache_location) + + +def delete_cache_item(key, cache_location=CACHE_LOCATION): + """Deletes a specific item from the cache.""" + logger.debug(f"Deleting cache item: Key={key}, Cache Location={cache_location}") + cache = load_cache(cache_location) + if key in cache: + del cache[key] + save_cache(cache, cache_location) + else: + logger.debug(f"Cache item not found, deletion skipped: Key={key}") diff --git a/remote_developer/utils/validation.py b/remote_developer/utils/validation.py new file mode 100644 index 0000000..6c1de26 --- /dev/null +++ b/remote_developer/utils/validation.py @@ -0,0 +1,85 @@ +"""Validation module.""" + +import ipaddress +import socket +from utils.logger import Logger, get_level_from_env +from utils.cli_command_executor import CLICommandExecutor + +logger = Logger("validation", level=get_level_from_env()) +cli_executor = CLICommandExecutor() + + +async def is_valid_ip_address(ip_address): + """Checks if the given string is a valid IPv4 or IPv6 address.""" + try: + ipaddress.ip_address(ip_address) + return True + except ValueError: + logger.warning(f"Invalid IP address: {ip_address}") + return False + + +async def is_host_pingable(hostname): + """Checks if the given hostname is pingable.""" + try: + # Use subprocess to run ping command + command = ["ping", "-c", "1", hostname] # -c 1 to send only one ping + logger.debug(f"Running command: {' '.join(command)}") + result = await cli_executor.execute_command(command) + + output = result.stdout.strip() + logger.debug(f"Ping output: {output}") + if "Destination host unreachable" in output: # Adjusted check + return False + return True + except Exception as e: + logger.error(f"Error pinging host: {hostname}. Error: {e}") + return False + + +async def is_port_available(port): + """Checks if the given port is available on the local machine.""" + try: + # Create a socket and try to bind to the given port + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.bind(("localhost", port)) + sock.close() + logger.debug(f"Port {port} is available on localhost.") + return True + except OSError as e: + logger.warning(f"Port {port} is not available on localhost. Error: {e}") + return False + + +async def validate_config_values(config): + """Validates the configuration values.""" + # Remote Host Validation + if "remote_host" not in config: + logger.error("Missing remote_host in config.") + return False + remote_host_data = config["remote_host"].split("@") + if len(remote_host_data) != 2: + logger.error("Remote Host, doesn't have the correct user@host format.") + return False + host = remote_host_data[1] + if not await is_valid_ip_address(host): + logger.error("Invalid IP for Remote Host.") + return False + if not await is_host_pingable(host): + logger.error("Cannot Ping Remote Host.") + return False + + # Port Mapping Validation + if "port_mappings" in config: + for port_mapping in config["port_mappings"]: + try: + local_port = int(port_mapping.split(":")[0]) + # remote_port = int(port_mapping.split(":")[1]) #Remove due to local checking + if not await is_port_available(local_port): + logger.error(f"Local port {local_port} is not available.") + return False + except (ValueError, IndexError): + logger.error("Invalid port mapping format") + return False + logger.info("All Config Validations has passed") + return True diff --git a/remote_developer/utils/wdm.py b/remote_developer/utils/wdm.py new file mode 100644 index 0000000..e644b44 --- /dev/null +++ b/remote_developer/utils/wdm.py @@ -0,0 +1,60 @@ +"""Utility module for file system monitoring using watchdog.""" + +import asyncio +import time +import watchdog.events +import watchdog.observers + +from utils.logger import Logger, get_level_from_env + +from components import syncing + +logger = Logger(__name__, level=get_level_from_env()) + + +class SyncEventHandler(watchdog.events.PatternMatchingEventHandler): + """Handles file system events and triggers synchronization.""" + + def __init__( + self, + config, + patterns=None, + ignore_patterns=None, + ignore_directories=False, + case_sensitive=False, + ): + """Initializes the event handler.""" + super().__init__(patterns, ignore_patterns, ignore_directories, case_sensitive) + self.config = config + self.sync_debounce_delay = 1 + self.last_sync_time = 0 + + def on_any_event(self, event): + """Called when any event occurs.""" + current_time = time.time() + if current_time - self.last_sync_time > self.sync_debounce_delay: + logger.info(f"Detected change: {event.event_type} path : {event.src_path}") + asyncio.create_task(syncing.sync_files(self.config)) + self.last_sync_time = current_time + else: + logger.debug(f"Debouncing sync for {event.src_path}") + + +async def start_watching(config): + """Starts watching the local directory for changes.""" + path = config["local_dir"] + event_handler = SyncEventHandler(config) + observer = watchdog.observers.Observer() + observer.schedule(event_handler, path, recursive=True) + observer.start() + logger.info(f"Watching for changes in {path}...") + try: + while True: + # Keep the task alive + await asyncio.sleep(1) + except asyncio.CancelledError: + logger.info("Stopping file watching...") + finally: + observer.stop() + observer.join() + logger.info("File watching stopped.")