diff --git a/.gitignore b/.gitignore index bcbf80a..af1208c 100644 --- a/.gitignore +++ b/.gitignore @@ -70,6 +70,10 @@ yarn-error.log .pnp.js # Yarn Integrity file .yarn-integrity +*.pyc +build -build \ No newline at end of file +# Ignore virtual environments +venv/ +/resources/python/vex/__pycache__ diff --git a/BUILD_GUIDE.md b/BUILD_GUIDE.md new file mode 100644 index 0000000..72eba47 --- /dev/null +++ b/BUILD_GUIDE.md @@ -0,0 +1,145 @@ +# NeuroBlock Build Guide + +This guide explains how to build NeuroBlock as a Windows executable with an integrated Python server. + +## Build System Overview + +NeuroBlock now supports building as a standalone Windows executable that includes: +1. **Electron App**: The main GUI application +2. **Python Server**: A WebSocket server that controls the VEX robot + +## Prerequisites + +1. **Node.js and Yarn**: For building the Electron application +2. **Python 3.10+**: For building the Python server executable +3. **Git**: For version control + +## Quick Start + +### 1. Install Dependencies + +```bash +# Install Node.js dependencies +yarn install + +# Configure Python environment (creates virtual environment) +# This will be done automatically when you run the build scripts +``` + +### 2. Build Everything + +```bash +# Build both Python executable and Electron app +yarn build:all +``` + +This command will: +1. Create a Python virtual environment +2. Install Python dependencies +3. Build a standalone Python executable (`VEXServer.exe`) +4. Build the Vue.js renderer +5. Package everything into a Windows installer + +## Individual Build Commands + +### Python Server Only +```bash +yarn build:python +``` +Creates: `dist/python/VEXServer.exe` + +### Vue.js Renderer Only +```bash +yarn build +``` +Creates: `build/renderer/` directory with compiled assets + +### Windows Executable Only +```bash +yarn build:win +``` +Creates: `dist/NeuroBlock Setup 1.3.0.exe` + +## Development Mode + +```bash +yarn serve +``` + +This starts: +- Development server for the renderer on `http://localhost:3000` +- Python WebSocket server on `ws://127.0.0.1:8777` +- Electron main process with hot reload + +## Build Output + +After running `yarn build:all`, you'll find: + +- **`dist/NeuroBlock Setup 1.3.0.exe`**: Windows installer +- **`dist/python/VEXServer.exe`**: Standalone Python server +- **`build/renderer/`**: Compiled web assets + +## Production Path Fix + +The main issue that was resolved: +- **Before**: Main process tried to load from `../renderer/index.html` +- **After**: Main process loads from `../../build/renderer/index.html` +- **Method**: Changed from `win.loadURL()` to `win.loadFile()` for better path resolution + +## Python Server Integration + +The Python server is automatically started by the Electron main process: + +- **Development**: Uses `python` command with source files +- **Production**: Uses bundled `VEXServer.exe` executable +- **Communication**: WebSocket on port 8777 +- **Fallback**: If bundled executable not found, falls back to system Python + +## Architecture + +``` +NeuroBlock.exe (Electron Main Process) +├── Renderer Process (Vue.js UI) +└── Python Server (VEXServer.exe) + └── WebSocket Server (port 8777) + └── VEX Robot Control +``` + +## Troubleshooting + +### White Screen Issue +If you see a white screen, the renderer path is incorrect. This has been fixed by updating the main process to use the correct build directory. + +### Python Server Not Starting +1. Check if Python is installed +2. Ensure all Python dependencies are installed +3. Check console for Python error messages +4. Verify port 8777 is not in use + +### Build Failures +1. Run `yarn install` to ensure all dependencies are installed +2. Check that Python virtual environment is properly configured +3. Ensure sufficient disk space for build process + +## File Structure + +``` +neuroscope/ +├── src/main/index.js # Electron main process (updated) +├── package.json # Build configuration (updated) +├── VEXServer.spec # PyInstaller specification (new) +├── requirements.txt # Python dependencies (updated) +├── dist/ # Build output +│ ├── NeuroBlock Setup 1.3.0.exe +│ └── python/VEXServer.exe +└── build/renderer/ # Compiled web assets +``` + +## Key Changes Made + +1. **Fixed production build path** in `src/main/index.js` +2. **Added Python executable support** with fallback to source files +3. **Updated electron-builder configuration** to include build directory +4. **Created PyInstaller spec file** for better Python build control +5. **Added comprehensive build scripts** for different scenarios +6. **Improved error handling** and process cleanup \ No newline at end of file diff --git a/Neuroscope-EMG.lnk b/Neuroscope-EMG.lnk new file mode 100644 index 0000000..3d44cb2 Binary files /dev/null and b/Neuroscope-EMG.lnk differ diff --git a/Neuroscope.bat b/Neuroscope.bat new file mode 100644 index 0000000..725e10a --- /dev/null +++ b/Neuroscope.bat @@ -0,0 +1,14 @@ +@echo off +setlocal + +REM This script should be placed inside the neuroscope-emg folder + +echo Starting Neuroscope EMG... + +REM Activate virtual environment +call venv\Scripts\activate.bat + +REM Start the application +yarn serve + +pause diff --git a/README.md b/README.md index e05930a..48cdb5c 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,113 @@ -# Readme +# Neuroscope -TODO +Neuroscope is an Electron-based application for real-time EEG/BCI signal visualization, feature extraction, and device connectivity using Bluetooth-enabled headsets like **OpenBCI Ganglion**. It features a Blockly-based visual programming interface for custom workflows and supports VEX and Ganglion devices. -Project builds on [this boiler plate](https://github.com/a133xz/electron-vuejs-parcel-boilerplate). +--- + +## Features + +- **EEG Device Support:** Connect to OpenBCI Ganglion headsets via Bluetooth. +- **Real-Time Visualization:** View EEG and telemetry data live. +- **Blockly Programming:** Drag-and-drop blocks for custom signal processing and logic. +- **Extensible:** Easily add new blocks or device integrations. + +--- + +## Getting Started + +### Prerequisites + +- [Node.js](https://nodejs.org/) (v14+ recommended) +- npm or yarn +- Python 3.x (for some optional features) +- Python package: `websockets` (optional, only if using WebSocket features) +- A supported EEG device (OpenBCI Ganglion) + +### Installation + +1. **Clone the repository:** + ```sh + git clone https://github.com/yourusername/neuroscope.git + cd neuroscope + ``` + +2. **Install Node dependencies:** + ```sh + yarn install + # or + npm install + ``` + +3. *(Optional, only if using Python WebSocket features)* + **Install Python dependencies:** + ```sh + pip install websockets + ``` + +--- + +## Running the Application + +1. **Start the Electron Application** + ```sh + yarn serve + # or + npm run serve + ``` + + This will launch the Electron app. In development mode, it will open with hot-reloading and developer tools enabled. + +2. **Connect Your EEG Device** + - Click the **Bluetooth** button in the UI. + - Select your **Ganglion** device from the list. + - Wait for the connection confirmation. + +--- + +## Usage + +- **Signal Visualization:** + EEG channels are displayed in real time. You can view raw and filtered signals, as well as extracted features (alpha, beta, etc.). + +- **Blockly Programming:** + Use the Blockly interface to create custom workflows. Drag and drop blocks for signal processing and feature extraction. + +--- + +## Troubleshooting + +- **Bluetooth Issues:** + Make sure your Ganglion device is powered on and not paired with another app. On Windows, you may need to grant Bluetooth permissions. +- **Missing Dependencies:** + If you see errors about missing modules, re-run `yarn install` or `npm install`. +- **Electron Fails to Start:** + Make sure you are using a compatible Node.js version and have all dependencies installed. + +--- + +## Development + +- **Hot Reload:** + The app reloads automatically in development mode. +- **Main Process:** + See `src/main/index.js` for Electron main process logic. +- **Renderer Process:** + See `src/renderer/js/` for UI and signal processing code. +- **Blockly Blocks:** + Custom blocks are defined in `src/renderer/js/customblock.js`. + +--- + +## License + +MIT License. See [LICENSE](LICENSE) for details. + +--- + +## Acknowledgements + +- [OpenBCI Ganglion](https://shop.openbci.com/products/ganglion-board) +- [Blockly](https://developers.google.com/blockly) +- [Electron](https://www.electronjs.org/) + +--- \ No newline at end of file diff --git a/VEXServer.spec b/VEXServer.spec new file mode 100644 index 0000000..2398505 --- /dev/null +++ b/VEXServer.spec @@ -0,0 +1,55 @@ +# -*- mode: python ; coding: utf-8 -*- + +block_cipher = None + +a = Analysis( + ['resources/python/VEXServer.py'], + pathex=['resources/python'], + binaries=[], + datas=[ + ('resources/python/vex', 'vex'), + ], + hiddenimports=[ + 'websockets', + 'asyncio', + 'json', + 'vex', + 'vex.vex_globals', + 'vex.vex_types', + 'vex.vex_messages', + 'vex.aim', + 'vex.settings' + ], + hookspath=[], + hooksconfig={}, + runtime_hooks=[], + excludes=[], + win_no_prefer_redirects=False, + win_private_assemblies=False, + cipher=block_cipher, + noarchive=False, +) + +pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher) + +exe = EXE( + pyz, + a.scripts, + a.binaries, + a.zipfiles, + a.datas, + [], + name='VEXServer', + debug=False, + bootloader_ignore_signals=False, + strip=False, + upx=True, + upx_exclude=[], + runtime_tmpdir=None, + console=True, + disable_windowed_traceback=False, + argv_emulation=False, + target_arch=None, + codesign_identity=None, + entitlements_file=None, +) \ No newline at end of file diff --git a/neuroIcon.ico b/neuroIcon.ico new file mode 100644 index 0000000..8d6aebf Binary files /dev/null and b/neuroIcon.ico differ diff --git a/package-lock.json b/package-lock.json index 9fc72ba..7ea2a6b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -26,8 +26,7 @@ "prettier": "^2.3.1", "sass": "^1.35.1", "typescript": "^4.3.2", - "vue": "^3.0.0", - "wait-on": "^5.0.0" + "vue": "^3.0.0" } }, "node_modules/@babel/code-frame": { @@ -645,14 +644,12 @@ "version": "9.2.0", "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.2.0.tgz", "integrity": "sha512-sqKVVVOe5ivCaXDWivIJYVSaEgdQK9ul7a4Kity5Iw7u9+wBAPbX1RMSnLLmp7O4Vzj0WOWwMAJsTL00xwaNug==", - "dev": true, "license": "BSD-3-Clause" }, "node_modules/@hapi/topo": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.0.0.tgz", "integrity": "sha512-tFJlT47db0kMqVm3H4nQYgn6Pwg10GTZHb1pwmSiv1K4ks6drQOtfEF5ZnPjkvC+y4/bUPHK+bc87QvLcL+WMw==", - "dev": true, "license": "BSD-3-Clause", "dependencies": { "@hapi/hoek": "^9.0.0" @@ -1692,6 +1689,28 @@ "url": "https://opencollective.com/parcel" } }, + "node_modules/@parcel/reporter-dev-server/node_modules/ws": { + "version": "7.5.10", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", + "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/@parcel/resolver-default": { "version": "2.0.0-beta.3.1", "resolved": "https://registry.npmjs.org/@parcel/resolver-default/-/resolver-default-2.0.0-beta.3.1.tgz", @@ -2177,7 +2196,6 @@ "version": "4.1.2", "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.2.tgz", "integrity": "sha512-idTz8ibqWFrPU8kMirL0CoPH/A29XOzzAzpyN3zQ4kAWnzmNfFmRaoMNN6VI8ske5M73HZyhIaW4OuSFIdM4oA==", - "dev": true, "license": "BSD-3-Clause", "dependencies": { "@hapi/hoek": "^9.0.0" @@ -2187,14 +2205,12 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz", "integrity": "sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==", - "dev": true, "license": "BSD-3-Clause" }, "node_modules/@sideway/pinpoint": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==", - "dev": true, "license": "BSD-3-Clause" }, "node_modules/@sindresorhus/is": { @@ -3172,7 +3188,6 @@ "version": "0.21.1", "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", "integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", - "dev": true, "license": "MIT", "dependencies": { "follow-redirects": "^1.10.0" @@ -6765,7 +6780,6 @@ "version": "1.14.1", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.1.tgz", "integrity": "sha512-HWqDgT7ZEkqRzBvc2s64vSZ/hfOceEol3ac/7tKwzuvEyWx3/4UegXh5oBOIotkGsObyk3xznnSRVADBgWSQVg==", - "dev": true, "funding": [ { "type": "individual", @@ -8376,7 +8390,6 @@ "version": "17.4.0", "resolved": "https://registry.npmjs.org/joi/-/joi-17.4.0.tgz", "integrity": "sha512-F4WiW2xaV6wc1jxete70Rw4V/VuMd6IN+a5ilZsxG4uYtUXWu2kq9W5P2dz30e7Gmw8RCbY/u/uk+dMPma9tAg==", - "dev": true, "license": "BSD-3-Clause", "dependencies": { "@hapi/hoek": "^9.0.0", @@ -8673,7 +8686,6 @@ "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true, "license": "MIT" }, "node_modules/lodash.camelcase": { @@ -13528,7 +13540,6 @@ "version": "5.3.0", "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-5.3.0.tgz", "integrity": "sha512-DwrHrnTK+/0QFaB9a8Ol5Lna3k7WvUR4jzSKmz0YaPBpuN2sACyiPVKVfj6ejnjcajAcvn3wlbTyMIn9AZouOg==", - "dev": true, "license": "MIT", "dependencies": { "axios": "^0.21.1", @@ -13548,7 +13559,6 @@ "version": "6.6.7", "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", - "dev": true, "license": "Apache-2.0", "dependencies": { "tslib": "^1.9.0" @@ -13561,7 +13571,6 @@ "version": "1.14.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true, "license": "0BSD" }, "node_modules/wcwidth": { @@ -13747,17 +13756,16 @@ } }, "node_modules/ws": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.0.tgz", - "integrity": "sha512-6ezXvzOZupqKj4jUqbQ9tXuJNo+BR2gU8fFRk3XCP3e0G6WT414u5ELe6Y0vtp7kmSJ3F7YWObSNr1ESsgi4vw==", - "dev": true, + "version": "8.18.2", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.2.tgz", + "integrity": "sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ==", "license": "MIT", "engines": { - "node": ">=8.3.0" + "node": ">=10.0.0" }, "peerDependencies": { "bufferutil": "^4.0.1", - "utf-8-validate": "^5.0.2" + "utf-8-validate": ">=5.0.2" }, "peerDependenciesMeta": { "bufferutil": { @@ -13982,4 +13990,4 @@ } } } -} +} \ No newline at end of file diff --git a/package.json b/package.json index 47b967a..844f3d6 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,10 @@ "vue:build": "cross-env NODE_ENV=production parcel build ./src/renderer/index.html --dist-dir build/renderer --public-url ./ --no-source-maps --no-scope-hoist", "electron:build": "electron-builder build -c.mac.target=dir", "build:local": "yarn build && yarn electron:build", + "build:win": "yarn build && electron-builder --win", + "build:python": "C:/dev/repos/VexEEG/neuroscope/.venv/Scripts/python.exe -m PyInstaller VEXServer.spec --distpath dist/python", + "build:all": "yarn build:python && yarn build && yarn build:win", + "prebuild:python": "echo 'PyInstaller already installed in virtual environment'", "clean": "rmdir build && rmdir .cache && rmdir dist && rmdir .parcel-cache", "patch": "npm version patch -m \"v%s\"", "postversion": "git push && git push --tags", @@ -50,8 +54,7 @@ "prettier": "^2.3.1", "sass": "^1.35.1", "typescript": "^4.3.2", - "vue": "^3.0.0", - "wait-on": "^5.0.0" + "vue": "^3.0.0" }, "main": "./src/main/index.js", "build": { @@ -59,11 +62,22 @@ "productName": "NeuroBlock", "compression": "maximum", "files": [ + "build/main/**/*", + "build/renderer/**/*", "src/main/**/*", - "node_modules/**/*", + "node_modules/**/*" + ], + "extraResources": [ + { + "from": "resources/python", + "to": "python" + }, { - "from": "build/renderer", - "to": "src/renderer" + "from": "dist/python/VEXServer.exe", + "to": "python/VEXServer.exe", + "filter": [ + "**/*" + ] } ], "mac": { @@ -72,7 +86,7 @@ "identity": null }, "win": { - "icon": "src/renderer/icons/icon.png" + "icon": "src/renderer/icons/icon.png" }, "dmg": { "contents": [ @@ -94,6 +108,8 @@ "blockly": "^10.4.3", "d3": "^7.9.0", "js-interpreter": "^5.1.1", - "rxjs": "^7.8.1" + "rxjs": "^7.8.1", + "ws": "^8.18.2", + "wait-on": "^5.3.0" } -} +} \ No newline at end of file diff --git a/programs/maze_program_emg.xml b/programs/maze_program_emg.xml new file mode 100644 index 0000000..eda4ac1 --- /dev/null +++ b/programs/maze_program_emg.xml @@ -0,0 +1 @@ +1000GT9033 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..e2daf16 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +opencv-python +pyaudio +websockets>=10.0 +websocket-client>=1.5.1 +pyinstaller>=5.0 \ No newline at end of file diff --git a/resources/python/VEXServer.py b/resources/python/VEXServer.py new file mode 100644 index 0000000..4cff8b4 --- /dev/null +++ b/resources/python/VEXServer.py @@ -0,0 +1,176 @@ +import asyncio +import websockets +import json + +# Try to import VEX libraries, but handle gracefully if robot not connected +try: + from vex import * + from vex.vex_globals import * + + # Try to initialize robot, but catch connection errors + try: + robot = Robot() + VEX_AVAILABLE = True + print("VEX robot initialized successfully") + except Exception as e: + print(f"Warning: Could not connect to VEX robot: {e}") + print("Running in simulation mode - commands will be logged but not executed") + robot = None + VEX_AVAILABLE = False + + # Create mock constants for simulation + RED = "RED" + GREEN = "GREEN" + BLUE = "BLUE" + WHITE = "WHITE" + YELLOW = "YELLOW" + ORANGE = "ORANGE" + PURPLE = "PURPLE" + CYAN = "CYAN" + ALL_LEDS = "ALL_LEDS" + +except ImportError as e: + print(f"Warning: VEX libraries not available: {e}") + print("Running in simulation mode - commands will be logged but not executed") + robot = None + VEX_AVAILABLE = False + + # Create mock constants for simulation + RED = "RED" + GREEN = "GREEN" + BLUE = "BLUE" + WHITE = "WHITE" + YELLOW = "YELLOW" + ORANGE = "ORANGE" + PURPLE = "PURPLE" + CYAN = "CYAN" + ALL_LEDS = "ALL_LEDS" + +# color_list = [ +# RED, GREEN, BLUE, WHITE, YELLOW, ORANGE, PURPLE, CYAN +# ] + +# for color in color_list: +# robot.led.on(ALL_LEDS, color) +# wait(1, SECONDS) + +# robot.led.off(ALL_LEDS) + +# Command handler for VEX AIM +async def handle_command(websocket, path=None): + try: + async for message in websocket: + command = json.loads(message) + action = command.get("action", "") + + if action == "led_on": + color_name = command.get("color", "BLUE") + color_map = { + "RED": RED, + "GREEN": GREEN, + "BLUE": BLUE, + "WHITE": WHITE, + "YELLOW": YELLOW, + "ORANGE": ORANGE, + "PURPLE": PURPLE, + "CYAN": CYAN, + } + color = color_map.get(color_name.upper(), BLUE) + print(f"LED Command: Turning on {color_name} LED") + + if VEX_AVAILABLE and robot: + try: + robot.led.on(ALL_LEDS, color) + status = "success" + except Exception as e: + print(f"Error executing LED command: {e}") + status = "error" + else: + print("(Simulation mode - no physical robot)") + status = "success_simulation" + + await websocket.send(json.dumps({"status": status, "action": "led_on", "color": color_name})) + + elif action == "move": + distance_inches = command.get("distance", 4) + heading = command.get("heading", 0) + distance_mm = distance_inches * 25.4 + print(f"Move Command: Distance={distance_inches} inches ({distance_mm} mm), Heading={heading}°") + + if VEX_AVAILABLE and robot: + try: + robot.move_for(distance_mm, heading) + status = "success" + except Exception as e: + print(f"Error executing move command: {e}") + status = "error" + else: + print("(Simulation mode - no physical robot)") + status = "success_simulation" + + await websocket.send(json.dumps({ + "status": status, + "action": "move", + "distance_inches": distance_inches, + "distance_mm": distance_mm, + "heading": heading + })) + + elif action == "turn_left": + degrees = command.get("degrees", 90) + print(f"Turn Command: Left {degrees}°") + + if VEX_AVAILABLE and robot: + try: + robot.turn_for(vex.TurnType.LEFT, degrees) + status = "success" + except Exception as e: + print(f"Error executing turn left command: {e}") + status = "error" + else: + print("(Simulation mode - no physical robot)") + status = "success_simulation" + + await websocket.send(json.dumps({"status": status, "action": "turn_left", "degrees": degrees})) + + elif action == "turn_right": + degrees = command.get("degrees", 90) + print(f"Turn Command: Right {degrees}°") + + if VEX_AVAILABLE and robot: + try: + robot.turn_for(vex.TurnType.RIGHT, degrees) + status = "success" + except Exception as e: + print(f"Error executing turn right command: {e}") + status = "error" + else: + print("(Simulation mode - no physical robot)") + status = "success_simulation" + + await websocket.send(json.dumps({"status": status, "action": "turn_right", "degrees": degrees})) + + else: + print(f"Unknown command: {command}") + await websocket.send(json.dumps({"status": "error", "message": "Unknown command"})) + + except Exception as e: + print(f"Error handling command: {e}") + await websocket.send(json.dumps({"status": "error", "message": str(e)})) + +async def main(): + port = 8777 + print(f"Starting WebSocket server on ws://127.0.0.1:{port}") + print(f"VEX robot available: {VEX_AVAILABLE}") + + async with websockets.serve(handle_command, "127.0.0.1", port): + print("WebSocket server is ready and listening...") + await asyncio.Future() # run forever + +if __name__ == "__main__": + try: + asyncio.run(main()) + except KeyboardInterrupt: + print("\nServer shutting down...") + except Exception as e: + print(f"Server error: {e}") diff --git a/resources/python/VEXServer_dev.py b/resources/python/VEXServer_dev.py new file mode 100644 index 0000000..27abd84 --- /dev/null +++ b/resources/python/VEXServer_dev.py @@ -0,0 +1,79 @@ +import asyncio +import websockets +import json + +print("Starting VEX WebSocket Server (Development Mode)") +print("VEX libraries not loaded - running in simulation mode") + +# Mock VEX constants for simulation +VEX_AVAILABLE = False +RED = "RED" +GREEN = "GREEN" +BLUE = "BLUE" +WHITE = "WHITE" +YELLOW = "YELLOW" +ORANGE = "ORANGE" +PURPLE = "PURPLE" +CYAN = "CYAN" +ALL_LEDS = "ALL_LEDS" + +# Command handler for VEX AIM +async def handle_command(websocket, path=None): + try: + async for message in websocket: + command = json.loads(message) + action = command.get("action", "") + + if action == "led_on": + color_name = command.get("color", "BLUE") + print(f"LED Command: Turning on {color_name} LED (Simulation)") + await websocket.send(json.dumps({"status": "success_simulation", "action": "led_on", "color": color_name})) + + elif action == "move": + distance_inches = command.get("distance", 4) + heading = command.get("heading", 0) + distance_mm = distance_inches * 25.4 + print(f"Move Command: Distance={distance_inches} inches ({distance_mm} mm), Heading={heading}° (Simulation)") + + await websocket.send(json.dumps({ + "status": "success_simulation", + "action": "move", + "distance_inches": distance_inches, + "distance_mm": distance_mm, + "heading": heading + })) + + elif action == "turn_left": + degrees = command.get("degrees", 90) + print(f"Turn Command: Left {degrees}° (Simulation)") + await websocket.send(json.dumps({"status": "success_simulation", "action": "turn_left", "degrees": degrees})) + + elif action == "turn_right": + degrees = command.get("degrees", 90) + print(f"Turn Command: Right {degrees}° (Simulation)") + await websocket.send(json.dumps({"status": "success_simulation", "action": "turn_right", "degrees": degrees})) + + else: + print(f"Unknown command: {command}") + await websocket.send(json.dumps({"status": "error", "message": "Unknown command"})) + + except Exception as e: + print(f"Error handling command: {e}") + await websocket.send(json.dumps({"status": "error", "message": str(e)})) + +async def main(): + port = 8777 + print(f"Starting WebSocket server on ws://127.0.0.1:{port}") + print(f"VEX robot available: {VEX_AVAILABLE}") + + async with websockets.serve(handle_command, "127.0.0.1", port): + print("WebSocket server is ready and listening...") + await asyncio.Future() # run forever + +if __name__ == "__main__": + try: + asyncio.run(main()) + except KeyboardInterrupt: + print("\nServer shutting down...") + except Exception as e: + print(f"Server error: {e}") \ No newline at end of file diff --git a/resources/python/VexTest.py b/resources/python/VexTest.py new file mode 100644 index 0000000..4eba56c --- /dev/null +++ b/resources/python/VexTest.py @@ -0,0 +1,50 @@ +import asyncio +import websockets +import json + +async def send_test_commands(): + uri = "ws://localhost:8765" # WebSocket server address + try: + print(f"Connecting to WebSocket server at {uri}...") + async with websockets.connect(uri) as websocket: + print("Connected to WebSocket server!") + + # Test 1: Turn LED on with a specific color + led_command = { + "action": "led_on", + "color": "GREEN" # Change to any color: "RED", "BLUE", etc. + } + print(f"Sending LED command: {led_command}") + await websocket.send(json.dumps(led_command)) + response = await websocket.recv() + print(f"Response from server: {response}") + + # Test 2: Move the robot forward + move_command = { + "action": "move", + "distance": 100, # Distance in cm + "heading": 0 # Heading in degrees + } + print(f"Sending move command: {move_command}") + await websocket.send(json.dumps(move_command)) + response = await websocket.recv() + print(f"Response from server: {response}") + + # Test 3: Turn LED off + led_off_command = { + "action": "led_on", + "color": "OFF" # Assuming "OFF" turns off the LEDs + } + print(f"Sending LED off command: {led_off_command}") + await websocket.send(json.dumps(led_off_command)) + response = await websocket.recv() + print(f"Response from server: {response}") + + except Exception as e: + print(f"Error: {e}") + +def main(): + asyncio.run(send_test_commands()) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/resources/python/vex/__init__.py b/resources/python/vex/__init__.py new file mode 100644 index 0000000..1ecd4a0 --- /dev/null +++ b/resources/python/vex/__init__.py @@ -0,0 +1,17 @@ +# ================================================================================================= +# Copyright (c) Innovation First 2025. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# ================================================================================================= +""" +AIM WebSocket API + +This package provides modules for interacting with the VEX AIM Robot using WebSocket communication. + +Modules exposed: +- `aim`: Core functionality for robot control and WebSocket communication. +- `vex_types`: Definitions for various types and enums used in the API. +- `vex_globals`: Global constants and variables to match VEXcode AIM API. +""" + +from .aim import * +from .vex_types import * diff --git a/resources/python/vex/__pycache__/__init__.cpython-313.pyc b/resources/python/vex/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000..77be36a Binary files /dev/null and b/resources/python/vex/__pycache__/__init__.cpython-313.pyc differ diff --git a/resources/python/vex/__pycache__/aim.cpython-313.pyc b/resources/python/vex/__pycache__/aim.cpython-313.pyc new file mode 100644 index 0000000..674d904 Binary files /dev/null and b/resources/python/vex/__pycache__/aim.cpython-313.pyc differ diff --git a/resources/python/vex/__pycache__/settings.cpython-313.pyc b/resources/python/vex/__pycache__/settings.cpython-313.pyc new file mode 100644 index 0000000..030b32d Binary files /dev/null and b/resources/python/vex/__pycache__/settings.cpython-313.pyc differ diff --git a/resources/python/vex/__pycache__/vex_globals.cpython-313.pyc b/resources/python/vex/__pycache__/vex_globals.cpython-313.pyc new file mode 100644 index 0000000..59c4d74 Binary files /dev/null and b/resources/python/vex/__pycache__/vex_globals.cpython-313.pyc differ diff --git a/resources/python/vex/__pycache__/vex_messages.cpython-313.pyc b/resources/python/vex/__pycache__/vex_messages.cpython-313.pyc new file mode 100644 index 0000000..9beea18 Binary files /dev/null and b/resources/python/vex/__pycache__/vex_messages.cpython-313.pyc differ diff --git a/resources/python/vex/__pycache__/vex_types.cpython-313.pyc b/resources/python/vex/__pycache__/vex_types.cpython-313.pyc new file mode 100644 index 0000000..a37d924 Binary files /dev/null and b/resources/python/vex/__pycache__/vex_types.cpython-313.pyc differ diff --git a/resources/python/vex/aim.py b/resources/python/vex/aim.py new file mode 100644 index 0000000..3349ff8 --- /dev/null +++ b/resources/python/vex/aim.py @@ -0,0 +1,2325 @@ +# ================================================================================================= +# Copyright (c) Innovation First 2025. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# ================================================================================================= +""" +AIM WebSocket Python Client + +This library is designed for use with the VEX AIM Robot. It provides a simple interface +for controlling the robot remotely over Wi-Fi and for receiving status updates. + +Communication is handled via the WebSocket protocol. The library includes a set of +classes and methods for controlling robot movement, sensors, and other features. +""" + +# pylint: disable=unnecessary-pass,unused-argument,line-too-long,too-many-lines +# pylint: disable=invalid-name,unused-import,redefined-outer-name +import io +import json +import time +import threading +import sys +from typing import Union, cast, Callable +import math +import atexit +import signal +import _thread +import pathlib +import websocket +from .settings import Settings +from . import vex_types as vex #importing from the same package +from . import vex_messages as commands +#module-specific "constant" globals +VERSION_MAJOR = 1 +VERSION_MINOR = 0 +VERSION_BUILD = 0 +VERSION_BETA = 0 +SYS_FLAGS_SOUND_PLAYING = 1<<0 +SYS_FLAGS_IS_SOUND_DNL = 1<<16 +SYS_FLAGS_IS_MOVE_ACTIVE = 1<<1 # if a move_at or move_for (or any "move" command) is active +SYS_FLAGS_IMU_CAL = 1<<3 +SYS_FLAGS_IS_TURN_ACTIVE = 1<<4 +SYS_FLAGS_IS_MOVING = 1<<5 # if there is any wheel movement whatsoever +SYS_FLAGS_HAS_CRASHED = 1<<6 +SYS_FLAGS_IS_SHAKE = 1<<8 +SYS_FLAGS_PWR_BUTTON = 1<<9 +SYS_FLAGS_PROG_ACTIVE = 1<<10 + +SOUND_SIZE_MAX_BYTES = 255 * 1024 +BARREL_MIN_Y = 160 +BARREL_MIN_CX = 120 +BARREL_MAX_CX = 200 + +BALL_MIN_Y = 170 +BALL_MIN_CX = 120 +BALL_MAX_CX = 200 + +AIVISION_MAX_OBJECTS = 24 +AIVISION_DEFAULT_SNAPSHOT_OBJECTS = 8 + +DRIVE_VELOCITY_MAX_MMPS = 200 # millimeters per second +TURN_VELOCITY_MAX_DPS = 180 # Degrees per second +class AimException(Exception): + """VEX AIM Exception Class""" + +class DisconnectedException(AimException): + """Exception that is thrown when robot is not connected over Websocket""" + +class NoImageException(AimException): + """No image was received""" + +class ReceiveErrorException(AimException): + """(internally used) error receiving WS frame""" + +class InvalidSoundFileException(AimException): + """Sound file extension or format is not supported""" + +class InvalidImageFileException(AimException): + """image file extension or format is not supported""" + +class WSThread(threading.Thread): + """ Base class for all Websocket threads""" + def __init__ (self, host, ws_name): + threading.Thread.__init__(self) + self.host = host + self.ws_name = ws_name + self.uri = f"ws://{self.host}/{self.ws_name}" + self.ws = self.connect_websocket(timeout=4) + self.callback = None + self.running = True + #set equal to true of connection needs to be reset (disconnect, reconnect) + self._ws_needs_reset = False + + def connect_websocket(self, timeout): + """ connect to the websocket server""" + ws = websocket.WebSocket() + try: + ws.connect(self.uri, timeout=timeout) + except Exception as error: + print( + f"Could not connect to {self.uri} (reason: {error}). " + f"Verify that \"{self.host}\" is the correct IP/hostname of the AIM robot " + "and that it is connected to the same network (AP mode is 192.168.4.1)" + ) + sys.exit(1) + return ws + + def ws_send(self, payload: Union[bytes, str], opcode: int = websocket.ABNF.OPCODE_TEXT): + """ send data to the robot over the websocket connection""" + try: + self.ws.send(payload, opcode) + except (ConnectionResetError, websocket.WebSocketException) as error: + self._ws_needs_reset = True + raise DisconnectedException( + f"{self.ws_name}: error sending data to robot, apparently disconnected, " + f"will try to reconnect; error: '{error}'" + ) from error + + def ws_receive(self): + """ receive data from the robot over the websocket connection""" + try: + data = self.ws.recv() + except (ConnectionResetError, websocket.WebSocketException) as error: + self._ws_needs_reset = True + raise ReceiveErrorException( + f"{self.ws_name}: error receiving data from robot, apparently disconnected, " + f"will try to reconnect; error: '{error}'" + ) from error + return data + + def ws_close(self): + """ close the websocket connection""" + try: + self.ws.close() + except Exception as error: + print(f"Failed to close WebSocket connection. It might already be closed. Error: {error}") + +class WSStatusThread(WSThread): + """ Websocket thread for listening to different status updates from the robot""" + def __init__(self, host): + super().__init__ (host, "ws_status") + self._empty_status = { + "controller": {"flags": "0x0000", "stick_x": 0, "stick_y": 0, "battery": 0}, + "robot": { + "flags": "0x00000000", + "battery": 0, + "touch_flags": "0x0000", + "touch_x": 0, + "touch_y": 0, + "robot_x": 0, + "robot_y": 0, + "roll": "0", + "pitch": "0", + "yaw": "0", + "heading": "0", + "acceleration": {"x": "0", "y": "0", "z": "0"}, + "gyro_rate": {"x": "0", "y": "0", "z": "0"}, + "screen": {"row": "1", "column": "1"} + }, + "aivision": { + "classnames": { + "count": 4, + "items": [ + {"index": 0, "name": "SportsBall"}, + {"index": 1, "name": "BlueBarrel"}, + {"index": 2, "name": "OrangeBarrel"}, + {"index": 3, "name": "Robot"}, + ], + }, + "objects": { + "count": 0, + "items": [ + { + "type": 0, + "id": 0, + "type_str": "0", + "originx": 0, + "originy": 0, + "width": 0, + "height": 0, + "score": 0, + "name": "0", + } + ]}, + }, + } + self.current_status = self._empty_status + self.is_move_active_flag_needs_setting = False + self.is_turn_active_flag_needs_setting = False + self.is_moving_flag_needs_setting = False + self.is_moving_flag_needs_clearing = False + self.imu_cal_flag_needs_setting = False + self.sound_playing_flag_needs_setting = False + self.sound_downloading_flag_needs_setting = False + + self._packets_lost_counter = 0 + self.heartbeat = 0 + self.program_active = False + + #need to have list of callback for screen pressed + self._screen_pressed_callbacks = [] + self._screen_released_callbacks = [] + self._inertial_crashed_callbacks = [] + self._last_screen_pressed = False + + def update_status_flags(self): + """ update the status flags based on the current status received from the robot""" + if self.is_move_active_flag_needs_setting: + self.set_is_move_active_flag() + self.is_move_active_flag_needs_setting = False + + if self.is_turn_active_flag_needs_setting: + self.set_is_turn_active_flag() + self.is_turn_active_flag_needs_setting = False + + if self.is_moving_flag_needs_setting: + self.set_is_moving_flag() + self.is_moving_flag_needs_setting = False + + if self.is_moving_flag_needs_clearing: + self.clear_is_moving_flag() + self.is_moving_flag_needs_clearing = False + + if self.imu_cal_flag_needs_setting: + self.set_imu_cal_flag() + self.imu_cal_flag_needs_setting = False + + if self.sound_playing_flag_needs_setting: + self.set_sound_playing_flag() + self.sound_playing_flag_needs_setting = False + + if self.sound_downloading_flag_needs_setting: + self.set_sound_downloading_flag() + self.sound_downloading_flag_needs_setting = False + + + def set_is_move_active_flag(self): + """ set the is_move_active flag in the robot status""" + robot_flags = self.current_status["robot"]["flags"] + #update is_move_active flag (convert robot_flags to int, set bit 1 to 1, then convert back to hex string) + new_robot_flags = hex(int(robot_flags, 16) | SYS_FLAGS_IS_MOVE_ACTIVE) + self.current_status["robot"]["flags"] = new_robot_flags + + def set_is_turn_active_flag(self): + """ set the is_turn_active flag in the robot status""" + robot_flags = self.current_status["robot"]["flags"] + #update is_turn_active flag (convert robot_flags to int, set bit 1 to 1, then convert back to hex string) + new_robot_flags = hex(int(robot_flags, 16) | SYS_FLAGS_IS_TURN_ACTIVE) + self.current_status["robot"]["flags"] = new_robot_flags + + def set_is_moving_flag(self): + """ set the is_moving flag in the robot status""" + robot_flags = self.current_status["robot"]["flags"] + #update is_moving flag + #convert robot_flags to int, set bit 1 to 1, then convert back to hex string + new_robot_flags = hex(int(robot_flags, 16) | SYS_FLAGS_IS_MOVING) + self.current_status["robot"]["flags"] = new_robot_flags + + def clear_is_moving_flag(self): + """ reset the is_moving flag in the robot status""" + robot_flags = self.current_status["robot"]["flags"] + #update is_moving flag + #convert robot_flags to int, set bit 1 to 1, then convert back to hex string + new_robot_flags = hex(int(robot_flags, 16) & ~SYS_FLAGS_IS_MOVING) + self.current_status["robot"]["flags"] = new_robot_flags + + def set_imu_cal_flag(self): + """ set the flag to say is IMU calibrating based on the robot status""" + robot_flags = self.current_status["robot"]["flags"] + #update imu_cal flag + #convert robot_flags to int, set bit 1 to 1, then convert back to hex string + new_robot_flags = hex(int(robot_flags, 16) | SYS_FLAGS_IMU_CAL) + self.current_status["robot"]["flags"] = new_robot_flags + + def set_sound_playing_flag(self): + """ set the flag to say sound is playing based on the status""" + robot_flags = self.current_status["robot"]["flags"] + #update sound_playing flag + #convert robot_flags to int, set bit 1 to 1, then convert back to hex string + new_robot_flags = hex(int(robot_flags, 16) | SYS_FLAGS_SOUND_PLAYING) + self.current_status["robot"]["flags"] = new_robot_flags + + def set_sound_downloading_flag(self): + """ set the flag to say sound is downloading based on the status""" + robot_flags = self.current_status["robot"]["flags"] + #update is_sound_downloading flag + # convert robot_flags to int, set bit 1 to 1, then convert back to hex string + new_robot_flags = hex(int(robot_flags, 16) | SYS_FLAGS_IS_SOUND_DNL) + self.current_status["robot"]["flags"] = new_robot_flags + + def check_shake_flag(self): + """ detect if the robot has been shaken""" + # previously, a shake would cause program to exit. + # This was problematic, so nothing is done currently. + robot_flags = self.current_status["robot"]["flags"] + # if int(robot_flags, 16) & SYS_FLAGS_IS_SHAKE != 0: + # _thread.interrupt_main() + + def check_power_button_flag(self): + """ detect if the power button has been pressed to exit the program""" + robot_flags = self.current_status["robot"]["flags"] + if int(robot_flags, 16) & SYS_FLAGS_PWR_BUTTON != 0: + print("detected power button press, exiting program") + _thread.interrupt_main() + + def check_program_active_flag(self): + """Going from program_active == True to program_active == False exits the program""" + robot_flags = self.current_status["robot"]["flags"] + program_active_old = self.program_active + + if int(robot_flags, 16) & SYS_FLAGS_PROG_ACTIVE != 0: + self.program_active = True + else: + self.program_active = False + + if program_active_old and not self.program_active: + print("detected that program is no longer active (robot power button pressed?), exiting program") + _thread.interrupt_main() + + def check_crash_flag(self): + """ detect if the robot has crashed and fire the appropriate callbacks""" + robot_flags = self.current_status["robot"]["flags"] + if int(robot_flags, 16) & SYS_FLAGS_HAS_CRASHED != 0: + for (cb, args) in self._inertial_crashed_callbacks: + cb(*args) + + def check_screen_pressing(self): + """Check if the screen is being pressed, call pressed/released callbacks on transition.""" + is_pressed = bool(int(self.current_status["robot"]["touch_flags"], 16) & 0x0001) + + # If newly pressed + if is_pressed and not self._last_screen_pressed: + for (cb, args) in self._screen_pressed_callbacks: + cb(*args) + + # If newly released + if not is_pressed and self._last_screen_pressed: + for (cb, args) in self._screen_released_callbacks: + cb(*args) + + self._last_screen_pressed = is_pressed + + def is_current_status_empty(self): + """ check if the current status is empty""" + if self.current_status == self._empty_status: + return True + else: + return False + + def add_screen_pressed_callback(self, callback: Callable[..., None], args: tuple = ()): + """Adds a callback for screen press events, storing optional args.""" + # Store a tuple of (callback_function, args) + self._screen_pressed_callbacks.append((callback, args)) + + def add_screen_released_callback(self, callback: Callable[..., None], args: tuple = ()): + """Adds a callback for screen release events, storing optional args.""" + self._screen_released_callbacks.append((callback, args)) + + def add_inertial_crash_callback(self, callback: Callable[..., None], args: tuple = ()): + """Adds a callback for inertial sensor's crash detected events, storing optional args.""" + # Store a tuple of (callback_function, args) + self._inertial_crashed_callbacks.append((callback, args)) + + def run(self): + while self.running: + new_status_json = '' + new_status = self._empty_status + if self._ws_needs_reset: + self.ws_close() + self._ws_needs_reset = False + + if self.ws.connected: + status_packet_error = False + self.ws_send((1).to_bytes(1, 'little'), websocket.ABNF.OPCODE_BINARY) + try: + new_status_json = self.ws_receive() + except Exception: + status_packet_error = True + + if not status_packet_error: + try: + new_status = json.loads(new_status_json) + except Exception: + status_packet_error = True + + #If we have an error receiving packet, initially we want to keep current_status unchanged. + #After enough dropped packets, set current_status to empty values. + if status_packet_error: + self._packets_lost_counter += 1 + print(f"lost a status packet, counter: {self._packets_lost_counter}") + #we have received a valid status packet, so update current_status: + else: + self._packets_lost_counter = 0 #reset this counter + self.current_status = new_status + # print("current_status: ", self.current_status) + if self.callback: + if callable(self.callback): + self.callback() + + # if a certain commands are sent, the very next status flag won't have the appropriate flags set yet, so over-ride locally. + self.update_status_flags() + + self.check_shake_flag() + self.check_crash_flag() + self.check_screen_pressing() + self.check_power_button_flag() + self.check_program_active_flag() + self.heartbeat = not self.heartbeat + + if self._packets_lost_counter > 5: + self.current_status = self._empty_status + + # print("current_status: ", self.current_status) + time.sleep(0.05) + else: + # print ("trying to reconnect to ws_status") + try: + print(f"{self.ws_name} reconnecting") + self.ws.connect(self.uri) + except: + pass # we'll keep trying to reconnect + +class WSImageThread(WSThread): + """ Websocket thread for receiving image stream from the robot""" + def __init__(self, host): + super().__init__ (host, "ws_img") + self.current_image_index = 0 + self.image_list: list[bytes] = [bytes(1), bytes(1)] + + self._next_image_index = 1 + self._streaming = False + + def run(self): + while self.running: + if self._ws_needs_reset: + self.ws_close() + self._ws_needs_reset = False + + if self.ws.connected and self._streaming: + if self.current_image_index == 1: + self._next_image_index = 1 + self.current_image_index = 0 + else: + self._next_image_index = 0 + self.current_image_index = 1 + + try: + self.image_list[self._next_image_index] = cast(bytes, self.ws_receive()) # cast to narrow str | bytes down to bytes + if self.callback: + if callable(self.callback): + self.callback() + except ReceiveErrorException: + self.image_list[self._next_image_index] = (0).to_bytes(1, 'little') + + elif not self.ws.connected: + self._streaming = False + try: + print(f"{self.ws_name} reconnecting") + self.ws.connect(self.uri) + except: + pass # we'll keep trying to reconnect + else: + time.sleep(0.05) + + def stop_stream(self): + """ stop the image stream""" + self._streaming = False + self.ws_send((0).to_bytes(1, 'little'), websocket.ABNF.OPCODE_BINARY) + + def start_stream(self): + """ start the image stream""" + self._streaming = True + self.ws_send((1).to_bytes(1, 'little'), websocket.ABNF.OPCODE_BINARY) + +class WSCommandThread(WSThread): + """ Websocket thread for sending commands to the robot""" + def __init__(self, host): + super().__init__ (host, "ws_cmd") + + def run(self): + while self.running: + if self._ws_needs_reset: + self.ws_close() + self._ws_needs_reset = False + + if not self.ws.connected: + try: + print(f"{self.ws_name} reconnecting") + self.ws.connect(self.uri) + except: + pass # we'll keep on trying + + time.sleep(0.2) + +class WSAudioThread(WSThread): + """ Websocket thread for sending audio to the robot""" + def __init__(self, host): + super().__init__ (host, "ws_audio") + + def run(self): + while self.running: + if self._ws_needs_reset: + self.ws_close() + self._ws_needs_reset = False + + if not self.ws.connected: + try: + print(f"{self.ws_name} reconnecting") + self.ws.connect(self.uri) + except: + pass # we'll keep on trying + + time.sleep(0.2) + +class ColorRGB: + """ RGB color class""" + def __init__(self, r: int, g: int, b: int, t: bool=False): + self.r = r + self.g = g + self.b = b + self.t = t + +class Robot(): + """ + AIM Robot class. + When initializing, provide a host (IP address, hostname, or even domain name) + or leave at empty for default if AIM is in WiFi AP mode. + """ + #region private internal methods + def __init__(self, host=""): + """ + Initialize the Robot with default settings and WebSocket connections. + """ + # If host is not provided, read the host from the settings file + if host == "": + # Create an instance of the Settings class to read the host from the settings file + settings = Settings() + host = settings.host + + self.host = host + print( + f"Welcome to the AIM Websocket Python Client. " + f"Running version {VERSION_MAJOR}.{VERSION_MINOR}.{VERSION_BUILD}.{VERSION_BETA} " + f"and connecting to {self.host}" + ) + self.move_active_cmd_list = ["drive", "drive_for"] + self.turn_active_cmd_list = [ "turn", "turn_for", "turn_to"] + self.stopped_active_cmd_list = self.move_active_cmd_list + self.turn_active_cmd_list + + + self._ws_status_thread = WSStatusThread(self.host) + self._ws_status_thread.daemon = True + self._ws_status_thread.start() + + self._ws_img_thread = WSImageThread(self.host) + self._ws_img_thread.daemon = True + self._ws_img_thread.start() + + self._ws_cmd_thread = WSCommandThread(self.host) + self._ws_cmd_thread.daemon = True + self._ws_cmd_thread.start() + + self._ws_audio_thread = WSAudioThread(self.host) + self._ws_audio_thread.daemon = True + self._ws_audio_thread.start() + + atexit.register(self.exit_handler) + signal.signal(signal.SIGINT, self.kill_handler) + signal.signal(signal.SIGTERM, self.kill_handler) + + self._program_init() + + self.drive_speed = 100 # Millimeters per second + self.turn_speed = 75 # Degrees per second + # internal class instances to access through robot instance + self.timer = Timer() + self.screen = Screen(self) + self.inertial = Inertial(self) + self.kicker = Kicker(self) + self.sound = Sound(self) + self.led = Led(self) + self.vision = AiVision(self) + + # We don't want to execute certain things (like reset_heading) until we start getting status packets + while self._ws_status_thread.is_current_status_empty() is True: + # print("waiting for status") + time.sleep(0.05) + self.inertial.reset_heading() + + def exit_handler(self): + """upon system exit, either due to SIGINT/SIGTERM or uncaught exceptions""" + if hasattr(self, '_ws_cmd_thread'): #if connection were never established, this property wouldn't exist + pass + # print("program terminating, stopping robot") + # #not attempting to stop robot since upon program end or connection loss, robot stops itself and wouldn't respond anyways. + # try: + # self.stop_all_movement() + # except Exception as error: + # print("exceptions arose during stop_all_movement(), error:", error) + else: + print("program terminating (never connected to robot)") + + try: + self._ws_cmd_thread.running = False + except: + # print("ws_cmd_thread doesn't exist") + pass + try: + self._ws_status_thread.running = False + except: + # print("ws_status_thread doesn't exist") + pass + try: + self._ws_img_thread.running = False + except: + # print("ws_img_thread doesn't exist") + pass + try: + self._ws_img_thread.stop_stream() + except: + # print("error stopping ws_img stream") + pass + + def kill_handler(self, signum, frame): + """when kill signal is received, exit the program. Will result in exit_handler being run""" + signame = signal.Signals(signum).name + print(f'Received signal {signame} ({signum})') + sys.exit(0) + + def __getattribute__(self, name): + """ + This function gets called before any other Robot function.\n + If we are not connected to the robot (just looking at ws_cmd), then raise an exception + and terminate program unless the user handles the exception. + """ + method = object.__getattribute__(self, name) + if not method: + # raise Exception("Method %s not implemented" %name) + return + if callable(method): + if self._ws_cmd_thread.ws.connected is False: + raise DisconnectedException(f"error calling {name}: not connected to robot") + return method + + def _program_init(self): + """ + Sends a command indicating to robot that new program is starting. + To be called during __init__ + """ + message = commands.ProgramInit() + self.robot_send(message.to_json()) + + def _block_on_state(self, state_method): + time_start = time.time() + blocking = True + while True: + if state_method() is False: # debounce + time.sleep(0.05) + if state_method() is False: + break + # print("blocking") + time_elapsed = time.time() - time_start + time.sleep(0.1) + #if turning/moving took too long, we want to stop moving and stop blocking. + if time_elapsed > 10: + print(f"{state_method.__name__} wait timed out, stopping") + self.stop_all_movement() + return + + @property + def status(self): + """ returns the current status of the robot """ + return self._ws_status_thread.current_status + #endregion private internal methods + + #region Generic methods to send commands to the robot + def robot_send(self, json_cmd): + """send a command to the robot through websocket connection""" + disconnected_error = False + #print(json_cmd) + json_cmd_string = json.dumps(json_cmd, separators=(',',':')) + #print("sending: ", json_cmd_string) + try: + cmd_id = json_cmd["cmd_id"] + except: + print("robot_send did not receive a cmd_id") + return + + self._ws_cmd_thread.ws_send(str.encode(json_cmd_string), websocket.ABNF.OPCODE_BINARY) + try: + response_json = self._ws_cmd_thread.ws_receive() + except ReceiveErrorException: + disconnected_error = True + raise DisconnectedException(f"robot got disconnected after sending cmd_id: {cmd_id}") from None # disable exception chaining + # not trying to resend command because that would take too long, let user decide. + + try: + response = json.loads(response_json) + except Exception as error: + print(f"{cmd_id} Error: could not parse ws_cmd JSON response: '{error}'") + print("response_json", response_json) + return + + # print("response_json", response_json) + if response["cmd_id"] == "cmd_unknown": + print("robot: did not recognize command: ", cmd_id) + return + + if response["status"] == "error": + try: + error_info_string = response["error_info"] + except KeyError: + error_info_string = "no reason given" + print("robot: error processing command, reason: ", error_info_string) + return + + # trigger a local update to the robot status flags in ws_status_thread + if response["status"] in ["complete", "in_progress"]: + if response["cmd_id"] in self.move_active_cmd_list: + self._ws_status_thread.is_move_active_flag_needs_setting = True + if response["cmd_id"] in self.turn_active_cmd_list: + self._ws_status_thread.is_turn_active_flag_needs_setting = True + if response["cmd_id"] in self.stopped_active_cmd_list: + self._ws_status_thread.is_moving_flag_needs_setting = True + self._ws_status_thread.is_moving_flag_needs_clearing = False + if response["cmd_id"] == "imu_calibrate": + self._ws_status_thread.imu_cal_flag_needs_setting = True + + return + + def robot_send_audio(self, audio): + """ send audio to the robot through websocket audio thread""" + self._ws_audio_thread.ws_send(audio, websocket.ABNF.OPCODE_BINARY) + + def add_screen_pressed_callback(self, callback: Callable[..., None], arg: tuple=()): + """Adds a screen-pressed callback (delegate to _ws_status_thread).""" + self._ws_status_thread.add_screen_pressed_callback(callback, arg) + + def add_screen_released_callback(self, callback: Callable[..., None], arg: tuple=()): + """Adds a screen-released callback (delegate to _ws_status_thread).""" + self._ws_status_thread.add_screen_released_callback(callback, arg) + + def add_inertial_crash_callback(self, callback: Callable[..., None], arg: tuple=()): + """Adds a crash detected callback (delegate to _ws_status_thread).""" + self._ws_status_thread.add_inertial_crash_callback(callback, arg) + + #endregion Generic methods to send commands to the robot + #region Sensing Motion + def get_x_position(self): + """ returns the x position of the robot""" + origin_x = float(self.status["robot"]["robot_x"]) + origin_y = float(self.status["robot"]["robot_y"]) + # print("raw:", origin_x, origin_y) + offset_radians = -math.radians(self.inertial.heading_offset) + x = origin_x * math.cos(offset_radians) + origin_y * math.sin(offset_radians) + + return x + + def get_y_position(self): + """ returns the y position of the robot""" + origin_x = float(self.status["robot"]["robot_x"]) + origin_y = float(self.status["robot"]["robot_y"]) + offset_radians = -math.radians(self.inertial.heading_offset) + y = origin_y * math.cos(offset_radians) - origin_x * math.sin(offset_radians) + + return y + + def is_move_active(self): + """returns true if a move_at() or move_for() command is active with nonzero speed""" + if self._ws_status_thread.is_move_active_flag_needs_setting: + return True + robot_flags = self.status["robot"]["flags"] + is_move_active = bool(int(robot_flags, 16) & SYS_FLAGS_IS_MOVE_ACTIVE) + return is_move_active + + def is_turn_active(self): + """returns true if a turn(), turn_to(), or turn_for() command is active with nonzero speed""" + if self._ws_status_thread.is_turn_active_flag_needs_setting: + return True + robot_flags = self.status["robot"]["flags"] + is_turn_active = bool(int(robot_flags, 16) & SYS_FLAGS_IS_TURN_ACTIVE) + return is_turn_active + + def is_stopped(self): + """returns true if no move, turn, or spin_wheels command is active (i.e. no wheels should be moving)""" + if self._ws_status_thread.is_moving_flag_needs_clearing: + return True + if self._ws_status_thread.is_moving_flag_needs_setting: + return False + robot_flags = self.status["robot"]["flags"] + is_stopped = not bool(int(robot_flags, 16) & SYS_FLAGS_IS_MOVING) + return is_stopped + #endregion Sensing Motion + + #region Sensing Battery + def get_battery_capacity(self): + """Get the remaining capacity of the battery (relative state of charge) in percent.""" + battery_capacity = self.status["robot"]["battery"] + return battery_capacity + #endregion Sensing Battery + + #region Motion Commands + def set_move_velocity(self, velocity:vex.vexnumber, units:vex.DriveVelocityPercentUnits=vex.DriveVelocityUnits.PERCENT): + """ + overrides the default velocity for all subsequent movement methods in the project.\n + The default move velocity is 50% (100 millimeters per second) + """ + #if velocity is negative throw value error + if velocity < 0: + raise ValueError("velocity must be a positive number") + + if units.value == vex.DriveVelocityUnits.PERCENT.value: + #cannot exeed 100% + if velocity > 100: + velocity = 100 + #convert velocity in percentage to MMPS ex: 100% is 200 MMPS + velocity = int(velocity * 2) + elif units.value == vex.DriveVelocityUnits.MMPS.value: + #cannot exceed MAX velocity MMPS + if velocity > DRIVE_VELOCITY_MAX_MMPS: + velocity = DRIVE_VELOCITY_MAX_MMPS + velocity = int(velocity) + + self.drive_speed = velocity + + def set_turn_velocity(self, velocity:vex.vexnumber, units:vex.TurnVelocityPercentUnits=vex.TurnVelocityUnits.PERCENT): + """ + overrides the default velocity for all subsequent turn methods in the project.\n + The default turn velocity is 50% (75 degrees per second) + """ + #if velocity is negative throw value error + if velocity < 0: + raise ValueError("velocity must be a positive number") + + if units.value == vex.TurnVelocityUnits.PERCENT.value: + #cannot exeed 100% + if velocity > 100: + velocity = 100 + #if velocity is PERCENT convert to DPS ex: 100% is 180 MMPS + velocity = int(velocity * 1.8) + elif units.value == vex.TurnVelocityUnits.DPS.value: + #cannot exceed MAX velocity MMPS + if velocity > TURN_VELOCITY_MAX_DPS: + velocity = TURN_VELOCITY_MAX_DPS + velocity = int(velocity) + + self.turn_speed = velocity + + def move_at(self, angle:vex.vexnumber, velocity=None, + units:vex.DriveVelocityPercentUnits=vex.DriveVelocityUnits.PERCENT): + """ + move indefinitely at angle (-360 to 360 degrees) at velocity (0-100) \n + if velocity is not provided, use the default set by set_move_velocity command \n + The velocity unit is PERCENT (default) or millimeters per second MMPS + """ + if velocity is None: + velocity = self.drive_speed + else: + if units.value == vex.DriveVelocityUnits.PERCENT.value: + #cannot exeed 100% + if velocity > 100: + velocity = 100 + #if velocity is PERCENT convert to MMPS ex: 100% is 200 MMPS + velocity = int(velocity * 2) + elif units.value == vex.DriveVelocityUnits.MMPS.value: + #cannot exceed MAX velocity MMPS + if velocity > DRIVE_VELOCITY_MAX_MMPS: + velocity = DRIVE_VELOCITY_MAX_MMPS + velocity = int(velocity) + # passing negaitive velocity will flip the direction on the firmware side. No need to handle it here + stacking_type = vex.StackingType.STACKING_OFF + message = commands.MoveAt(angle, velocity,stacking_type.value) + self.robot_send(message.to_json()) + + def move_for(self, distance:vex.vexnumber, angle:vex.vexnumber, velocity=None, units:vex.DriveVelocityPercentUnits=vex.DriveVelocityUnits.PERCENT, wait=True): + """move for a distance (mm) at angle (-360 to 360 degrees) at velocity (PERCENT/MMPS). + if velocity or turn_speed is not provided, use the default set by set_move_velocity and set_turn_velocity commands""" + if velocity is None: + velocity = self.drive_speed + else: + if units.value == vex.DriveVelocityUnits.PERCENT.value: + #cannot exeed 100% + if velocity > 100: + velocity = 100 + #if velocity is PERCENT convert to MMPS ex: 100% is 200 MMPS + velocity = int(velocity * 2) + elif units.value == vex.DriveVelocityUnits.MMPS.value: + #cannot exceed MAX velocity MMPS + if velocity > DRIVE_VELOCITY_MAX_MMPS: + velocity = DRIVE_VELOCITY_MAX_MMPS + velocity = int(velocity) + + #if velocity is negative flip the direction + if velocity < 0: + velocity = -velocity + distance = -distance + + turn_speed = self.turn_speed + # if not final_heading: + # final_heading = self.get_heading_raw() + heading = 0 + stacking_type = vex.StackingType.STACKING_OFF + message = commands.MoveFor(distance, angle, velocity, turn_speed, heading, stacking_type.value) + self.robot_send(message.to_json()) + if wait: + self._block_on_state(self.is_move_active) + + def move_with_vectors(self, x, y, r): + """ + moves the robot using vector-based motion, combining horizontal (X-axis) and + vertical (Y-axis) movement and having the robot to rotate at the same time + + x: X-axis velocity (in %). negative values move left and positive values move right\n + y: Y-axis velocity (in %). negative values move backward and positive values move forward\n + r: rotation velocity (in %). negative values move counter-clockwise and positive values move clockwise\n + """ + # clip to +/- 100 + if x > 100: + x = 100 + if x < -100: + x = -100 + if y > 100: + y = 100 + if y < -100: + y = -100 + if r > 100: + r = 100 + if r < -100: + r = -100 + + # scale + x = x * 2.0 + y = y * 2.0 + r = r * 1.8 + + # calculate wheel velocities + w1 = (0.5 * x) + ((0.866) * y) + r + w2 = (0.5 * x) - ((0.866) * y) + r + w3 = r - x + self.spin_wheels(int(w1), int(w2), int(w3)) + + def turn(self, direction: vex.TurnType, velocity=None, units:vex.TurnVelocityPercentUnits=vex.TurnVelocityUnits.PERCENT): + """turn indefinitely at velocity (DPS) in the direction specified by turn_direction (TurnType.LEFT or TurnType.RIGHT). + if velocity is not provided, use the default set by set_turn_velocity command""" + if velocity is None: + velocity = self.turn_speed + else: + if units.value == vex.TurnVelocityUnits.PERCENT.value: + #cannot exeed 100% + if velocity > 100: + velocity = 100 + #if velocity is PERCENT convert to DPS ex: 100% is 180 MMPS + velocity = int(velocity * 1.8) + elif units.value == vex.TurnVelocityUnits.DPS.value: + #cannot exceed MAX velocity MMPS + if velocity > TURN_VELOCITY_MAX_DPS: + velocity = TURN_VELOCITY_MAX_DPS + velocity = int(velocity) + #handle direction flip + if direction == vex.TurnType.LEFT: + velocity = -velocity + stacking_type = vex.StackingType.STACKING_OFF + message = commands.Turn(velocity, stacking_type.value) + self.robot_send(message.to_json()) + + def turn_for(self, direction: vex.TurnType, angle, velocity=None, units:vex.TurnVelocityPercentUnits=vex.TurnVelocityUnits.PERCENT, wait=True): + """turn for a 'angle' number of degrees at turn_rate (deg/sec)""" + if velocity is None: + velocity = self.turn_speed + else: + if units.value == vex.TurnVelocityUnits.PERCENT.value: + #cannot exeed 100% + if velocity > 100: + velocity = 100 + #if velocity is PERCENT convert to DPS ex: 100% is 180 MMPS + velocity = int(velocity * 1.8) + elif units.value == vex.TurnVelocityUnits.DPS.value: + #cannot exceed MAX velocity MMPS + if velocity > TURN_VELOCITY_MAX_DPS: + velocity = TURN_VELOCITY_MAX_DPS + velocity = int(velocity) + #handle direction flip + if direction == vex.TurnType.LEFT: + angle = -angle + stacking_type = vex.StackingType.STACKING_OFF + message = commands.TurnFor(angle, velocity, stacking_type.value) + self.robot_send(message.to_json()) + if wait: + self._block_on_state(self.is_turn_active) + + def turn_to(self, heading:vex.vexnumber, velocity=None, units:vex.TurnVelocityPercentUnits=vex.TurnVelocityUnits.PERCENT, wait=True): + """turn to a heading (degrees) at velocity (deg/sec)\n + heading can be -360 to 360""" + if not (-360 < heading < 360): + raise ValueError("heading must be between -360 and 360") + if velocity is None: + velocity = self.turn_speed + else: + if units.value == vex.TurnVelocityUnits.PERCENT.value: + #cannot exeed 100% + if velocity > 100: + velocity = 100 + #if velocity is PERCENT convert to DPS ex: 100% is 180 MMPS + velocity = int(velocity * 1.8) + elif units.value == vex.TurnVelocityUnits.DPS.value: + #cannot exceed MAX velocity MMPS + if velocity > TURN_VELOCITY_MAX_DPS: + velocity = TURN_VELOCITY_MAX_DPS + velocity = int(velocity) + #handle negative velocity + if velocity < 0: + velocity = -velocity + + heading_offset = self.inertial.heading_offset + heading = math.fmod (heading_offset + heading, 360) + stacking_type = vex.StackingType.STACKING_OFF + message = commands.TurnTo(heading, velocity, stacking_type.value) + self.robot_send(message.to_json()) + if wait: + self._block_on_state(self.is_turn_active) + + def stop_all_movement(self): + """stops all movements of the robot""" + self.move_at(0,0) + self.turn(vex.TurnType.RIGHT, 0) + # clear the is_moving_flag now (used by robot.is_stopped()) + self._ws_status_thread.clear_is_moving_flag() + # trigger this flag to be cleared again next time a status msg is received, in case the robot hasn't update the state yet: + self._ws_status_thread.is_moving_flag_needs_clearing = True + self._ws_status_thread.is_moving_flag_needs_setting = False + + def spin_wheels(self, velocity1: int, velocity2: int, velocity3: int): + """spin all three wheels of the robot at the specified velocities""" + message = commands.SpinWheels(velocity1, velocity2, velocity3) + self.robot_send(message.to_json()) + + def set_xy_position(self, x, y): + """sets the robot’s current position to the specified values""" + offset_radians = -math.radians(self.inertial.heading_offset) + origin_x = x * math.cos(offset_radians) - y * math.sin(offset_radians) + origin_y = y * math.cos(offset_radians) + x * math.sin(offset_radians) + + message = commands.SetPose(origin_x, origin_y) + self.robot_send(message.to_json()) + + for status_counter in range(2): + heartbeat_state = self._ws_status_thread.heartbeat + # wait till we get a new status packet + while heartbeat_state == self._ws_status_thread.heartbeat: + # print("status_counter %d" %(status_counter)) + time.sleep(0.050) + + #endregion Motion Commands + + #region Vision Commands + def has_any_barrel(self): + """returns true if a barrel is held by the kicker""" + ai_objects = list(self.vision.get_data(AiVision.ALL_AIOBJS)) + has_barrel = False + for object in range(len(ai_objects)): + cx = ai_objects[object].originX + ai_objects[object].width/2 + if ai_objects[object].classname in ["BlueBarrel", "OrangeBarrel"] and \ + BARREL_MIN_CX < cx < BARREL_MAX_CX and \ + ai_objects[object].originY > BARREL_MIN_Y: + has_barrel = True + + return has_barrel + + def has_blue_barrel(self): + """returns true if a barrel is held by the kicker""" + ai_objects = list(self.vision.get_data(AiVision.ALL_AIOBJS)) + has_barrel = False + for object in range(len(ai_objects)): + cx = ai_objects[object].originX + ai_objects[object].width/2 + if ai_objects[object].classname in ["BlueBarrel"] and \ + BARREL_MIN_CX < cx < BARREL_MAX_CX and \ + ai_objects[object].originY > BARREL_MIN_Y: + has_barrel = True + return has_barrel + + def has_orange_barrel(self): + """returns true if a barrel is held by the kicker""" + ai_objects = list(self.vision.get_data(AiVision.ALL_AIOBJS)) + has_barrel = False + for object in range(len(ai_objects)): + cx = ai_objects[object].originX + ai_objects[object].width/2 + if ai_objects[object].classname in ["OrangeBarrel"] and \ + BARREL_MIN_CX < cx < BARREL_MAX_CX and \ + ai_objects[object].originY > BARREL_MIN_Y : + has_barrel = True + return has_barrel + + def has_sports_ball(self): + """returns true if a ball is held by the kicker""" + ai_objects = list(self.vision.get_data(AiVision.ALL_AIOBJS)) + has_ball = False + for object in range(len(ai_objects)): + cx = ai_objects[object].originX + ai_objects[object].width/2 + if ai_objects[object].classname in ["SportsBall"] and \ + BALL_MIN_CX < cx < BALL_MAX_CX and \ + ai_objects[object].originY > BALL_MIN_Y : + has_ball = True + return has_ball + #endregion Vision Commands +class Inertial(): + """ AIM Inertial class. Provides methods to interact with the robot's inertial sensor.""" + def __init__(self, robot_instance: Robot): + """Initialize the Gyro with default settings.""" + self.robot_instance = robot_instance + self.heading_offset = 0 + self.rotation_offset = 0 + #region Inertial - Action + def calibrate(self): + """calibrate the IMU. Can't check if calibration is done, so probably do not call for now""" + message = commands.InterialCalibrate() + self.robot_instance.robot_send(message.to_json()) + #endregion Inertial - Action + #region Inertial - Mutators + def set_heading(self, heading): + """sets the robot’s heading to a specified value""" + raw_heading = self.get_heading_raw() + # print("reset heading to %f, get_heading_raw(): %f" %(heading, raw_heading)) + self.heading_offset = raw_heading - heading + + def reset_heading(self): + """robot heading will be set to 0""" + self.set_heading(0) + + def set_rotation(self, rotation): + """sets the robot’s rotation to a specified value""" + raw_rotation = self.get_rotation_raw() + self.rotation_offset = raw_rotation - rotation + + def reset_rotation(self): + """resets the robot’s rotation to 0""" + self.set_rotation(0) + + def set_crash_sensitivity(self, sensitivity=vex.SensitivityType.LOW): + """set the sensitivity of the crash sensor""" + message = commands.InterialSetCrashSensitivity(sensitivity.value) + self.robot_instance.robot_send(message.to_json()) + #endregion Inertial - Mutators + #region Inertial - Getters + def get_heading(self): + """reports the robot’s heading angle. This returns a float in the range 0 to 359.99 degrees""" + raw_heading = self.get_heading_raw() + heading = math.fmod(raw_heading - self.heading_offset, 360) + #round to 2 decimal places + heading = round(heading, 2) + if heading < 0: + heading += 360 + return heading + + def get_heading_raw(self): + """reports raw heading value from IMU""" + raw_heading = self.robot_instance.status["robot"]["heading"] + if type(raw_heading) == str: + raw_heading = float(raw_heading) + return raw_heading + + def get_rotation(self): + """returns the robot’s total rotation in degrees as a float. + This measures how much the robot has rotated relative to its last reset point""" + raw_rotation = self.get_rotation_raw() + rotation = raw_rotation - self.rotation_offset + #round to 2 decimal places + rotation = round(rotation, 2) + return rotation + + def get_rotation_raw(self): + """reports raw rotation value from IMU""" + raw_rotation = self.robot_instance.status["robot"]["rotation"] + if type(raw_rotation) == str: + raw_rotation = float(raw_rotation) + return raw_rotation + + def get_acceleration(self, axis: Union[vex.AxisType, vex.AccelerationType]): + """ returns the robot’s acceleration for a given axis.""" + if axis in [vex.AxisType.X_AXIS, vex.AccelerationType.FORWARD]: + value = self.robot_instance._ws_status_thread.current_status["robot"]["acceleration"]["x"] + elif axis in [vex.AxisType.Y_AXIS, vex.AccelerationType.RIGHTWARD]: + value = self.robot_instance._ws_status_thread.current_status["robot"]["acceleration"]["y"] + elif axis in [vex.AxisType.X_AXIS, vex.AccelerationType.DOWNWARD]: + value = self.robot_instance._ws_status_thread.current_status["robot"]["acceleration"]["z"] + if type(value) == str: + value = float(value) + return value + + def get_turn_rate(self, axis: Union[vex.AxisType, vex.OrientationType]): + """returns the robot’s gyro rate for a given axis in degrees per second (DPS). It returns a float from –1000.00 to 1000.00 degrees per second""" + if axis in [vex.AxisType.X_AXIS, vex.OrientationType.ROLL]: + value = self.robot_instance._ws_status_thread.current_status["robot"]["gyro_rate"]["x"] + elif axis in [vex.AxisType.Y_AXIS, vex.OrientationType.PITCH]: + value = self.robot_instance._ws_status_thread.current_status["robot"]["gyro_rate"]["y"] + elif axis in [vex.AxisType.Z_AXIS, vex.OrientationType.YAW]: + value = self.robot_instance._ws_status_thread.current_status["robot"]["gyro_rate"]["z"] + if type(value) == str: + value = float(value) + return value + + def get_roll(self): + """returns the robot’s roll angle in the range –180.00 to 180.00 degrees as a float""" + value = self.robot_instance.status["robot"]["roll"] + if type(value) == str: + value = float(value) + #round to 2 decimal places + value = round(value, 2) + return value + + def get_pitch(self): + """returns the robot’s pitch angle in the range –90.00 to 90.00 degrees as a float""" + value = self.robot_instance.status["robot"]["pitch"] + if type(value) == str: + value = float(value) + #round to 2 decimal places + value = round(value, 2) + return value + + def get_yaw(self): + """returns the robot’s yaw angle in the range –180.00 to 180.00 degrees as a float""" + value = self.robot_instance.status["robot"]["yaw"] + if type(value) == str: + value = float(value) + #round to 2 decimal places + value = round(value, 2) + return value + + def is_calibrating(self): + """reports whether the gyro is calibrating.""" + if self.robot_instance._ws_status_thread.imu_cal_flag_needs_setting == True: + return True + robot_flags = self.robot_instance._ws_status_thread.current_status["robot"]["flags"] + calibrating = bool(int(robot_flags, 16) & SYS_FLAGS_IMU_CAL) + return calibrating + #endregion Inertial - Getters + #region Inertial - Callbacks + def crashed(self, callback: Callable[...,None], arg: tuple=()): + """calls the specified function when the robot crashes""" + self.robot_instance.add_inertial_crash_callback(callback, arg) + #endregion Inertial - Callbacks + +class Screen(): + """ + Screen class for accessing the robot's screen features + like drawing shapes,text,and showing emojis + """ + def __init__(self, robot_instance: Robot): + self.robot_instance = robot_instance + # store r,b,g values of default fill color + fill_r,fill_g,fill_b = self._return_rgb(vex.Color.BLUE) + self.default_fill_color = ColorRGB(fill_r,fill_g,fill_b) + pen_r,pen_g,pen_b = self._return_rgb(vex.Color.BLUE) + self.default_pen_color = ColorRGB(pen_r,pen_g,pen_b) + + def _return_rgb(self, color): + if isinstance(color, (vex.Color, vex.Color.DefinedColor)): + r = (color.value >> 16) & 0xFF + g = (color.value >> 8) & 0xFF + b = color.value & 0xFF + elif isinstance(color, int): + r = (color >> 16) & 0xFF + g = (color >> 8) & 0xFF + b = color & 0xFF + else: + raise AimException("parameter must be a vex.Color instance or int rgb value") + return r,g,b + + def _return_transparency(self, color): + if isinstance(color, (vex.Color, vex.Color.DefinedColor)): + return color.transparent + return False + + #region Screen - Cursor Print + + def print(self, *args, **kwargs): + """displays text on the robot’s screen at the current cursor position and font""" + try: + out=io.StringIO() + if 'end' not in kwargs: + kwargs['end'] = "" + print(*args,**kwargs, file=out) + text = out.getvalue() + #print("text: ", text) + out.close() + message = commands.ScreenPrint(text) + self.robot_instance.robot_send(message.to_json()) + except Exception as e: + print(f"Error displaying text on screen: {e}") + + def set_cursor(self, row, column): + """sets the cursor’s (row, column) screen position""" + message = commands.ScreenSetCursor(row, column) + self.robot_instance.robot_send(message.to_json()) + + def next_row(self): + """moves the cursor to the next row""" + message = commands.ScreenNextRow() + self.robot_instance.robot_send(message.to_json()) + + def clear_row(self, row: int, color=vex.Color.BLUE): + """clears a row of text.""" + r,g,b = self._return_rgb(color) + message = commands.ScreenClearRow(row, r, g, b) + self.robot_instance.robot_send(message.to_json()) + + def get_row(self): + """returns the current row of the cursor""" + value = self.robot_instance.status["robot"]["screen"]["row"] + if type(value) == str: + value = int(value) + return value + + def get_column(self): + """returns the current column of the cursor""" + value = self.robot_instance.status["robot"]["screen"]["column"] + if type(value) == str: + value = int(value) + return value + + #endregion Screen - Cursor Print + #region Screen - XY Print + def print_at(self, *args, x=0, y=0, **kwargs): + """displays text on the robot’s screen at a specified (x, y) screen coordinate. This method disregards the current cursor position""" + out=io.StringIO() + if 'end' not in kwargs: + kwargs['end'] = "" + print(*args,**kwargs, file=out) + text = out.getvalue() + #print("text: ", text) + out.close() + message = commands.ScreenPrintAt(text, x, y, True) + self.robot_instance.robot_send(message.to_json()) + + def set_origin(self, x, y): + """sets the origin of the screen to the specified (x, y) screen coordinate""" + message = commands.ScreenSetOrigin(x, y) + self.robot_instance.robot_send(message.to_json()) + #endregion Screen - XY Print + #region Screen - Mutators + + def clear_screen(self, color=vex.Color.BLUE): + """clears the robot’s screen of all drawings and text""" + r,g,b = self._return_rgb(color) + message = commands.ScreenClear(r, g, b) + self.robot_instance.robot_send(message.to_json()) + + def set_font(self, fontname: vex.FontType): + """sets the font used for displaying text on the robot’s screen""" + message = commands.ScreenSetFont(fontname.lower()) + self.robot_instance.robot_send(message.to_json()) + + def set_pen_width(self, width: int): + """sets the pen width used for drawing lines and shapes""" + message = commands.ScreenSetPenWidth(width) + self.robot_instance.robot_send(message.to_json()) + + def set_pen_color(self, color): + """sets the pen color used for drawing lines, shapes, and text""" + r,g,b = self._return_rgb(color) + self.default_pen_color = ColorRGB(r,g,b) + message = commands.ScreenSetPenColor(r, g, b) + self.robot_instance.robot_send(message.to_json()) + + def set_fill_color(self, color): + """sets the fill color used when shapes are drawn""" + r,g,b = self._return_rgb(color) + t = self._return_transparency(color) + self.default_fill_color = ColorRGB(r,g,b,t) + message = commands.ScreenSetFillColor(r, g, b, t) + self.robot_instance.robot_send(message.to_json()) + #endregion Screen - Mutators + #region Screen - Draw + def draw_pixel(self, x: int, y: int): + """draws a pixel at the specified (x, y) screen coordinate in the current pen color""" + message = commands.ScreenDrawPixel(x, y) + self.robot_instance.robot_send(message.to_json()) + + def draw_line(self, x1: int, y1: int, x2: int, y2: int): + """draws a line from the first specified screen coordinate (x1, y1) + to the second specified screen coordinate (x2, y2). + It uses the current the pen width set by set_pen_width and pen color set by set_pen_color""" + message = commands.ScreenDrawLine(x1, y1, x2, y2) + self.robot_instance.robot_send(message.to_json()) + + def draw_rectangle(self, x: int, y: int, width: int, height: int, color=None): + """draws a rectangle. It uses the current pen width set by set_pen_width and the pen color + set by set_pen_color for the outline. The fill color, set by set_fill_color, determines the interior color""" + #if color is not provided, use the default fill color + if color: + r,g,b = self._return_rgb(color) + t = self._return_transparency(color) + else: + r = self.default_fill_color.r + g = self.default_fill_color.g + b = self.default_fill_color.b + t = self.default_fill_color.t + message = commands.ScreenDrawRectangle(x, y, width, height, r, g, b, t) + self.robot_instance.robot_send(message.to_json()) + + def draw_circle(self, x: int, y: int, radius: int, color=None): + """draws a circle. It uses the current pen width set by set_pen_width and the pen color set by set_pen_color for the outline. + The fill color, set by set_fill_color, determines the interior color""" + #if color is not provided, use the default fill color + if color: + r,g,b = self._return_rgb(color) + t = self._return_transparency(color) + else: + r = self.default_fill_color.r + g = self.default_fill_color.g + b = self.default_fill_color.b + t = self.default_fill_color.t + message = commands.ScreenDrawCircle(x, y, radius, r, g, b, t) + self.robot_instance.robot_send(message.to_json()) + + def show_file(self, filename: str, x: int, y: int): + """draws a custom user-uploaded image on the robot’s screen at the specified (x, y) screen coordinate""" + #TODO: alowed extensions are correct? + if filename[-3:] not in ("bmp", "png"): + raise InvalidImageFileException(f"extension is {filename[-3:]}; expected extension to be bmp or png") + + message = commands.ScreenDrawImageFromFile(filename, x, y) + self.robot_instance.robot_send(message.to_json()) + + def set_clip_region(self, x: int, y: int, width: int, height: int): + """ + defines a rectangular area on the screen where all drawings and text will be confined. Any content outside this region will not be displayed + """ + message = commands.ScreenSetClipRegion(x, y, width, height) + self.robot_instance.robot_send(message.to_json()) + #endregion Screen - Draw + #region Screen - Emoji + def show_emoji(self, emoji: vex.EmojiType.EmojiType, look: vex.EmojiLookType.EmojiLookType = vex.EmojiLookType.LOOK_FORWARD): + """Show an emoji from a list of preset emojis""" + message = commands.ScreenShowEmoji(emoji.value, look.value) + self.robot_instance.robot_send(message.to_json()) + + def hide_emoji(self): + """hide emoji from being displayed, so that any underlying graphics can be viewed""" + message = commands.ScreenHideEmoji() + self.robot_instance.robot_send(message.to_json()) + #endregion Screen - Emoji + #region Screen - Callbacks + def pressed(self, callback: Callable[...,None], arg: tuple=()): + """Calls the specified function when the screen is pressed, with optional args.""" + self.robot_instance.add_screen_pressed_callback(callback, arg) + + def released(self, callback: Callable[...,None], arg: tuple=()): + """Calls the specified function when the screen is released, with optional args.""" + self.robot_instance.add_screen_released_callback(callback, arg) + + #endregion Screen - Callbacks + #region Screen - Touch + def pressing(self): + """returns true if the screen is being pressed""" + is_pressing = bool(int(self.robot_instance.status["robot"]["touch_flags"], 16) & 0x0001) + return is_pressing + + def x_position(self): + """returns the x position of the screen press""" + touch_x = float(self.robot_instance.status["robot"]["touch_x"]) + return touch_x + + def y_position(self): + """returns the y position of the screen press""" + touch_y = float(self.robot_instance.status["robot"]["touch_y"]) + return touch_y + #endregion Screen - Touch + #region Screen - Vision + def show_aivision(self): + """show the aivision output on the screen""" + message = commands.ScreenShowAivision() + self.robot_instance.robot_send(message.to_json()) + + def hide_aivision(self): + """hide the aivision output""" + message = commands.ScreenHideAivision() + self.robot_instance.robot_send(message.to_json()) + #endregion Screen - Vision + +class Kicker(): + """ Kicker class for accessing the robot's kicker features like kicking and pushing""" + def __init__(self, robot_instance: Robot): + self.robot_instance = robot_instance + pass + + def kick(self, kick_type: vex.KickType): + """activates the Kicker to kick an object with specified levels of force""" + message = commands.KickerKick(kick_type) + self.robot_instance.robot_send(message.to_json()) + + def place(self): + """activates the Kicker in order to place an object gently in front of the robot""" + # json = {"cmd_id": "push"} + self.kick(vex.KickType.SOFT) + +class Sound(): + """ + Sound class for accessing the robot's sound features like playing built-in and uploaded sounds + """ + def __init__(self, robot_instance: Robot): + self.robot_instance = robot_instance + def __note_to_midi(self, note_str): + """ + Converts a musical note string (e.g., "C#5") into a MIDI note number (0-11) and octave. + """ + # Check at least one character is present + if len(note_str) < 1: + raise TypeError("invalid note string") + + # Map first character to note + c = note_str[0].lower() + if c == 'c': + note = 0 + elif c == 'd': + note = 2 + elif c == 'e': + note = 4 + elif c == 'f': + note = 5 + elif c == 'g': + note = 7 + elif c == 'a': + note = 9 + elif c == 'b': + note = 11 + else: + raise TypeError("invalid note string") + + octave = 0 + + # If length=2, second char should be the octave 5–8 + if len(note_str) == 2: + if note_str[1] < '5' or note_str[1] > '8': + raise TypeError("invalid note string") + octave = int(note_str[1]) - 5 + if octave < 0: + octave = 0 + + # If length=3, middle char should be '#' or 'b', last char is octave 5–8 + elif len(note_str) == 3: + if (note_str[2] < '5' or note_str[2] > '8' or (note_str[1] not in ('#', 'b'))): + raise TypeError("invalid note string") + accidental = note_str[1] + if accidental == '#' and note < 11: + note += 1 + elif accidental == 'b' and note > 0: + note -= 1 + + octave = int(note_str[2]) - 5 + if octave < 0: + octave = 0 + else: + raise TypeError("invalid note string") + + return note, octave + + def __set_sound_active(self): + self.robot_instance._ws_status_thread.set_sound_playing_flag() + self.robot_instance._ws_status_thread.set_sound_downloading_flag() + self.robot_instance._ws_status_thread.sound_downloading_flag_needs_setting = True # have it be set again after next status message + self.robot_instance._ws_status_thread.sound_playing_flag_needs_setting = True # have it be set again after next status message + + def play(self, sound: vex.SoundType, volume = 50): + """plays one of the robot’s built-in sounds at a specified volume percentage. + Since this is a non-waiting method, the robot plays the built-in sound and moves to + the next command without waiting for it to finish""" + message = commands.SoundPlay(sound.lower(), volume) + self.robot_instance.robot_send(message.to_json()) + + def play_file(self, name: str, volume = 50): + """plays a custom sound loaded by the user at a specified volume percentage. \n + Current uploading sounds to AIM is supported onnly the VEXcode AIM app. + Since this is a non-waiting method, the robot plays the built-in sound and moves to + the next command without waiting for it to finish""" + message = commands.SoundPlayFile(name, volume) + self.robot_instance.robot_send(message.to_json()) + + def play_local_file(self, filepath: str, volume = 100): + """play a WAV or MP3 file stored on the client side; file will be transmitted to robot\n + Maximum filesize is 255 KB""" + file = pathlib.Path(filepath) + size = file.stat().st_size + if size > SOUND_SIZE_MAX_BYTES: + raise InvalidSoundFileException(f"file size of {size} bytes is too big; max size allowed is {SOUND_SIZE_MAX_BYTES} bytes ({SOUND_SIZE_MAX_BYTES/1024:.1f} kB)") + + extension = file.suffix + filename = file.name + audio = bytearray(64) + if not (extension == ".wav" or extension == ".mp3"): + raise InvalidSoundFileException(f"extension is {extension}; expected extension to be wav or mp3") + try: + f = open(filepath, 'rb') + except FileNotFoundError: + print ("File", filepath, "was not found") + else: + with f: + data = f.read() + + # do some sanity checks to make sure it's really a wave: + if extension == ".wav": + if not (data[0:4] == b'RIFF' and data[8:12] == b'WAVE'): + raise InvalidSoundFileException("file extension was .wav but does not appear to actually be a WAVE file") + channels = int.from_bytes(data[22:24], "little") + if channels > 2: + raise InvalidSoundFileException(f"only mono or stereo is supported, detected {channels} channels.") + if channels == 2: + print("%s is stereo; mono is recommended") + # first 64 bytes of audio is header + audio[0:1] = (0).to_bytes(1, 'little') + + # assuming the mp3 is valid: + elif extension == ".mp3": + audio[0:1] = (1).to_bytes(1, 'little') + + # set volume + audio[1:2] = (volume).to_bytes(1, 'little') + + audio[4:8] = (len(data)).to_bytes(4, 'little') # length of data + audio[8:12] = (0).to_bytes(4, 'little') # file chunk number + audio[32:32+len(filename)] = map(ord, filename[:32]) # filename + audio.extend(data) # append the data + self.robot_instance.robot_send_audio(audio) + self.__set_sound_active() + + def play_note(self, note_string: str, duration=750, volume=50): + """ + plays a specific note for a specific duration. . Since this is a non-waiting method, the robot plays + the specific note and moves to the next command without waiting for it to finish. + + ### Example: + robot.sound.play_note("C5", 2000) + robot.sound.play_note("F#6", 2000, 100) + """ + #get the note number and octave + note, octave = self.__note_to_midi(note_string) + if duration > 4000: + duration = 4000 + if volume > 100: + volume = 100 + if volume < 0: + volume = 0 + message = commands.SoundPlayNote(note, octave, duration, volume) + self.robot_instance.robot_send(message.to_json()) + self.__set_sound_active() + + def is_active(self): + """returns true if sound is currently playing or if it is being transmitted for playing""" + robot_flags = self.robot_instance.status["robot"]["flags"] + sound_active = bool(int(robot_flags, 16) & SYS_FLAGS_SOUND_PLAYING) or bool(int(robot_flags, 16) & SYS_FLAGS_IS_SOUND_DNL) + return sound_active + + def stop(self): + """ + stops a sound that is currently playing. + It will take some time for the sound to actually stop playing. + """ + message = commands.SoundStop() + self.robot_instance.robot_send(message.to_json()) + +class Led(): + """ Led class for accessing the robot's LED features like setting the color of the LEDs""" + def __init__(self, robot_instance: Robot): + self.robot_instance = robot_instance + pass + def __set_led_rgb(self, led: str, r: int, g: int, b: int): + """Turns on the specified LED with the specified RGB values""" + message = commands.LedSet(led, r, g, b) + self.robot_instance.robot_send(message.to_json()) + def on(self, *args): + """ + Sets the color of any one of six LEDs, with RGB values \n + ### Example: + robot.led.on(vex.LightType.ALL_LEDS, vex.Color.BLUE)\n + robot.led.on(1, vex.Color.BLUE) + """ + light_index = "all" + r, g, b = 0, 0, 0 + if len(args) not in [2, 4]: + raise TypeError("must have two or four arguments") + + if isinstance(args[0], int): + if args[0] in range(0,6): + light_index = f"light{args[0]+1}" + else: + light_index = "all" + elif isinstance(args[0], vex.LightType): + light_index = args[0] + + else: + raise TypeError("first argument must be of type int or vex.LightType") + + if len(args) == 2: + if isinstance(args[1], (vex.Color, vex.Color.DefinedColor)): + r = (args[1].value >> 16) & 0xFF + g = (args[1].value >> 8) & 0xFF + b = args[1].value & 0xFF + elif isinstance(args[1], (bool)): # turn white if True, off if False + r = 128 if args[1] else 0 + g = 128 if args[1] else 0 + b = 128 if args[1] else 0 + elif args[1] is None: + r = 0 + g = 0 + b = 0 + else: + raise TypeError("second argument must be of type vex.Color, vex.Color.DefinedColor, bool, or None") + + elif len(args) == 4: + r = args[1] + g = args[2] + b = args[3] + + else: + raise TypeError(f"bad parameters, n_args: {len(args)}") + + self.__set_led_rgb(light_index, r, g, b) + + def off(self, led: vex.LightType): + """turns off the specified LED""" + message = commands.LedSet(led, 0, 0, 0) + self.robot_instance.robot_send(message.to_json()) + +class Colordesc: + '''### Colordesc class - a class for holding an AI vision sensor color definition + + #### Arguments: + index : The color description index (1 to 7) + red : the red color value + green : the green color value + blue : the blue color value + hangle : the range of allowable hue + hdsat : the range of allowable saturation + + #### Returns: + An instance of the Colordesc class + + #### Examples: + COL1 = Colordesc(1, 13, 114, 227, 10.00, 0.20)\\ + COL2 = Colordesc(2, 237, 61, 74, 10.00, 0.20)\\ + ''' + def __init__(self, index, red, green, blue, hangle, hdsat): + self.id = index + self.red = red + self.green = green + self.blue = blue + self.hangle = hangle + self.hdsat = hdsat + pass + +class Codedesc: + '''### Codedesc class - a class for holding AI vision sensor codes + + A code description is a collection of up to five AI vision color descriptions. + #### Arguments: + index : The code description index (1 to 5) + c1 : An AI vision Colordesc + c1 : An AI vision Colordesc + c3 (optional) : An AI vision Colordesc + c4 (optional) : An AI vision Colordesc + c5 (optional) : An AI vision Colordesc + + #### Returns: + An instance of the Codedesc class + + #### Examples: + COL1 = Colordesc(1, 13, 114, 227, 10.00, 0.20)\\ + COL2 = Colordesc(2, 237, 61, 74, 10.00, 0.20)\\ + C1 = Codedesc( 1, COL1, COL2 ) + ''' + def __init__(self, index, c1:Colordesc, c2:Colordesc, *args): + self.id = index + self.cols = [c1, c2] + for arg in args: + if isinstance(arg, Colordesc): + self.cols.append(arg) + +class Tagdesc: + '''### Tagdesc class - a class for holding AI vision sensor tag id + + A tag description holds an apriltag id + #### Arguments: + id : The apriltag id (positive integer, not 0) + + #### Returns: + An instance of the Tagdesc class + + #### Examples: + T1 = Tagdesc( 23 ) + ''' + def __init__(self, index): + self.id = index + pass + + def __int__(self): + return self.id + + def __eq__(self, other): + if isinstance(other, Tagdesc): + return self.id == other.id + elif isinstance(other, int): + return self.id == other + return False + +class AiObjdesc: + '''### AiObjdesc class - a class for holding AI vision sensor AI object id + + A tag description holds an apriltag id + #### Arguments: + id : The AI Object (model) id (positive integer, not 0) + + #### Returns: + An instance of the AiObjdesc class + + #### Examples: + A1 = AiObjdesc( 2 ) + ''' + def __init__(self, index): + self.id = index + pass + + def __int__(self): + return self.id + + def __eq__(self, other): + if isinstance(other, AiObjdesc): + return self.id == other.id + elif isinstance(other, int): + return self.id == other + return False + +class ObjDesc: + """ + ObjDesc class - to represent any object type + """ + def __init__(self, index): + self.id = index + +class _ObjectTypeMask: + unkownObject = 0 + colorObject = (1 << 0) + codeObject = (1 << 1) + modelObject = (1 << 2) + tagObject = (1 << 3) + allObject = (0x3F) + +MATCH_ALL_ID = 0xFFFF +class AiVision(): + """ + AiVision class for accessing the robot's AI Vision Sensor features + """ + ALL_TAGS = Tagdesc(MATCH_ALL_ID) + '''A tag description for get_data indicating all tag objects to be returned''' + ALL_COLORS = Colordesc(MATCH_ALL_ID, 0, 0, 0, 0, 0) + '''A tag description for get_data indicating all color objects to be returned''' + ALL_CODES = Codedesc(MATCH_ALL_ID, ALL_COLORS, ALL_COLORS) + '''A tag description for get_data indicating all code objects to be returned''' + ALL_AIOBJS = AiObjdesc(MATCH_ALL_ID) + '''A tag description for get_data indicating all AI model objects to be returned''' + ALL_OBJECTS = ObjDesc(MATCH_ALL_ID) + '''A tag description for get_data indicating all objects to be returned''' + ALL_AIOBJECTS = AiObjdesc(MATCH_ALL_ID) + '''A description for get_data indicating all AI model objects to be returned''' + + def __init__(self, robot_instance: Robot): + self.robot_instance = robot_instance + self._object_count_val = 0 + self._largest_object = None + + def get_data(self, type, count=8): + '''### filters the data from the AI Vision Sensor frame to return a tuple. + The AI Vision Sensor can detect signatures that include pre-trained objects, AprilTags, or configured Colors and Color Codes. + + Color Signatures and Color Codes must be configured first in the AI Vision Utility in VEXcode before they can be used with this method. + The tuple stores objects ordered from largest to smallest by width, starting at index 0. Each object’s properties can be accessed using its index. + An empty tuple is returned if no matching objects are detected. + + #### Arguments: + type : A color, code or other object type + count (optional) : the maximum number of objects to obtain. default is 8. + + #### Returns: + tuple of AiVisionObject, this will be an empty tuple if nothing is available. + + #### Examples: + #### look for and return 1 object matching COL1 + objects = robot.vision.get_data(COL1) + + #### look for and return a maximum of 4 objects matching SIG_1 + objects = robot.vision.get_data(COL1, 4) + + #### return apriltag objects + objects = robot.vision.get_data(ALL_TAGS, AIVISION_MAX_OBJECTS) + ''' + match_tuple=None + if isinstance(type, Colordesc): + type_mask = _ObjectTypeMask.colorObject + id = type.id + elif isinstance(type, Codedesc): + type_mask = _ObjectTypeMask.codeObject + id = type.id + elif isinstance(type, AiObjdesc): + type_mask = _ObjectTypeMask.modelObject + id = type.id + elif isinstance(type, Tagdesc): + type_mask = _ObjectTypeMask.tagObject + id = type.id + elif isinstance(type, ObjDesc): + type_mask = _ObjectTypeMask.allObject + id = type.id + elif isinstance(type, tuple): + match_tuple = type + if not match_tuple: + raise AimException("tuple passed to get_data is empty") + type_mask = _ObjectTypeMask.allObject + else: + type_mask = _ObjectTypeMask.allObject # default value, changed to match uP by James + id = type # assume the first argument is any object id including match all. + + if count > AIVISION_MAX_OBJECTS: + count = AIVISION_MAX_OBJECTS + + objects = self.robot_instance.status["aivision"]["objects"] + item_count = objects["count"] + ai_object_list = [AiVisionObject() for item in range(item_count)] + # first just extract everything we got from ws_status + for item in range(item_count): + ai_object_list[item].type = objects["items"][item]["type"] + ai_object_list[item].id = objects["items"][item]["id"] + ai_object_list[item].originX = objects["items"][item]["originx"] + ai_object_list[item].originY = objects["items"][item]["originy"] + ai_object_list[item].width = objects["items"][item]["width"] + ai_object_list[item].height = objects["items"][item]["height"] + ai_object_list[item].centerX = int(ai_object_list[item].originX + (ai_object_list[item].width/2)) + ai_object_list[item].centerY = int(ai_object_list[item].originY + (ai_object_list[item].height/2)) + + if ai_object_list[item].type == _ObjectTypeMask.colorObject: + ai_object_list[item].angle = objects["items"][item]["angle"] * 0.01 + + if ai_object_list[item].type == _ObjectTypeMask.codeObject: + ai_object_list[item].angle = objects["items"][item]["angle"] * 0.01 + + if ai_object_list[item].type == _ObjectTypeMask.modelObject: #AI model objects can have a classname + ai_object_list[item].classname = self.robot_instance.status["aivision"]["classnames"]["items"][ai_object_list[item].id]["name"] + ai_object_list[item].score = objects["items"][item]["score"] + + if ai_object_list[item].type == _ObjectTypeMask.tagObject: + ai_object_list[item].tag.x = (objects["items"][item]["x0"],objects["items"][item]["x1"],objects["items"][item]["x2"],objects["items"][item]["x3"]) + ai_object_list[item].tag.y = (objects["items"][item]["y0"],objects["items"][item]["y1"],objects["items"][item]["y2"],objects["items"][item]["y3"]) + + ai_object_list[item].rotation = ai_object_list[item].angle + ai_object_list[item].area = ai_object_list[item].width * ai_object_list[item].height + cx = ai_object_list[item].centerX + cy = ai_object_list[item].centerY + ai_object_list[item].bearing = -34.656 + (cx * 0.22539) + (cy * 0.011526) + (cx * cx * -0.000042011) + (cx * cy * 0.000010433) + (cy * cy * -0.00007073) + + # print("diagnostic: ai_object_list: ", ai_object_list) + num_matches = 0 + sublist = [] + for item in range(item_count): + match_found = False + if match_tuple: + # check all tuple members for a match + for obj in match_tuple: + if isinstance(obj, Colordesc): + if ai_object_list[item].type == _ObjectTypeMask.colorObject and (ai_object_list[item].id == obj.id or MATCH_ALL_ID == obj.id): + match_found = True + elif isinstance(obj, Codedesc): + if ai_object_list[item].type == _ObjectTypeMask.codeObject and (ai_object_list[item].id == obj.id or MATCH_ALL_ID == obj.id): + match_found = True + elif isinstance(obj, AiObjdesc): + if ai_object_list[item].type == _ObjectTypeMask.modelObject and (ai_object_list[item].id == obj.id or MATCH_ALL_ID == obj.id): + match_found = True + elif isinstance(obj, Tagdesc): + if ai_object_list[item].type == _ObjectTypeMask.tagObject and (ai_object_list[item].id == obj.id or MATCH_ALL_ID == obj.id): + match_found = True + elif isinstance(obj, ObjDesc): + if ai_object_list[item].id == obj.id or MATCH_ALL_ID == obj.id: + match_found = True + else: + # assume obj is an int + if ai_object_list[item].id == int(obj) or MATCH_ALL_ID == int(obj): + match_found = True + else: + if ai_object_list[item].id == id or MATCH_ALL_ID == id: + match_found = True + + if ai_object_list[item].type & type_mask: + if match_found: + num_matches += 1 + #sort objects by object area in descending order + current_object_area = ai_object_list[item].height * ai_object_list[item].width + current_object_smallest = True + for i in range(len(sublist)): + if current_object_area >= (sublist[i].width * sublist[i].height): + sublist.insert(i, ai_object_list[item]) # insert item at position i of sublist. + current_object_smallest = False + break + if current_object_smallest: + sublist.append(ai_object_list[item]) #add to the end + + if num_matches > count: + num_matches = count + + self._object_count_val = num_matches + if sublist: + self._largest_object = sublist[0] + else: + self._largest_object = None + return sublist[:num_matches] + + def largest_object(self): + '''### Request the largest object from the last get_data(...) call + + #### Arguments: + None + + #### Returns: + An AiVisionObject object or None if it does not exist + ''' + return self._largest_object + + def object_count(self): + '''### Request the number of objects found in the last get_data call + + #### Arguments: + None + + #### Returns: + The number of objects found in the last get_data call + ''' + return self._object_count_val + + + def tag_detection(self, enable: bool): + '''### Enable or disable apriltag processing + + #### Arguments: + enable : True or False + + #### Returns: + None + ''' + message = commands.VisionTagDetection(enable) + self.robot_instance.robot_send(message.to_json()) + + def color_detection(self, enable: bool, merge: bool = False): + '''### Enable or disable color and code object processing + + #### Arguments: + enable : True or False + merge (optional) : True to enable merging of adjacent color detections + + #### Returns: + None + ''' + message = commands.VisionColorDetection(enable, merge) + self.robot_instance.robot_send(message.to_json()) + + def model_detection(self, enable: bool): + '''### Enable or disable AI model object processing + + #### Arguments: + enable : True or False + + #### Returns: + None + ''' + message = commands.VisionModelDetection(enable) + self.robot_instance.robot_send(message.to_json()) + + def color_description(self, desc: Colordesc): + '''### set a new color description + + #### Arguments: + desc: a color description + + #### Returns: + None + ''' + message = commands.VisionColorDescription(desc.id, desc.red, desc.green, desc.blue, desc.hangle, desc.hdsat) + self.robot_instance.robot_send(message.to_json()) + + def code_description(self, desc: Codedesc ): + '''### set a new code description + + #### Arguments: + desc: a code description + + #### Returns: + None + ''' + message = commands.VisionCodeDescription(desc.id, *desc.cols) + self.robot_instance.robot_send(message.to_json()) + + + def get_camera_image(self): + """ + returns a camera image; starts stream when first called; first image will take about 0.3 seconds to return.\n + Subsequently, images will continually stream from robot and therefore will be immediately available. + """ + if self.robot_instance._ws_img_thread._streaming == False: + # print("starting the stream") + start_time = time.time() + time_elapsed = 0 + self.robot_instance._ws_img_thread.start_stream() + while (self.robot_instance._ws_img_thread.image_list[self.robot_instance._ws_img_thread.current_image_index] == bytes(1) and time_elapsed < 0.5): + time.sleep(0.01) + time_elapsed = time.time() - start_time + image = self.robot_instance._ws_img_thread.image_list[self.robot_instance._ws_img_thread.current_image_index] + if image == bytes(1): + raise NoImageException("no image was received") + return image + +class VisionObject: + '''### VisionObject class - a class with some predefined objects for get_data''' + SPORTS_BALL = AiObjdesc(0) + '''A description for get_data indicating the AI ball objects to be returned''' + BLUE_BARREL = AiObjdesc(1) + '''A description for get_data indicating the AI blue barrel objects to be returned''' + ORANGE_BARREL = AiObjdesc(2) + '''A description for get_data indicating the AI orange barrel objects to be returned''' + AIM_ROBOT = AiObjdesc(3) + '''A description for get_data indicating the AI robot objects to be returned''' + TAG0 = Tagdesc(0) + '''A description for get_data indicating apriltags with id 0 to be returned''' + TAG1 = Tagdesc(1) + '''A description for get_data indicating apriltags with id 1 to be returned''' + TAG2 = Tagdesc(2) + '''A description for get_data indicating apriltags with id 2 to be returned''' + TAG3 = Tagdesc(3) + '''A description for get_data indicating apriltags with id 3 to be returned''' + TAG4 = Tagdesc(4) + '''A description for get_data indicating apriltags with id 4 to be returned''' + TAG5 = Tagdesc(5) + '''A description for get_data indicating apriltags with id 5 to be returned''' + TAG6 = Tagdesc(6) + '''A description for get_data indicating apriltags with id 6 to be returned''' + TAG7 = Tagdesc(7) + '''A description for get_data indicating apriltags with id 7 to be returned''' + TAG8 = Tagdesc(8) + '''A description for get_data indicating apriltags with id 8 to be returned''' + TAG9 = Tagdesc(9) + '''A description for get_data indicating apriltags with id 9 to be returned''' + TAG10 = Tagdesc(10) + '''A description for get_data indicating apriltags with id 10 to be returned''' + TAG11 = Tagdesc(11) + '''A description for get_data indicating apriltags with id 11 to be returned''' + TAG12 = Tagdesc(12) + '''A description for get_data indicating apriltags with id 12 to be returned''' + TAG13 = Tagdesc(13) + '''A description for get_data indicating apriltags with id 13 to be returned''' + TAG14 = Tagdesc(14) + '''A description for get_data indicating apriltags with id 14 to be returned''' + TAG15 = Tagdesc(15) + '''A description for get_data indicating apriltags with id 15 to be returned''' + TAG16 = Tagdesc(16) + '''A description for get_data indicating apriltags with id 16 to be returned''' + TAG17 = Tagdesc(17) + '''A description for get_data indicating apriltags with id 17 to be returned''' + TAG18 = Tagdesc(18) + '''A description for get_data indicating apriltags with id 18 to be returned''' + TAG19 = Tagdesc(19) + '''A description for get_data indicating apriltags with id 19 to be returned''' + TAG20 = Tagdesc(20) + '''A description for get_data indicating apriltags with id 20 to be returned''' + TAG21 = Tagdesc(21) + '''A description for get_data indicating apriltags with id 21 to be returned''' + TAG22 = Tagdesc(22) + '''A description for get_data indicating apriltags with id 22 to be returned''' + TAG23 = Tagdesc(23) + '''A description for get_data indicating apriltags with id 23 to be returned''' + TAG24 = Tagdesc(24) + '''A description for get_data indicating apriltags with id 24 to be returned''' + TAG25 = Tagdesc(25) + '''A description for get_data indicating apriltags with id 25 to be returned''' + TAG26 = Tagdesc(26) + '''A description for get_data indicating apriltags with id 26 to be returned''' + TAG27 = Tagdesc(27) + '''A description for get_data indicating apriltags with id 27 to be returned''' + TAG28 = Tagdesc(28) + '''A description for get_data indicating apriltags with id 28 to be returned''' + TAG29 = Tagdesc(29) + '''A description for get_data indicating apriltags with id 29 to be returned''' + TAG30 = Tagdesc(30) + '''A description for get_data indicating apriltags with id 30 to be returned''' + TAG31 = Tagdesc(31) + '''A description for get_data indicating apriltags with id 31 to be returned''' + TAG32 = Tagdesc(32) + '''A description for get_data indicating apriltags with id 32 to be returned''' + TAG33 = Tagdesc(33) + '''A description for get_data indicating apriltags with id 33 to be returned''' + TAG34 = Tagdesc(34) + '''A description for get_data indicating apriltags with id 34 to be returned''' + TAG35 = Tagdesc(35) + '''A description for get_data indicating apriltags with id 35 to be returned''' + TAG36 = Tagdesc(36) + '''A description for get_data indicating apriltags with id 36 to be returned''' + TAG37 = Tagdesc(37) + '''A description for get_data indicating apriltags with id 37 to be returned''' + ALL_TAGS = Tagdesc(MATCH_ALL_ID) + '''A description for get_data indicating any apriltag to be returned''' + ALL_VISION = ObjDesc(MATCH_ALL_ID) + '''A description for get_data indicating any object to be returned''' + ALL_COLORS = (AiVision.ALL_COLORS, AiVision.ALL_CODES) + '''A description for get_data indicating any color or code to be returned''' + ALL_CARGO = (SPORTS_BALL, BLUE_BARREL, ORANGE_BARREL) + '''A description for get_data indicating AI ball or barrel to be returned''' + +class AiVisionObject(): + """ + AiVisionObject class - a class for holding AI vision sensor object properties + """ + class Tag: + """ + Tag class - a class for holding AI vision sensor tag properties + The tag class is used to hold the coordinates of the four corners of the tag + """ + def __init__(self): + self.x = (0,0,0,0) + self.y = (0,0,0,0) + pass + + def __init__(self): + self.type = 0 + self.id = 0 + self.originX = 0 + self.originY = 0 + self.centerX = 0 + self.centerY = 0 + self.width = 0 + self.height = 0 + self.exists = True + self.angle = 0.0 + self.rotation = 0.0 + self.score = 0 + self.area = 0 + self.bearing = 0.0 + self.classname = '' + self.color = None # not used in remote + self.tag = AiVisionObject.Tag() + +class Timer: + '''### Timer class - create a new timer + + This class is used to create a new timer\\ + A timer can be used to measure time, access the system time and run a function at a time in the future. + + #### Arguments: + None + + #### Returns: + An instance of the Timer class + + #### Examples: + t1 = Timer() + ''' + def __init__(self): + self.start_time = time.time() + + def time(self, units=vex.TimeUnits.MSEC): + '''### return the current time for this timer + + #### Arguments: + units (optional) : the units that the time should be returned in, default is MSEC + + #### Returns: + An the current time in specified units. + + #### Examples: + ''' + elapsed_time = time.time() - self.start_time + if units == vex.TimeUnits.SECONDS: + # seconds as float in 2 decimal places + return round(elapsed_time, 2) + elif units == vex.TimeUnits.MSEC: + # miliseconds as int - no decimal + return int(elapsed_time * 1000) + else: + raise ValueError("Invalid time unit") + + def reset(self): + '''### reset the timer to 0 + + #### Arguments: + None + + #### Returns: + None + + #### Examples: + ''' + self.start_time = time.time() + + def event(self, callback: Callable[...,None], delay: int, arg: tuple=()): + '''### register a function to be called in the future + + #### Arguments: + callback : A function that will called after the supplied delay + delay : The delay before the callback function is called. + arg (optional) : A tuple that is used to pass arguments to the function. + + #### Returns: + None + + #### Examples: + def foo(arg): + print('timer has expired ', arg) + + t1 = Timer()\\ + t1.event(foo, 1000, ('Hello',)) + ''' + def delayed_call(): + time.sleep(delay / 1000) + callback(*arg) + + threading.Thread(target=delayed_call).start() + +class Thread(): + """ Thread class for running a function in a separate thread""" + def __init__(self, func, args=None): + if args: + self.t = threading.Thread(target=func, args=args) + else: + self.t = threading.Thread(target=func) + self.t.start() + \ No newline at end of file diff --git a/resources/python/vex/settings.json b/resources/python/vex/settings.json new file mode 100644 index 0000000..6b53a65 --- /dev/null +++ b/resources/python/vex/settings.json @@ -0,0 +1,5 @@ +{ + "connection": { + "host": "192.168.4.1" + } +} \ No newline at end of file diff --git a/resources/python/vex/settings.py b/resources/python/vex/settings.py new file mode 100644 index 0000000..442f6e0 --- /dev/null +++ b/resources/python/vex/settings.py @@ -0,0 +1,48 @@ +# ================================================================================================= +# Copyright (c) Innovation First 2025. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# ================================================================================================= +""" +AIM WebSocket API - Settings + +Settings class to read and manage the JSON configuration file. +""" +import json +import os + +class Settings: + """ + Settings class to read the JSON configuration file and provide access to the properties. + """ + + def __init__(self): + """ + Initialize the Settings class and load the settings from the JSON configuration file. + """ + self.file_path = os.path.join(os.path.dirname(__file__), 'settings.json') + self.config = self._load_settings() + + def _load_settings(self): + """ + Load the settings from the JSON configuration file. + + Returns: + dict: Dictionary containing the configuration settings. + """ + if not os.path.exists(self.file_path): + raise FileNotFoundError(f"Configuration file not found: {self.file_path}") + + with open(self.file_path, 'r') as file: + config = json.load(file) + + return config + + @property + def host(self): + """ + Get the host property from the configuration settings. + + Returns: + str: Host property value. + """ + return self.config.get('connection', {}).get('host', 'localhost') \ No newline at end of file diff --git a/resources/python/vex/vex_globals.py b/resources/python/vex/vex_globals.py new file mode 100644 index 0000000..65c128f --- /dev/null +++ b/resources/python/vex/vex_globals.py @@ -0,0 +1,295 @@ +# ================================================================================================= +# Copyright (c) Innovation First 2025. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# ================================================================================================= +""" +AIM WebSocket API - Globals + +This module defines global constants and enums used in the AIM WebSocket API. These globals +are provided to improve code readability and ease of use, especially for beginners and users +familiar with the VEXcode API. + +Globals in this module are designed to match the VEXcode API documentation. +If these conflict with existing types or variables in your project, +you may choose not to import this module. +""" + +# ---------------------------------------------------------- +# pylint: disable=unnecessary-pass,unused-argument,line-too-long,too-many-lines +# pylint: disable=missing-module-docstring,missing-function-docstring,missing-class-docstring +# pylint: disable=invalid-name,unused-import,redefined-outer-name + +from vex import KickType +from vex import AxisType +from vex import OrientationType +from vex import AccelerationType +from vex import LightType +from vex import Color +from vex import FontType +from vex import EmojiType +from vex import EmojiLookType +from vex import SoundType +from vex import VisionObject + +SOFT = KickType.SOFT +'''A kick type of soft''' +MEDIUM = KickType.MEDIUM +'''A kick type of medium''' +HARD = KickType.HARD +'''A kick type of hard''' + +X_AXIS = AxisType.X_AXIS +'''The X axis of the Inertial sensor.''' +Y_AXIS = AxisType.Y_AXIS +'''The Y axis of the Inertial sensor.''' +Z_AXIS = AxisType.Z_AXIS +'''The Z axis of the Inertial sensor.''' + +ROLL = OrientationType.ROLL +'''roll, orientation around the X axis of the Inertial sensor.''' +PITCH = OrientationType.PITCH +'''pitch, orientation around the Y axis of the Inertial sensor.''' +YAW = OrientationType.YAW +'''yaw, orientation around the Z axis of the Inertial sensor.''' + +FORWARD = AccelerationType.FORWARD +'''The X acceleration axis of the Inertial sensor.''' +RIGHTWARD = AccelerationType.RIGHTWARD +'''The Y acceleration axis of the Inertial sensor.''' +DOWNWARD = AccelerationType.DOWNWARD +'''The Z acceleration axis of the Inertial sensor.''' + +LED1 = LightType.LED1 +'''The LED at the 315 degree position on AIM''' +LED2 = LightType.LED2 +'''The LED at the 265 degree position on AIM''' +LED3 = LightType.LED3 +'''The LED at the 210 degree position on AIM''' +LED4 = LightType.LED4 +'''The LED at the 155 degree position on AIM''' +LED5 = LightType.LED5 +'''The LED at the 100 degree position on AIM''' +LED6 = LightType.LED6 +'''The LED at the 45 degree position on AIM''' +ALL_LEDS = LightType.ALL_LEDS +'''All LEDs on AIM''' + +BLACK = Color.BLACK +'''predefined Color black''' +WHITE = Color.WHITE +'''predefined Color white''' +RED = Color.RED +'''predefined Color red''' +GREEN = Color.GREEN +'''predefined Color green''' +BLUE = Color.BLUE +'''predefined Color blue''' +YELLOW = Color.YELLOW +'''predefined Color yellow''' +ORANGE = Color.ORANGE +'''predefined Color orange''' +PURPLE = Color.PURPLE +'''predefined Color purple''' +CYAN = Color.CYAN +'''predefined Color cyan''' +TRANSPARENT = Color.TRANSPARENT +'''predefined Color transparent''' + +MONO12 = FontType.MONO12 +'''monotype font of size 12''' +MONO15 = FontType.MONO15 +'''monotype font of size 15''' +MONO20 = FontType.MONO20 +'''monotype font of size 20''' +MONO24 = FontType.MONO24 +'''monotype font of size 24''' +MONO30 = FontType.MONO30 +'''monotype font of size 30''' +MONO36 = FontType.MONO36 +'''monotype font of size 36''' +MONO40 = FontType.MONO40 +'''monotype font of size 40''' +MONO60 = FontType.MONO60 +'''monotype font of size 60''' +PROP20 = FontType.PROP20 +'''proportional font of size 20''' +PROP24 = FontType.PROP24 +'''proportional font of size 24''' +PROP30 = FontType.PROP30 +'''proportional font of size 30''' +PROP36 = FontType.PROP36 +'''proportional font of size 36''' +PROP40 = FontType.PROP40 +'''proportional font of size 40''' +PROP60 = FontType.PROP60 +'''proportional font of size 60''' + +EXCITED = EmojiType.EXCITED +CONFIDENT = EmojiType.CONFIDENT +SILLY = EmojiType.SILLY +AMAZED = EmojiType.AMAZED +STRONG = EmojiType.STRONG +THRILLED = EmojiType.THRILLED +HAPPY = EmojiType.HAPPY +PROUD = EmojiType.PROUD +LAUGHING = EmojiType.LAUGHING +OPTIMISTIC = EmojiType.OPTIMISTIC +DETERMINED = EmojiType.DETERMINED +AFFECTIONATE = EmojiType.AFFECTIONATE +CALM = EmojiType.CALM +QUIET = EmojiType.QUIET +SHY = EmojiType.SHY +CHEERFUL = EmojiType.CHEERFUL +LOVED = EmojiType.LOVED +SURPRISED = EmojiType.SURPRISED +THINKING = EmojiType.THINKING +TIRED = EmojiType.TIRED +CONFUSED = EmojiType.CONFUSED +BORED = EmojiType.BORED +EMBARRASSED = EmojiType.EMBARRASSED +WORRIED = EmojiType.WORRIED +SAD = EmojiType.SAD +SICK = EmojiType.SICK +DISAPPOINTED = EmojiType.DISAPPOINTED +NERVOUS = EmojiType.NERVOUS +ANNOYED = EmojiType.ANNOYED +STRESSED = EmojiType.STRESSED +ANGRY = EmojiType.ANGRY +FRUSTRATED = EmojiType.FRUSTRATED +JEALOUS = EmojiType.JEALOUS +SHOCKED = EmojiType.SHOCKED +FEAR = EmojiType.FEAR +DISGUST = EmojiType.DISGUST + +LOOK_FORWARD = EmojiLookType.LOOK_FORWARD +'''The emoji will look forwards.''' +LOOK_RIGHT = EmojiLookType.LOOK_RIGHT +'''The emoji will look to the right.''' +LOOK_LEFT = EmojiLookType.LOOK_LEFT +'''The emoji will look to the left.''' + +DOORBELL = SoundType.DOORBELL +TADA = SoundType.TADA +FAIL = SoundType.FAIL +SPARKLE = SoundType.SPARKLE +FLOURISH = SoundType.FLOURISH +MOVE_FORWARD = SoundType.FORWARD +MOVE_REVERSE = SoundType.REVERSE +TURN_RIGHT = SoundType.RIGHT +TURN_LEFT = SoundType.LEFT +BLINKER = SoundType.BLINKER +CRASH = SoundType.CRASH +BRAKES = SoundType.BRAKES +HUAH = SoundType.HUAH +PICKUP = SoundType.PICKUP +CHEER = SoundType.CHEER +SENSING = SoundType.SENSING +DETECTED = SoundType.DETECTED +OBSTACLE = SoundType.OBSTACLE +LOOPING = SoundType.LOOPING +COMPLETE = SoundType.COMPLETE +PAUSE = SoundType.PAUSE +RESUME = SoundType.RESUME +SEND = SoundType.SEND +RECEIVE = SoundType.RECEIVE + + +ACT_HAPPY = SoundType.ACT_HAPPY +ACT_SAD = SoundType.ACT_SAD +ACT_EXCITED = SoundType.ACT_EXCITED +ACT_ANGRY = SoundType.ACT_ANGRY +ACT_SILLY = SoundType.ACT_SILLY + +SPORTS_BALL = VisionObject.SPORTS_BALL +'''A description for get_data indicating the AI ball objects to be returned''' +BLUE_BARREL = VisionObject.BLUE_BARREL +'''A description for get_data indicating the AI blue barrel objects to be returned''' +ORANGE_BARREL = VisionObject.ORANGE_BARREL +'''A description for get_data indicating the AI orange barrel objects to be returned''' +AIM_ROBOT = VisionObject.AIM_ROBOT +'''A description for get_data indicating the AI robot objects to be returned''' +TAG0 = VisionObject.TAG0 +'''A description for get_data indicating apriltags with id 0 to be returned''' +TAG1 = VisionObject.TAG1 +'''A description for get_data indicating apriltags with id 1 to be returned''' +TAG2 = VisionObject.TAG2 +'''A description for get_data indicating apriltags with id 2 to be returned''' +TAG3 = VisionObject.TAG3 +'''A description for get_data indicating apriltags with id 3 to be returned''' +TAG4 = VisionObject.TAG4 +'''A description for get_data indicating apriltags with id 4 to be returned''' +TAG5 = VisionObject.TAG5 +'''A description for get_data indicating apriltags with id 5 to be returned''' +TAG6 = VisionObject.TAG6 +'''A description for get_data indicating apriltags with id 6 to be returned''' +TAG7 = VisionObject.TAG7 +'''A description for get_data indicating apriltags with id 7 to be returned''' +TAG8 = VisionObject.TAG8 +'''A description for get_data indicating apriltags with id 8 to be returned''' +TAG9 = VisionObject.TAG9 +'''A description for get_data indicating apriltags with id 9 to be returned''' +TAG10 = VisionObject.TAG10 +'''A description for get_data indicating apriltags with id 10 to be returned''' +TAG11 = VisionObject.TAG11 +'''A description for get_data indicating apriltags with id 11 to be returned''' +TAG12 = VisionObject.TAG12 +'''A description for get_data indicating apriltags with id 12 to be returned''' +TAG13 = VisionObject.TAG13 +'''A description for get_data indicating apriltags with id 13 to be returned''' +TAG14 = VisionObject.TAG14 +'''A description for get_data indicating apriltags with id 14 to be returned''' +TAG15 = VisionObject.TAG15 +'''A description for get_data indicating apriltags with id 15 to be returned''' +TAG16 = VisionObject.TAG16 +'''A description for get_data indicating apriltags with id 16 to be returned''' +TAG17 = VisionObject.TAG17 +'''A description for get_data indicating apriltags with id 17 to be returned''' +TAG18 = VisionObject.TAG18 +'''A description for get_data indicating apriltags with id 18 to be returned''' +TAG19 = VisionObject.TAG19 +'''A description for get_data indicating apriltags with id 19 to be returned''' +TAG20 = VisionObject.TAG20 +'''A description for get_data indicating apriltags with id 20 to be returned''' +TAG21 = VisionObject.TAG21 +'''A description for get_data indicating apriltags with id 21 to be returned''' +TAG22 = VisionObject.TAG22 +'''A description for get_data indicating apriltags with id 22 to be returned''' +TAG23 = VisionObject.TAG23 +'''A description for get_data indicating apriltags with id 23 to be returned''' +TAG24 = VisionObject.TAG24 +'''A description for get_data indicating apriltags with id 24 to be returned''' +TAG25 = VisionObject.TAG25 +'''A description for get_data indicating apriltags with id 25 to be returned''' +TAG26 = VisionObject.TAG26 +'''A description for get_data indicating apriltags with id 26 to be returned''' +TAG27 = VisionObject.TAG27 +'''A description for get_data indicating apriltags with id 27 to be returned''' +TAG28 = VisionObject.TAG28 +'''A description for get_data indicating apriltags with id 28 to be returned''' +TAG29 = VisionObject.TAG29 +'''A description for get_data indicating apriltags with id 29 to be returned''' +TAG30 = VisionObject.TAG30 +'''A description for get_data indicating apriltags with id 30 to be returned''' +TAG31 = VisionObject.TAG31 +'''A description for get_data indicating apriltags with id 31 to be returned''' +TAG32 = VisionObject.TAG32 +'''A description for get_data indicating apriltags with id 32 to be returned''' +TAG33 = VisionObject.TAG33 +'''A description for get_data indicating apriltags with id 33 to be returned''' +TAG34 = VisionObject.TAG34 +'''A description for get_data indicating apriltags with id 34 to be returned''' +TAG35 = VisionObject.TAG35 +'''A description for get_data indicating apriltags with id 35 to be returned''' +TAG36 = VisionObject.TAG36 +'''A description for get_data indicating apriltags with id 36 to be returned''' +TAG37 = VisionObject.TAG37 +'''A description for get_data indicating apriltags with id 37 to be returned''' + +ALL_TAGS = VisionObject.ALL_TAGS +'''A description for get_data indicating any apriltag to be returned''' +ALL_VISION = VisionObject.ALL_VISION +'''A description for get_data indicating any object to be returned''' +ALL_COLORS = VisionObject.ALL_COLORS +'''A description for get_data indicating any color or code to be returned''' +ALL_CARGO = VisionObject.ALL_CARGO +'''A description for get_data indicating AI ball or barrel to be returned''' diff --git a/resources/python/vex/vex_messages.py b/resources/python/vex/vex_messages.py new file mode 100644 index 0000000..6bd9653 --- /dev/null +++ b/resources/python/vex/vex_messages.py @@ -0,0 +1,656 @@ +# ================================================================================================= +# Copyright (c) Innovation First 2025. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# ================================================================================================= +""" +AIM WebSocket - Messages + +This module contains definitions for the Websocket messages. +""" +class VexWebSocketCommand: + def __init__(self, cmd_id: str): + self.cmd_id = cmd_id + + def to_json(self) -> dict: + return { + "cmd_id": self.cmd_id + } + +#region General Commands +class ProgramInit(VexWebSocketCommand): + def __init__(self): + super().__init__("program_init") + + +#endregion General Commands + +#region Movement Commands +class MoveAt(VexWebSocketCommand): + def __init__(self, angle=0.0, speed=0.0, stacking_type=0): + super().__init__("drive") + self.angle = angle + self.speed = speed + self.stacking_type = stacking_type + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "angle": self.angle, + "speed": self.speed, + "stacking_type": self.stacking_type + }) + return base_data + +class MoveFor(VexWebSocketCommand): + def __init__(self, distance =0.0, angle=0.0, drive_speed=0.0, turn_speed=0.0, final_heading=0,stacking_type=0): + super().__init__("drive_for") + self.distance = distance + self.angle = angle + self.drive_speed = drive_speed + self.turn_speed = turn_speed + self.final_heading = final_heading + self.stacking_type = stacking_type + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "distance": self.distance, + "angle": self.angle, + "final_heading" : self.final_heading, + "drive_speed": self.drive_speed, + "turn_speed": self.turn_speed, + "stacking_type": self.stacking_type + }) + return base_data + +class MoveWithVector(VexWebSocketCommand): + def __init__(self, x=0, t=0, r=0): + super().__init__("drive_with_vector") + self.x = x + self.t = t + self.r = r + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "x": self.x, + "t": self.t, + "r": self.r + }) + return base_data + +class Turn(VexWebSocketCommand): + def __init__(self, turn_rate=0.0, stacking_type=0): + super().__init__("turn") + self.turn_rate = turn_rate + self.stacking_type = stacking_type + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "turn_rate": self.turn_rate, + "stacking_type": self.stacking_type + }) + return base_data + +class TurnTo(VexWebSocketCommand): + def __init__(self, heading=0.0, turn_rate=0.0, stacking_type=0): + super().__init__("turn_to") + self.heading = heading + self.turn_rate = turn_rate + self.stacking_type = stacking_type + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "heading": self.heading, + "turn_rate": self.turn_rate, + "stacking_type": self.stacking_type + }) + return base_data + +class TurnFor(VexWebSocketCommand): + def __init__(self, angle=0, turn_rate=0.0, stacking_type=0): + super().__init__("turn_for") + self.angle = angle + self.turn_rate = turn_rate + self.stacking_type = stacking_type + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "angle": self.angle, + "turn_rate": self.turn_rate, + "stacking_type": self.stacking_type + }) + return base_data + +class SpinWheels(VexWebSocketCommand): + def __init__(self, vel1=0, vel2=0, vel3=0): + super().__init__("spin_wheels") + self.vel1 = vel1 + self.vel2 = vel2 + self.vel3 = vel3 + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "vel1": self.vel1, + "vel2": self.vel2, + "vel3": self.vel3 + }) + return base_data + +class SetPose(VexWebSocketCommand): + def __init__(self, x=0, y=0): + super().__init__("set_pose") + self.x = x + self.y = y + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "x": self.x, + "y": self.y + }) + return base_data +#endregion Movement Commands + +#region Screen Commands +class ScreenPrint(VexWebSocketCommand): + def __init__(self, string=""): + super().__init__("lcd_print") + self.string = string + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "string": self.string + }) + return base_data + +class ScreenPrintAt(VexWebSocketCommand): + def __init__(self, string="", x=0, y=0, b_opaque=True): + super().__init__("lcd_print_at") + self.string = string + self.x = x + self.y = y + self.b_opaque = b_opaque + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "x": self.x, + "y": self.y, + "string": self.string, + "b_opaque": self.b_opaque + }) + return base_data + +class ScreenSetCursor(VexWebSocketCommand): + def __init__(self, row=0, col=0): + super().__init__("lcd_set_cursor") + self.row = row + self.col = col + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "row": self.row, + "col": self.col + }) + return base_data + +class ScreenSetOrigin(VexWebSocketCommand): + def __init__(self, x=0, y=0): + super().__init__("lcd_set_origin") + self.x = x + self.y = y + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "x": self.x, + "y": self.y + }) + return base_data + +class ScreenNextRow(VexWebSocketCommand): + def __init__(self): + super().__init__("lcd_next_row") + + def to_json(self): + return super().to_json() + +class ScreenClearRow(VexWebSocketCommand): + def __init__(self, row=0, r=0,g=0,b=0): + super().__init__("lcd_clear_row") + self.row = row + self.r = r + self.g = g + self.b = b + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "number": self.row, + "r": self.r, + "g": self.g, + "b": self.b + }) + return base_data +class ScreenClear(VexWebSocketCommand): + def __init__(self, r=0, g=0, b=0): + super().__init__("lcd_clear_screen") + self.r = r + self.g = g + self.b = b + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "r": self.r, + "g": self.g, + "b": self.b + }) + return base_data + +class ScreenSetFont(VexWebSocketCommand): + def __init__(self, fontname): + super().__init__("lcd_set_font") + self.fontname = fontname + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "fontname": self.fontname + }) + return base_data + +class ScreenSetPenWidth(VexWebSocketCommand): + def __init__(self, width): + super().__init__("lcd_set_pen_width") + self.width = width + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "width": self.width + }) + return base_data + +class ScreenSetPenColor(VexWebSocketCommand): + def __init__(self, r=0, g=0, b=0): + super().__init__("lcd_set_pen_color") + self.r = r + self.g = g + self.b = b + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "r": self.r, + "g": self.g, + "b": self.b + }) + return base_data +class ScreenSetFillColor(VexWebSocketCommand): + def __init__(self, r=0, g=0, b=0, transparent=False): + super().__init__("lcd_set_fill_color") + self.r = r + self.g = g + self.b = b + self.b_transparency = transparent + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "r": self.r, + "g": self.g, + "b": self.b, + "b_transparency":self.b_transparency + }) + return base_data + +class ScreenDrawLine(VexWebSocketCommand): + def __init__(self, x1=0, y1=0, x2=0, y2=0): + super().__init__("lcd_draw_line") + self.x1 = x1 + self.y1 = y1 + self.x2 = x2 + self.y2 = y2 + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "x1": self.x1, + "y1": self.y1, + "x2": self.x2, + "y2": self.y2 + }) + return base_data + +class ScreenDrawRectangle(VexWebSocketCommand): + def __init__(self, x=0, y=0, width=0, height=0, r=0, g=0, b=0, transparent=False): + super().__init__("lcd_draw_rectangle") + self.x = x + self.y = y + self.width = width + self.height = height + self.r = r + self.g = g + self.b = b + self.b_transparency = transparent + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "x": self.x, + "y": self.y, + "width": self.width, + "height": self.height, + "r": self.r, + "g": self.g, + "b": self.b, + "b_transparency":self.b_transparency + }) + return base_data + +class ScreenDrawCircle(VexWebSocketCommand): + def __init__(self, x=0, y=0, radius=0, r=0, g=0, b=0, transparent=False): + super().__init__("lcd_draw_circle") + self.x = x + self.y = y + self.radius = radius + self.r = r + self.g = g + self.b = b + self.b_transparency = transparent + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "x": self.x, + "y": self.y, + "radius": self.radius, + "r": self.r, + "g": self.g, + "b": self.b, + "b_transparency":self.b_transparency + }) + return base_data + +class ScreenDrawPixel(VexWebSocketCommand): + def __init__(self, x=0, y=0): + super().__init__("lcd_draw_pixel") + self.x = x + self.y = y + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "x": self.x, + "y": self.y + }) + return base_data + +class ScreenDrawImageFromFile(VexWebSocketCommand): + def __init__(self, filename="", x=0, y=0): + super().__init__("lcd_draw_image_from_file") + self.filename = filename + self.x = x + self.y = y + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "filename": self.filename, + "x": self.x, + "y": self.y + }) + return base_data + +class ScreenSetClipRegion(VexWebSocketCommand): + def __init__(self, x=0, y=0, width=0, height=0): + super().__init__("lcd_set_clip_region") + self.x = x + self.y = y + self.width = width + self.height = height + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "x": self.x, + "y": self.y, + "width": self.width, + "height": self.height + }) + return base_data + +class ScreenShowEmoji(VexWebSocketCommand): + def __init__(self, name=0, look=0): + super().__init__("show_emoji") + self.name = name + self.look = look + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "name": self.name, + "look": self.look + }) + return base_data + +class ScreenHideEmoji(VexWebSocketCommand): + def __init__(self): + super().__init__("hide_emoji") + def to_json(self): + return super().to_json() + +class ScreenShowAivision(VexWebSocketCommand): + def __init__(self, name=0, look=0): + super().__init__("show_aivision") + def to_json(self): + return super().to_json() + +class ScreenHideAivision(VexWebSocketCommand): + def __init__(self, name=0, look=0): + super().__init__("hide_aivision") + def to_json(self): + return super().to_json() +#endregion Screen Commands + +#region Interial Commands +class InterialCalibrate(VexWebSocketCommand): + def __init__(self): + super().__init__("imu_calibrate") + + def to_json(self): + return super().to_json() + +class InterialSetCrashSensitivity(VexWebSocketCommand): + def __init__(self, sensitivity=0): + super().__init__("imu_set_crash_threshold") + self.sensitivity = sensitivity + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "sensitivity": self.sensitivity + }) + return base_data +#endregion Interial Commands + +#region Kicker Commands +class KickerKick(VexWebSocketCommand): + def __init__(self, kick_type=""): + super().__init__(kick_type) + + def to_json(self): + return super().to_json() +#endregion Kicker Commands + +#region Sound Commands +class SoundPlay(VexWebSocketCommand): + def __init__(self, name="", volume=0): + super().__init__("play_sound") + self.name = name + self.volume = volume + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "name": self.name, + "volume": self.volume + }) + return base_data +class SoundPlayFile(VexWebSocketCommand): + def __init__(self, name="", volume=0): + super().__init__("play_file") + self.name = name + self.volume = volume + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "name": self.name, + "volume": self.volume + }) + return base_data +class SoundPlayNote(VexWebSocketCommand): + def __init__(self, note=0, octave=0, duration=500, volume=0): + super().__init__("play_note") + self.note = note + self.octave = octave + self.duration = duration + self.volume = volume + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "note": self.note, + "octave": self.octave, + "duration": self.duration, + "volume": self.volume + }) + return base_data + +class SoundStop(VexWebSocketCommand): + def __init__(self): + super().__init__("stop_sound") + + def to_json(self): + return super().to_json() + +#endregion Sound Commands + +#region LED Commands +class LedSet(VexWebSocketCommand): + def __init__(self, led="", r=0, g=0, b=0): + super().__init__("light_set") + self.led = led + self.r = r + self.g = g + self.b = b + + def to_json(self): + base_data = super().to_json() + base_data.update({ + self.led: { + "r": self.r, + "g": self.g, + "b": self.b + } + }) + return base_data + +#endregion LED Commands + +#region AiVision Commands +class VisionColorDescription(VexWebSocketCommand): + def __init__(self, id, r, g, b, hangle, hdsat ): + super().__init__("color_description") + self.id = id + self.r = r + self.g = g + self.b = b + self.hdsat = hdsat + self.hangle = hangle + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "id": self.id, + "red": self.r, + "green": self.g, + "blue": self.b, + "hangle": self.hangle, + "hdsat": self.hdsat + }) + return base_data + +class VisionCodeDescription(VexWebSocketCommand): + def __init__(self, id, c1, c2, *args): + super().__init__("code_description") + self.id = id + self.c1 = c1.id + self.c2 = c2.id + self.c3 = -1 + self.c4 = -1 + self.c5 = -1 + if( len(args) > 0 ): + self.c3 = args[0].id + if( len(args) > 1 ): + self.c3 = args[1].id + if( len(args) > 2 ): + self.c3 = args[2].id + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "id": self.id, + "c1": self.c1, + "c2": self.c2, + "c3": self.c3, + "c4": self.c4, + "c5": self.c5 + }) + return base_data +class VisionTagDetection(VexWebSocketCommand): + def __init__(self, enable=True): + super().__init__("tag_detection") + self.b_enable = enable + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "b_enable": self.b_enable + }) + return base_data + +class VisionColorDetection(VexWebSocketCommand): + def __init__(self, enable=True, merge=True): + super().__init__("color_detection") + self.b_enable = enable + self.b_merge = merge + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "b_enable": self.b_enable, + "b_merge": self.b_merge + }) + return base_data +class VisionModelDetection(VexWebSocketCommand): + def __init__(self, enable=True): + super().__init__("model_detection") + self.b_enable = enable + + def to_json(self): + base_data = super().to_json() + base_data.update({ + "b_enable": self.b_enable + }) + return base_data +#endregion AiVision Commands diff --git a/resources/python/vex/vex_types.py b/resources/python/vex/vex_types.py new file mode 100644 index 0000000..99e131d --- /dev/null +++ b/resources/python/vex/vex_types.py @@ -0,0 +1,389 @@ +# ================================================================================================= +# Copyright (c) Innovation First 2025. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# ================================================================================================= +""" +AIM WebSocket API - Types + +This module defines various types and enums used in the AIM WebSocket API. +""" +from enum import Enum +from typing import Union +import time +class vexEnum: + '''Base class for all enumerated types''' + value = 0 + name = "" + + def __init__(self, value, name): + self.value = value + self.name = name + + def __str__(self): + return self.name + + def __repr__(self): + return self.name + + def __hash__(self): + return self.value + +class SoundType(str, Enum): + DOORBELL = "DOORBELL" + TADA = "TADA" + FAIL = "FAIL" + SPARKLE = "SPARKLE" + FLOURISH = "FLOURISH" + FORWARD = "FORWARD" + REVERSE = "REVERSE" + RIGHT = "RIGHT" + LEFT = "LEFT" + BLINKER = "BLINKER" + CRASH = "CRASH" + BRAKES = "BRAKES" + HUAH = "HUAH" + PICKUP = "PICKUP" + #PLACE = "PLACE" + #KICK = "KICK" + CHEER = "CHEER" + SENSING = "SENSING" + DETECTED = "DETECTED" + OBSTACLE = "OBSTACLE" + LOOPING = "LOOPING" + COMPLETE = "COMPLETE" + PAUSE = "PAUSE" + RESUME = "RESUME" + SEND = "SEND" + RECEIVE = "RECEIVE" + #CHIRP = "CHIRP" + + ACT_HAPPY = "ACT_HAPPY" + ACT_SAD = "ACT_SAD" + ACT_EXCITED = "ACT_EXCITED" + ACT_ANGRY = "ACT_ANGRY" + ACT_SILLY = "ACT_SILLY" + +class FontType(str, Enum): + MONO20 = "MONO20" + MONO24 = "MONO24" + MONO30 = "MONO30" + MONO36 = "MONO36" + MONO40 = "MONO40" + MONO60 = "MONO60" + PROP20 = "PROP20" + PROP24 = "PROP24" + PROP30 = "PROP30" + PROP36 = "PROP36" + PROP40 = "PROP40" + PROP60 = "PROP60" + MONO15 = "MONO15" + MONO12 = "MONO12" + +class KickType(str, Enum): + SOFT = "kick_soft" + MEDIUM = "kick_medium" + HARD = "kick_hard" + +class AxisType(Enum): + """The defined units for inertial sensor axis.""" + X_AXIS = 0 + Y_AXIS = 1 + Z_AXIS = 2 + +class TurnType(Enum): + LEFT = 0 + RIGHT = 1 +class OrientationType: + '''The defined units for inertial sensor orientation.''' + ROLL = 0 + PITCH = 1 + YAW = 2 +class AccelerationType: + '''The defined units for inertial sensor acceleration.''' + FORWARD = 0 + RIGHTWARD = 1 + DOWNWARD = 2 + +class PercentUnits: + '''The measurement units for percentage values.''' + class PercentUnits(vexEnum): + pass + PERCENT = PercentUnits(0, "PERCENT") + '''A percentage unit that represents a value from 0% to 100%''' +class RotationUnits: + '''The measurement units for rotation values.''' + class RotationUnits(vexEnum): + pass + DEG = RotationUnits(0, "DEG") + '''A rotation unit that is measured in degrees.''' + REV = RotationUnits(1, "REV") + '''A rotation unit that is measured in revolutions.''' + RAW = RotationUnits(99, "RAW") + '''A rotation unit that is measured in raw data form.''' +class DriveVelocityUnits: + '''The measurement units for drive velocity values.''' + class DriveVelocityUnits(vexEnum): + pass + PERCENT = DriveVelocityUnits(0, "PCT") + '''A velocity unit that is measured in percentage.''' + MMPS = DriveVelocityUnits(1, "MMPS") + '''A velocity unit that is measured in mm per second.''' +class TurnVelocityUnits: + '''The measurement units for turn velocity values.''' + class TurnVelocityUnits(vexEnum): + pass + PERCENT = TurnVelocityUnits(0, "PCT") + '''A velocity unit that is measured in percentage.''' + DPS = TurnVelocityUnits(1, "DPS") + '''A velocity unit that is measured in degrees per second.''' + +class TimeUnits: + '''The measurement units for time values.''' + class TimeUnits(vexEnum): + pass + SECONDS = TimeUnits(0, "SECONDS") + '''A time unit that is measured in seconds.''' + MSEC = TimeUnits(1, "MSEC") + '''A time unit that is measured in milliseconds.''' +class DistanceUnits: + '''The measurement units for distance values.''' + class DistanceUnits(vexEnum): + pass + MM = DistanceUnits(0, "MM") + '''A distance unit that is measured in millimeters.''' + IN = DistanceUnits(1, "IN") + '''A distance unit that is measured in inches.''' + CM = DistanceUnits(2, "CM") + '''A distance unit that is measured in centimeters.''' +class VoltageUnits: + '''The measurement units for voltage values.''' + class VoltageUnits(vexEnum): + pass + VOLT = VoltageUnits(0, "VOLT") + '''A voltage unit that is measured in volts.''' + MV = VoltageUnits(0, "mV") + '''A voltage unit that is measured in millivolts.''' + +# ---------------------------------------------------------- +# globals +# ---------------------------------------------------------- +PERCENT = PercentUnits.PERCENT +'''A percentage unit that represents a value from 0% to 100%''' +LEFT = TurnType.LEFT +'''A turn unit that is defined as left turning.''' +RIGHT = TurnType.LEFT +'''A turn unit that is defined as right turning.''' +DEGREES = RotationUnits.DEG +'''A rotation unit that is measured in degrees.''' +TURNS = RotationUnits.REV +'''A rotation unit that is measured in revolutions.''' +SECONDS = TimeUnits.SECONDS +'''A time unit that is measured in seconds.''' +MSEC = TimeUnits.MSEC +'''A time unit that is measured in milliseconds.''' +INCHES = DistanceUnits.IN +'''A distance unit that is measured in inches.''' +MM = DistanceUnits.MM +'''A distance unit that is measured in millimeters.''' +VOLT = VoltageUnits.VOLT +'''A voltage unit that is measured in volts.''' +MV = VoltageUnits.MV +'''A voltage unit that is measured in millivolts.''' +MMPS = DriveVelocityUnits.MMPS +'''units of mm per second''' +DPS = TurnVelocityUnits.DPS +'''units of degrees per second''' +OFF = False +'''used to turn off an LED''' + +vexnumber = Union[int, float] +# drivetrain move functions take either DriveVelocity or percentage units +DriveVelocityPercentUnits = Union[DriveVelocityUnits.DriveVelocityUnits, PercentUnits.PercentUnits] +# drivetrain turn functions take either TurnVelocity or percentage units +TurnVelocityPercentUnits = Union[TurnVelocityUnits.TurnVelocityUnits, PercentUnits.PercentUnits] + +class LightType(str, Enum): + LED1 = "light1" + LED2 = "light2" + LED3 = "light3" + LED4 = "light4" + LED5 = "light5" + LED6 = "light6" + ALL_LEDS = "all" + +class Color: + '''### Color class - create a new color + + This class is used to create instances of color objects + + #### Arguments: + value : The color value, can be specified in various ways, see examples. + + #### Returns: + An instance of the Color class + + #### Examples: + # create blue using hex value\\ + c = Color(0x0000ff)\n + # create blue using r, g, b values\\ + c = Color(0, 0, 255)\n + # create blue using web string\\ + c = Color("#00F")\n + # create blue using web string (alternate)\\ + c = Color("#0000FF")\n + # create red using an existing object\\ + c = Color(Color.RED) + ''' + class DefinedColor: + def __init__(self, value, transparent=False): + self.value = value + self.transparent = transparent + + BLACK = DefinedColor(0x000000) + '''predefined Color black''' + WHITE = DefinedColor(0xFFFFFF) + '''predefined Color white''' + RED = DefinedColor(0xFF0000) + '''predefined Color red''' + GREEN = DefinedColor(0x00FF00) + '''predefined Color green''' + BLUE = DefinedColor(0x001871) + '''predefined Color blue''' + YELLOW = DefinedColor(0xFFFF00) + '''predefined Color yellow''' + ORANGE = DefinedColor(0xFF8500) + '''predefined Color orange''' + PURPLE = DefinedColor(0xFF00FF) + '''predefined Color purple''' + CYAN = DefinedColor(0x00FFFF) + '''predefined Color cyan''' + TRANSPARENT = DefinedColor(0x000000, True) + '''predefined Color transparent''' + + def __init__(self, *args): + self.transparent = False + if len(args) == 1 and isinstance(args[0], int): + self.value: int = args[0] + elif len(args) == 3 and all(isinstance(arg, int) for arg in args): + self.value = ((args[0] & 0xFF) << 16) + ((args[1] & 0xFF) << 8) + (args[2] & 0xFF) + else: + raise TypeError("bad parameters") + + def set_rgb(self, *args): + '''### change existing Color instance to new rgb value + + #### Arguments: + value : The color value, can be specified in various ways, see examples. + + #### Returns: + integer value representing the color + + #### Examples: + # create a color that is red + c = Color(0xFF0000) + # change color to blue using single value + c.rgb(0x0000FF) + # change color to green using three values + c.rgb(0, 255, 0) + ''' + if len(args) == 1 and isinstance(args[0], int): + self.value = args[0] + if len(args) == 3 and all(isinstance(arg, int) for arg in args): + self.value = ((args[0] & 0xFF) << 16) + ((args[1] & 0xFF) << 8) + (args[2] & 0xFF) + + # ---------------------------------------------------------- + +def sleep(duration: vexnumber, units=TimeUnits.MSEC): + '''### delay the current thread for the provided number of seconds or milliseconds. + + #### Arguments: + duration: The number of seconds or milliseconds to sleep for + units: The units of duration, optional, default is milliseconds + + #### Returns: + None + ''' + if units == TimeUnits.MSEC: + time.sleep(duration / 1000) + else: + time.sleep(duration) + +def wait(duration: vexnumber, units=TimeUnits.MSEC): + '''### delay the current thread for the provided number of seconds or milliseconds. + + #### Arguments: + duration: The number of seconds or milliseconds to sleep for + units: The units of duration, optional, default is milliseconds + + #### Returns: + None + ''' + if units == TimeUnits.MSEC: + time.sleep(duration / 1000) + else: + time.sleep(duration) + +class EmojiType: + class EmojiType(vexEnum): + pass + EXCITED = EmojiType( 0, "EXCITED") + CONFIDENT = EmojiType( 1, "CONFIDENT") + SILLY = EmojiType( 2, "SILLY") + AMAZED = EmojiType( 3, "AMAZED") + STRONG = EmojiType( 4, "STRONG") + THRILLED = EmojiType( 5, "THRILLED") + HAPPY = EmojiType( 6, "HAPPY") + PROUD = EmojiType( 7, "PROUD") + LAUGHING = EmojiType( 8, "LAUGHING") + OPTIMISTIC = EmojiType(9, "OPTIMISTIC") + DETERMINED = EmojiType(10, "DETERMINED") + AFFECTIONATE = EmojiType(11, "AFFECTIONATE") + CALM = EmojiType(12, "CALM") + QUIET = EmojiType(13, "QUIET") + SHY = EmojiType(14, "SHY") + CHEERFUL = EmojiType(15, "CHEERFUL") + LOVED = EmojiType(16, "LOVED") + SURPRISED = EmojiType(17, "SURPRISED") + THINKING = EmojiType(18, "THINKING") + TIRED = EmojiType(19, "TIRED") + CONFUSED = EmojiType(20, "CONFUSED") + BORED = EmojiType(21, "BORED") + EMBARRASSED = EmojiType(22, "EMBARRASSED") + WORRIED = EmojiType(23, "WORRIED") + SAD = EmojiType(24, "SAD") + SICK = EmojiType(25, "SICK") + DISAPPOINTED = EmojiType(26, "DISAPPOINTED") + NERVOUS = EmojiType(27, "NERVOUS") + ANNOYED = EmojiType(30, "ANNOYED") + STRESSED = EmojiType(31, "STRESSED") + ANGRY = EmojiType(32, "ANGRY") + FRUSTRATED = EmojiType(33, "FRUSTRATED") + JEALOUS = EmojiType(34, "JEALOUS") + SHOCKED = EmojiType(35, "SHOCKED") + FEAR = EmojiType(36, "FEAR") + DISGUST = EmojiType(37, "DISGUST") + +Emoji = EmojiType + +# ---------------------------------------------------------- + +class EmojiLookType: + class EmojiLookType(vexEnum): + pass + LOOK_FORWARD = EmojiLookType( 0, "LOOK_FORWARD") + LOOK_RIGHT = EmojiLookType( 1, "LOOK_RIGHT") + LOOK_LEFT = EmojiLookType( 2, "LOOK_LEFT") + +EmojiLook = EmojiLookType + +class StackingType(Enum): + STACKING_OFF = 0 + STACKING_MOVE_RELATIVE = 1 + STACKING_MOVE_GLOBAL = 2 + +class SensitivityType: + class SensitivityType(vexEnum): + pass + LOW = SensitivityType( 0, "LOW") + MEDIUM = SensitivityType( 1, "MEDIUM") + HIGH = SensitivityType( 2, "HIGH") diff --git a/setup-venv.sh b/setup-venv.sh new file mode 100644 index 0000000..1e2b9fd --- /dev/null +++ b/setup-venv.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +echo "Creating and configuring Python virtual environment..." + +# Check if Python is installed +if ! command -v python &> /dev/null && ! command -v python3 &> /dev/null; then + echo "Python is not installed. Please run the system setup script first." + exit 1 +fi + +# Use python or python3 +PYTHON_CMD=$(command -v python || command -v python3) + +# Create venv if it doesn't exist +if [ ! -d "venv" ]; then + echo "Creating venv..." + $PYTHON_CMD -m venv venv +else + echo "venv already exists." +fi + +# Activate the venv +echo "Activating virtual environment..." +source venv/Scripts/activate + +# Install dependencies +if [ -f "requirements.txt" ]; then + echo "Installing from requirements.txt..." + pip install --upgrade pip + pip install -r requirements.txt +else + echo "No requirements.txt found. Installing known dependencies..." + pip install --upgrade pip + pip install websocket-client +fi + +echo "Setup complete. To activate later, run:" +echo "source venv/Scripts/activate" diff --git a/src/main/index copy.js b/src/main/index copy.js index 4fc91a8..d7ba9c1 100644 --- a/src/main/index copy.js +++ b/src/main/index copy.js @@ -52,7 +52,7 @@ async function createWindow() { // Reload try { require("electron-reloader")(module); - } catch (_) {} + } catch (_) { } // Errors are thrown if the dev tools are opened // before the DOM is ready win.webContents.once("dom-ready", async () => { diff --git a/src/main/index.js b/src/main/index.js index c2a3429..789a195 100644 --- a/src/main/index.js +++ b/src/main/index.js @@ -1,6 +1,9 @@ const { app, BrowserWindow, ipcMain, dialog } = require("electron"); +const { spawn } = require('child_process'); const path = require("path"); const tello = require("./tello.js"); +const WebSocket = require('ws'); +const waitOn = require('wait-on'); const isProduction = process.env.NODE_ENV === "production" || !process || !process.env || !process.env.NODE_ENV; @@ -18,7 +21,73 @@ let bleCallback = null; // be closed automatically when the JavaScript object is garbage collected. let win; +let pythonProcess; + async function createWindow() { + // ---- Start Python VEXServer ---- + let pythonExe, script; + + if (isProduction) { + // Check if we have a bundled Python executable + const bundledPythonPath = path.join(process.resourcesPath, 'python', 'VEXServer.exe'); + const fs = require('fs'); + + if (fs.existsSync(bundledPythonPath)) { + // Use bundled Python executable + pythonExe = bundledPythonPath; + script = null; + } else { + // Fallback to system Python with bundled script + pythonExe = 'python'; + script = path.join(process.resourcesPath, 'python', 'VEXServer.py'); + } + } else { + // Development mode - use virtual environment Python + const venvPython = path.join(__dirname, '..', '..', '.venv', 'Scripts', 'python.exe'); + const fs = require('fs'); + + if (fs.existsSync(venvPython)) { + pythonExe = venvPython; + console.log('Using virtual environment Python:', venvPython); + } else { + pythonExe = 'python'; + console.log('Virtual environment not found, using system Python'); + } + // Use development version that doesn't require physical robot + script = path.join(__dirname, '..', '..', 'resources', 'python', 'VEXServer_dev.py'); + } + + try { + pythonProcess = script ? spawn(pythonExe, [script]) : spawn(pythonExe); + + pythonProcess.stdout.on('data', (data) => { + console.log(`PYTHON: ${data}`); + }); + + pythonProcess.stderr.on('data', (data) => { + console.error(`PYTHON ERROR: ${data}`); + }); + + pythonProcess.on('close', (code) => { + console.log(`Python process exited with code ${code}`); + }); + + pythonProcess.on('error', (error) => { + console.error(`Failed to start Python process: ${error.message}`); + }); + } catch (error) { + console.error(`Error starting Python server: ${error.message}`); + } + // ---- End Python VEXServer ---- + + // Wait for the Python WebSocket server to be ready (up to 10 seconds) + try { + await waitOn({ resources: ['tcp:127.0.0.1:8777'], timeout: 10000 }); + console.log('Python WebSocket server is ready'); + } catch (error) { + console.error('Failed to connect to Python WebSocket server:', error.message); + } + // If you'd like to set up auto-updating for your app, // I'd recommend looking at https://github.com/iffy/electron-updater-example // to use the method most suitable for you. @@ -42,13 +111,18 @@ async function createWindow() { } }); + // Add the event listener here, AFTER creating the window + win.webContents.on('did-fail-load', (event, errorCode, errorDescription) => { + console.error('Window failed to load:', errorDescription); + }); + // Load the url of the dev server if in development mode // Load the index.html when not in development if (isDevelopment) { win.loadURL(selfHost); } else { - //win.loadURL(`${Protocol.scheme}://rse/index.html`); - win.loadURL(`file://${path.join(__dirname, "../renderer/index.html")}`); + // Fix the path for production builds + win.loadFile(path.join(__dirname, "../../build/renderer/index.html")); } // Only do these things when in development @@ -56,7 +130,7 @@ async function createWindow() { // Reload try { require("electron-reloader")(module); - } catch (_) {} + } catch (_) { } // Errors are thrown if the dev tools are opened // before the DOM is ready win.webContents.once("dom-ready", async () => { @@ -125,33 +199,55 @@ async function createWindow() { } }); + const ws = new WebSocket('ws://127.0.0.1:8777'); + + ws.on('open', function open() { + console.log('WebSocket connection opened'); + }); + + ws.on('error', function error(err) { + console.error('WebSocket error:', err); + }); + + function sendCommand(command) { + ws.send(JSON.stringify(command)); + console.log("Command sent:", command); + // Remove all the timing logic + } + ipcMain.on("drone-up", (event, response) => { let recent_val = parseInt(response); - let upVal = recent_val > maxSpeed ? maxSpeed : recent_val < minSpeed ? minSpeed : recent_val; - console.log("drone up", upVal, "sent", response); - tello.up(upVal); + let rightVal = recent_val > maxSpeed ? maxSpeed : recent_val < minSpeed ? minSpeed : recent_val; + console.log("Sphero right", rightVal, "sent", response); + const moveCommand = { action: "move", distance: response, heading: 90 };//For Vex + sendCommand(moveCommand); }); ipcMain.on("drone-down", (event, response) => { let recent_val = parseInt(response); let downVal = recent_val > maxSpeed ? maxSpeed : recent_val < minSpeed ? minSpeed : recent_val; - console.log("drone down", downVal, "sent", response); - tello.down(downVal); + console.log("Sphero Left", downVal, "sent", response); + const moveCommand = { action: "move", distance: response, heading: 270 };//For Vex + sendCommand(moveCommand); }); ipcMain.on("drone-forward", (event, response) => { let recent_val = parseInt(response); - _maxSpeed = 80; - let val = recent_val > _maxSpeed ? _maxSpeed : recent_val < minSpeed ? minSpeed : recent_val; - console.log("drone forward", val, "sent", response); - tello.forward(val); + let forwardVal = recent_val > maxSpeed ? maxSpeed : recent_val < minSpeed ? minSpeed : recent_val; + console.log("drone forward", forwardVal, "sent", response); + const moveCommand = { action: "move", distance: response, heading: 0 };//For Vex + sendCommand(moveCommand); }); ipcMain.on("drone-back", (event, response) => { let recent_val = parseInt(response); - let val = recent_val > maxSpeed ? maxSpeed : recent_val < minSpeed ? minSpeed : recent_val; - console.log("drone back", val, "sent", response); - tello.back(val); + let backVal = recent_val > maxSpeed ? maxSpeed : recent_val < minSpeed ? minSpeed : recent_val; + console.log("drone back", backVal, "sent", response); + // let val = recent_val > maxSpeed ? maxSpeed : recent_val < minSpeed ? minSpeed : recent_val; + // console.log("drone back", val, "sent", response); + const moveCommand = { action: "move", distance: response, heading: 180 };//For Vex + sendCommand(moveCommand); + // tello.back(val); }); ipcMain.on("cw", (event, response) => { @@ -168,30 +264,67 @@ async function createWindow() { tello.ccw(recent_val); }); - let isUp = false; + //Vex commands + ipcMain.on("vex-turn-left", (event, degrees) => { + console.log(`[VEX] Turn left ${degrees}°`); + const turnCommand = { action: "turn_left", degrees: degrees }; + sendCommand(turnCommand); + }); - ipcMain.on("manual-control", (event, response) => { - //console.log("index", response); - switch (response) { - case "takeoff": - isUp = true; - tello.takeoff(); - break; - case "land": - isUp = true; - tello.land(); - break; - case "up": - tello.up(20); - break; - case "down": - tello.down(20); - break; - default: - break; - } + ipcMain.on("vex-turn-right", (event, degrees) => { + console.log(`[VEX] Turn right ${degrees}°`); + const turnCommand = { action: "turn_right", degrees: degrees }; + sendCommand(turnCommand); + }); + + ipcMain.on("vex-forward", (event, distance) => { + console.log(`[VEX] Move forward ${distance} inches`); + const moveCommand = { action: "move", distance: distance, heading: 0 }; + sendCommand(moveCommand); + }); + + ipcMain.on("vex-back", (event, distance) => { + console.log(`[VEX] Move back ${distance} inches`); + const moveCommand = { action: "move", distance: distance, heading: 180 }; + sendCommand(moveCommand); + }); + + ipcMain.on("vex-left", (event, distance) => { + console.log(`[VEX] Move left ${distance} inches`); + const moveCommand = { action: "move", distance: distance, heading: 270 }; + sendCommand(moveCommand); }); + ipcMain.on("vex-right", (event, distance) => { + console.log(`[VEX] Move right ${distance} inches`); + const moveCommand = { action: "move", distance: distance, heading: 90 }; + sendCommand(moveCommand); + }); + + let isUp = false; + + // ipcMain.on("manual-control", (event, response) => { + // //console.log("index", response); + // switch (response) { + // case "takeoff": + // isUp = true; + // tello.takeoff(); + // break; + // case "land": + // isUp = true; + // tello.land(); + // break; + // case "up": + // tello.up(20); + // break; + // case "down": + // tello.down(20); + // break; + // default: + // break; + // } + // }); + ipcMain.on("control-signal", (event, response) => { /* @@ -209,6 +342,11 @@ async function createWindow() { } */ }); + + ipcMain.on("send-command", (event, command) => { + console.log("Received command from renderer:", command); + sendCommand(command); // Use the existing sendCommand function + }); } // This method will be called when Electron has finished @@ -218,6 +356,11 @@ app.on("ready", createWindow); // Quit when all windows are closed. app.on("window-all-closed", () => { + // Clean up Python process + if (pythonProcess && !pythonProcess.killed) { + pythonProcess.kill(); + } + // On macOS it is common for applications and their menu bar // to stay active until the user quits explicitly with Cmd + Q if (process.platform !== "darwin") { @@ -225,6 +368,13 @@ app.on("window-all-closed", () => { } }); +app.on("before-quit", () => { + // Clean up Python process before quitting + if (pythonProcess && !pythonProcess.killed) { + pythonProcess.kill(); + } +}); + app.on("activate", () => { // On macOS it's common to re-create a window in the app when the // dock icon is clicked and there are no other windows open. diff --git a/src/main/preload.js b/src/main/preload.js index 24737c1..4a58353 100644 --- a/src/main/preload.js +++ b/src/main/preload.js @@ -25,6 +25,13 @@ process.once("loaded", () => { droneDown: (response) => ipcRenderer.send("drone-down", response), droneForward: (response) => ipcRenderer.send("drone-forward", response), droneBack: (response) => ipcRenderer.send("drone-back", response), + //Vex commands + vexTurnLeft: (degrees) => ipcRenderer.send("vex-turn-left", degrees), + vexTurnRight: (degrees) => ipcRenderer.send("vex-turn-right", degrees), + vexForward: (distance) => ipcRenderer.send("vex-forward", distance), + vexBack: (distance) => ipcRenderer.send("vex-back", distance), + vexLeft: (distance) => ipcRenderer.send("vex-left", distance), + vexRight: (distance) => ipcRenderer.send("vex-right", distance), cw: (response) => ipcRenderer.send("cw", response), ccw: (response) => ipcRenderer.send("ccw", response), getBLEList: (callback) => ipcRenderer.on("device_list", callback), @@ -32,6 +39,8 @@ process.once("loaded", () => { selectBluetoothDevice: (deviceID) => ipcRenderer.send("select-ble-device", deviceID), cancelBluetoothRequest: (callback) => ipcRenderer.send("cancel-bluetooth-request", callback), bluetoothPairingRequest: (callback) => ipcRenderer.on("bluetooth-pairing-request", callback), - bluetoothPairingResponse: (response) => ipcRenderer.send("bluetooth-pairing-response", response) + bluetoothPairingResponse: (response) => ipcRenderer.send("bluetooth-pairing-response", response), + // Expose sendCommand to the renderer process + sendCommand: (command) => ipcRenderer.send("send-command", command), }); }); diff --git a/src/renderer/js/blockly-main.js b/src/renderer/js/blockly-main.js index 90f4ddf..0794e1a 100644 --- a/src/renderer/js/blockly-main.js +++ b/src/renderer/js/blockly-main.js @@ -9,19 +9,34 @@ import Interpreter from "js-interpreter"; import { InterpreterAPI } from "./interpreter-api.js"; Blockly.setLocale(locale); -let { cat_logic, cat_loops, cat_math, cat_sep, cat_data, cat_drone } = Categories; +let { cat_logic, cat_loops, cat_math, cat_sep, cat_data, cat_drone, cat_vex } = Categories; export const BlocklyMain = class { constructor() { createCustomBlocks(); this.interpreter = null; - this.runner = null; // may need to use window here + this.runner = null; this.latestCode = ""; - let _toolbox = new Toolbox([cat_logic, cat_loops, cat_math, cat_sep, cat_data, cat_drone]); + let cat_robot = { + name: "Robot Controls", + colour: 160, + modules: ["move", "led_control"] + }; + + let _toolbox = new Toolbox([ + cat_logic, + cat_loops, + cat_math, + cat_sep, + cat_data, + cat_drone, + cat_vex, // Add VEX category here + cat_robot + ]); this.workspace = Blockly.inject("blocklyDiv", { - toolbox: _toolbox.toString() + toolbox: _toolbox.toString(), }); this.registerCustomToolbox(); @@ -114,7 +129,10 @@ export const BlocklyMain = class { registerCustomToolbox = () => { // Triggers everytime category opens this.workspace.registerToolboxCategoryCallback("DATA", (ws) => { - return this.createCustomToolBox(["filter_signal"]); + return this.createCustomToolBox([ + "filter_signal", + "muscle_energy", + ]); }); }; diff --git a/src/renderer/js/categories.js b/src/renderer/js/categories.js index c0c7017..f6ce1cc 100644 --- a/src/renderer/js/categories.js +++ b/src/renderer/js/categories.js @@ -24,12 +24,12 @@ export const Categories = { cat_data: { name: "Data", colour: 330, - modules: ["delta", "theta", "alpha", "beta", "gamma", "print"] + modules: ["delta", "theta", "alpha", "beta", "gamma", "print", "muscle_energy"] }, cat_drone: { name: "Drone", - colour: 70, + colour: 230, modules: [ "drone_up", "drone_down", @@ -42,6 +42,20 @@ export const Categories = { ] }, + // Add the new VEX category + cat_vex: { + name: "VEX", + colour: 70, + modules: [ + "vex_forward", + "vex_back", + "vex_left", + "vex_right", + "vex_turn_left", + "vex_turn_right" + ] + }, + cat_vars: { name: "Variables", colour: 100, diff --git a/src/renderer/js/customblock.js b/src/renderer/js/customblock.js index ab26ff1..7e1ae07 100644 --- a/src/renderer/js/customblock.js +++ b/src/renderer/js/customblock.js @@ -175,6 +175,32 @@ export const createCustomBlocks = function () { var code = `blockly_print(${text});\n`; return code; }; + + // 1. Define the block’s JSON + const muscleEnergyJson = { + type: "muscle_energy", + message0: "muscle energy", + output: "Number", + colour: 230, + tooltip: "Current EMG muscle‐energy value", + helpUrl: "" + }; + + // 2. Tell Blockly about the block + Blockly.Blocks["muscle_energy"] = { + init: function () { + this.jsonInit(muscleEnergyJson); + } + }; + + // 3. Generate JS for the block: read from window.filteredSample + javascriptGenerator.forBlock["muscle_energy"] = function (block) { + // Call the host-registered function: + return ["getMuscleEnergy()", Order.NONE]; + }; + + + }; /// @@ -212,11 +238,11 @@ javascriptGenerator.forBlock["wait_seconds"] = function (block) { /* droneUp() */ var droneUp = { type: "drone_up", - message0: "up %1 cm", + message0: "Right %1 cm", args0: [{ type: "input_value", name: "value", check: "Number" }], previousStatement: null, nextStatement: null, - colour: drone_blocks_color + colour: 230 }; Blockly.Blocks["drone_up"] = { @@ -236,11 +262,11 @@ javascriptGenerator.forBlock["drone_up"] = function (block, generator) { /* droneDown() */ var droneDown = { type: "drone_down", - message0: "down %1 cm", + message0: "Left %1 cm", args0: [{ type: "input_value", name: "value", check: "Number" }], previousStatement: null, nextStatement: null, - colour: drone_blocks_color + colour: 230 }; Blockly.Blocks["drone_down"] = { @@ -263,7 +289,7 @@ var droneForward = { args0: [{ type: "input_value", name: "value", check: "Number" }], previousStatement: null, nextStatement: null, - colour: drone_blocks_color + colour: 230 }; Blockly.Blocks["drone_forward"] = { @@ -286,7 +312,7 @@ var droneBack = { args0: [{ type: "input_value", name: "value", check: "Number" }], previousStatement: null, nextStatement: null, - colour: drone_blocks_color + colour: 230 }; Blockly.Blocks["drone_back"] = { @@ -309,7 +335,7 @@ var ccw = { args0: [{ type: "input_value", name: "value", check: "Number" }], previousStatement: null, nextStatement: null, - colour: drone_blocks_color + colour: 230 }; Blockly.Blocks["ccw"] = { @@ -332,7 +358,7 @@ var cw = { args0: [{ type: "input_value", name: "value", check: "Number" }], previousStatement: null, nextStatement: null, - colour: drone_blocks_color + colour: 230 }; Blockly.Blocks["cw"] = { @@ -355,7 +381,7 @@ var takeoff = { args0: [], previousStatement: null, nextStatement: null, - colour: drone_blocks_color + colour: 230 }; Blockly.Blocks["takeoff"] = { @@ -376,7 +402,7 @@ var land = { args0: [], previousStatement: null, nextStatement: null, - colour: drone_blocks_color + colour: 100 }; Blockly.Blocks["land"] = { @@ -389,3 +415,217 @@ javascriptGenerator.forBlock["land"] = function (block, generator) { var code = `land();\n`; return code; }; + +var moveBlock = { + type: "move", + message0: "move %1 cm at heading %2°", + args0: [ + { type: "field_number", name: "DISTANCE", value: 100, min: 0 }, + { type: "field_number", name: "HEADING", value: 0, min: 0, max: 360 } + ], + previousStatement: null, + nextStatement: null, + colour: 160 +}; + +Blockly.Blocks["move"] = { + init: function () { + this.jsonInit(moveBlock); + } +}; + +javascriptGenerator.forBlock["move"] = function (block) { + var distance = block.getFieldValue("DISTANCE"); + var heading = block.getFieldValue("HEADING"); + var code = `electronAPI.sendCommand({ action: "move", distance: ${distance}, heading: ${heading} });\n`; + console.log("Generated code for move block:", code); + return code; +}; + +// LED Control Block +var ledControl = { + type: "led_control", + message0: "turn LED %1", + args0: [ + { + type: "field_dropdown", + name: "COLOR", + options: [ + ["Red", "RED"], + ["Green", "GREEN"], + ["Blue", "BLUE"], + ["White", "WHITE"], + ["Yellow", "YELLOW"], + ["Orange", "ORANGE"], + ["Purple", "PURPLE"], + ["Cyan", "CYAN"] + ] + } + ], + previousStatement: null, + nextStatement: null, + colour: 160 +}; + +Blockly.Blocks["led_control"] = { + init: function () { + this.jsonInit(ledControl); + } +}; + +javascriptGenerator.forBlock["led_control"] = function (block) { + var color = block.getFieldValue("COLOR"); + var code = `electronAPI.sendCommand({ action: "led_on", color: "${color}" });\n`; + console.log("Generated code for LED block:", code); + return code; +}; + +// VEX Turn Left Block (simplified - no degrees input) +var vexTurnLeft = { + type: "vex_turn_left", + message0: "turn left", + args0: [], // Remove the degrees input + previousStatement: null, + nextStatement: null, + colour: 70 +}; + +Blockly.Blocks["vex_turn_left"] = { + init: function () { + this.jsonInit(vexTurnLeft); + } +}; + +javascriptGenerator.forBlock["vex_turn_left"] = function (block, generator) { + // Use a default value of 90 degrees since there's no input + return `vex_turn_left(90);\n`; +}; + +// VEX Turn Right Block (simplified - no degrees input) +var vexTurnRight = { + type: "vex_turn_right", + message0: "turn right", + args0: [], // Remove the degrees input + previousStatement: null, + nextStatement: null, + colour: 70 +}; + +Blockly.Blocks["vex_turn_right"] = { + init: function () { + this.jsonInit(vexTurnRight); + } +}; + +javascriptGenerator.forBlock["vex_turn_right"] = function (block, generator) { + // Use a default value of 90 degrees since there's no input + return `vex_turn_right(90);\n`; +}; + +// VEX Forward Block +var vexForward = { + type: "vex_forward", + message0: "forward %1 inches", + args0: [ + { + type: "input_value", + name: "distance", + check: "Number" + } + ], + previousStatement: null, + nextStatement: null, + colour: 70 +}; + +Blockly.Blocks["vex_forward"] = { + init: function () { + this.jsonInit(vexForward); + } +}; + +javascriptGenerator.forBlock["vex_forward"] = function (block, generator) { + var distance = generator.valueToCode(block, "distance", Order.ATOMIC) || "4"; + return `vex_forward(${distance});\n`; +}; + +// VEX Back Block +var vexBack = { + type: "vex_back", + message0: "back %1 inches", + args0: [ + { + type: "input_value", + name: "distance", + check: "Number" + } + ], + previousStatement: null, + nextStatement: null, + colour: 70 +}; + +Blockly.Blocks["vex_back"] = { + init: function () { + this.jsonInit(vexBack); + } +}; + +javascriptGenerator.forBlock["vex_back"] = function (block, generator) { + var distance = generator.valueToCode(block, "distance", Order.ATOMIC) || "4"; + return `vex_back(${distance});\n`; +}; + +// VEX Left Block +var vexLeft = { + type: "vex_left", + message0: "left %1 inches", + args0: [ + { + type: "input_value", + name: "distance", + check: "Number" + } + ], + previousStatement: null, + nextStatement: null, + colour: 70 +}; + +Blockly.Blocks["vex_left"] = { + init: function () { + this.jsonInit(vexLeft); + } +}; + +javascriptGenerator.forBlock["vex_left"] = function (block, generator) { + var distance = generator.valueToCode(block, "distance", Order.ATOMIC) || "4"; + return `vex_left(${distance});\n`; +}; + +// VEX Right Block +var vexRight = { + type: "vex_right", + message0: "right %1 inches", + args0: [ + { + type: "input_value", + name: "distance", + check: "Number" + } + ], + previousStatement: null, + nextStatement: null, + colour: 70 +}; + +Blockly.Blocks["vex_right"] = { + init: function () { + this.jsonInit(vexRight); + } +}; + +javascriptGenerator.forBlock["vex_right"] = function (block, generator) { + var distance = generator.valueToCode(block, "distance", Order.ATOMIC) || "4"; + return `vex_right(${distance});\n`; +}; diff --git a/src/renderer/js/interpreter-api.js b/src/renderer/js/interpreter-api.js index 7c3238d..35e6d8a 100644 --- a/src/renderer/js/interpreter-api.js +++ b/src/renderer/js/interpreter-api.js @@ -3,6 +3,8 @@ import { WrapperFunctions } from "./wrapper-functions"; export const InterpreterAPI = class { constructor(workspace) { this.wrapperFunctions = new WrapperFunctions(workspace); + + // Expose all the functions you need inside Blockly’s interpreter this.nativeFunctions = { highlightBlock: this.wrapperFunctions.highlightWrapper, getDelta: this.wrapperFunctions.getDelta, @@ -18,8 +20,19 @@ export const InterpreterAPI = class { ccw: this.wrapperFunctions.ccw, cw: this.wrapperFunctions.cw, takeoff: this.wrapperFunctions.takeoff, - land: this.wrapperFunctions.land + land: this.wrapperFunctions.land, + // VEX commands + vex_turn_left: this.wrapperFunctions.vex_turn_left, + vex_turn_right: this.wrapperFunctions.vex_turn_right, + vex_forward: this.wrapperFunctions.vex_forward, + vex_back: this.wrapperFunctions.vex_back, + vex_left: this.wrapperFunctions.vex_left, + vex_right: this.wrapperFunctions.vex_right, + + // ← NEW: expose muscle energy from window.filteredSample + getMuscleEnergy: () => window.filteredSample }; + this.asyncFunctions = { filterSignal: this.wrapperFunctions.filterSignalWrapper, wait_seconds: this.wrapperFunctions.wait_seconds @@ -27,19 +40,21 @@ export const InterpreterAPI = class { } init(interpreter, globalObject) { - for (let [key, value] of Object.entries(this.nativeFunctions)) { + // Bind native (synchronous) functions + for (const [name, fn] of Object.entries(this.nativeFunctions)) { interpreter.setProperty( globalObject, - key, - interpreter.createNativeFunction(value.bind(this.wrapperFunctions)) + name, + interpreter.createNativeFunction(fn.bind(this.wrapperFunctions)) ); } - for (let [key, value] of Object.entries(this.asyncFunctions)) { + // Bind async functions (returning promises) + for (const [name, fn] of Object.entries(this.asyncFunctions)) { interpreter.setProperty( globalObject, - key, - interpreter.createAsyncFunction(value.bind(this.wrapperFunctions)) + name, + interpreter.createAsyncFunction(fn.bind(this.wrapperFunctions)) ); } } diff --git a/src/renderer/js/main.js b/src/renderer/js/main.js index 65d5848..beaf71b 100644 --- a/src/renderer/js/main.js +++ b/src/renderer/js/main.js @@ -15,6 +15,44 @@ import { ChannelVis } from "./channel_vis.js"; import { BlocklyMain } from "./blockly-main.js"; import { BandPowerVis } from "./band-power-vis.js"; +let ws; +let wsReconnectAttempts = 0; +const maxReconnectAttempts = 5; + +// function connectWebSocket() { +// ws = new WebSocket("ws://127.0.0.1:8777"); + +// ws.onopen = () => { +// console.log("WebSocket connection established"); +// wsReconnectAttempts = 0; +// }; + +// ws.onerror = (error) => { +// console.log("WebSocket connection attempt failed, retrying..."); +// }; + +// ws.onclose = () => { +// if (wsReconnectAttempts < maxReconnectAttempts) { +// wsReconnectAttempts++; +// console.log(`WebSocket reconnecting... attempt ${wsReconnectAttempts}`); +// setTimeout(connectWebSocket, 2000); // Wait 2 seconds before retry +// } +// }; + +// ws.onmessage = (event) => { +// console.log("Message from server:", event.data); +// }; +// } + +// Wait a bit before connecting to give Python server time to start +// setTimeout(connectWebSocket, 3000); + +function sendCommand(command) { + window.electronAPI.sendCommand(command); +} + +window.sendCommand = sendCommand; + export const NeuroScope = class { constructor() { this.blocklyMain = new BlocklyMain(); diff --git a/src/renderer/js/wrapper-functions.js b/src/renderer/js/wrapper-functions.js index c310b23..8e73430 100644 --- a/src/renderer/js/wrapper-functions.js +++ b/src/renderer/js/wrapper-functions.js @@ -68,6 +68,30 @@ export const WrapperFunctions = class { window.electronAPI.droneBack(value); } + vex_turn_left(degrees) { + window.electronAPI.vexTurnLeft(degrees); + } + + vex_turn_right(degrees) { + window.electronAPI.vexTurnRight(degrees); + } + + vex_forward(distance) { + window.electronAPI.vexForward(distance); + } + + vex_back(distance) { + window.electronAPI.vexBack(distance); + } + + vex_left(distance) { + window.electronAPI.vexLeft(distance); + } + + vex_right(distance) { + window.electronAPI.vexRight(distance); + } + ccw(value) { console.log("ccw"); window.electronAPI.ccw(value); diff --git a/test_websocket.py b/test_websocket.py new file mode 100644 index 0000000..c0049a4 --- /dev/null +++ b/test_websocket.py @@ -0,0 +1,29 @@ +import asyncio +import websockets +import json + +async def test_websocket(): + uri = "ws://localhost:8777" + try: + print(f"Connecting to {uri}...") + async with websockets.connect(uri) as websocket: + print("✅ Connected to VEX WebSocket server!") + + # Test a simple command + test_command = { + "action": "move", + "distance": 5, + "heading": 0 + } + + print(f"Sending test command: {test_command}") + await websocket.send(json.dumps(test_command)) + + response = await websocket.recv() + print(f"✅ Server response: {response}") + + except Exception as e: + print(f"❌ Connection failed: {e}") + +if __name__ == "__main__": + asyncio.run(test_websocket()) \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index 4ed2ea9..e261fd3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7441,7 +7441,7 @@ w3c-xmlserializer@^4.0.0: dependencies: xml-name-validator "^4.0.0" -wait-on@^5.0.0: +wait-on@^5.3.0: version "5.3.0" resolved "https://registry.npmjs.org/wait-on/-/wait-on-5.3.0.tgz" integrity sha512-DwrHrnTK+/0QFaB9a8Ol5Lna3k7WvUR4jzSKmz0YaPBpuN2sACyiPVKVfj6ejnjcajAcvn3wlbTyMIn9AZouOg== @@ -7606,15 +7606,20 @@ ws@^6.1.2: async-limiter "~1.0.0" ws@^7.0.0: - version "7.5.0" - resolved "https://registry.npmjs.org/ws/-/ws-7.5.0.tgz" - integrity sha512-6ezXvzOZupqKj4jUqbQ9tXuJNo+BR2gU8fFRk3XCP3e0G6WT414u5ELe6Y0vtp7kmSJ3F7YWObSNr1ESsgi4vw== + version "7.5.10" + resolved "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz" + integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ== ws@^8.13.0: version "8.16.0" resolved "https://registry.yarnpkg.com/ws/-/ws-8.16.0.tgz#d1cd774f36fbc07165066a60e40323eab6446fd4" integrity sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ== +ws@^8.18.2: + version "8.18.3" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.3.tgz#b56b88abffde62791c639170400c93dcb0c95472" + integrity sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg== + xdg-basedir@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz"