diff --git a/.github/workflows/test-mqtt-broker.yml b/.github/workflows/test-mqtt-broker.yml new file mode 100644 index 000000000..64a9110ce --- /dev/null +++ b/.github/workflows/test-mqtt-broker.yml @@ -0,0 +1,55 @@ +name: Test BELY MQTT Message Broker + +on: + pull_request: + paths: + - 'tools/developer_tools/bely-mqtt-message-broker/**' + - '.github/workflows/test-mqtt-broker.yml' + workflow_dispatch: + +jobs: + test: + name: Run MQTT Broker Tests + runs-on: ubuntu-latest + + defaults: + run: + working-directory: tools/developer_tools/bely-mqtt-message-broker + + strategy: + matrix: + python-version: ['3.11', '3.12'] + # python-version: ['3.11'] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + make install-dev + + - name: Run code quality linter + run: | + make lint + + - name: Run code quality type-check + run: | + make type-check + + - name: Run tests with coverage + run: | + make test-cov + + # - name: Upload coverage reports + # if: matrix.python-version == '3.11' + # uses: actions/upload-artifact@v4 + # with: + # name: coverage-report + # path: tools/developer_tools/bely-mqtt-message-broker/htmlcov/ \ No newline at end of file diff --git a/.gitignore b/.gitignore index cb6708344..90726d60b 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ .vscode .DS_Store src/python/.loglogin +.loglogin # Compiled files *.pyc @@ -56,3 +57,5 @@ docs/python/_build/ # Temporary testing csv files tools/developer_tools/python-client/cdbCli/service/cli/Spreadsheets/ .env +tools/developer_tools/bely-mqtt-message-broker/conda-bld +tools/developer_tools/bely-mqtt-message-broker/dev-config diff --git a/etc/bely-mqtt-template b/etc/bely-mqtt-template new file mode 100644 index 000000000..06b37c5ea --- /dev/null +++ b/etc/bely-mqtt-template @@ -0,0 +1,157 @@ +keys: + - name: topic + type: string + color: + foreground: crimson + background: black + max-width: 25 + - name: description + type: string + color: + foreground: white + background: black + max-width: 25 + - name: entityId + type: string + color: + foreground: white + background: black + max-width: 25 + - name: entityName + type: string + color: + foreground: white + background: black + max-width: 25 + - name: eventTimestamp + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: eventTriggedByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: logInfo/enteredByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: logInfo/enteredOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: logInfo/id + type: string + color: + foreground: white + background: black + max-width: 25 + - name: logInfo/lastModifiedByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: logInfo/lastModifiedOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: logbookList + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/createdByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/enteredOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: parentLogDocumentInfo/id + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/lastModifiedByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/lastModifiedOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: parentLogDocumentInfo/name + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/ownerUserGroupName + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/ownerUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentlogInfo/enteredByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentlogInfo/enteredOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: parentlogInfo/id + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentlogInfo/lastModifiedByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentlogInfo/lastModifiedOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: textDiff + type: string + color: + foreground: white + background: black + max-width: 25 diff --git a/sbin/bely_configure_mqtt_service.sh b/sbin/bely_configure_mqtt_service.sh new file mode 100755 index 000000000..727d275d4 --- /dev/null +++ b/sbin/bely_configure_mqtt_service.sh @@ -0,0 +1,165 @@ +#!/bin/bash + +# Copyright (c) UChicago Argonne, LLC. All rights reserved. +# See LICENSE file. + + +# +# Script used for configuring MQTT connector for Bely webapp +# Deploys MQTT Resource Adapter and creates connection pool/resource +# +# Usage: +# +# $0 [mqtt_config_file] +# +# If no config file is specified, defaults to $LOGR_INSTALL_DIR/etc/mqtt.conf +# +# Sample MQTT configuration file contents (mqtt.conf): +# MQTT_HOST=localhost # MQTT broker hostname (default: localhost) +# MQTT_PORT=1883 # MQTT broker port (default: 1883) +# MQTT_USERNAME=admin # MQTT username (optional) +# MQTT_PASSWORD=admin # MQTT password (optional) +# MQTT_CLEAN_SESSION=true # Clean session flag (optional) +# MQTT_QOS=1 # Quality of Service level (optional) +# MQTT_KEEP_ALIVE_INTERVAL=60 # Keep alive interval in seconds (optional) +# MQTT_CONNECTION_TIMEOUT=30 # Connection timeout in seconds (optional) +# MQTT_MAX_INFLIGHT=10 # Maximum number of messages in flight (optional) +# MQTT_AUTOMATIC_RECONNECT=true # Automatic reconnection on disconnect (optional) +# MQTT_FILE_PERSISTANCE=false # Enable file-based message persistence (optional) +# MQTT_PERSISTENCE_DIRECTORY=. # Directory for persistent message storage (optional) +# MQTT_TOPIC_FILTER # MQTT topic filter for subscriptions (optional) + +MY_DIR=`dirname $0` && cd $MY_DIR && MY_DIR=`pwd` +if [ -z "${BELY_ROOT_DIR}" ]; then + BELY_ROOT_DIR=$MY_DIR/.. +fi +BELY_ENV_FILE=${BELY_ROOT_DIR}/setup.sh +if [ ! -f ${BELY_ENV_FILE} ]; then + echo "Environment file ${BELY_ENV_FILE} does not exist." + exit 2 +fi +. ${BELY_ENV_FILE} > /dev/null + +# Constants +MQTT_POOL_NAME="bely/MQTT/pool" +MQTT_RESOURCE_NAME="bely/MQTT/resource" +MQTT_RAR_NAME="mqtt-rar-0.8.0" +MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME="mqtt-rar-deployment" +MQTT_RAR_PATH=$BELY_ROOT_DIR/src/lib/${MQTT_RAR_NAME}.rar + +# Look for MQTT configuration file +if [ ! -z "$1" ]; then + mqttConfigFile=$1 +else + mqttConfigFile=$LOGR_INSTALL_DIR/etc/mqtt.conf +fi + +if [ -f $mqttConfigFile ]; then + echo "Using MQTT config file: $mqttConfigFile" + . $mqttConfigFile +else + echo "Error: MQTT config file $mqttConfigFile not found." + echo "You can create one using bely_create_mqtt_configuration.sh" + exit 1 +fi + +BELY_HOST_ARCH=$(uname -sm | tr -s '[:upper:][:blank:]' '[:lower:][\-]') +GLASSFISH_DIR=$LOGR_SUPPORT_DIR/payara/$BELY_HOST_ARCH + +ASADMIN_CMD=$GLASSFISH_DIR/bin/asadmin + +# MQTT Configuration defaults +MQTT_HOST=${MQTT_HOST:=localhost} +MQTT_PORT=${MQTT_PORT:=1883} + +# Build properties string for connection pool +PROPERTIES="serverURIs=tcp\\://${MQTT_HOST}\\:${MQTT_PORT}" + +if [ ! -z "$MQTT_USERNAME" ]; then + PROPERTIES="${PROPERTIES}:userName=${MQTT_USERNAME}" +fi + +if [ ! -z "$MQTT_PASSWORD" ]; then + PROPERTIES="${PROPERTIES}:password=${MQTT_PASSWORD}" +fi + +if [ ! -z "$MQTT_CLEAN_SESSION" ]; then + PROPERTIES="${PROPERTIES}:cleanSession=${MQTT_CLEAN_SESSION}" +fi + +if [ ! -z "$MQTT_KEEP_ALIVE_INTERVAL" ]; then + PROPERTIES="${PROPERTIES}:keepAliveInterval=${MQTT_KEEP_ALIVE_INTERVAL}" +fi + +if [ ! -z "$MQTT_CONNECTION_TIMEOUT" ]; then + PROPERTIES="${PROPERTIES}:connectionTimeout=${MQTT_CONNECTION_TIMEOUT}" +fi + +if [ ! -z "$MQTT_MAX_INFLIGHT" ]; then + PROPERTIES="${PROPERTIES}:maxInflight=${MQTT_MAX_INFLIGHT}" +fi + +if [ ! -z "$MQTT_AUTOMATIC_RECONNECT" ]; then + PROPERTIES="${PROPERTIES}:automaticReconnect=${MQTT_AUTOMATIC_RECONNECT}" +fi + +if [ ! -z "$MQTT_FILE_PERSISTANCE" ]; then + PROPERTIES="${PROPERTIES}:filePersistance=${MQTT_FILE_PERSISTANCE}" +fi + +if [ ! -z "$MQTT_PERSISTENCE_DIRECTORY" ]; then + PROPERTIES="${PROPERTIES}:persistenceDirectory=${MQTT_PERSISTENCE_DIRECTORY}" +fi + +if [ ! -z "$MQTT_QOS" ]; then + PROPERTIES="${PROPERTIES}:qos=${MQTT_QOS}" +fi + +if [ ! -z "$MQTT_TOPIC_FILTER" ]; then + PROPERTIES="${PROPERTIES}:topicFilter=${MQTT_TOPIC_FILTER}" +fi + +# Deploy MQTT RAR +echo "Deploying MQTT RAR" +if [ -f "$MQTT_RAR_PATH" ]; then + # Check if already deployed and undeploy if needed + $ASADMIN_CMD list-applications | grep -q ${MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME} && { + echo "Undeploying existing MQTT RAR" + # Check if resource exists and delete it + $ASADMIN_CMD list-connector-resources | grep -q ${MQTT_RESOURCE_NAME} && { + echo "Deleting existing MQTT resource" + $ASADMIN_CMD delete-connector-resource ${MQTT_RESOURCE_NAME} || exit 1 + } + + # Check if connection pool exists and delete it + $ASADMIN_CMD list-connector-connection-pools | grep -q ${MQTT_POOL_NAME} && { + echo "Deleting existing MQTT connection pool" + $ASADMIN_CMD delete-connector-connection-pool ${MQTT_POOL_NAME} || exit 1 + } + + echo "Undeploying existing MQTT RAR" + $ASADMIN_CMD undeploy ${MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME} || exit 1 + } + $ASADMIN_CMD deploy --name ${MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME} $MQTT_RAR_PATH || exit 1 +else + echo "Warning: MQTT RAR file not found at $MQTT_RAR_PATH" + exit 1 +fi + +# Create MQTT connection pool +echo "Creating MQTT connection pool ${MQTT_POOL_NAME}" +$ASADMIN_CMD create-connector-connection-pool \ + --raname ${MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME} \ + --connectiondefinition fish.payara.cloud.connectors.mqtt.api.MQTTConnectionFactory \ + --property "${PROPERTIES}" \ + ${MQTT_POOL_NAME} || exit 1 +# Create MQTT resource +echo "Creating MQTT resource ${MQTT_RESOURCE_NAME}" +$ASADMIN_CMD create-connector-resource \ + --poolname ${MQTT_POOL_NAME} \ + ${MQTT_RESOURCE_NAME} || exit 1 +# Test MQTT connection pool +echo "Testing MQTT connection pool" +$ASADMIN_CMD ping-connection-pool ${MQTT_POOL_NAME} || { echo "Warning: MQTT connection pool ping failed"; exit 1; } + +echo "Restart or redeploy BELY." \ No newline at end of file diff --git a/sbin/bely_create_mqtt_configuration.sh b/sbin/bely_create_mqtt_configuration.sh new file mode 100755 index 000000000..bd9ffde1d --- /dev/null +++ b/sbin/bely_create_mqtt_configuration.sh @@ -0,0 +1,133 @@ +#!/bin/bash + +# Script to create MQTT configuration file for Bely service + +MY_DIR=`dirname $0` && cd $MY_DIR && MY_DIR=`pwd` +if [ -z "${BELY_ROOT_DIR}" ]; then + BELY_ROOT_DIR=$MY_DIR/.. +fi +BELY_ENV_FILE=${BELY_ROOT_DIR}/setup.sh +if [ ! -f ${BELY_ENV_FILE} ]; then + echo "Environment file ${BELY_ENV_FILE} does not exist." + exit 2 +fi +. ${BELY_ENV_FILE} > /dev/null + +# Default configuration file location +MQTT_CONFIG_FILE=${LOGR_INSTALL_DIR}/etc/mqtt.conf + +echo "===================================" +echo "MQTT Configuration Setup for Bely" +echo "===================================" +echo "" +echo "This script will help you create an MQTT configuration file." +echo "Configuration will be saved to: $MQTT_CONFIG_FILE" +echo "" + +# Check if config file already exists +if [ -f "$MQTT_CONFIG_FILE" ]; then + read -p "Configuration file already exists. Overwrite? (y/n): " -n 1 -r + echo + if [[ ! $REPLY =~ ^[Yy]$ ]]; then + echo "Exiting without changes." + exit 0 + fi +fi + +echo "" +echo "Configuration Details:" +echo "- cleanSession: Whether client and server should remember state across reconnects" +echo "- automaticReconnect: Whether client will automatically reconnect if connection is lost" +echo "- filePersistance: Whether to use file persistence for un-acknowledged messages" +echo "- persistenceDirectory: Directory to use for file persistence" +echo "- connectionTimeout: Connection timeout value in seconds" +echo "- maxInflight: Maximum messages that can be sent without acknowledgements" +echo "- keepAliveInterval: Keep alive interval in seconds" +echo "- userName/password: Authentication credentials" +# Disable MDB only variables. +# echo "- topicFilter: Topic Filter (For MDBs only)" +# echo "- qos: Quality of Service for the subscription (For MDBs only)" +echo "" + +# Ensure directory exists +mkdir -p $(dirname "$MQTT_CONFIG_FILE") + +# Prompt for configuration values +read -p "MQTT Host [localhost]: " MQTT_HOST +MQTT_HOST=${MQTT_HOST:-localhost} + +read -p "MQTT Port [1883]: " MQTT_PORT +MQTT_PORT=${MQTT_PORT:-1883} + +read -p "MQTT Username (leave empty for no auth): " MQTT_USERNAME + +if [ ! -z "$MQTT_USERNAME" ]; then + read -s -p "MQTT Password: " MQTT_PASSWORD + echo +fi + +read -p "Clean Session (true/false) [false]: " CLEAN_SESSION +CLEAN_SESSION=${CLEAN_SESSION:-false} + +read -p "Automatic Reconnect (true/false) [true]: " AUTOMATIC_RECONNECT +AUTOMATIC_RECONNECT=${AUTOMATIC_RECONNECT:-true} + +read -p "File Persistance (true/false) [false]: " FILE_PERSISTANCE +FILE_PERSISTANCE=${FILE_PERSISTANCE:-false} + +read -p "Persistence Directory [.]: " PERSISTENCE_DIRECTORY +PERSISTENCE_DIRECTORY=${PERSISTENCE_DIRECTORY:-.} + +read -p "Connection Timeout (seconds) [30]: " CONNECTION_TIMEOUT +CONNECTION_TIMEOUT=${CONNECTION_TIMEOUT:-30} + +read -p "Max Inflight [10]: " MAX_INFLIGHT +MAX_INFLIGHT=${MAX_INFLIGHT:-10} + +read -p "Keep Alive Interval (seconds) [60]: " KEEP_ALIVE_INTERVAL +KEEP_ALIVE_INTERVAL=${KEEP_ALIVE_INTERVAL:-60} + +# MDB only variables +# read -p "Topic Filter (leave empty if not using MDB): " TOPIC_FILTER +# read -p "QoS (0/1/2) [0]: " QOS +# QOS=${QOS:-0} + +# Write configuration file +cat > "$MQTT_CONFIG_FILE" << EOF +# MQTT Configuration for Bely Service +# Generated on $(date) + +MQTT_HOST=$MQTT_HOST +MQTT_PORT=$MQTT_PORT +EOF + +if [ ! -z "$MQTT_USERNAME" ]; then + echo "MQTT_USERNAME=$MQTT_USERNAME" >> "$MQTT_CONFIG_FILE" +fi + +if [ ! -z "$MQTT_PASSWORD" ]; then + echo "MQTT_PASSWORD=$MQTT_PASSWORD" >> "$MQTT_CONFIG_FILE" +fi + +cat >> "$MQTT_CONFIG_FILE" << EOF +MQTT_CLEAN_SESSION=$CLEAN_SESSION +MQTT_AUTOMATIC_RECONNECT=$AUTOMATIC_RECONNECT +MQTT_FILE_PERSISTANCE=$FILE_PERSISTANCE +MQTT_PERSISTENCE_DIRECTORY=$PERSISTENCE_DIRECTORY +MQTT_CONNECTION_TIMEOUT=$CONNECTION_TIMEOUT +MQTT_MAX_INFLIGHT=$MAX_INFLIGHT +MQTT_KEEP_ALIVE_INTERVAL=$KEEP_ALIVE_INTERVAL +EOF + +if [ ! -z "$TOPIC_FILTER" ]; then + echo "MQTT_TOPIC_FILTER=$TOPIC_FILTER" >> "$MQTT_CONFIG_FILE" +fi + +if [ ! -z "$QOS" ]; then + echo "MQTT_QOS=$QOS" >> "$MQTT_CONFIG_FILE" +fi + +echo "" +echo "Configuration file created successfully at: $MQTT_CONFIG_FILE" +echo "" +echo "You can now run bely_configure_mqtt_service.sh to apply this configuration." \ No newline at end of file diff --git a/sbin/bely_update_mqtt_connector_build.sh b/sbin/bely_update_mqtt_connector_build.sh new file mode 100755 index 000000000..ff5756de4 --- /dev/null +++ b/sbin/bely_update_mqtt_connector_build.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +# Copyright (c) UChicago Argonne, LLC. All rights reserved. +# See LICENSE file. + + +# +# Script downloads builds cloud connectors. Updates the one included with the repo. +# +# Usage: +# +# $0 +# + +MY_DIR=`dirname $0` && cd $MY_DIR && MY_DIR=`pwd` +if [ -z "${BELY_ROOT_DIR}" ]; then + BELY_ROOT_DIR=$MY_DIR/.. +fi +BELY_ENV_FILE=${BELY_ROOT_DIR}/setup.sh +if [ ! -f ${BELY_ENV_FILE} ]; then + echo "Environment file ${BELY_ENV_FILE} does not exist." + exit 2 +fi +. ${BELY_ENV_FILE} > /dev/null + +# Constants +CONNECTOR_URL=https://github.com/payara/Cloud-Connectors/archive/refs/tags/0.8.0.tar.gz +TARGET_FILE_NAME=mqtt-rar-0.8.0.rar +TARGET_FILE_PATH=MQTT/MQTTRAR/target +DEST_DIR="${BELY_ROOT_DIR}/src/lib" + +# Create temporary directory +TMP_DIR=$(mktemp -d) +cd $TMP_DIR + +# Download and extract connector +curl -L --progress-bar $CONNECTOR_URL -o connector.tar.gz +tar -xzf connector.tar.gz + +# Find extracted directory and build +EXTRACTED_DIR=$(find . -maxdepth 1 -type d -name "Cloud-Connectors-*" | head -1) +cd $EXTRACTED_DIR + +# Run maven build +mvn clean install -DskipTests + +# Find and print the newly built target +TARGET_FILE="$TARGET_FILE_PATH/$TARGET_FILE_NAME" +if [ -f "$TARGET_FILE" ]; then + echo "Found target: $TARGET_FILE" + ls -la "$TARGET_FILE" + + # Copy to root/src/lib + cp "$TARGET_FILE" "$DEST_DIR/" + echo "Copied $TARGET_FILE_NAME to $DEST_DIR" +else + echo "Target file not found: $TMP_DIR/$TARGET_FILE" + exit 1 +fi + +# Clean up temporary directory +cd $MY_DIR +rm -rf $TMP_DIR +echo "Cleaned up temporary directory: $TMP_DIR" diff --git a/src/java/LogrPortal/lib/flexmark-ext-tables-0.64.8.jar b/src/java/LogrPortal/lib/flexmark-ext-tables-0.64.8.jar new file mode 100644 index 000000000..9201e31bf Binary files /dev/null and b/src/java/LogrPortal/lib/flexmark-ext-tables-0.64.8.jar differ diff --git a/src/java/LogrPortal/lib/jakarta.resource-api-2.1.0.jar b/src/java/LogrPortal/lib/jakarta.resource-api-2.1.0.jar new file mode 100644 index 000000000..5a5d90897 Binary files /dev/null and b/src/java/LogrPortal/lib/jakarta.resource-api-2.1.0.jar differ diff --git a/src/java/LogrPortal/lib/mqtt-jca-api-1.0.0.jar b/src/java/LogrPortal/lib/mqtt-jca-api-1.0.0.jar new file mode 100644 index 000000000..fa423699f Binary files /dev/null and b/src/java/LogrPortal/lib/mqtt-jca-api-1.0.0.jar differ diff --git a/src/java/LogrPortal/nbproject/build-impl.xml b/src/java/LogrPortal/nbproject/build-impl.xml index f7b8a2414..97f3d75ad 100644 --- a/src/java/LogrPortal/nbproject/build-impl.xml +++ b/src/java/LogrPortal/nbproject/build-impl.xml @@ -17,7 +17,7 @@ - cleanup --> - + @@ -470,7 +470,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -618,7 +618,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -1041,6 +1041,9 @@ exists or setup the property manually. For example like this: + + + @@ -1104,6 +1107,9 @@ exists or setup the property manually. For example like this: + + + diff --git a/src/java/LogrPortal/nbproject/build-impl.xml~ b/src/java/LogrPortal/nbproject/build-impl.xml~ index f144c2662..f7b8a2414 100644 --- a/src/java/LogrPortal/nbproject/build-impl.xml~ +++ b/src/java/LogrPortal/nbproject/build-impl.xml~ @@ -17,7 +17,7 @@ - cleanup --> - + @@ -49,36 +49,6 @@ - - - - - - - - - - - - - - - - - - - Must set platform.home - Must set platform.bootcp - Must set platform.java - Must set platform.javac - - The J2SE Platform is not correctly set up. - Your active platform is: ${platform.active}, but the corresponding property "platforms.${platform.active}.home" is not found in the project's properties files. - Either open the project in the IDE and setup the Platform with the same name or add it manually. - For example like this: - ant -Duser.properties.file=<path_to_property_file> jar (where you put the property "platforms.${platform.active}.home" in a .properties file) - or ant -Dplatforms.${platform.active}.home=<path_to_JDK_home> jar (where no properties file is used) - @@ -220,6 +190,15 @@ + + + + + + + + + @@ -314,7 +293,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -351,7 +330,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -428,7 +407,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -451,7 +430,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -491,7 +470,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -569,7 +548,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -593,7 +572,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -639,7 +618,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -716,7 +695,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -748,9 +727,6 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - - - @@ -785,7 +761,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -1293,9 +1269,6 @@ exists or setup the property manually. For example like this: - - - @@ -1418,7 +1391,7 @@ exists or setup the property manually. For example like this: --> - + diff --git a/src/java/LogrPortal/nbproject/genfiles.properties b/src/java/LogrPortal/nbproject/genfiles.properties index 1a378891c..1394c7ab3 100644 --- a/src/java/LogrPortal/nbproject/genfiles.properties +++ b/src/java/LogrPortal/nbproject/genfiles.properties @@ -3,9 +3,9 @@ build.xml.script.CRC32=67492cbd build.xml.stylesheet.CRC32=1707db4f@1.94.0.1 # This file is used by a NetBeans-based IDE to track changes in generated files such as build-impl.xml. # Do not edit this file. You may delete it but then the IDE will never regenerate such files for you. -nbproject/build-impl.xml.data.CRC32=69e278e8 -nbproject/build-impl.xml.script.CRC32=709f92a3 -nbproject/build-impl.xml.stylesheet.CRC32=334708a0@1.91.0.1 +nbproject/build-impl.xml.data.CRC32=ad0819a2 +nbproject/build-impl.xml.script.CRC32=9e43ea77 +nbproject/build-impl.xml.stylesheet.CRC32=334708a0@1.94.0.1 nbproject/rest-build.xml.data.CRC32=83e13ee9 nbproject/rest-build.xml.script.CRC32=0d1fb1b4 nbproject/rest-build.xml.stylesheet.CRC32=0cfeebcc@1.31.1 diff --git a/src/java/LogrPortal/nbproject/project.properties b/src/java/LogrPortal/nbproject/project.properties index 853ccb196..94f4a2639 100644 --- a/src/java/LogrPortal/nbproject/project.properties +++ b/src/java/LogrPortal/nbproject/project.properties @@ -48,6 +48,7 @@ file.reference.dm-api-3.3.1.jar=lib/dm-api-3.3.1.jar file.reference.dm-base-3.3.1.jar=lib/dm-base-3.3.1.jar file.reference.ejb-api-3.0.jar=lib/ejb-api-3.0.jar file.reference.flexmark-0.64.8.jar=lib/flexmark-0.64.8.jar +file.reference.flexmark-ext-tables-0.64.8.jar=lib/flexmark-ext-tables-0.64.8.jar file.reference.flexmark-util-ast-0.64.8.jar=lib/flexmark-util-ast-0.64.8.jar file.reference.flexmark-util-builder-0.64.8.jar=lib/flexmark-util-builder-0.64.8.jar file.reference.flexmark-util-collection-0.64.8.jar=lib/flexmark-util-collection-0.64.8.jar @@ -69,6 +70,7 @@ file.reference.jackson-dataformat-yaml-2.11.2.jar=lib/jackson-dataformat-yaml-2. file.reference.jackson-datatype-jsr310-2.11.3.jar=lib/jackson-datatype-jsr310-2.11.3.jar file.reference.jackson-jaxrs-base-2.11.2.jar=lib/jackson-jaxrs-base-2.11.2.jar file.reference.jackson-jaxrs-json-provider-2.11.2.jar=lib/jackson-jaxrs-json-provider-2.11.2.jar +file.reference.jakarta.resource-api-2.1.0.jar=lib/jakarta.resource-api-2.1.0.jar file.reference.javaee-web-api-8.0.1.jar=lib/javaee-web-api-8.0.1.jar file.reference.javax.faces-api-2.1.jar=lib/javax.faces-api-2.1.jar file.reference.javax.inject-1.jar=lib/javax.inject-1.jar @@ -79,6 +81,7 @@ file.reference.libphonenumber-8.12.3.jar=lib/libphonenumber-8.12.3.jar file.reference.log4j-api-2.17.0.jar=lib/log4j-api-2.17.0.jar file.reference.log4j-core-2.17.0.jar=lib/log4j-core-2.17.0.jar file.reference.metadata-extractor-2.17.0.jar=lib/metadata-extractor-2.17.0.jar +file.reference.mqtt-jca-api-1.0.0.jar=lib/mqtt-jca-api-1.0.0.jar file.reference.omnifaces-3.10.1.jar-1=lib/omnifaces-3.10.1.jar file.reference.pdfbox-2.0.24.jar=lib/pdfbox-2.0.24.jar file.reference.poi-4.0.1.jar=lib/poi-4.0.1.jar @@ -169,7 +172,10 @@ javac.classpath=\ ${file.reference.flexmark-util-collection-0.64.8.jar}:\ ${file.reference.flexmark-util-format-0.64.8.jar}:\ ${file.reference.flexmark-util-visitor-0.64.8.jar}:\ - ${file.reference.flexmark-util-html-0.64.8.jar} + ${file.reference.flexmark-util-html-0.64.8.jar}:\ + ${file.reference.flexmark-ext-tables-0.64.8.jar}:\ + ${file.reference.jakarta.resource-api-2.1.0.jar}:\ + ${file.reference.mqtt-jca-api-1.0.0.jar} # Space-separated list of extra javac options javac.compilerargs= javac.debug=true diff --git a/src/java/LogrPortal/nbproject/project.xml b/src/java/LogrPortal/nbproject/project.xml index b5acfa92b..51296cb8a 100644 --- a/src/java/LogrPortal/nbproject/project.xml +++ b/src/java/LogrPortal/nbproject/project.xml @@ -5,7 +5,6 @@ LogrPortal 1.6.5 - ${file.reference.pdfbox-2.0.24.jar} @@ -243,6 +242,18 @@ ${file.reference.flexmark-util-html-0.64.8.jar} WEB-INF/lib + + ${file.reference.flexmark-ext-tables-0.64.8.jar} + WEB-INF/lib + + + ${file.reference.jakarta.resource-api-2.1.0.jar} + WEB-INF/lib + + + ${file.reference.mqtt-jca-api-1.0.0.jar} + WEB-INF/lib + diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/ChangeType.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/ChangeType.java new file mode 100644 index 000000000..1224a1d77 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/ChangeType.java @@ -0,0 +1,27 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.constants; + +/** + * + * @author djarosz + */ +public enum ChangeType { + + ADD(1), + UPDATE(2), + DELETE(3); + + private final int value; + + ChangeType(int value) { + this.value = value; + } + + public int getValue() { + return value; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/MqttTopic.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/MqttTopic.java new file mode 100644 index 000000000..04a1db824 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/MqttTopic.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.constants; + +/** + * + * @author djarosz + */ +public enum MqttTopic { + UPDATE("bely/update"), + ADD("bely/add"), + DELETE("bely/delete"), + LOGENTRYADD("bely/logEntry/Add"), + LOGENTRYUPDATE("bely/logEntry/Update"), + LOGENTRYDELETE("bely/logEntry/Delete"), + LOGENTRYREPLYADD("bely/logEntryReply/Add"), + LOGENTRYREPLYUPDATE("bely/logEntryReply/Update"), + LOGENTRYREPLYDELETE("bely/logEntryReply/Delete"), + LOGREACTIONADD("bely/logReaction/Add"), + LOGREACTIONDELETE("bely/logReaction/Delete"); + + private final String value; + + MqttTopic(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + @Override + public String toString() { + return super.toString() + "(" + value + ")"; + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/AddEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/AddEvent.java new file mode 100644 index 000000000..73a733ea1 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/AddEvent.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class AddEvent extends MqttEvent { + + public AddEvent(CdbEntity entity, UserInfo eventTriggedByUser, String description) { + super(entity, eventTriggedByUser, description); + } + + @Override + public MqttTopic getTopic() { + return MqttTopic.ADD; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/DeleteEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/DeleteEvent.java new file mode 100644 index 000000000..80a3dbe04 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/DeleteEvent.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class DeleteEvent extends MqttEvent { + + public DeleteEvent(CdbEntity entity, UserInfo eventTriggedByUser, String description) { + super(entity, eventTriggedByUser, description); + } + + @Override + public MqttTopic getTopic() { + return MqttTopic.DELETE; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogEntryEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogEntryEvent.java new file mode 100644 index 000000000..0a3d9f00c --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogEntryEvent.java @@ -0,0 +1,87 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.ChangeType; +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.common.mqtt.model.entities.LogInfo; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookDocumentInfo; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookInfo; +import gov.anl.aps.logr.portal.model.db.entities.EntityType; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; +import java.util.ArrayList; +import java.util.List; + +/** + * + * @author djarosz + */ +public class LogEntryEvent extends MqttEvent { + + LogbookDocumentInfo parentLogDocumentInfo; + LogInfo logInfo; + List logbookList; + String textDiff; + protected ChangeType changeType; + + public LogEntryEvent(ItemDomainLogbook parentLogbook, Log entity, UserInfo eventTriggedByUser, String description, String textDiff, ChangeType changeType) { + super(entity, eventTriggedByUser, description); + this.logInfo = new LogInfo(entity); + this.textDiff = textDiff; + this.changeType = changeType; + + if (parentLogbook != null) { + parentLogDocumentInfo = new LogbookDocumentInfo(parentLogbook); + } + + List entityTypeList = parentLogbook.getEntityTypeList(); + logbookList = new ArrayList<>(); + for (EntityType entityType : entityTypeList) { + LogbookInfo info = new LogbookInfo(entityType); + logbookList.add(info); + } + } + + @Override + public final MqttTopic getTopic() { + if (changeType == ChangeType.ADD) { + return getAddEventTopic(); + } else if (changeType == ChangeType.DELETE) { + return getDeletedEventTopic(); + } + return getUpdateEventTopic(); + } + + protected MqttTopic getAddEventTopic() { + return MqttTopic.LOGENTRYADD; + } + + protected MqttTopic getUpdateEventTopic() { + return MqttTopic.LOGENTRYUPDATE; + } + + protected MqttTopic getDeletedEventTopic() { + return MqttTopic.LOGENTRYDELETE; + } + + public LogInfo getLogInfo() { + return logInfo; + } + + public LogbookDocumentInfo getParentLogDocumentInfo() { + return parentLogDocumentInfo; + } + + public List getLogbookList() { + return logbookList; + } + + public String getTextDiff() { + return textDiff; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogReactionEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogReactionEvent.java new file mode 100644 index 000000000..5162ac888 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogReactionEvent.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.common.mqtt.model.entities.LogInfo; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookDocumentInfo; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import gov.anl.aps.logr.portal.model.db.entities.LogReaction; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class LogReactionEvent extends MqttEvent { + + boolean isDelete; + + LogInfo parentLogInfo; + LogbookDocumentInfo parentLogDocumentInfo; + + public LogReactionEvent(LogReaction entity, + Log parentLogEntry, + ItemDomainLogbook parentLogDocument, + UserInfo eventTriggedByUser, + String description, boolean isDelete) { + super(entity, eventTriggedByUser, description); + + this.isDelete = isDelete; + + if (parentLogEntry != null) { + parentLogInfo = new LogInfo(parentLogEntry); + } + if (parentLogDocument != null) { + parentLogDocumentInfo = new LogbookDocumentInfo(parentLogDocument); + } + } + + @Override + public MqttTopic getTopic() { + if (isDelete) { + return MqttTopic.LOGREACTIONDELETE; + } + return MqttTopic.LOGREACTIONADD; + } + + public LogReaction getLogReaction() { + return entity; + } + + public LogInfo getParentLogInfo() { + return parentLogInfo; + } + + public LogbookDocumentInfo getParentLogDocumentInfo() { + return parentLogDocumentInfo; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/MqttEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/MqttEvent.java new file mode 100644 index 000000000..c01ef0c5d --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/MqttEvent.java @@ -0,0 +1,73 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import com.fasterxml.jackson.annotation.JsonFormat; +import java.util.Date; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public abstract class MqttEvent { + + Entity entity; + String description; + Date eventTimestamp; + UserInfo eventTriggedByUser; + + public MqttEvent(Entity entity, UserInfo eventTriggedByUser, String description) { + this.entity = entity; + this.description = description; + this.eventTimestamp = new Date(); + this.eventTriggedByUser = eventTriggedByUser; + } + + @JsonIgnore + public abstract MqttTopic getTopic(); + + @JsonIgnore + public CdbEntity getEntity() { + return entity; + } + + public Object getEntityId() { + return entity.getId(); + } + + public String getEntityName() { + return entity.getClass().getSimpleName(); + } + + public String getDescription() { + return description; + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getEventTimestamp() { + return eventTimestamp; + } + + public String getEventTriggedByUsername() { + if (eventTriggedByUser == null) { + return null; + } + + return eventTriggedByUser.getUsername(); + } + + public String toJson() throws JsonProcessingException { + ObjectMapper mapper = new ObjectMapper(); + return mapper.writeValueAsString(this); + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/ReplyLogEntryEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/ReplyLogEntryEvent.java new file mode 100644 index 000000000..7d1f0e16c --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/ReplyLogEntryEvent.java @@ -0,0 +1,60 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.mqtt.constants.ChangeType; +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.common.mqtt.model.entities.LogInfo; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class ReplyLogEntryEvent extends LogEntryEvent { + + LogInfo parentLogInfo; + + public ReplyLogEntryEvent(ItemDomainLogbook parentLogbook, Log entity, UserInfo eventTriggedByUser, String description, String textDiff, ChangeType changeType) { + super(parentLogbook, entity, eventTriggedByUser, description, textDiff, changeType); + + Log parentLogObject = getParentLogObject(); + + if (parentLogObject != null) { + parentLogInfo = new LogInfo(parentLogObject); + } + } + + @Override + protected MqttTopic getAddEventTopic() { + return MqttTopic.LOGENTRYREPLYADD; + } + + @Override + protected MqttTopic getUpdateEventTopic() { + return MqttTopic.LOGENTRYREPLYUPDATE; + } + + @Override + protected MqttTopic getDeletedEventTopic() { + return MqttTopic.LOGENTRYREPLYDELETE; + } + + @JsonIgnore + public final Log getParentLogObject() { + if (entity == null) { + return null; + } + return entity.getParentLog(); + } + + public LogInfo getParentLogInfo() { + return parentLogInfo; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/UpdateEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/UpdateEvent.java new file mode 100644 index 000000000..51797335c --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/UpdateEvent.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class UpdateEvent extends MqttEvent { + + public UpdateEvent(CdbEntity entity, UserInfo eventTriggedByUser, String description) { + super(entity, eventTriggedByUser, description); + } + + @Override + public MqttTopic getTopic() { + return MqttTopic.UPDATE; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogInfo.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogInfo.java new file mode 100644 index 000000000..5982730d8 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogInfo.java @@ -0,0 +1,44 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model.entities; + +import com.fasterxml.jackson.annotation.JsonFormat; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import java.util.Date; + +/** + * + * @author djarosz + */ +public class LogInfo { + + Log log; + + public LogInfo(Log log) { + this.log = log; + } + + public Integer getId() { + return log.getId(); + } + + public String getEnteredByUsername() { + return log.getEnteredByUsername(); + } + + public String getLastModifiedByUsername() { + return log.getLastModifiedByUsername(); + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getEnteredOnDateTime() { + return log.getEnteredOnDateTime(); + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getLastModifiedOnDateTime() { + return log.getLastModifiedOnDateTime(); + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookDocumentInfo.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookDocumentInfo.java new file mode 100644 index 000000000..d96b88920 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookDocumentInfo.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model.entities; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.portal.model.db.entities.EntityInfo; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import java.util.Date; + +/** + * + * @author djarosz + */ +public class LogbookDocumentInfo { + + ItemDomainLogbook parentLogbook; + + public LogbookDocumentInfo(ItemDomainLogbook parentLogbook) { + this.parentLogbook = parentLogbook; + } + + public Integer getId() { + return parentLogbook.getId(); + } + + public String getName() { + return parentLogbook.getName(); + } + + @JsonIgnore + public EntityInfo getEntityInfo() { + return parentLogbook.getEntityInfo(); + } + + public String getCreatedByUsername() { + return getEntityInfo().getCreatedByUsername(); + } + + public String getLastModifiedByUsername() { + return getEntityInfo().getLastModifiedByUsername(); + } + + public String getOwnerUsername() { + return getEntityInfo().getOwnerUsername(); + } + + public String getOwnerUserGroupName() { + return getEntityInfo().getOwnerUserGroupName(); + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getEnteredOnDateTime() { + return getEntityInfo().getCreatedOnDateTime(); + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getLastModifiedOnDateTime() { + return getEntityInfo().getLastModifiedOnDateTime(); + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookInfo.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookInfo.java new file mode 100644 index 000000000..35fa5ea4d --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookInfo.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model.entities; + +import gov.anl.aps.logr.portal.model.db.entities.EntityType; + +/** + * + * @author djarosz + */ +public class LogbookInfo { + + EntityType logbook; + + public LogbookInfo(EntityType logbook) { + this.logbook = logbook; + } + + public Integer getId() { + return this.logbook.getId(); + } + + public String getName() { + return this.logbook.getName(); + } + + public String getDisplayName() { + return this.logbook.getDisplayName(); + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/ItemDomainLogbookController.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/ItemDomainLogbookController.java index a02591f07..838b82ac7 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/ItemDomainLogbookController.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/ItemDomainLogbookController.java @@ -16,6 +16,7 @@ import gov.anl.aps.logr.portal.controllers.utilities.EntityInfoControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.EntityTypeControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.ItemDomainLogbookControllerUtility; +import gov.anl.aps.logr.portal.controllers.utilities.LogReactionControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.PropertyTypeControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.SettingTypeControllerUtility; import gov.anl.aps.logr.portal.model.ItemDomainLogbookLazyDataModel; @@ -56,6 +57,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.logging.Level; import java.util.regex.Pattern; import javax.ejb.EJB; import javax.enterprise.context.SessionScoped; @@ -121,7 +123,7 @@ public class ItemDomainLogbookController extends ItemController private EntityInfoControllerUtility entityInfoControllerUtility; + private LogReactionControllerUtility logReactionControllerUtility; private static final String OPS_ENTITY_TYPE_NAME = "ops"; @@ -302,6 +305,13 @@ private EntityInfoControllerUtility getEntityInfoControllerUtility() { return entityInfoControllerUtility; } + public LogReactionControllerUtility getLogReactionControllerUtility() { + if (logReactionControllerUtility == null) { + logReactionControllerUtility = new LogReactionControllerUtility(); + } + return logReactionControllerUtility; + } + @Override public String getDefaultDomainDerivedFromDomainName() { throw new UnsupportedOperationException("Not supported yet."); // Generated from nbfs://nbhost/SystemFileSystem/Templates/Classes/Code/GeneratedMethodBody @@ -598,11 +608,16 @@ private void updateModifiedDateForCurrent() { public void destroyLogEntry(Log entry) { if (isSaveLogLockoutsForCurrent(entry)) { - LogController instance = LogController.getInstance(); - instance.destroy(entry); + ItemDomainLogbookControllerUtility utility = getControllerUtility(); + UserInfo user = SessionUtility.getUser(); + try { + utility.destroyLogEntry(entry, user); + } catch (CdbException ex) { + logger.error(ex); + SessionUtility.addErrorMessage("Error", ex.getErrorMessage()); + } + updateModifiedDateForCurrent(); } - - updateModifiedDateForCurrent(); } public String getAddedReactionsString(Log entry) { @@ -646,32 +661,14 @@ public List getGroupedReactions(Log entry) { } public void toggleReaction(Log entry, Reaction reaction) { - // Fetch the latest version - entry = logFacade.find(entry.getId()); + LogReactionControllerUtility utility = getLogReactionControllerUtility(); UserInfo user = SessionUtility.getUser(); - List logReactionList = entry.getLogReactionList(); - boolean add = true; - - // Check if need to remove log reaction. - for (LogReaction lr : logReactionList) { - UserInfo userId = lr.getUserInfo(); - - if (user.equals(userId)) { - Reaction existingReaction = lr.getReaction(); - - if (existingReaction.equals(reaction)) { - add = false; - logReactionList.remove(lr); - logReactionFacade.remove(lr); - break; - } - } - } - - if (add) { - LogReaction lr = new LogReaction(entry.getId(), reaction.getId(), user.getId()); - logReactionFacade.create(lr); + try { + utility.toggleReaction(entry, reaction, user); + } catch (CdbException ex) { + logger.error(ex); + SessionUtility.addErrorMessage("Error", ex.getMessage()); } // No need to scroll to any log entry. Ajax event. @@ -682,9 +679,10 @@ public void toggleReaction(Log entry, Reaction reaction) { @Override public String saveLogList() { Log newLogEdit = getNewLogEdit(); + Log savedLogEntry = null; if (newLogEdit.getId() != null) { // Perform validation - Log savedLogEntry = logFacade.find(newLogEdit.getId()); + savedLogEntry = logFacade.find(newLogEdit.getId()); if (savedLogEntry == null) { handleDeletedLogEntryDuringSync(newLogEdit); @@ -704,8 +702,19 @@ public String saveLogList() { } } - LogController logController = LogController.getInstance(); - logController.saveLogEntry(newLogEdit); + UserInfo userInfo = SessionUtility.getUser(); + + try { + controllerUtility.saveLog(newLogEdit, userInfo, savedLogEntry); + } catch (CdbException ex) { + String persitanceErrorMessage = newLogEdit.getPersitanceErrorMessage(); + SessionUtility.addErrorMessage("Error", persitanceErrorMessage); + return null; + } catch (RuntimeException ex) { + String persitanceErrorMessage = newLogEdit.getPersitanceErrorMessage(); + SessionUtility.addErrorMessage("Error", persitanceErrorMessage); + return null; + } lastLog = newLogEdit; if (newLogEdit.getId() == null) { @@ -1131,7 +1140,7 @@ public void performEntitySearch(String searchString, boolean caseInsensitive) { logResults.add(searchResult); } - } + } public String getSearchOpts() { if (searchOpts == null) { @@ -1165,24 +1174,24 @@ public String getSearchOpts() { return searchOpts; } - public void processSearchRequestParams() { + public void processSearchRequestParams() { String entityTypeIdList = SessionUtility.getRequestParameterValue(SEARCH_ETL_IDS); String itemTypeIdList = SessionUtility.getRequestParameterValue(SEARCH_ITL_IDS); - String userIdList = SessionUtility.getRequestParameterValue(SEARCH_USR_IDS); + String userIdList = SessionUtility.getRequestParameterValue(SEARCH_USR_IDS); String createStart = SessionUtility.getRequestParameterValue(SEARCH_CREATE_START_DATE); String createEnd = SessionUtility.getRequestParameterValue(SEARCH_CREATE_END_DATE); String modifyStart = SessionUtility.getRequestParameterValue(SEARCH_MOD_START_DATE); String modifyEnd = SessionUtility.getRequestParameterValue(SEARCH_MOD_END_DATE); // If any variables have been passed in, reset all search options. - if (entityTypeIdList != null + if (entityTypeIdList != null || itemTypeIdList != null || userIdList != null || createStart != null || createEnd != null || modifyStart != null || modifyEnd != null) { - + SearchController searchCtrl = SearchController.getInstance(); SearchSettings searchSettings = searchCtrl.getSearchSettings(); searchSettings.setAdvancedSearch(true); @@ -1194,29 +1203,29 @@ public void processSearchRequestParams() { searchCreatedEndDate = null; searchModifiedStartDate = null; searchModifiedEndDate = null; - + if (entityTypeIdList != null) { String[] ids = entityTypeIdList.split(","); List selection = new ArrayList<>(); - + for (String id : ids) { selection.add(entityTypeFacade.find(Integer.valueOf(id))); } setSearchLogbookTypeList(selection); - } + } if (itemTypeIdList != null) { String[] ids = itemTypeIdList.split(","); List selection = new ArrayList<>(); - + for (String id : ids) { selection.add(itemTypeFacade.find(Integer.valueOf(id))); } - setSearchSystemList(selection); + setSearchSystemList(selection); } if (userIdList != null) { String[] ids = userIdList.split(","); List selection = new ArrayList<>(); - + for (String id : ids) { selection.add(userInfoFacade.find(Integer.valueOf(id))); } @@ -1232,12 +1241,12 @@ public void processSearchRequestParams() { } if (modifyStart != null) { long unixTimestamp = Long.parseLong(modifyStart); - setSearchModifiedStartDate(new Date(unixTimestamp)); + setSearchModifiedStartDate(new Date(unixTimestamp)); } if (modifyEnd != null) { long unixTimestamp = Long.parseLong(modifyEnd); setSearchModifiedEndDate(new Date(unixTimestamp)); - } + } } } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/LogController.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/LogController.java index bebae3c62..57be8efd6 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/LogController.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/LogController.java @@ -100,25 +100,6 @@ protected LogControllerUtility createControllerUtilityInstance() { return new LogControllerUtility(); } - public void saveLogEntry(Log log) { - LogControllerUtility controllerUtility1 = getControllerUtility(); - UserInfo userInfo = SessionUtility.getUser(); - - try { - if (log.getId() != null) { - controllerUtility1.update(log, userInfo); - } else { - controllerUtility1.create(log, userInfo); - } - } catch (CdbException ex) { - String persitanceErrorMessage = log.getPersitanceErrorMessage(); - SessionUtility.addErrorMessage("Error", persitanceErrorMessage); - } catch (RuntimeException ex) { - String persitanceErrorMessage = log.getPersitanceErrorMessage(); - SessionUtility.addErrorMessage("Error", persitanceErrorMessage); - } - } - /** * Converter class for log objects. */ diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/CdbEntityControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/CdbEntityControllerUtility.java index 14bd21d11..8f8250e71 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/CdbEntityControllerUtility.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/CdbEntityControllerUtility.java @@ -4,7 +4,14 @@ */ package gov.anl.aps.logr.portal.controllers.utilities; +import com.fasterxml.jackson.core.JsonProcessingException; +import fish.payara.cloud.connectors.mqtt.api.MQTTConnection; +import fish.payara.cloud.connectors.mqtt.api.MQTTConnectionFactory; import gov.anl.aps.logr.common.exceptions.CdbException; +import gov.anl.aps.logr.common.mqtt.model.AddEvent; +import gov.anl.aps.logr.common.mqtt.model.DeleteEvent; +import gov.anl.aps.logr.common.mqtt.model.UpdateEvent; +import gov.anl.aps.logr.common.mqtt.model.MqttEvent; import gov.anl.aps.logr.common.utilities.StringUtility; import gov.anl.aps.logr.portal.constants.SystemLogLevel; import gov.anl.aps.logr.portal.model.db.beans.CdbEntityFacade; @@ -13,6 +20,7 @@ import gov.anl.aps.logr.portal.model.db.entities.PropertyValue; import gov.anl.aps.logr.portal.model.db.entities.UserInfo; import gov.anl.aps.logr.portal.utilities.SearchResult; +import gov.anl.aps.logr.portal.utilities.SessionUtility; import java.util.Date; import java.util.LinkedList; import java.util.List; @@ -23,43 +31,87 @@ import org.apache.logging.log4j.Logger; /** - * Controller utility provides unified functionality for managing entities - * to be used from view controllers as well as API endpoints. + * Controller utility provides unified functionality for managing entities to be + * used from view controllers as well as API endpoints. * * @author darek - * @param Database mapped class of the entity. - * @param Database facade provides communication to database. + * @param Database mapped class of the entity. + * @param Database facade provides communication to database. */ public abstract class CdbEntityControllerUtility> { - + private static final Logger logger = LogManager.getLogger(CdbEntityControllerUtility.class.getName()); - - LogControllerUtility logControllerUtility; - + + protected void publishMqttEvent(MqttEvent event) { + MQTTConnectionFactory mqttFactory = SessionUtility.fetchMQTTConnectionFactory(); + String jsonMessage; + + try { + jsonMessage = event.toJson(); + } catch (JsonProcessingException ex) { + logger.error(ex); + return; + } + + if (mqttFactory == null) { + logger.warn("MQTT not configured. Skipping event: " + jsonMessage); + return; + } + MQTTConnection connection = mqttFactory.getConnection(); + try { + connection.publish(event.getTopic().getValue(), jsonMessage.getBytes(), 0, false); + CdbEntity entity = event.getEntity(); + + List actionEvents = entity.getActionEvents(); + _publishActionEvents(connection, actionEvents); + + connection.close(); + } catch (Exception ex) { + logger.error(ex); + } + } + + private void _publishActionEvents(MQTTConnection activeConnection, List events) { + if (events == null) { + return; + } + + for (MqttEvent event : events) { + try { + String jsonMessage = event.toJson(); + activeConnection.publish(event.getTopic().getValue(), jsonMessage.getBytes(), 0, false); + } catch (Exception ex) { + logger.error(ex); + } + + } + } + /** * Abstract method for returning entity DB facade. * * @return entity DB facade */ protected abstract FacadeType getEntityDbFacade(); - + /** * Abstract method for creating new entity instance. * * @return created entity instance */ - public abstract EntityType createEntityInstance(UserInfo sessionUser); - + public abstract EntityType createEntityInstance(UserInfo sessionUser); + public EntityType create(EntityType entity, UserInfo createdByUserInfo) throws CdbException, RuntimeException { - try { + try { prepareEntityInsert(entity, createdByUserInfo); getEntityDbFacade().create(entity); - + addCreatedSystemLog(entity, createdByUserInfo); entity.setPersitanceErrorMessage(null); - + publishMqttEvent(new AddEvent(entity, createdByUserInfo, "Add action completed")); + clearCaches(); - return entity; + return entity; } catch (CdbException ex) { logger.error("Could not create " + getDisplayEntityTypeName() + ": " + ex.getMessage()); addCreatedWarningSystemLog(ex, entity, createdByUserInfo); @@ -73,15 +125,18 @@ public EntityType create(EntityType entity, UserInfo createdByUserInfo) throws C throw ex; } } - + public void createList(List entities, UserInfo createdByUserInfo) throws CdbException, RuntimeException { try { for (EntityType entity : entities) { prepareEntityInsert(entity, createdByUserInfo); } - getEntityDbFacade().create(entities); - - addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Created " + entities.size() + " entities.", createdByUserInfo); + getEntityDbFacade().create(entities); + + addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Created " + entities.size() + " entities.", createdByUserInfo); + for (EntityType entity : entities) { + publishMqttEvent(new AddEvent(entity, createdByUserInfo, "Add action completed")); + } setPersistenceErrorMessageForList(entities, null); clearCaches(); } catch (CdbException ex) { @@ -95,59 +150,62 @@ public void createList(List entities, UserInfo createdByUserInfo) th setPersistenceErrorMessageForList(entities, ex.getMessage()); addCdbEntityWarningSystemLog("Failed to create list of entities: " + getDisplayEntityTypeName(), ex, null, createdByUserInfo); throw ex; - } + } } - + public EntityType update(EntityType entity, UserInfo updatedByUserInfo) throws CdbException, RuntimeException { - try { + try { logger.debug("Updating " + getDisplayEntityTypeName() + " " + getEntityInstanceName(entity)); prepareEntityUpdate(entity, updatedByUserInfo); EntityType updatedEntity = getEntityDbFacade().edit(entity); addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Updated: " + entity.getSystemLogString(), updatedByUserInfo); + publishMqttEvent(new UpdateEvent(entity, updatedByUserInfo, "Update action completed")); entity.setPersitanceErrorMessage(null); - + clearCaches(); - return updatedEntity; + return updatedEntity; } catch (CdbException ex) { entity.setPersitanceErrorMessage(ex.getMessage()); addCdbEntityWarningSystemLog("Failed to update", ex, entity, updatedByUserInfo); logger.error("Could not update " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + ex.getMessage()); + + getEntityInstanceName(entity) + ": " + ex.getMessage()); throw ex; } catch (RuntimeException ex) { - Throwable t = ExceptionUtils.getRootCause(ex); + Throwable t = ExceptionUtils.getRootCause(ex); logger.error("Could not update " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + t.getMessage()); + + getEntityInstanceName(entity) + ": " + t.getMessage()); addCdbEntityWarningSystemLog("Failed to update", ex, entity, updatedByUserInfo); entity.setPersitanceErrorMessage(t.getMessage()); throw ex; - } + } } - + public EntityType updateOnRemoval(EntityType entity, UserInfo updatedByUserInfo) throws CdbException, RuntimeException { try { logger.debug("Updating " + getDisplayEntityTypeName() + " " + getEntityInstanceName(entity)); prepareEntityUpdateOnRemoval(entity); EntityType updatedEntity = getEntityDbFacade().edit(entity); clearCaches(); - - return updatedEntity; + + publishMqttEvent(new UpdateEvent(entity, updatedByUserInfo, "Update on removal action completed")); + + return updatedEntity; } catch (CdbException ex) { entity.setPersitanceErrorMessage(ex.getMessage()); addCdbEntityWarningSystemLog("Failed to update", ex, entity, updatedByUserInfo); logger.error("Could not update " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + ex.getMessage()); + + getEntityInstanceName(entity) + ": " + ex.getMessage()); throw ex; } catch (RuntimeException ex) { - Throwable t = ExceptionUtils.getRootCause(ex); + Throwable t = ExceptionUtils.getRootCause(ex); logger.error("Could not update " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + t.getMessage()); + + getEntityInstanceName(entity) + ": " + t.getMessage()); addCdbEntityWarningSystemLog("Failed to update", ex, entity, updatedByUserInfo); entity.setPersitanceErrorMessage(t.getMessage()); throw ex; - } + } } - + public void updateList(List entities, UserInfo updatedByUserInfo) throws CdbException, RuntimeException { try { for (EntityType entity : entities) { @@ -155,9 +213,10 @@ public void updateList(List entities, UserInfo updatedByUserInfo) th prepareEntityUpdate(entity, updatedByUserInfo); } getEntityDbFacade().edit(entities); - for (EntityType entity : entities) { + for (EntityType entity : entities) { entity.setPersitanceErrorMessage(null); addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Updated: " + entity.getSystemLogString(), updatedByUserInfo); + publishMqttEvent(new UpdateEvent(entity, updatedByUserInfo, "Update action completed")); } clearCaches(); } catch (CdbException ex) { @@ -168,35 +227,37 @@ public void updateList(List entities, UserInfo updatedByUserInfo) th } catch (RuntimeException ex) { Throwable t = ExceptionUtils.getRootCause(ex); logger.error("Could not update list of " + getDisplayEntityTypeName() + ": " + t.getMessage()); - addCdbEntityWarningSystemLog("Failed to update list of " + getDisplayEntityTypeName(), ex, null, updatedByUserInfo); + addCdbEntityWarningSystemLog("Failed to update list of " + getDisplayEntityTypeName(), ex, null, updatedByUserInfo); setPersistenceErrorMessageForList(entities, t.getMessage()); throw ex; - } + } } - + public void destroy(EntityType entity, UserInfo destroyedByUserInfo) throws CdbException, RuntimeException { try { prepareEntityDestroy(entity, destroyedByUserInfo); getEntityDbFacade().remove(entity); - - addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Deleted: " + entity.getSystemLogString(), destroyedByUserInfo); + + addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Deleted: " + entity.getSystemLogString(), destroyedByUserInfo); + publishMqttEvent(new DeleteEvent(entity, destroyedByUserInfo, "Delete action completed")); + clearCaches(); } catch (CdbException ex) { entity.setPersitanceErrorMessage(ex.getMessage()); addCdbEntityWarningSystemLog("Failed to destroy", ex, entity, destroyedByUserInfo); logger.error("Could not destroy " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + ex.getMessage()); + + getEntityInstanceName(entity) + ": " + ex.getMessage()); throw ex; } catch (RuntimeException ex) { - Throwable t = ExceptionUtils.getRootCause(ex); + Throwable t = ExceptionUtils.getRootCause(ex); logger.error("Could not destroy " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + t.getMessage()); + + getEntityInstanceName(entity) + ": " + t.getMessage()); addCdbEntityWarningSystemLog("Failed to destroy", ex, entity, destroyedByUserInfo); entity.setPersitanceErrorMessage(t.getMessage()); throw ex; - } + } } - + public void destroyList( List entities, EntityType updateEntity, UserInfo destroyedByUserInfo) @@ -216,6 +277,9 @@ public void destroyList( getEntityDbFacade().remove(entities, updateEntity); addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Deleted: " + entities.size() + " entities.", destroyedByUserInfo); + for (EntityType entity : entities) { + publishMqttEvent(new DeleteEvent(entity, destroyedByUserInfo, "Delete action completed")); + } setPersistenceErrorMessageForList(entities, null); clearCaches(); } catch (CdbException ex) { @@ -228,15 +292,15 @@ public void destroyList( setPersistenceErrorMessageForList(entities, ex.getMessage()); addCdbEntityWarningSystemLog("Failed to delete list of " + getDisplayEntityTypeName(), ex, updateEntity, destroyedByUserInfo); throw ex; - } + } } - + /** - * On database operation clear cache of related cached entity when needed. + * On database operation clear cache of related cached entity when needed. */ - protected void clearCaches() { + protected void clearCaches() { } - + /** * Find entity instance by id. * @@ -246,46 +310,46 @@ protected void clearCaches() { public EntityType findById(Integer id) { return getEntityDbFacade().find(id); } - + /** - * Used by import framework. Looks up entity by path. Default implementation - * raises exception. Subclasses should override to provide support for lookup - * by path. + * Used by import framework. Looks up entity by path. Default implementation + * raises exception. Subclasses should override to provide support for + * lookup by path. */ public EntityType findByPath(String path) throws CdbException { throw new CdbException("controller utility does not support lookup by path"); } - + public String getEntityInstanceName(EntityType entity) { if (entity != null) { - return entity.toString(); + return entity.toString(); } - return ""; - } - - public abstract String getEntityTypeName(); - + return ""; + } + + public abstract String getEntityTypeName(); + public String getDisplayEntityTypeName() { String entityTypeName = getEntityTypeName(); - - entityTypeName = entityTypeName.substring(0, 1).toUpperCase() + entityTypeName.substring(1); - - String displayEntityTypeName = ""; - - int prevEnd = 0; + + entityTypeName = entityTypeName.substring(0, 1).toUpperCase() + entityTypeName.substring(1); + + String displayEntityTypeName = ""; + + int prevEnd = 0; for (int i = 1; i < entityTypeName.length(); i++) { - Character c = entityTypeName.charAt(i); + Character c = entityTypeName.charAt(i); if (Character.isUpperCase(c)) { - displayEntityTypeName += entityTypeName.substring(prevEnd, i) + " "; - prevEnd = i; + displayEntityTypeName += entityTypeName.substring(prevEnd, i) + " "; + prevEnd = i; } } - - displayEntityTypeName += entityTypeName.substring(prevEnd); - - return displayEntityTypeName; + + displayEntityTypeName += entityTypeName.substring(prevEnd); + + return displayEntityTypeName; } - + /** * Prepare entity insert. * @@ -294,9 +358,9 @@ public String getDisplayEntityTypeName() { * @param entity entity instance * @throws CdbException in case of any errors */ - protected void prepareEntityInsert(EntityType entity, UserInfo userInfo) throws CdbException { + protected void prepareEntityInsert(EntityType entity, UserInfo userInfo) throws CdbException { } - + /** * Prepare entity update. * @@ -305,24 +369,24 @@ protected void prepareEntityInsert(EntityType entity, UserInfo userInfo) throws * @param entity entity instance * @throws CdbException in case of any errors */ - protected void prepareEntityUpdate(EntityType entity, UserInfo updatedByUser) throws CdbException { + protected void prepareEntityUpdate(EntityType entity, UserInfo updatedByUser) throws CdbException { } - + protected void prepareEntityUpdateOnRemoval(EntityType entity) throws CdbException { } - - protected void prepareEntityDestroy(EntityType entity, UserInfo userInfo) throws CdbException { + + protected void prepareEntityDestroy(EntityType entity, UserInfo userInfo) throws CdbException { } - + protected void addCreatedSystemLog(EntityType entity, UserInfo createdByUserInfo) throws CdbException { - String message = "Created: " + entity.getSystemLogString(); - addCdbEntitySystemLog(SystemLogLevel.entityInfo, message, createdByUserInfo); + String message = "Created: " + entity.getSystemLogString(); + addCdbEntitySystemLog(SystemLogLevel.entityInfo, message, createdByUserInfo); } - + protected void addCreatedWarningSystemLog(Exception exception, EntityType entity, UserInfo createdByUserInfo) throws CdbException { addCdbEntityWarningSystemLog("Failed to create", exception, entity, createdByUserInfo); } - + /** * Allows the controller to quickly add a warning log entry while * automatically appending appropriate info. @@ -342,8 +406,8 @@ protected void addCdbEntityWarningSystemLog(String warningMessage, Exception exc addCdbEntitySystemLog(SystemLogLevel.entityWarning, warningMessage, sessionUser); } - - /** + + /** * Allows the controller to quickly add a log entry to system logs with * current session user stamp. * @@ -351,7 +415,7 @@ protected void addCdbEntityWarningSystemLog(String warningMessage, Exception exc * @param message * @param sessionUser */ - protected void addCdbEntitySystemLog(SystemLogLevel logLevel, String message, UserInfo sessionUser) throws CdbException { + protected void addCdbEntitySystemLog(SystemLogLevel logLevel, String message, UserInfo sessionUser) throws CdbException { if (sessionUser != null) { String username = sessionUser.getUsername(); message = "User: " + username + " | " + message; @@ -359,36 +423,36 @@ protected void addCdbEntitySystemLog(SystemLogLevel logLevel, String message, Us LogControllerUtility logControllerUtility = LogControllerUtility.getSystemLogInstance(); logControllerUtility.addSystemLog(logLevel, message); } - + protected void setPersistenceErrorMessageForList(List entities, String msg) { for (EntityType entity : entities) { entity.setPersitanceErrorMessage(msg); } - } + } public List getAllEntities() { return getEntityDbFacade().findAll(); } - + public List searchEntities(String searchString, Map searchOpts) { - return searchEntities(searchString); + return searchEntities(searchString); } - + public List searchEntities(String searchString) { - return getEntityDbFacade().searchEntities(searchString); + return getEntityDbFacade().searchEntities(searchString); } - + public String generatePatternString(String searchString) { - String patternString; - if (searchString.contains("?") || searchString.contains("*")) { - patternString = searchString.replace("*", ".*"); + String patternString; + if (searchString.contains("?") || searchString.contains("*")) { + patternString = searchString.replace("*", ".*"); patternString = patternString.replace("?", "."); } else { - patternString = Pattern.quote(searchString); + patternString = Pattern.quote(searchString); } - return patternString; + return patternString; } - + public Pattern getSearchPattern(String patternString, boolean caseInsensitive) { Pattern searchPattern; if (caseInsensitive) { @@ -396,56 +460,55 @@ public Pattern getSearchPattern(String patternString, boolean caseInsensitive) { } else { searchPattern = Pattern.compile(patternString); } - - return searchPattern; + + return searchPattern; } - + public LinkedList performEntitySearch(String searchString, boolean caseInsensitive) { - return performEntitySearch(searchString, null, caseInsensitive); + return performEntitySearch(searchString, null, caseInsensitive); } - + /** * Search all entities for a given string. * * @param searchString search string * @param caseInsensitive use case insensitive search - * @return + * @return */ public LinkedList performEntitySearch(String searchString, Map searchOpts, boolean caseInsensitive) { - LinkedList searchResultList = new LinkedList<>(); - if (searchString == null || searchString.isEmpty()) { + LinkedList searchResultList = new LinkedList<>(); + if (searchString == null || searchString.isEmpty()) { return searchResultList; } - + // Start new search String patternString = generatePatternString(searchString); - Pattern searchPattern = getSearchPattern(patternString, caseInsensitive); - - + Pattern searchPattern = getSearchPattern(patternString, caseInsensitive); + List allObjectList = searchEntities(searchString, searchOpts); for (EntityType entity : allObjectList) { try { SearchResult searchResult = entity.createSearchResultInfo(searchPattern); - if (!searchResult.isEmpty()) { - searchResultList.add(searchResult); + if (!searchResult.isEmpty()) { + searchResultList.add(searchResult); } } catch (RuntimeException ex) { logger.warn("Could not search entity " + entity.toString() + " (Error: " + ex.toString() + ")"); } } - - return searchResultList; + + return searchResultList; } - + public PropertyValue preparePropertyTypeValueAdd(EntityType cdbDomainEntity, PropertyType propertyType) { return preparePropertyTypeValueAdd(cdbDomainEntity, propertyType, propertyType.getDefaultValue(), null); } public PropertyValue preparePropertyTypeValueAdd(EntityType cdbDomainEntity, - PropertyType propertyType, String propertyValueString, String tag) { + PropertyType propertyType, String propertyValueString, String tag) { // Implement in controller with entity info. - return null; + return null; } public PropertyValue preparePropertyTypeValueAdd(EntityType cdbEntity, @@ -474,7 +537,7 @@ public PropertyValue preparePropertyTypeValueAdd(EntityType cdbEntity, } public PropertyType prepareCableEndDesignationPropertyType() { - + PropertyTypeControllerUtility propertyTypeControllerUtility = new PropertyTypeControllerUtility(); PropertyType propertyType = propertyTypeControllerUtility.createEntityInstance(null); @@ -488,7 +551,7 @@ public PropertyType prepareCableEndDesignationPropertyType() { logger.error(ex.getMessage()); return null; } - + return propertyType; } @@ -510,5 +573,5 @@ public String getDisplayItemConnectorsLabel() { String labelString = StringUtility.capitalize(getDisplayItemConnectorName()); return labelString + "s"; } - + } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/ItemDomainLogbookControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/ItemDomainLogbookControllerUtility.java index 7dd0cd904..cfe7f12d3 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/ItemDomainLogbookControllerUtility.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/ItemDomainLogbookControllerUtility.java @@ -6,6 +6,9 @@ import gov.anl.aps.logr.common.exceptions.CdbException; import gov.anl.aps.logr.common.exceptions.InvalidObjectState; +import gov.anl.aps.logr.common.mqtt.constants.ChangeType; +import gov.anl.aps.logr.common.mqtt.model.LogEntryEvent; +import gov.anl.aps.logr.common.mqtt.model.ReplyLogEntryEvent; import gov.anl.aps.logr.common.utilities.CollectionUtility; import gov.anl.aps.logr.portal.constants.ItemDomainName; import gov.anl.aps.logr.portal.constants.LogDocumentSettings; @@ -337,9 +340,9 @@ public LinkedList performEntitySearch(String searchString, Map sea // Add search opts to match description. for (SearchResult result : searchResultList) { - addCommonLogEntryDocumentMatches(result, searchEntityTypeList, searchItemTypeList); - - ItemDomainLogbook resultItem = (ItemDomainLogbook) result.getCdbEntity(); + addCommonLogEntryDocumentMatches(result, searchEntityTypeList, searchItemTypeList); + + ItemDomainLogbook resultItem = (ItemDomainLogbook) result.getCdbEntity(); EntityInfo entityInfo = resultItem.getEntityInfo(); if (searchUserList != null && !searchUserList.isEmpty()) { @@ -422,4 +425,111 @@ public static void copyLogs(ItemDomainLogbook oldLogDoc, ItemDomainLogbook newLo } } + public static ItemDomainLogbook getParentLogDocument(Log logEntry) { + Log parentLog = logEntry.getParentLog(); + + if (parentLog != null) { + // Parent log has association to the log document. + logEntry = parentLog; + } + List itemElementList = logEntry.getItemElementList(); + + if (itemElementList != null && itemElementList.size() == 1) { + // This should always happen. + // No exception however since this is required for notification framework not core functionality. + ItemElement parentElement = itemElementList.get(0); + Item parentItem = parentElement.getParentItem(); + if (parentItem instanceof ItemDomainLogbook) { + return (ItemDomainLogbook) parentItem; + } + } + return null; + + } + + private String getLogDiffString(Log originalLog, Log updatedLog) { + String originalText; + String updatedText = updatedLog.getText(); + + if (originalLog != null) { + originalText = originalLog.getText(); + } else { + return updatedText; + } + + StringBuilder diffOutput = new StringBuilder(); + String[] originalLines = originalText.split("\n"); + String[] updatedLines = updatedText.split("\n"); + + int maxLines = Math.max(originalLines.length, updatedLines.length); + + for (int i = 0; i < maxLines; i++) { + String originalLine = i < originalLines.length ? originalLines[i] : ""; + String updatedLine = i < updatedLines.length ? updatedLines[i] : ""; + + if (!originalLine.equals(updatedLine)) { + if (i < originalLines.length) { + diffOutput.append("- ").append(originalLine).append("\n"); + } + if (i < updatedLines.length) { + diffOutput.append("+ ").append(updatedLine).append("\n"); + } + } else if (i < originalLines.length) { + diffOutput.append(" ").append(originalLine).append("\n"); + } + } + return diffOutput.toString(); + + } + + public void destroyLogEntry(Log logEntity, UserInfo user) throws CdbException { + addLogEntryMqttEvent(logEntity, null, user, true); + + LogControllerUtility utility = new LogControllerUtility(); + utility.destroy(logEntity, user); + } + + private void addLogEntryMqttEvent(Log logEntity, Log originalLog, UserInfo user, boolean isDestroy) { + String logDiffString = getLogDiffString(originalLog, logEntity); + + // Avoid duplicates + logEntity.clearActionEvents(); + + ItemDomainLogbook parentLogbook = getParentLogDocument(logEntity); + + Log parentLog = logEntity.getParentLog(); + Integer id = logEntity.getId(); + ChangeType changeType; + + String description = ""; + if (isDestroy) { + description += "log entry was deleted"; + changeType = ChangeType.DELETE; + } else if (id == null) { + description += "log entry was added"; + changeType = ChangeType.ADD; + } else { + description += "log entry id [" + id + "] was modified"; + changeType = ChangeType.UPDATE; + } + + if (parentLog != null) { + description = "reply " + description; + + // Reply + logEntity.addActionEvent(new ReplyLogEntryEvent(parentLogbook, logEntity, user, description, logDiffString, changeType)); + } else { + logEntity.addActionEvent(new LogEntryEvent(parentLogbook, logEntity, user, description, logDiffString, changeType)); + } + } + + public Log saveLog(Log logEntity, UserInfo user, Log originalLog) throws CdbException { + LogControllerUtility utility = new LogControllerUtility(); + + addLogEntryMqttEvent(logEntity, originalLog, user, false); + + // Add a generic log entry. + return utility.saveLogEntry(logEntity, user); + } + } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogControllerUtility.java index f6a7b5ad0..9484889e4 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogControllerUtility.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogControllerUtility.java @@ -5,10 +5,12 @@ package gov.anl.aps.logr.portal.controllers.utilities; import gov.anl.aps.logr.common.exceptions.CdbException; +import gov.anl.aps.logr.common.mqtt.model.MqttEvent; import gov.anl.aps.logr.portal.constants.SystemLogLevel; import gov.anl.aps.logr.portal.model.db.beans.LogFacade; import gov.anl.aps.logr.portal.model.db.beans.LogLevelFacade; import gov.anl.aps.logr.portal.model.db.beans.UserInfoFacade; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; import gov.anl.aps.logr.portal.model.db.entities.Log; import gov.anl.aps.logr.portal.model.db.entities.LogLevel; import gov.anl.aps.logr.portal.model.db.entities.UserInfo; @@ -19,47 +21,47 @@ * * @author darek */ -public class LogControllerUtility extends CdbEntityControllerUtility{ - +public class LogControllerUtility extends CdbEntityControllerUtility { + @EJB - private LogFacade logFacade; - + private LogFacade logFacade; + @EJB private LogLevelFacade logLevelFacade; @EJB private UserInfoFacade userInfoFacade; - + private final String DEFAULT_SYSTEM_ADMIN_USERNAME = "logr"; - + private static LogControllerUtility systemLogInstance; public LogControllerUtility() { if (logFacade == null) { - logFacade = LogFacade.getInstance(); + logFacade = LogFacade.getInstance(); } if (logLevelFacade == null) { logLevelFacade = LogLevelFacade.getInstance(); } if (userInfoFacade == null) { - userInfoFacade = UserInfoFacade.getInstance(); + userInfoFacade = UserInfoFacade.getInstance(); } } @Override - protected LogFacade getEntityDbFacade() { - return logFacade; + protected LogFacade getEntityDbFacade() { + return logFacade; } - + public static synchronized LogControllerUtility getSystemLogInstance() { if (systemLogInstance == null) { systemLogInstance = new LogControllerUtility(); } return systemLogInstance; } - + public void addSystemLog(SystemLogLevel systemlogLevel, String logMessage) throws CdbException { - String logLevelName = systemlogLevel.toString(); + String logLevelName = systemlogLevel.toString(); UserInfo enteredByUser = userInfoFacade.findByUsername(DEFAULT_SYSTEM_ADMIN_USERNAME); if (enteredByUser == null) { throw new CdbException("User '" + DEFAULT_SYSTEM_ADMIN_USERNAME + "' needs to be in the system. Please notify system administrator."); @@ -79,28 +81,29 @@ public void addSystemLog(SystemLogLevel systemlogLevel, String logMessage) throw newSystemLog.setText(logMessage); newSystemLog.setEnteredOnDateTime(enteredOnDateTime); newSystemLog.setEnteredByUser(enteredByUser); - + newSystemLog.markAsSystemLog(); + create(newSystemLog, enteredByUser); } @Override protected void prepareEntityUpdate(Log entity, UserInfo updatedByUser) throws CdbException { super.prepareEntityUpdate(entity, updatedByUser); - + Date lastModifiedOnDate = new Date(); entity.setLastModifiedOnDateTime(lastModifiedOnDate); - entity.setLastModifiedByUser(updatedByUser); + entity.setLastModifiedByUser(updatedByUser); } @Override protected void prepareEntityInsert(Log entity, UserInfo userInfo) throws CdbException { super.prepareEntityInsert(entity, userInfo); - + Date enteredOnDateTime = entity.getEnteredOnDateTime(); entity.setLastModifiedOnDateTime(enteredOnDateTime); entity.setLastModifiedByUser(userInfo); } - + protected Log createEntityInstance() { return new Log(); } @@ -110,7 +113,7 @@ protected void addCdbEntitySystemLog(SystemLogLevel logLevel, String message, Us // No need to create system logs. return; } - + @Override protected void addCreatedSystemLog(Log entity, UserInfo createdByUserInfo) { // No need to create a system log when creating a log. @@ -122,7 +125,7 @@ protected void addCreatedWarningSystemLog(Exception exception, Log entity, UserI // No need to create a system log when creating a log. return; } - + @Override public String getEntityTypeName() { return "log"; @@ -130,8 +133,26 @@ public String getEntityTypeName() { @Override public Log createEntityInstance(UserInfo sessionUser) { - return new Log(); + return new Log(); + } + + @Override + protected void publishMqttEvent(MqttEvent event) { + CdbEntity entity = event.getEntity(); + if (entity instanceof Log) { + if (((Log) entity).isSystemLog()) { + return; + } + } + super.publishMqttEvent(event); + } + + public Log saveLogEntry(Log log, UserInfo userInfo) throws CdbException { + if (log.getId() != null) { + return update(log, userInfo); + } else { + return create(log, userInfo); + } } - } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogReactionControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogReactionControllerUtility.java new file mode 100644 index 000000000..c4351ffba --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogReactionControllerUtility.java @@ -0,0 +1,96 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.controllers.utilities; + +import gov.anl.aps.logr.common.exceptions.CdbException; +import gov.anl.aps.logr.common.mqtt.model.LogReactionEvent; +import gov.anl.aps.logr.portal.model.db.beans.LogFacade; +import gov.anl.aps.logr.portal.model.db.beans.LogReactionFacade; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import gov.anl.aps.logr.portal.model.db.entities.LogReaction; +import gov.anl.aps.logr.portal.model.db.entities.Reaction; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; +import java.util.List; + +/** + * + * @author djarosz + */ +public class LogReactionControllerUtility extends CdbEntityControllerUtility { + + LogReactionFacade logReactionFacade; + LogFacade logFacade; + + public LogReactionControllerUtility() { + logReactionFacade = LogReactionFacade.getInstance(); + logFacade = LogFacade.getInstance(); + } + + @Override + protected LogReactionFacade getEntityDbFacade() { + return logReactionFacade; + } + + @Override + public LogReaction createEntityInstance(UserInfo sessionUser) { + LogReaction lr = new LogReaction(); + lr.setUserInfo(sessionUser); + return lr; + } + + @Override + public String getEntityTypeName() { + return "Log Reaction"; + } + + public void toggleReaction(Log entry, Reaction reaction, UserInfo user) throws CdbException { + // Fetch the latest version + entry = logFacade.find(entry.getId()); + + List logReactionList = entry.getLogReactionList(); + LogReaction dbReaction = null; + + // Check if need to remove log reaction. + for (LogReaction lr : logReactionList) { + UserInfo userId = lr.getUserInfo(); + + if (user.equals(userId)) { + Reaction existingReaction = lr.getReaction(); + + if (existingReaction.equals(reaction)) { + dbReaction = lr; + break; + } + } + } + + ItemDomainLogbook parentLogDoc = ItemDomainLogbookControllerUtility.getParentLogDocument(entry); + + if (dbReaction != null) { + dbReaction.addActionEvent(new LogReactionEvent(dbReaction, + entry, + parentLogDoc, + user, + "User removed a reaction", + true)); + logReactionList.remove(dbReaction); + destroy(dbReaction, user); + } else { + LogReaction lr = createEntityInstance(user); + lr.setLog(entry); + lr.setReaction(reaction); + lr.addActionEvent(new LogReactionEvent(lr, + entry, + parentLogDoc, + user, + "User added a reaction", + false)); + create(lr, user); + } + + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/CdbEntityFacade.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/CdbEntityFacade.java index dca9f86a1..449d4950c 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/CdbEntityFacade.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/CdbEntityFacade.java @@ -42,7 +42,8 @@ public T edit(T entity) { // delete list of connectors, if any if (entity instanceof CdbEntity) { CdbEntity cdbEntity = (CdbEntity) entity; - for (ItemConnector connector : cdbEntity.getDeletedConnectorList()) { + List deletedConnectorList = cdbEntity.getDeletedConnectorList(); + for (ItemConnector connector : deletedConnectorList) { ItemConnectorFacade.getInstance().remove(connector); } cdbEntity.clearDeletedConnectorList(); diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/CdbEntity.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/CdbEntity.java index 628a63aaa..b23818cd3 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/CdbEntity.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/CdbEntity.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.mqtt.model.MqttEvent; import gov.anl.aps.logr.portal.controllers.utilities.CdbEntityControllerUtility; import gov.anl.aps.logr.portal.import_export.import_.objects.ValidInfo; import gov.anl.aps.logr.portal.model.db.beans.PropertyTypeFacade; @@ -23,8 +24,9 @@ /** * Base class for all CDB entities. + * @param describes an optional additional specific mqtt event that can be specified for the entity. */ -public class CdbEntity implements Serializable, Cloneable { +public class CdbEntity implements Serializable, Cloneable { private static final Logger LOGGER = LogManager.getLogger(CdbEntity.class.getName()); @@ -44,6 +46,8 @@ public class CdbEntity implements Serializable, Cloneable { // persistence management for associated ItemConnectors, deleted on call to edit this item in facade private transient List deletedConnectorList = null; + private transient List actionEvents; + // import wizard variables private transient boolean isValidImport = true; private transient String validStringImport; @@ -311,4 +315,20 @@ public static boolean isValidCableEndDesignation(String designation) { return list.contains(designation); } + @JsonIgnore + public List getActionEvents() { + return actionEvents; + } + + public void addActionEvent(ActionEvent event) { + if (actionEvents == null) { + actionEvents = new ArrayList<>(); + } + actionEvents.add(event); + } + + public void clearActionEvents() { + actionEvents = null; + } + } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Log.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Log.java index 684f35dcc..07be06a9b 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Log.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Log.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.mqtt.model.LogEntryEvent; import gov.anl.aps.logr.portal.utilities.MarkdownParser; import gov.anl.aps.logr.portal.view.objects.GroupedReaction; import java.io.Serializable; @@ -50,7 +51,7 @@ @NamedQuery(name = "Log.findByEnteredOnDateTime", query = "SELECT l FROM Log l WHERE l.enteredOnDateTime = :enteredOnDateTime"), @NamedQuery(name = "Log.findByEffectiveFromDateTime", query = "SELECT l FROM Log l WHERE l.effectiveFromDateTime = :effectiveFromDateTime"), @NamedQuery(name = "Log.findByEffectiveToDateTime", query = "SELECT l FROM Log l WHERE l.effectiveToDateTime = :effectiveToDateTime")}) -public class Log extends CdbEntity implements Serializable { +public class Log extends CdbEntity implements Serializable { private static final long serialVersionUID = 1L; @Id @@ -121,6 +122,8 @@ public class Log extends CdbEntity implements Serializable { private transient String addedReactionsString; private transient List childLogListReversed = null; + + private transient boolean isSystemLog = false; public Log() { } @@ -186,6 +189,10 @@ public void setLastModifiedOnDateTime(Date lastModifiedOnDateTime) { public UserInfo getLastModifiedByUser() { return lastModifiedByUser; } + + public String getLastModifiedByUsername() { + return lastModifiedByUser.getUsername(); + } public void setLastModifiedByUser(UserInfo lastModifiedByUser) { this.lastModifiedByUser = lastModifiedByUser; @@ -383,6 +390,15 @@ public void setAddedReactionsString(String addedReactionsString) { this.addedReactionsString = addedReactionsString; } + @JsonIgnore + public boolean isSystemLog() { + return isSystemLog; + } + + public void markAsSystemLog() { + isSystemLog = true; + } + @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set @@ -398,7 +414,7 @@ public boolean equals(Object object) { @Override public String toString() { - return "gov.anl.aps.cdb.portal.model.db.entities.Log[ id=" + id + " ]"; + return "gov.anl.aps.cdb.portal.model.db.entities.Log[ id=" + id; } } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/LogReaction.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/LogReaction.java index 608a32320..adc8444b2 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/LogReaction.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/LogReaction.java @@ -5,6 +5,7 @@ package gov.anl.aps.logr.portal.model.db.entities; import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.mqtt.model.LogReactionEvent; import java.io.Serializable; import javax.persistence.EmbeddedId; import javax.persistence.Entity; @@ -28,7 +29,7 @@ @NamedQuery(name = "LogReaction.findByLogId", query = "SELECT l FROM LogReaction l WHERE l.logReactionPK.logId = :logId"), @NamedQuery(name = "LogReaction.findByReactionId", query = "SELECT l FROM LogReaction l WHERE l.logReactionPK.reactionId = :reactionId"), @NamedQuery(name = "LogReaction.findByUserId", query = "SELECT l FROM LogReaction l WHERE l.logReactionPK.userId = :userId")}) -public class LogReaction implements Serializable { +public class LogReaction extends CdbEntity implements Serializable { private static final long serialVersionUID = 1L; @EmbeddedId @@ -44,6 +45,7 @@ public class LogReaction implements Serializable { private UserInfo userInfo; public LogReaction() { + this.logReactionPK = new LogReactionPK(); } public LogReaction(LogReactionPK logReactionPK) { @@ -63,6 +65,11 @@ public void setLogReactionPK(LogReactionPK logReactionPK) { this.logReactionPK = logReactionPK; } + @Override + public Object getId() { + return logReactionPK; + } + @JsonIgnore public Log getLog() { return log; @@ -70,6 +77,7 @@ public Log getLog() { public void setLog(Log log) { this.log = log; + updatePK(); } public Reaction getReaction() { @@ -78,6 +86,7 @@ public Reaction getReaction() { public void setReaction(Reaction reaction) { this.reaction = reaction; + updatePK(); } public String getUsername() { @@ -91,6 +100,22 @@ public UserInfo getUserInfo() { public void setUserInfo(UserInfo userInfo) { this.userInfo = userInfo; + updatePK(); + } + + private void updatePK() { + if (this.log != null) { + this.logReactionPK.setLogId(this.log.getId()); + } + + if (this.reaction != null) { + this.logReactionPK.setReactionId(this.reaction.getId()); + } + + if (this.userInfo != null) { + this.logReactionPK.setUserId(this.userInfo.getId()); + } + } @Override diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Reaction.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Reaction.java index d1ff308d6..92cc9ec41 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Reaction.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Reaction.java @@ -4,6 +4,7 @@ */ package gov.anl.aps.logr.portal.model.db.entities; +import com.fasterxml.jackson.annotation.JsonIgnore; import java.io.Serializable; import java.util.Collection; import javax.persistence.Basic; @@ -119,6 +120,7 @@ public void setDescription(String description) { } @XmlTransient + @JsonIgnore public Collection getLogReactionCollection() { return logReactionCollection; } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/MarkdownParser.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/MarkdownParser.java index 5e8a81081..ff4d9526d 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/MarkdownParser.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/MarkdownParser.java @@ -21,6 +21,7 @@ import com.vladsch.flexmark.util.misc.Extension; import com.vladsch.flexmark.util.sequence.BasedSequence; import com.vladsch.flexmark.util.sequence.Escaping; +import com.vladsch.flexmark.ext.tables.TablesExtension; import gov.anl.aps.logr.common.constants.CdbPropertyValue; import java.util.HashSet; @@ -79,7 +80,8 @@ public class MarkdownParser { private static MutableDataHolder options = new MutableDataSet() .set(Parser.EXTENSIONS, Arrays.asList( new Extension[]{ - LogrFlexmarkExtension.create() + LogrFlexmarkExtension.create(), + TablesExtension.create() } )); diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SessionUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SessionUtility.java index 0a6256779..3347c6956 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SessionUtility.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SessionUtility.java @@ -4,16 +4,13 @@ */ package gov.anl.aps.logr.portal.utilities; +import fish.payara.cloud.connectors.mqtt.api.MQTTConnectionFactory; import gov.anl.aps.logr.portal.model.db.entities.UserInfo; import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; -import java.net.URLEncoder; -import java.net.UnknownHostException; import java.util.HashMap; import java.util.Map; import java.util.Stack; -import java.util.logging.Level; +import javax.annotation.Resource; import javax.faces.application.FacesMessage; import javax.faces.application.NavigationHandler; import javax.faces.application.ViewHandler; @@ -47,11 +44,16 @@ public class SessionUtility { private static final String MODULE_NAME_LOOKUP = "java:module/ModuleName"; private static final String JAVA_LOOKUP_START = "java:global/"; private static String FACADE_LOOKUP_STRING_START = null; + private static final String BELY_MQTT_NAME = "bely/MQTT/resource"; private static final String USER_SESSION_COOKIE_KEY = "USERSESSIONID"; private static final Logger logger = LogManager.getLogger(SessionUtility.class.getName()); + // Instructs the framework of the class to resolve for the mqtt connection factory. + @Resource(lookup = BELY_MQTT_NAME) + MQTTConnectionFactory factory; + public SessionUtility() { } @@ -133,7 +135,7 @@ public static UserInfo getUser() { public static String getRemoteAddress() { HttpServletRequest request = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); - return request.getRemoteAddr(); + return request.getRemoteAddr(); } public static String getSessionCookie() { @@ -141,10 +143,10 @@ public static String getSessionCookie() { Cookie cookie = (Cookie) cookieMap.get(USER_SESSION_COOKIE_KEY); - if (cookie != null) { + if (cookie != null) { String value = cookie.getValue(); - - return value; + + return value; } return null; @@ -323,6 +325,21 @@ public static Object findBean(String beanName) { return (Object) context.getApplication().evaluateExpressionGet(context, "#{" + beanName + "}", Object.class); } + public static MQTTConnectionFactory fetchMQTTConnectionFactory() { + try { + InitialContext context = new InitialContext(); + MQTTConnectionFactory result = (MQTTConnectionFactory) context.lookup(BELY_MQTT_NAME); + + return result; + } catch (NamingException ex) { + logger.error(ex); + } catch (NoClassDefFoundError ex) { + logger.error(ex); + } + return null; + + } + public static Object findFacade(String facadeName) { try { InitialContext context = new InitialContext(); diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/LogbookRoute.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/LogbookRoute.java index 982b776e3..7dbec4057 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/LogbookRoute.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/LogbookRoute.java @@ -15,6 +15,7 @@ import gov.anl.aps.logr.portal.controllers.utilities.LogControllerUtility; import gov.anl.aps.logr.portal.model.db.beans.DomainFacade; import gov.anl.aps.logr.portal.model.db.beans.ItemDomainLogbookFacade; +import gov.anl.aps.logr.portal.model.db.beans.LogFacade; import gov.anl.aps.logr.portal.model.db.entities.Domain; import gov.anl.aps.logr.portal.model.db.entities.EntityInfo; import gov.anl.aps.logr.portal.model.db.entities.EntityType; @@ -65,6 +66,9 @@ public class LogbookRoute extends ItemBaseRoute { @EJB ItemDomainLogbookFacade itemDomainLogbookFacade; + @EJB + LogFacade logFacade; + private Domain getLogbookDomain() { return domainFacade.find(ItemDomainName.LOGBOOK_ID); } @@ -234,8 +238,13 @@ public LogEntry addUpdateLogEntry(@RequestBody(required = true) LogEntry logEntr utility.verifySaveLogLockoutsForItem(logDocument, logEntity, user); } + Log originalLogEntry = null; + if (logId != null) { + originalLogEntry = logFacade.find(logId); + } + logEntry.updateLogPerLogEntryObject(logEntity); - logEntity = saveLog(logEntity, user); + logEntity = utility.saveLog(logEntity, user, originalLogEntry); // Update modified date. updateModifiedDateForLogDocument(logDocument, user); @@ -401,18 +410,6 @@ private void updateModifiedDateForLogDocument(ItemDomainLogbook logDocument, Use eicu.update(entityInfo, user); } - private Log saveLog(Log log, UserInfo userInfo) throws CdbException { - LogControllerUtility utility = new LogControllerUtility(); - - if (log.getId() != null) { - log = utility.update(log, userInfo); - } else { - log = utility.create(log, userInfo); - } - - return log; - } - @Override protected void verifyUserPermissionForItem(UserInfo user, Item item) throws AuthorizationError { // Permission verification should be done at the top level document only. diff --git a/src/java/LogrPortal/web/resources/css/logbook.css b/src/java/LogrPortal/web/resources/css/logbook.css index 4cd29136d..68638bb77 100644 --- a/src/java/LogrPortal/web/resources/css/logbook.css +++ b/src/java/LogrPortal/web/resources/css/logbook.css @@ -133,7 +133,30 @@ pre:has([class^="language-"]) { } .logEntry img { - max-width: 450px; + max-width: 450px; +} + +/* Markdown table styles */ +.logEntry table { + border-collapse: collapse; + width: 100%; + margin: 1em 0; +} + +.logEntry th, +.logEntry td { + border: 1px solid #ddd; + padding: 8px; + text-align: left; +} + +.logEntry th { + background-color: #f2f2f2; + font-weight: bold; +} + +.logEntry tr:nth-child(even) { + background-color: #f9f9f9; } .activeLogEntry { diff --git a/src/lib/mqtt-rar-0.8.0.rar b/src/lib/mqtt-rar-0.8.0.rar new file mode 100644 index 000000000..5f21fdcae Binary files /dev/null and b/src/lib/mqtt-rar-0.8.0.rar differ diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/async-test-configuration.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/async-test-configuration.md new file mode 100644 index 000000000..0db9362e0 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/async-test-configuration.md @@ -0,0 +1,6 @@ +--- +globs: '["**/pytest.ini", "**/pyproject.toml", "**/test_*.py"]' +alwaysApply: false +--- + +Always include pytest-asyncio in dev dependencies and configure it properly in pytest.ini with asyncio_mode = auto and asyncio_default_fixture_loop_scope = function. Mark async test functions with @pytest.mark.asyncio decorator. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/conda-python-environment.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/conda-python-environment.md new file mode 100644 index 000000000..c229515b3 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/conda-python-environment.md @@ -0,0 +1,12 @@ +--- +alwaysApply: true +--- + +Always use the .conda/bin Python environment for terminal executions. When running Python-related commands: +1. ALWAYS execute commands from the project root directory to ensure relative conda paths work correctly +2. Use full paths: '.conda/bin/python' instead of 'python', '.conda/bin/pip' instead of 'pip', '.conda/bin/pytest' instead of 'pytest' +3. For make commands and build tools that invoke Python, either: + - Set PATH explicitly: 'PATH=.conda/bin:$PATH make ...' + - Or use absolute paths by prefixing with workspace path +4. Never use system Python or assume 'python' is in PATH +5. Always verify you're in the project root before executing commands \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/default-topic-pattern.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/default-topic-pattern.md new file mode 100644 index 000000000..e11a589ee --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/default-topic-pattern.md @@ -0,0 +1,7 @@ +--- +globs: "**/*.py" +regex: class.*MQTTHandler +alwaysApply: false +--- + +When creating MQTTHandler subclasses, do not override the topic_pattern property unless there's a specific need to limit the topics. The default pattern "bely/#" allows the handler to receive all BELY events, and the framework automatically routes events to the appropriate handler methods. Only override topic_pattern for performance optimization or when you explicitly want to limit the handler to specific topics. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/documentation-standards.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/documentation-standards.md new file mode 100644 index 000000000..12961e94d --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/documentation-standards.md @@ -0,0 +1,6 @@ +--- +globs: '["docs/*.md", "*.md"]' +alwaysApply: false +--- + +Keep documentation simple and practical. Use clear examples, avoid jargon, and focus on what users need to know. Each doc file should have a clear purpose: getting started, API reference, troubleshooting, etc. Include code examples that can be copy-pasted. Link between related docs. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/email-threading-implementation.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/email-threading-implementation.md new file mode 100644 index 000000000..1c9fd7c5c --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/email-threading-implementation.md @@ -0,0 +1,14 @@ +--- +globs: examples/handlers/apprise_smart_notification/**/*.py +description: Guidelines for maintaining email threading functionality in the + Apprise Smart Notification Handler +alwaysApply: false +--- + +When modifying the Apprise Smart Notification Handler: +1. Email notifications must use threading headers (In-Reply-To, References, Thread-Topic) to group related messages +2. Subject lines for email notifications should follow the pattern "Re: Log: [Document Name]" with optional suffixes like "[Entry Updated]" or "[by username]" +3. Non-email notifications should use descriptive subjects with emojis +4. The EmailThreadingStrategy class should detect email URLs and only apply threading to email notifications +5. Test assertions must account for different subject formats between email and non-email notifications +6. Mock notification tracking functions in tests must accept an optional headers parameter for email threading support \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/pydantic-v2-standards.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/pydantic-v2-standards.md new file mode 100644 index 000000000..42d209369 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/pydantic-v2-standards.md @@ -0,0 +1,7 @@ +--- +globs: '["**/models.py", "**/*.py"]' +regex: BaseModel +alwaysApply: false +--- + +Always use Pydantic v2 ConfigDict instead of the deprecated Config class. Use model_config = ConfigDict(...) at the class level. For field aliases, use Field(alias="...") and set populate_by_name=True in ConfigDict to allow both field name and alias during parsing. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/python-import-resolution-for-tests.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/python-import-resolution-for-tests.md new file mode 100644 index 000000000..7a30a7d3a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/python-import-resolution-for-tests.md @@ -0,0 +1,22 @@ +--- +globs: "**/*.py" +description: Apply when fixing Python import errors in test files that are in + subdirectories trying to import from parent directories +alwaysApply: false +--- + +When Python test files need to import modules from parent directories, use try/except blocks in the module being imported to handle both relative imports (when used as a package) and absolute imports (when imported directly from tests). Example: +```python +try: + # Try relative imports first (when used as a package) + from .submodule import Component +except ImportError: + # Fall back to absolute imports (when imported directly from tests) + from submodule import Component +``` +Also ensure test files add the parent directory to sys.path before importing: +```python +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) +``` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/use-specific-handler-methods.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/use-specific-handler-methods.md new file mode 100644 index 000000000..6b71da31c --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/use-specific-handler-methods.md @@ -0,0 +1,7 @@ +--- +globs: '["**/*.md", "**/handlers/*.py", "**/examples/*.py"]' +regex: MQTTHandler|handle.*event|handler.*example +alwaysApply: false +--- + +Always encourage users to implement specific handler methods like handle_log_entry_add, handle_log_entry_update, etc. instead of the generic handle method. Show HybridEventHandler for multi-event handlers. The framework automatically routes events to the correct typed method based on the event type. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.gitignore b/tools/developer_tools/bely-mqtt-message-broker/.gitignore new file mode 100644 index 000000000..dd5118ff2 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.gitignore @@ -0,0 +1,140 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +Pipfile.lock + +# PEP 582 +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Project specific +.env.local +.env.*.local +*.pid + +# Conda build artifacts +conda-bld/ +.conda/ diff --git a/tools/developer_tools/bely-mqtt-message-broker/CHANGELOG.md b/tools/developer_tools/bely-mqtt-message-broker/CHANGELOG.md new file mode 100644 index 000000000..eb7361468 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/CHANGELOG.md @@ -0,0 +1,28 @@ +# Changelog + +All notable changes to the BELY MQTT Framework will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.1.0] - 2024-01-01 + +### Added +- Initial release +- Pluggable handler system for MQTT events +- Pydantic models for all BELY event types +- MQTT topic pattern matching with wildcards +- BELY API client integration +- CLI interface with `start` and `version` commands +- Async/await support +- Comprehensive logging +- Configuration via command-line options +- Handler hot-reloading support +- Systemd service example +- Documentation and examples + +### Security +- Secure MQTT authentication support +- API key handling for BELY API + +[0.1.0]: https://github.com/bely-org/bely-mqtt-framework/releases/tag/v0.1.0 \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/LICENSE b/tools/developer_tools/bely-mqtt-message-broker/LICENSE new file mode 100644 index 000000000..194aa8022 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/LICENSE @@ -0,0 +1,23 @@ +Copyright © 2025, UChicago Argonne, LLC +All Rights Reserved +Software Name: Best Electronic Logbook Yet (BELY) +By: UChicago Argonne, LLC +OPEN SOURCE LICENSE (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + + * The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tools/developer_tools/bely-mqtt-message-broker/Makefile b/tools/developer_tools/bely-mqtt-message-broker/Makefile new file mode 100644 index 000000000..8d9fc01c0 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/Makefile @@ -0,0 +1,128 @@ +.PHONY: help install install-dev test test-handlers test-apprise test-apprise-integration test-all lint format fix type-check clean docs quality quality-fix + +help: + @echo "BELY MQTT Framework - Development Commands" + @echo "" + @echo "Installation:" + @echo " make install Install the package" + @echo " make install-dev Install with development dependencies" + @echo "" + @echo "Testing:" + @echo " make test Run core framework tests" + @echo " make test-handlers Run all handler tests" + @echo " make test-apprise Run Apprise handler unit tests" + @echo " make test-apprise-integration Run Apprise integration tests" + @echo " make test-all Run all tests (framework + handlers)" + @echo " make test-cov Run all tests with coverage" + @echo "" + @echo "Code Quality:" + @echo " make lint Run linting checks" + @echo " make format Format code with black" + @echo " make fix Auto-fix formatting and linting issues" + @echo " make type-check Run type checking with mypy" + @echo " make type-check-handlers Type check example handlers" + @echo " make quality Run all quality checks" + @echo " make quality-fix Fix issues then run quality checks" + @echo "" + @echo "Utilities:" + @echo " make clean Clean build artifacts" + @echo " make docs Build documentation" + @echo " make run-example Run example handler" + +install: + pip install -e . + +install-dev: + pip install -e ".[dev]" + +test: + pytest tests/ + +test-handlers: + @echo "Running Apprise Smart Notification Handler tests..." + pytest examples/handlers/apprise_smart_notification/test/ \ + --asyncio-mode=auto -v + +test-apprise: + @echo "Running Apprise handler unit tests..." + pytest examples/handlers/apprise_smart_notification/test/test_handler.py --asyncio-mode=auto -v + +test-apprise-integration: + @echo "Running Apprise handler integration tests..." + pytest examples/handlers/apprise_smart_notification/test/test_integration.py --asyncio-mode=auto -v + +test-all: test test-handlers + +test-cov: + pytest tests/ \ + examples/handlers/apprise_smart_notification/test/ \ + --cov=src/bely_mqtt \ + --cov=examples/handlers/apprise_smart_notification \ + --cov-report=html \ + --cov-report=term-missing \ + --asyncio-mode=auto + +lint: + ruff check src/ tests/ examples/handlers/apprise_smart_notification/ + black --check src/ tests/ examples/handlers/apprise_smart_notification/ + +format: + black src/ tests/ examples/handlers/apprise_smart_notification/ + ruff check --fix src/ tests/ examples/handlers/apprise_smart_notification/ + +type-check: + mypy src/bely_mqtt + @echo "Type checking apprise_smart_notification handler..." + mypy examples/handlers/apprise_smart_notification --ignore-missing-imports + +type-check-handlers: + @echo "Type checking all example handlers..." + mypy examples/handlers/apprise_smart_notification --ignore-missing-imports + # Add other handlers here as they are converted to packages + +quality: lint type-check test-all + +fix: + @echo "Auto-fixing code formatting and linting issues..." + @echo "Running black formatter..." + @black src/ tests/ examples/handlers/apprise_smart_notification/ + @echo "Running ruff auto-fixes..." + @-ruff check --fix src/ tests/ examples/handlers/apprise_smart_notification/ + @echo "" + @echo "Auto-fix complete. Run 'make lint' to see remaining issues." + +quality-fix: fix + @echo "Running quality checks after fixes..." + @echo "=======================================" + @$(MAKE) quality || (echo ""; echo "Some issues could not be automatically fixed. Please review the errors above."; exit 1) + +clean: + find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true + find . -type f -name "*.pyc" -delete + find . -type d -name "*.egg-info" -exec rm -rf {} + 2>/dev/null || true + find . -type d -name ".pytest_cache" -exec rm -rf {} + 2>/dev/null || true + find . -type d -name ".mypy_cache" -exec rm -rf {} + 2>/dev/null || true + find . -type d -name ".ruff_cache" -exec rm -rf {} + 2>/dev/null || true + find . -type d -name "htmlcov" -exec rm -rf {} + 2>/dev/null || true + find . -type f -name ".coverage" -delete + rm -rf build/ dist/ + +docs: + @echo "Documentation is in docs/ directory" + @echo "Main files:" + @echo " - README.md: Overview and API reference" + @echo " - docs/QUICKSTART.md: Quick start guide" + @echo " - docs/HANDLER_DEVELOPMENT.md: Handler development guide" + @echo " - docs/ARCHITECTURE.md: Architecture documentation" + +run-example: + @echo "Starting BELY MQTT Framework with example handlers..." + bely-mqtt start \ + --broker-host localhost \ + --broker-port 1883 \ + --handlers-dir ./examples/handlers \ + --topic "bely/#" \ + --log-level INFO + +list-handlers: + bely-mqtt list-handlers --handlers-dir ./examples/handlers diff --git a/tools/developer_tools/bely-mqtt-message-broker/README.md b/tools/developer_tools/bely-mqtt-message-broker/README.md new file mode 100644 index 000000000..e9d4e62a8 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/README.md @@ -0,0 +1,16 @@ +# BELY MQTT Framework + +A pluggable Python framework for handling MQTT events from BELY (Best Electronic Logbook Yet). + +## Features + +- 🔌 **Pluggable Handlers** - Create custom handlers for specific MQTT topics +- 📦 **Type-Safe Models** - Pydantic models for all BELY event types +- 🎯 **Topic Matching** - Support for MQTT wildcards (`+`, `#`) +- 🔗 **API Integration** - Optional BELY API client for additional data +- 🚀 **Async Support** - Built on async/await for high performance +- 📢 **Notifications** - Send alerts via email, Slack, Discord, and more + +## Documentation + +For more details, see the [docs](./docs) directory. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/README.md b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/README.md new file mode 100644 index 000000000..5af3c8b03 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/README.md @@ -0,0 +1,134 @@ +# Conda Recipe for BELY MQTT Framework + +This directory contains the conda recipe for building the BELY MQTT Framework package. + +## Prerequisites + +- Conda or Miniconda installed +- conda-build package: `conda install conda-build` +- anaconda-client (for uploading): `conda install anaconda-client` + +## Building the Package + +### Quick Build + +```bash +# Run the build script +./conda-recipe/build_conda_package.sh +``` + +### Manual Build + +```bash +# Set build output directory (important if running from conda env in project) +export CONDA_BLD_PATH="${HOME}/conda-bld" + +# Build for current platform +conda build conda-recipe/ --output-folder "${CONDA_BLD_PATH}" + +# Build with specific Python version +conda build conda-recipe/ --python 3.11 --output-folder "${CONDA_BLD_PATH}" + +# Build for all Python versions defined in conda_build_config.yaml +conda build conda-recipe/ --variants --output-folder "${CONDA_BLD_PATH}" +``` + +## Testing the Package + +The package includes automated tests that run during the build process: +- Import tests for all modules +- CLI command tests +- Basic functionality tests + +## Uploading to Private Repository + +### To Anaconda Cloud (Private Channel) + +```bash +# Login to Anaconda Cloud +anaconda login + +# Upload the package +anaconda upload --user YOUR_ORG --channel YOUR_CHANNEL /path/to/package.tar.bz2 + +# Upload all built packages +anaconda upload --user YOUR_ORG --channel YOUR_CHANNEL ~/conda-bld/**/*.tar.bz2 +``` + +### To Private Conda Server + +```bash +# Example for Artifactory +curl -u username:password -T /path/to/package.tar.bz2 \ + "https://your-artifactory.com/artifactory/conda-local/linux-64/package.tar.bz2" +``` + +## Installing from Private Repository + +### From Anaconda Cloud + +```bash +# Add your private channel +conda config --add channels https://conda.anaconda.org/YOUR_ORG/YOUR_CHANNEL + +# Install the package +conda install bely-mqtt-framework +``` + +### From Private Server + +```bash +# Add your private repository +conda config --add channels https://your-server.com/conda/channel + +# Install the package +conda install bely-mqtt-framework +``` + +## Package Variants + +The recipe builds packages for multiple Python versions: +- Python 3.9 +- Python 3.10 +- Python 3.11 +- Python 3.12 + +All packages are noarch (platform-independent). + +## Optional Dependencies + +To include optional dependencies (like apprise for notifications): + +```bash +# Install with apprise support +conda install bely-mqtt-framework apprise +``` + +## Troubleshooting + +### Build Failures + +1. Check conda-build is up to date: `conda update conda-build` +2. Clear conda cache: `conda clean --all` +3. Check build logs in `~/conda-bld/work/` + +### "Can't merge/copy source into subdirectory of itself" Error + +This occurs when trying to build conda packages with the build directory inside the source tree. The build script automatically handles this by: + +1. Using a temporary directory for the build process +2. Copying the final artifacts to `./conda-bld` in your project +3. Cleaning up the temporary directory + +This allows you to keep build artifacts locally while avoiding the circular reference issue. + +### Import Errors + +1. Ensure all dependencies are available in your conda channels +2. Check for conflicting packages: `conda list | grep pydantic` + +### Upload Issues + +1. Verify anaconda-client is installed: `conda install anaconda-client` +2. Check authentication: `anaconda whoami` +3. Verify channel permissions \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/build_conda_package.sh b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/build_conda_package.sh new file mode 100755 index 000000000..96ee31437 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/build_conda_package.sh @@ -0,0 +1,91 @@ +#!/bin/bash +# Build script for creating conda packages + +set -e + +# Colors for output +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +echo -e "${GREEN}Building BELY MQTT Framework conda package...${NC}" + +# Check if conda-build is installed +if ! command -v conda-build &> /dev/null; then + echo -e "${RED}conda-build is not installed. Installing...${NC}" + conda install -y conda-build +fi + +# Clean previous builds +echo -e "${YELLOW}Cleaning previous builds...${NC}" +rm -rf build/ dist/ *.egg-info/ +conda build purge + +# Set up local directory for final artifacts +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +PROJECT_ROOT="$( cd "${SCRIPT_DIR}/.." && pwd )" +LOCAL_CONDA_BLD="${PROJECT_ROOT}/conda-bld" + +# Remove existing conda-bld directory to ensure clean build +if [ -d "${LOCAL_CONDA_BLD}" ]; then + echo -e "${YELLOW}Removing existing conda-bld directory...${NC}" + rm -rf "${LOCAL_CONDA_BLD}" +fi +mkdir -p "${LOCAL_CONDA_BLD}" + +# Use a temporary build directory outside the project +TEMP_BUILD_DIR=$(mktemp -d "${TMPDIR:-/tmp}/conda-build.XXXXXX") +export CONDA_BLD_PATH="${TEMP_BUILD_DIR}" + +echo -e "${YELLOW}Using temporary build directory: ${CONDA_BLD_PATH}${NC}" +echo -e "${YELLOW}Final packages will be copied to: ${LOCAL_CONDA_BLD}${NC}" + +# Build the package +echo -e "${YELLOW}Building conda package...${NC}" +conda build conda-recipe/ --output-folder "${CONDA_BLD_PATH}" + +# Get the package path +PACKAGE_PATH=$(conda build conda-recipe/ --output-folder "${CONDA_BLD_PATH}" --output) + +# Copy build artifacts to local directory +echo -e "${YELLOW}Copying build artifacts to project directory...${NC}" +cp -r "${CONDA_BLD_PATH}"/noarch "${LOCAL_CONDA_BLD}/" 2>/dev/null || true +cp -r "${CONDA_BLD_PATH}"/osx-arm64 "${LOCAL_CONDA_BLD}/" 2>/dev/null || true +cp -r "${CONDA_BLD_PATH}"/linux-64 "${LOCAL_CONDA_BLD}/" 2>/dev/null || true +cp -r "${CONDA_BLD_PATH}"/win-64 "${LOCAL_CONDA_BLD}/" 2>/dev/null || true + +# Update package path to local directory +PACKAGE_NAME=$(basename "${PACKAGE_PATH}") +PACKAGE_PATH="${LOCAL_CONDA_BLD}/noarch/${PACKAGE_NAME}" + +# Clean up temporary directory +rm -rf "${TEMP_BUILD_DIR}" + +echo -e "${GREEN}Package built successfully!${NC}" +echo -e "${GREEN}Package location: ${PACKAGE_PATH}${NC}" + +# Optional: Convert to other platforms (skip for noarch packages) +if [[ ! ${PACKAGE_PATH} == *"noarch"* ]]; then + echo -e "${YELLOW}Converting package for other platforms...${NC}" + # Use temp directory for conversion, then copy results + TEMP_CONVERT_DIR=$(mktemp -d "${TMPDIR:-/tmp}/conda-convert.XXXXXX") + conda convert -p all ${PACKAGE_PATH} -o "${TEMP_CONVERT_DIR}" + cp -r "${TEMP_CONVERT_DIR}"/* "${LOCAL_CONDA_BLD}/" 2>/dev/null || true + rm -rf "${TEMP_CONVERT_DIR}" +else + echo -e "${YELLOW}Package is noarch - no platform conversion needed${NC}" +fi + +echo -e "${GREEN}Build complete!${NC}" +echo "" +echo "To upload to your private conda channel:" +echo " anaconda upload ${PACKAGE_PATH}" +echo "" +echo "To install locally:" +echo " conda install -c local bely-mqtt-framework" +echo "" +echo "Or install from the local build directory:" +echo " conda install -c file://${LOCAL_CONDA_BLD} bely-mqtt-framework" +echo "" +echo "Package files are located in: ${LOCAL_CONDA_BLD}" \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/conda_build_config.yaml b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/conda_build_config.yaml new file mode 100644 index 000000000..d54e73aad --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/conda_build_config.yaml @@ -0,0 +1,14 @@ +# Conda build configuration +# This file defines build variants and pinnings + +python: + - 3.9 + - 3.10 + - 3.11 + - 3.12 + +# Pin run dependencies to be compatible with the build dependencies +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/meta.yaml b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/meta.yaml new file mode 100644 index 000000000..d98e74294 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/meta.yaml @@ -0,0 +1,65 @@ +{% set name = "bely-mqtt-framework" %} +{% set version = "0.1.0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + path: .. + +build: + number: 0 + noarch: python + script: {{ PYTHON }} -m pip install . -vv + entry_points: + - bely-mqtt = bely_mqtt.cli:cli + +requirements: + host: + - python >=3.9 + - pip + - setuptools >=65.0 + - wheel + run: + - python >=3.9 + - click >=8.1.0 + - paho-mqtt >=2.0.0 + - pydantic >=2.0.0 + - python-dotenv >=1.0.0 + - pluggy >=1.3.0 + +test: + imports: + - bely_mqtt + - bely_mqtt.models + - bely_mqtt.events + - bely_mqtt.plugin + - bely_mqtt.mqtt_client + commands: + - bely-mqtt --help + - bely-mqtt --version + requires: + - pytest >=7.0.0 + - pytest-asyncio >=0.21.0 + +about: + home: https://github.com/bely-org/bely-mqtt-framework + license: MIT + license_family: MIT + license_file: LICENSE + summary: Pluggable Python framework for handling BELY MQTT events + description: | + BELY MQTT Framework is a pluggable Python framework for handling MQTT events + from BELY (Best Electronic Logbook Yet). It provides: + - Pluggable handler system for MQTT topics + - Type-safe models for BELY events + - Integration with BELY API for additional data + - CLI for easy configuration and management + - Async support for high performance + doc_url: https://github.com/bely-org/bely-mqtt-framework/tree/main/docs + dev_url: https://github.com/bely-org/bely-mqtt-framework + +extra: + recipe-maintainers: + - your-github-username \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/post-link.sh b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/post-link.sh new file mode 100644 index 000000000..36f02e49d --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/post-link.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# Post-link script that runs after package installation + +echo "" +echo "BELY MQTT Framework has been successfully installed!" +echo "" +echo "To get started:" +echo " 1. Create a handlers directory: mkdir -p handlers" +echo " 2. Create your first handler (see examples)" +echo " 3. Run: bely-mqtt start --handlers-dir ./handlers" +echo "" +echo "For more information:" +echo " - Documentation: https://github.com/bely-org/bely-mqtt-framework/tree/main/docs" +echo " - Examples: https://github.com/bely-org/bely-mqtt-framework/tree/main/examples" +echo "" \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/pre-unlink.sh b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/pre-unlink.sh new file mode 100644 index 000000000..8c96312f8 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/pre-unlink.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# Pre-unlink script that runs before package removal + +echo "Removing BELY MQTT Framework..." + +# Clean up any cached files +if [ -d "$HOME/.cache/bely-mqtt" ]; then + echo "Cleaning cache directory..." + rm -rf "$HOME/.cache/bely-mqtt" +fi \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/run_test.py b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/run_test.py new file mode 100644 index 000000000..639b97f18 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/run_test.py @@ -0,0 +1,81 @@ +"""Test script for conda package.""" + +import sys +import subprocess + +def test_imports(): + """Test that all modules can be imported.""" + print("Testing imports...") + + try: + import bely_mqtt + print(f"✓ bely_mqtt version: {bely_mqtt.__version__}") + + from bely_mqtt import MQTTHandler, BelyMQTTClient, PluginManager + print("✓ Core classes imported") + + from bely_mqtt.models import ( + LogEntryAddEvent, + LogEntryUpdateEvent, + MQTTMessage + ) + print("✓ Event models imported") + + from bely_mqtt.events import EventType + print("✓ Event types imported") + + except ImportError as e: + print(f"✗ Import failed: {e}") + return False + + return True + +def test_cli(): + """Test CLI commands.""" + print("\nTesting CLI...") + + # Test help command + result = subprocess.run( + [sys.executable, "-m", "bely_mqtt.cli", "--help"], + capture_output=True, + text=True + ) + if result.returncode != 0: + print(f"✗ CLI help failed: {result.stderr}") + return False + print("✓ CLI help works") + + # Test version command + result = subprocess.run( + [sys.executable, "-m", "bely_mqtt.cli", "--version"], + capture_output=True, + text=True + ) + if result.returncode != 0: + print(f"✗ CLI version failed: {result.stderr}") + return False + print(f"✓ CLI version: {result.stdout.strip()}") + + return True + +def main(): + """Run all tests.""" + print("Running conda package tests...\n") + + tests_passed = True + + if not test_imports(): + tests_passed = False + + if not test_cli(): + tests_passed = False + + if tests_passed: + print("\n✓ All tests passed!") + return 0 + else: + print("\n✗ Some tests failed!") + return 1 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/variants.yaml b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/variants.yaml new file mode 100644 index 000000000..bfb3b0865 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/variants.yaml @@ -0,0 +1,19 @@ +# Build variants for different configurations +# This allows building multiple package variants in one go + +# Python versions to build for +python: + - 3.9 + - 3.10 + - 3.11 + - 3.12 + +# Optional: Build with different dependency versions +pydantic: + - 2.0 + - 2.5 + +# Optional: Build with/without optional dependencies +include_apprise: + - true + - false \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/config.example.yaml b/tools/developer_tools/bely-mqtt-message-broker/config.example.yaml new file mode 100644 index 000000000..10f929cc2 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/config.example.yaml @@ -0,0 +1,25 @@ +# BELY MQTT Framework Configuration +# This file configures global settings and handler-specific parameters + +# Global configuration shared across all handlers +global: + # BELY API URL for querying additional information + bely_url: https://bely.gov/bely + +# Handler-specific configurations +handlers: + # Advanced logging handler with custom directory + AdvancedLoggingHandler: + logging_dir: /var/log/bely + + # Basic logging handler (no configuration needed) + LoggingHandler: {} + + # Notification handler with webhook + NotificationHandler: + webhook_url: https://example.com/webhook + + # Apprise smart notification handler + AppriseSmartNotificationHandler: + config: + config_path: /path/to/apprise_notification_config.yaml \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/README.md b/tools/developer_tools/bely-mqtt-message-broker/docs/README.md new file mode 100644 index 000000000..5926f0f3f --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/README.md @@ -0,0 +1,33 @@ +# BELY MQTT Framework Documentation + +This directory contains the complete documentation for the BELY MQTT Framework. + +## Documentation Structure + +### Getting Started +- **[index.md](index.md)** - Documentation home page with overview +- **[getting-started.md](getting-started.md)** - Quick start guide for new users +- **[examples.md](examples.md)** - Working examples of different handler types + +### Reference +- **[api-reference.md](api-reference.md)** - Complete API documentation +- **[configuration.md](configuration.md)** - All configuration options + +### Guides +- **[faq.md](faq.md)** - Frequently asked questions +- **[troubleshooting.md](troubleshooting.md)** - Common issues and solutions +- **[migration-guide.md](migration-guide.md)** - Migrating from raw MQTT clients + +## Key Concepts + +1. **Handlers** - Python classes that process specific MQTT events +2. **Event Models** - Type-safe Pydantic models for all BELY events +3. **Topic Patterns** - MQTT topic matching with wildcard support +4. **API Integration** - Optional BELY API client for enriched data + +## Quick Links + +- [Create your first handler](getting-started.md) +- [View example handlers](examples.md) +- [Configure the framework](configuration.md) +- [Troubleshoot issues](troubleshooting.md) \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/api-reference.md b/tools/developer_tools/bely-mqtt-message-broker/docs/api-reference.md new file mode 100644 index 000000000..f5d7c4019 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/api-reference.md @@ -0,0 +1,167 @@ +# API Reference + +## Core Classes + +### MQTTHandler + +Base class for all MQTT event handlers. + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class MyHandler(MQTTHandler): + # By default, subscribes to all BELY topics (bely/#) + # Override topic_pattern to subscribe to specific topics only + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + """Handle new log entry event.""" + pass +``` + +**Properties:** +- `topic_pattern` - MQTT topic pattern to match (default: `"bely/#"`) +- `logger` - Pre-configured logger instance +- `api_client` - Optional BELY API client (if configured) + +**Overriding Topic Pattern:** + +```python +class SpecificHandler(MQTTHandler): + @property + def topic_pattern(self) -> str: + """Subscribe to specific topics only.""" + return "bely/logEntry/Add" +``` + +**Event Handler Methods:** +- `handle_log_entry_add(event: LogEntryAddEvent)` - New log entries +- `handle_log_entry_update(event: LogEntryUpdateEvent)` - Updated entries +- `handle_log_entry_reply_add(event: LogEntryReplyAddEvent)` - New replies +- `handle_log_entry_reply_update(event: LogEntryReplyUpdateEvent)` - Updated replies +- `handle_log_reaction_add(event: LogReactionAddEvent)` - New reactions +- `handle_log_reaction_delete(event: LogReactionDeleteEvent)` - Deleted reactions + +**Utility Methods:** +- `topic_matches(topic: str) -> bool` - Check if topic matches pattern + +### HybridEventHandler + +Alias for MQTTHandler that emphasizes handling multiple event types. With the default topic pattern `"bely/#"`, all handlers can process multiple event types by implementing the appropriate handler methods. + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class MultiHandler(MQTTHandler): + # Uses default topic_pattern "bely/#" + # Implements multiple event handler methods + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"New entry: {event.description}") + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + self.logger.info(f"Updated entry: {event.log_info.id}") +``` + +### MQTTMessage + +Represents an MQTT message. + +```python +from bely_mqtt import MQTTMessage + +message = MQTTMessage( + topic="bely/logEntry/Add", + payload={"description": "Entry added"}, + raw_payload='{"description": "Entry added"}' +) +``` + +**Attributes:** +- `topic: str` - MQTT topic +- `payload: Dict[str, Any]` - Parsed JSON payload +- `raw_payload: str` - Original payload string + +## Event Models + +### LogEntryAddEvent + +```python +from bely_mqtt import LogEntryAddEvent + +# Event is automatically parsed from message payload +async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + print(f"Entry ID: {event.log_info.id}") + print(f"Document: {event.parent_log_document_info.name}") + print(f"Added by: {event.event_triggered_by_username}") + print(f"Description: {event.description}") +``` + +### LogEntryUpdateEvent + +```python +from bely_mqtt import LogEntryUpdateEvent + +async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + print(f"Updated entry: {event.log_info.id}") + print(f"Text diff: {event.text_diff}") +``` + +### LogEntryReplyAddEvent + +```python +from bely_mqtt import LogEntryReplyAddEvent + +async def handle_log_entry_reply_add(self, event: LogEntryReplyAddEvent) -> None: + print(f"Reply to entry: {event.parent_log_info.id}") + print(f"Reply ID: {event.log_info.id}") +``` + +## Topic Patterns + +By default, handlers subscribe to `bely/#` (all BELY events). The framework automatically routes events to the appropriate handler methods based on the event type. + +MQTT topic patterns support wildcards: + +- `+` - Single level wildcard +- `#` - Multi-level wildcard + +Examples: +- `bely/#` - Matches all BELY events (default) +- `bely/logEntry/Add` - Exact match +- `bely/logEntry/+` - Matches Add, Update, Delete +- `bely/+/Add` - Matches any entity Add events + +**When to Override the Default:** +- Performance optimization - reduce unnecessary message processing +- Clarity - make handler's purpose explicit +- Testing - isolate specific event types + +## CLI Commands + +### start + +Start the MQTT framework: + +```bash +bely-mqtt start [OPTIONS] +``` + +**Options:** +- `--handlers-dir PATH` - Directory containing handlers +- `--mqtt-host TEXT` - MQTT broker host +- `--mqtt-port INTEGER` - MQTT broker port +- `--mqtt-username TEXT` - MQTT username +- `--mqtt-password TEXT` - MQTT password +- `--log-level TEXT` - Logging level +- `--api-url TEXT` - BELY API URL +- `--api-key TEXT` - BELY API key + +### version + +Show version: + +```bash +bely-mqtt version +``` + + diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/configuration.md b/tools/developer_tools/bely-mqtt-message-broker/docs/configuration.md new file mode 100644 index 000000000..5d9b0a545 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/configuration.md @@ -0,0 +1,165 @@ +# Configuration + +The BELY MQTT Framework can be configured through command-line options, environment variables, or configuration files. + +## Command-Line Options + +```bash +bely-mqtt start [OPTIONS] +``` + +### MQTT Options + +- `--broker-host TEXT` - MQTT broker hostname (default: localhost) +- `--broker-port INTEGER` - MQTT broker port (default: 1883) +- `--username TEXT` - MQTT username for authentication +- `--password TEXT` - MQTT password for authentication +- `--topic TEXT` - MQTT topic pattern to subscribe (default: bely/#) + +### Handler Options + +- `--handlers-dir PATH` - Directory containing handler files (default: ./handlers) +- `--config PATH` - YAML configuration file for handlers + +### API Options + +- `--api-url TEXT` - BELY API base URL +- `--api-key TEXT` - BELY API authentication key + +### Logging Options + +- `--log-level TEXT` - Logging level: DEBUG, INFO, WARNING, ERROR (default: INFO) + +## Environment Variables + +All command-line options can be set via environment variables: + +```bash +export MQTT_BROKER_HOST=broker.example.com +export MQTT_BROKER_PORT=1883 +export MQTT_CLIENT_ID=bely-mqtt-client +export MQTT_USERNAME=myuser +export MQTT_PASSWORD=mypass +export BELY_API_URL=https://api.bely.dev +export BELY_API_KEY=your-api-key +export BELY_HANDLERS_DIR=./handlers +export BELY_CONFIG=./config.yaml +export LOG_LEVEL=DEBUG +``` + +## Configuration Files + +### Environment File (.env) + +Create a `.env` file in your project root for environment variables: + +```bash +# MQTT Configuration +MQTT_BROKER_HOST=localhost +MQTT_BROKER_PORT=1883 +MQTT_CLIENT_ID=bely-mqtt-client +MQTT_USERNAME= +MQTT_PASSWORD= + +# BELY API Configuration +BELY_API_URL=https://api.bely.dev +BELY_API_KEY=your-api-key-here + +# Logging +LOG_LEVEL=INFO + +# Handler Configuration +BELY_HANDLERS_DIR=./handlers +BELY_CONFIG=./config.yaml +``` + +### Handler Configuration (YAML) + +Handlers can be configured via a YAML file to provide both global and handler-specific settings: + +```yaml +# Global configuration shared across all handlers +global: + # BELY API URL for querying additional information + bely_url: https://bely.example.com/bely + # Add any other global parameters here + shared_param: value + +# Handler-specific configurations +handlers: + # Configure the AdvancedLoggingHandler + AdvancedLoggingHandler: + logging_dir: /var/log/bely + log_level: DEBUG + rotate_logs: true + max_size_mb: 100 + + # Configure the NotificationHandler + NotificationHandler: + webhook_url: https://hooks.slack.com/services/YOUR/WEBHOOK + enabled: true + timeout: 30 + + # Configure the AppriseSmartNotificationHandler + AppriseSmartNotificationHandler: + config_path: /path/to/apprise_notification_config.yaml +``` + +Use with: `--config config.yaml` + +## SSL/TLS Configuration + +For secure MQTT connections: + +```bash +bely-mqtt start \ + --mqtt-host broker.example.com \ + --mqtt-port 8883 \ + --mqtt-tls \ + --mqtt-ca-cert /path/to/ca.crt \ + --mqtt-client-cert /path/to/client.crt \ + --mqtt-client-key /path/to/client.key +``` + +## Logging Configuration + +### Log Levels + +- `DEBUG` - Detailed information for debugging +- `INFO` - General informational messages +- `WARNING` - Warning messages for potentially harmful situations +- `ERROR` - Error messages for serious problems + +### Log Format + +The default log format includes: +- Timestamp +- Log level +- Handler name +- Message + +Example: +``` +2024-01-01 12:00:00 INFO [LogHandler] New log entry added: ID=123 +``` + +## Production Configuration + +For production deployments: + +1. Use environment variables for sensitive data +2. Enable appropriate log level (INFO or WARNING) +3. Configure log rotation +4. Use SSL/TLS for MQTT connections +5. Set up monitoring and alerting + +Example production command: + +```bash +bely-mqtt start \ + --handlers-dir /opt/bely-handlers \ + --log-level INFO \ + --log-file /var/log/bely-mqtt/app.log \ + --mqtt-tls \ + --mqtt-ca-cert /etc/ssl/mqtt/ca.crt +``` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/examples.md b/tools/developer_tools/bely-mqtt-message-broker/docs/examples.md new file mode 100644 index 000000000..94798e1bd --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/examples.md @@ -0,0 +1,217 @@ +# Examples + +## Basic Examples + +### Simple Log Entry Handler + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class SimpleHandler(MQTTHandler): + """Log new entries to console.""" + + # Uses default topic_pattern "bely/#" - receives all BELY events + # The framework automatically routes log entry add events to handle_log_entry_add + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"New entry: {event.description}") +``` + +### Multi-Event Handler + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class LogMonitor(MQTTHandler): + """Monitor all log entry changes.""" + + # Uses default topic_pattern "bely/#" - receives all BELY events + # Simply implement the handler methods for the events you care about + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"NEW: {event.description[:50]}...") + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + self.logger.info(f"UPDATED: Entry {event.log_info.id}") +``` + +### Specific Topic Handler + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class LogEntryOnlyHandler(MQTTHandler): + """Monitor only log entry events (not replies, reactions, etc).""" + + @property + def topic_pattern(self) -> str: + return "bely/logEntry/+" # Override default to match only log entry events + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"NEW: {event.description[:50]}...") + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + self.logger.info(f"UPDATED: Entry {event.log_info.id}") +``` + +## Advanced Examples + +### API Integration + +```python +from bely_mqtt import MQTTHandler, LogEntryUpdateEvent + +class EnrichedHandler(MQTTHandler): + """Enrich events with API data.""" + + @property + def topic_pattern(self) -> str: + # Override default to only process update events (performance optimization) + return "bely/logEntry/Update" + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + if self.api_client: + # Get full entry details + entry = await self.api_client.get_log_entry(event.log_info.id) + self.logger.info(f"Full entry data: {entry}") +``` + +### Notification Handler + +```python +from bely_mqtt import MQTTHandler, LogEntryReplyAddEvent +import aiohttp + +class NotificationHandler(MQTTHandler): + """Send notifications for replies.""" + + @property + def topic_pattern(self) -> str: + # Override to only process reply events (avoid unnecessary processing) + return "bely/logEntryReply/Add" + + async def handle_log_entry_reply_add(self, event: LogEntryReplyAddEvent) -> None: + # Don't notify about self-replies + if event.event_triggered_by_username == event.parent_log_info.entered_by_username: + return + + message = f"New reply from {event.event_triggered_by_username}" + await self.send_slack_notification(message) + + async def send_slack_notification(self, message: str) -> None: + webhook_url = "https://hooks.slack.com/services/YOUR/WEBHOOK" + async with aiohttp.ClientSession() as session: + await session.post(webhook_url, json={"text": message}) +``` + +### Reaction Tracker + +```python +from bely_mqtt import MQTTHandler, LogReactionAddEvent, LogReactionDeleteEvent +from collections import defaultdict + +class ReactionTracker(MQTTHandler): + """Track reactions on log entries.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.reaction_counts = defaultdict(lambda: defaultdict(int)) + + @property + def topic_pattern(self) -> str: + # Override to only process reaction events + return "bely/logReaction/+" + + async def handle_log_reaction_add(self, event: LogReactionAddEvent) -> None: + log_id = event.parent_log_info.id + emoji = event.log_reaction.reaction.emoji + self.reaction_counts[log_id][emoji] += 1 + + self.logger.info(f"Reaction {emoji} added to entry {log_id}") + + async def handle_log_reaction_delete(self, event: LogReactionDeleteEvent) -> None: + log_id = event.parent_log_info.id + emoji = event.log_reaction.reaction.emoji + if self.reaction_counts[log_id][emoji] > 0: + self.reaction_counts[log_id][emoji] -= 1 +``` + +### Error Handling Example + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent +import asyncio + +class RobustHandler(MQTTHandler): + """Handler with comprehensive error handling.""" + + # Uses default topic_pattern "bely/#" + # Implements only the specific handler method needed + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + try: + # Process with timeout + await asyncio.wait_for( + self.process_entry(event), + timeout=30.0 + ) + except asyncio.TimeoutError: + self.logger.error(f"Timeout processing entry {event.log_info.id}") + except Exception as e: + self.logger.error(f"Error: {e}", exc_info=True) + + async def process_entry(self, event: LogEntryAddEvent) -> None: + # Your processing logic here + await asyncio.sleep(1) + self.logger.info(f"Processed entry {event.log_info.id}") +``` + +## Running the Examples + +1. Create a `handlers` directory +2. Copy example code to Python files in the directory +3. Run the framework: + +```bash +# Basic +bely-mqtt start --handlers-dir ./handlers + +# With API integration +bely-mqtt start \ + --handlers-dir ./handlers \ + --api-url https://api.bely.dev \ + --api-key your-api-key + +# With debug logging +bely-mqtt start \ + --handlers-dir ./handlers \ + --log-level DEBUG +``` + +## Testing Examples + +```python +import pytest +from bely_mqtt import LogEntryAddEvent +from handlers.simple_handler import SimpleHandler + +@pytest.mark.asyncio +async def test_simple_handler(caplog): + handler = SimpleHandler() + + event = LogEntryAddEvent( + description="Test entry", + event_timestamp="2024-01-01T00:00:00Z", + entity_name="Log", + entity_id=1, + event_triggered_by_username="testuser", + parent_log_document_info={"name": "Test Doc", "id": 1}, + log_info={"id": 1}, + logbook_list=[], + text_diff="+ Test entry" + ) + + await handler.handle_log_entry_add(event) + + assert "New entry: Test entry" in caplog.text +``` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/faq.md b/tools/developer_tools/bely-mqtt-message-broker/docs/faq.md new file mode 100644 index 000000000..0213daae9 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/faq.md @@ -0,0 +1,112 @@ +# Frequently Asked Questions + +## General Questions + +### What is BELY MQTT Framework? + +It's a Python framework that makes it easy to handle MQTT events from BELY (Best Electronic Logbook Yet). You write handlers that react to specific events like log entries being added or updated. + +### What Python versions are supported? + +Python 3.9, 3.10, 3.11, and 3.12 are supported. + +### Do I need to know MQTT? + +Basic understanding helps, but the framework handles most MQTT details for you. You just need to know about topics (like `bely/logEntry/Add`). + +## Handler Development + +### How do I handle multiple event types? + +Use wildcards in your topic pattern and implement specific handler methods: + +```python +from bely_mqtt import HybridEventHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class MultiHandler(HybridEventHandler): + @property + def topic_pattern(self) -> str: + return "bely/logEntry/+" # Handles Add, Update, Delete + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Handle new entries + pass + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + # Handle updates + pass +``` + +### Can I use the BELY API in handlers? + +Yes! Configure the API when starting: + +```bash +bely-mqtt start --api-url https://api.bely.dev --api-key YOUR_KEY +``` + +Then use `self.api_client` in your handler. + +### How do I test my handlers? + +```python +import pytest +from bely_mqtt import LogEntryAddEvent +from my_handler import MyHandler + +@pytest.mark.asyncio +async def test_handler(): + handler = MyHandler() + event = LogEntryAddEvent( + description="Test entry", + event_timestamp="2024-01-01T00:00:00Z", + entity_name="Log", + entity_id=1, + event_triggered_by_username="testuser", + parent_log_document_info={"name": "Test Doc", "id": 1}, + log_info={"id": 1}, + logbook_list=[], + text_diff="+ Test entry" + ) + await handler.handle_log_entry_add(event) +``` + +## Troubleshooting + +### Handler not being called + +1. Check the topic pattern matches the MQTT topic +2. Verify the handler file is in the handlers directory +3. Check logs for errors: `--log-level DEBUG` + +### Connection refused + +1. Check MQTT broker is running +2. Verify host and port are correct +3. Check username/password if required + +### Import errors + +Make sure the framework is installed: + +```bash +pip install bely-mqtt-framework +``` + +## Performance + +### How many handlers can I run? + +The framework can handle hundreds of handlers. Each handler runs asynchronously, so they don't block each other. + +### Can I run multiple instances? + +Yes, you can run multiple framework instances. Each will receive all MQTT messages independently. + +### Is it production ready? + +Yes! The framework includes: +- Error handling and recovery +- Logging +- Systemd service support +- Async processing \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/getting-started.md b/tools/developer_tools/bely-mqtt-message-broker/docs/getting-started.md new file mode 100644 index 000000000..d7a906657 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/getting-started.md @@ -0,0 +1,78 @@ +# Getting Started + +This guide will help you create your first MQTT handler for BELY events. + +## Installation + +```bash +pip install bely-mqtt-framework +``` + +## Your First Handler + +Create a file `handlers/my_first_handler.py`: + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class MyFirstHandler(MQTTHandler): + """Logs when new entries are added.""" + + # By default, handlers subscribe to all BELY topics (bely/#) + # The framework automatically routes events to the appropriate handler methods + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"New log entry: {event.description}") + self.logger.info(f"Added by: {event.event_triggered_by_username}") +``` + +### Subscribing to Specific Topics + +If you want to limit your handler to specific topics only (for performance or clarity), override the `topic_pattern` property: + +```python +class SpecificTopicHandler(MQTTHandler): + """Only handles log entry events.""" + + @property + def topic_pattern(self) -> str: + return "bely/logEntry/#" # Only log entry events + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Handle new entries + pass + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + # Handle updates + pass +``` + +## Running the Framework + +```bash +# Start with your handler +bely-mqtt start --handlers-dir ./handlers + +# With custom MQTT broker +bely-mqtt start \ + --handlers-dir ./handlers \ + --mqtt-host broker.example.com \ + --mqtt-port 1883 +``` + +## Handler Methods + +The framework automatically calls the appropriate method based on the event type: + +- `handle_log_entry_add(event: LogEntryAddEvent)` - New log entries +- `handle_log_entry_update(event: LogEntryUpdateEvent)` - Updated entries +- `handle_log_entry_reply_add(event: LogEntryReplyAddEvent)` - New replies +- `handle_log_reaction_add(event: LogReactionAddEvent)` - New reactions + +## What's Next? + +- [API Reference](api-reference.md) - Complete API documentation +- [Examples](examples.md) - More handler examples +- [FAQ](faq.md) - Common questions and answers +- [Troubleshooting](troubleshooting.md) - Solve common issues + diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/index.md b/tools/developer_tools/bely-mqtt-message-broker/docs/index.md new file mode 100644 index 000000000..ae8e10231 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/index.md @@ -0,0 +1,57 @@ +# BELY MQTT Framework Documentation + +Welcome to the BELY MQTT Framework documentation. This framework helps you build event-driven integrations with BELY (Best Electronic Logbook Yet). + +## Quick Links + +- [Getting Started](getting-started.md) - Create your first handler in 5 minutes +- [API Reference](api-reference.md) - Complete API documentation +- [Examples](examples.md) - Real-world handler examples +- [Configuration](configuration.md) - Configure the framework +- [FAQ](faq.md) - Frequently asked questions +- [Troubleshooting](troubleshooting.md) - Common issues and solutions + +## What is BELY MQTT Framework? + +The BELY MQTT Framework is a pluggable Python framework for handling MQTT events from BELY. It provides: + +- **Easy Handler Development** - Simple Python classes for event handling +- **Flexible Topic Matching** - Support for MQTT wildcards +- **Built-in Models** - Pydantic models for all BELY events +- **API Integration** - Optional BELY API client +- **Production Ready** - Logging, error handling, and systemd support + +## Installation + +```bash +pip install bely-mqtt-framework +``` + +## Basic Example + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class LogHandler(MQTTHandler): + @property + def topic_pattern(self) -> str: + return "bely/logEntry/Add" + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"New entry: {event.description}") +``` + +## Architecture + +The framework consists of: + +1. **Core Framework** - MQTT client and plugin manager +2. **Handler System** - Pluggable handlers for different events +3. **Data Models** - Pydantic models for type safety +4. **CLI Interface** - Command-line tools for running the framework + +## Support + +- GitHub Issues: [Report bugs or request features](https://github.com/bely-org/bely-mqtt-framework/issues) +- Documentation: [Full documentation](https://github.com/bely-org/bely-mqtt-framework/tree/main/docs) +- Examples: [Example handlers](https://github.com/bely-org/bely-mqtt-framework/tree/main/examples) \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/migration-guide.md b/tools/developer_tools/bely-mqtt-message-broker/docs/migration-guide.md new file mode 100644 index 000000000..a49327e4e --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/migration-guide.md @@ -0,0 +1,141 @@ +# Migration Guide + +## Migrating from Raw MQTT Client + +If you're currently using a raw MQTT client (like `paho-mqtt`), here's how to migrate: + +### Before (Raw MQTT) + +```python +import paho.mqtt.client as mqtt +import json + +def on_message(client, userdata, msg): + if msg.topic == "bely/logEntry/Add": + payload = json.loads(msg.payload) + print(f"New entry: {payload['description']}") + +client = mqtt.Client() +client.on_message = on_message +client.connect("localhost", 1883) +client.subscribe("bely/#") +client.loop_forever() +``` + +### After (BELY MQTT Framework) + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class LogHandler(MQTTHandler): + # By default, subscribes to all BELY topics (bely/#) + # The framework automatically routes events to specific handlers + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + print(f"New entry: {event.description}") +``` + +Or if you want to subscribe to specific topics only: + +```python +class SpecificLogHandler(MQTTHandler): + @property + def topic_pattern(self) -> str: + return "bely/logEntry/Add" # Override to subscribe to specific topic + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + print(f"New entry: {event.description}") +``` + +## Benefits of Migration + +1. **Type Safety** - Pydantic models validate event data +2. **Better Organization** - Separate handlers for different events +3. **Error Handling** - Built-in error handling and logging +4. **Async Support** - Better performance with async/await +5. **API Integration** - Easy access to BELY API +6. **Testing** - Easier to unit test handlers + +## Migration Steps + +1. **Install the framework** + ```bash + pip install bely-mqtt-framework + ``` + +2. **Convert callbacks to handlers** + - Create one handler per event type + - Move callback logic to specific handler methods (e.g., `handle_log_entry_add`) + +3. **Use provided models** + ```python + from bely_mqtt import LogEntryAddEvent + + event = LogEntryAddEvent(**message.payload) + # Now you have type-safe access to all fields + ``` + +4. **Update configuration** + - Replace connection code with CLI arguments + - Use environment variables for secrets + +5. **Test your handlers** + ```python + @pytest.mark.asyncio + async def test_handler(): + handler = MyHandler() + message = MQTTMessage(...) + await handler.handle(message) + ``` + +## Common Patterns + +### Multiple Topics + +**Before:** +```python +def on_message(client, userdata, msg): + if msg.topic == "bely/logEntry/Add": + handle_add(msg) + elif msg.topic == "bely/logEntry/Update": + handle_update(msg) +``` + +**After:** +```python +from bely_mqtt import HybridEventHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class MultiHandler(HybridEventHandler): + @property + def topic_pattern(self) -> str: + return "bely/logEntry/+" + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Handle add events + pass + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + # Handle update events + pass +``` + +### Error Handling + +**Before:** +```python +def on_message(client, userdata, msg): + try: + payload = json.loads(msg.payload) + process_message(payload) + except Exception as e: + print(f"Error: {e}") +``` + +**After:** +```python +async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + try: + await self.process_event(event) + except Exception as e: + self.logger.error(f"Failed to process: {e}", exc_info=True) +``` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/troubleshooting.md b/tools/developer_tools/bely-mqtt-message-broker/docs/troubleshooting.md new file mode 100644 index 000000000..051677262 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/troubleshooting.md @@ -0,0 +1,124 @@ +# Troubleshooting Guide + +## Common Issues + +### MQTT Connection Issues + +**Problem:** Cannot connect to MQTT broker + +**Solutions:** +1. Check broker is running: `mosquitto_sub -t '#' -v` +2. Verify connection details: + ```bash + bely-mqtt start --mqtt-host localhost --mqtt-port 1883 + ``` +3. Check firewall rules +4. Try without authentication first + +### Handler Not Loading + +**Problem:** Handler file exists but not being loaded + +**Solutions:** +1. Check file naming: Must be `*.py` in handlers directory +2. Verify class inherits from `MQTTHandler` +3. Check for syntax errors: `python -m py_compile handlers/my_handler.py` +4. Enable debug logging: `--log-level DEBUG` + +### Handler Not Receiving Messages + +**Problem:** Handler loads but doesn't receive messages + +**Solutions:** +1. Verify topic pattern matches: + ```python + # Check exact topic + @property + def topic_pattern(self) -> str: + return "bely/logEntry/Add" # Must match exactly + ``` +2. Implement the correct handler method: + ```python + # Use specific method for the event type + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Not just handle(self, message) + pass + ``` +3. Test with wildcards: + ```python + return "bely/#" # Receives all BELY messages + ``` +4. Check MQTT subscription: Look for "Subscribed to topic" in logs + +### Import Errors + +**Problem:** `ModuleNotFoundError: No module named 'bely_mqtt'` + +**Solutions:** +1. Install the framework: + ```bash + pip install bely-mqtt-framework + ``` +2. Check virtual environment is activated +3. Verify installation: + ```bash + pip show bely-mqtt-framework + ``` + +### Async Errors + +**Problem:** `RuntimeWarning: coroutine 'handle' was never awaited` + +**Solutions:** +1. Ensure handle method is async: + ```python + async def handle(self, message: MQTTMessage) -> None: + # Your code here + ``` +2. Use `await` for async calls: + ```python + await some_async_function() + ``` + +## Debug Mode + +Enable debug logging to see detailed information: + +```bash +bely-mqtt start --handlers-dir ./handlers --log-level DEBUG +``` + +This shows: +- Handler loading process +- MQTT connection details +- Message routing +- Error stack traces + +## Getting Help + +1. Check the [FAQ](faq.md) +2. Review [examples](examples.md) +3. Enable debug logging +4. Check GitHub issues +5. Ask in discussions + +## Log Messages Explained + +### INFO Messages + +- `"Loading handlers from ..."` - Handler discovery started +- `"Loaded handler: ..."` - Handler successfully loaded +- `"Connected to MQTT broker"` - MQTT connection established +- `"Subscribed to topic: ..."` - Topic subscription successful + +### WARNING Messages + +- `"No handlers found"` - Check handlers directory +- `"Failed to load handler"` - Syntax error in handler file +- `"No handlers for topic"` - No matching topic patterns + +### ERROR Messages + +- `"Connection refused"` - MQTT broker not accessible +- `"Authentication failed"` - Check username/password +- `"Handler error"` - Exception in handle() method \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/.env.example b/tools/developer_tools/bely-mqtt-message-broker/examples/.env.example new file mode 100644 index 000000000..e670509c4 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/.env.example @@ -0,0 +1,20 @@ +# MQTT Broker Configuration +MQTT_BROKER_HOST=localhost +MQTT_BROKER_PORT=1883 +MQTT_CLIENT_ID=bely-mqtt-client +MQTT_USERNAME= +MQTT_PASSWORD= + +# BELY API Configuration +BELY_API_URL=https://api.bely.dev +BELY_API_KEY= + +# Handler Configuration +BELY_HANDLERS_DIR=./handlers + +# Logging Configuration +LOG_LEVEL=INFO + +# Notification Configuration (if using notification handlers) +# APPRISE_NOTIFICATION_URL=mailto://user:password@gmail.com +# APPRISE_NOTIFICATION_URL=discord://webhook_id/webhook_token diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/config/apprise_notification_config.yaml b/tools/developer_tools/bely-mqtt-message-broker/examples/config/apprise_notification_config.yaml new file mode 100644 index 000000000..cdc499950 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/config/apprise_notification_config.yaml @@ -0,0 +1,101 @@ +# Apprise Smart Notification Handler Configuration +# +# This configuration file defines how the ApprisSmartNotificationHandler +# sends notifications for BELY events. + +# Global configuration (optional) +global: + # Email server configuration for mailto:// URLs + mail_server: "smtp.gmail.com" + # mail_port: 587 + # mail_username: "your-email@gmail.com" + # mail_password: "your-app-password" + mail_from: "your-email@gmail.com" + mail_from_name: "BELY Notifications" + +# User-specific notification configuration +users: + # Example user with email and Discord notifications + john_doe: + # Apprise URLs for notification endpoints + # See https://github.com/caronc/apprise/wiki for all supported services + apprise_urls: + # Email notification + - "mailto://john.doe@example.com" + # Discord webhook + - "discord://webhook-id/webhook-token" + + # Which types of notifications to send + notifications: + # Notify when someone else updates any log entry (typically for document owners) + entry_updates: true + # Notify when someone else edits YOUR log entries specifically + own_entry_edits: true + # Notify when someone else replies to a log entry + entry_replies: true + # Notify when someone else creates an entry in a document + new_entries: true + # Notify when someone replies to any entry in your document + document_replies: true + # Notify when someone reacts to your log entry + reactions: true + + # Example user with Slack notifications only + jane_smith: + apprise_urls: + # Slack webhook + - "slack://token-a/token-b/token-c" + + notifications: + entry_updates: true + # Only get notified about general updates, not specifically own entries + own_entry_edits: false + entry_replies: false # Don't notify about replies + new_entries: true + + # Example user with multiple notification channels + bob_johnson: + apprise_urls: + # Email + - "mailto://bob@example.com" + # Telegram + - "tgram://bot-token/chat-id" + # Pushbullet + - "pbul://access-token" + + notifications: + entry_updates: true + entry_replies: true + new_entries: false # Don't notify about new entries + + # Example user with Teams notifications + alice_williams: + apprise_urls: + # Microsoft Teams + - "msteams://token-a/token-b/token-c" + + notifications: + entry_updates: true + entry_replies: true + new_entries: true + +# Supported Apprise Services: +# +# Email: +# - mailto://user:password@domain.com +# - mailgun://user:password@domain.com +# - sendgrid://api-key@domain.com +# +# Chat: +# - discord://webhook-id/webhook-token +# - slack://token-a/token-b/token-c +# - msteams://token-a/token-b/token-c +# - telegram://bot-token/chat-id +# +# Notifications: +# - pushbullet://access-token +# - pushover://user-key/api-token +# - gotify://hostname/token +# +# See https://github.com/caronc/apprise/wiki for complete list + diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/config/example.env b/tools/developer_tools/bely-mqtt-message-broker/examples/config/example.env new file mode 100644 index 000000000..a6f8ac309 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/config/example.env @@ -0,0 +1,19 @@ +# BELY MQTT Framework Configuration Example +# Copy this file to .env and update with your values + +# MQTT Configuration +MQTT_HOST=localhost +MQTT_PORT=1883 +MQTT_USERNAME= +MQTT_PASSWORD= +MQTT_TOPIC=bely/# + +# BELY API Configuration (optional) +BELY_API_URL=https://api.bely.dev +BELY_API_KEY=your-api-key-here + +# Logging +LOG_LEVEL=INFO + +# Handler Configuration +HANDLERS_DIR=./handlers \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_notification_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_notification_handler.py new file mode 100644 index 000000000..b80128749 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_notification_handler.py @@ -0,0 +1,300 @@ +""" +Advanced notification handler using Apprise. + +This handler demonstrates how to send notifications for BELY events +using the Apprise library, which supports multiple notification services. + +Installation: + pip install bely-mqtt-framework[apprise] + +Configuration: + Set APPRISE_URLS environment variable with notification endpoints: + + APPRISE_URLS="mailto://user:password@gmail.com discord://webhook_id/webhook_token" + + Or configure in handler initialization. +""" + +import logging +import os +from typing import List, Optional + +from bely_mqtt.models import ( + LogEntryAddEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryUpdateEvent, + MQTTMessage, +) +from bely_mqtt.plugin import MQTTHandler + +logger = logging.getLogger(__name__) + +try: + import apprise + + APPRISE_AVAILABLE = True +except ImportError: + APPRISE_AVAILABLE = False + logger.warning( + "Apprise not installed. Install with: pip install bely-mqtt-framework[apprise]" + ) + + +class ApprisNotificationHandler(MQTTHandler): + """ + Send notifications for BELY events using Apprise. + + Supports multiple notification services: + - Email (SMTP) + - Discord + - Slack + - Telegram + - Pushbullet + - And many more... + + Configure notification URLs via: + 1. APPRISE_URLS environment variable + 2. Handler initialization + 3. Configuration file + """ + + def __init__(self, *args, notification_urls: Optional[List[str]] = None, **kwargs): + """ + Initialize the notification handler. + + Args: + notification_urls: List of Apprise notification URLs. + If not provided, will read from APPRISE_URLS env var. + """ + super().__init__(*args, **kwargs) + self.apprise_instance: Optional[apprise.Apprise] = None + self.notification_urls: List[str] = [] + + if not APPRISE_AVAILABLE: + self.logger.warning( + "Apprise not available. Install with: pip install bely-mqtt-framework[apprise]" + ) + return + + # Get notification URLs from parameter or environment + if notification_urls: + self.notification_urls = notification_urls + else: + env_urls = os.getenv("APPRISE_URLS", "") + if env_urls: + self.notification_urls = env_urls.split() + + if self.notification_urls: + self.apprise_instance = apprise.Apprise() + for url in self.notification_urls: + if self.apprise_instance.add(url): + self.logger.info(f"Added notification endpoint: {url}") + else: + self.logger.warning(f"Failed to add notification endpoint: {url}") + else: + self.logger.warning( + "No notification URLs configured. Set APPRISE_URLS environment variable." + ) + + @property + def topic_pattern(self) -> str: + """Subscribe to all log entry events.""" + return "bely/logEntry/#" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry events and send notifications.""" + if not APPRISE_AVAILABLE or not self.apprise_instance: + self.logger.debug("Apprise not available, skipping notification") + return + + try: + if "Reply" in message.topic: + if "Add" in message.topic: + await self._handle_reply_added(message) + elif "Update" in message.topic: + await self._handle_reply_updated(message) + elif "Add" in message.topic: + await self._handle_entry_added(message) + elif "Update" in message.topic: + await self._handle_entry_updated(message) + except Exception as e: + self.logger.error(f"Failed to handle notification event: {e}", exc_info=True) + + async def _handle_entry_added(self, message: MQTTMessage) -> None: + """Handle log entry add event.""" + event = LogEntryAddEvent(**message.payload) + + title = f"📝 New Log Entry in {event.parent_log_document_info.name}" + body = self._format_entry_added_body(event) + + await self._send_notification(title, body) + + async def _handle_entry_updated(self, message: MQTTMessage) -> None: + """Handle log entry update event.""" + event = LogEntryUpdateEvent(**message.payload) + + title = f"✏️ Log Entry Updated in {event.parent_log_document_info.name}" + body = self._format_entry_updated_body(event) + + await self._send_notification(title, body) + + async def _handle_reply_added(self, message: MQTTMessage) -> None: + """Handle reply add event.""" + event = LogEntryReplyAddEvent(**message.payload) + + title = f"💬 Reply Added to Entry in {event.parent_log_document_info.name}" + body = self._format_reply_added_body(event) + + await self._send_notification(title, body) + + async def _handle_reply_updated(self, message: MQTTMessage) -> None: + """Handle reply update event.""" + event = LogEntryReplyUpdateEvent(**message.payload) + + title = f"✏️ Reply Updated in {event.parent_log_document_info.name}" + body = self._format_reply_updated_body(event) + + await self._send_notification(title, body) + + def _format_entry_added_body(self, event: LogEntryAddEvent) -> str: + """Format notification body for entry added event.""" + logbooks = ", ".join(lb.display_name or lb.name for lb in event.logbook_list) + return ( + f"User: {event.event_triggered_by_username}\n" + f"Logbooks: {logbooks}\n" + f"Entry ID: {event.log_info.id}\n" + f"Time: {event.event_timestamp.isoformat()}\n\n" + f"Content:\n{event.text_diff[:200]}" + ) + + def _format_entry_updated_body(self, event: LogEntryUpdateEvent) -> str: + """Format notification body for entry updated event.""" + logbooks = ", ".join(lb.display_name or lb.name for lb in event.logbook_list) + return ( + f"User: {event.event_triggered_by_username}\n" + f"Logbooks: {logbooks}\n" + f"Entry ID: {event.log_info.id}\n" + f"Time: {event.event_timestamp.isoformat()}\n\n" + f"Changes:\n{event.text_diff[:200]}" + ) + + def _format_reply_added_body(self, event: LogEntryReplyAddEvent) -> str: + """Format notification body for reply added event.""" + logbooks = ", ".join(lb.display_name or lb.name for lb in event.logbook_list) + return ( + f"User: {event.event_triggered_by_username}\n" + f"Logbooks: {logbooks}\n" + f"Reply to Entry: {event.parent_log_info.id}\n" + f"Reply ID: {event.log_info.id}\n" + f"Time: {event.event_timestamp.isoformat()}\n\n" + f"Content:\n{event.text_diff[:200]}" + ) + + def _format_reply_updated_body(self, event: LogEntryReplyUpdateEvent) -> str: + """Format notification body for reply updated event.""" + logbooks = ", ".join(lb.display_name or lb.name for lb in event.logbook_list) + return ( + f"User: {event.event_triggered_by_username}\n" + f"Logbooks: {logbooks}\n" + f"Reply to Entry: {event.parent_log_info.id}\n" + f"Reply ID: {event.log_info.id}\n" + f"Time: {event.event_timestamp.isoformat()}\n\n" + f"Changes:\n{event.text_diff[:200]}" + ) + + async def _send_notification(self, title: str, body: str) -> None: + """Send a notification via Apprise.""" + if not self.apprise_instance: + self.logger.warning("Apprise instance not initialized") + return + + self.logger.info(f"Sending notification: {title}") + + # Send notification + self.apprise_instance.notify( + body=body, + title=title, + ) + + +class SelectiveNotificationHandler(MQTTHandler): + """ + Send notifications only for specific conditions. + + This handler demonstrates how to filter events and only send + notifications for specific logbooks or users. + """ + + def __init__( + self, + *args, + notification_urls: Optional[List[str]] = None, + target_logbooks: Optional[List[str]] = None, + exclude_users: Optional[List[str]] = None, + **kwargs, + ): + """ + Initialize the selective notification handler. + + Args: + notification_urls: List of Apprise notification URLs. + target_logbooks: Only notify for these logbooks (None = all). + exclude_users: Don't notify for these users. + """ + super().__init__(*args, **kwargs) + self.target_logbooks = target_logbooks + self.exclude_users = exclude_users or [] + self.apprise_instance: Optional[apprise.Apprise] = None + + if not APPRISE_AVAILABLE: + return + + if notification_urls: + self.apprise_instance = apprise.Apprise() + for url in notification_urls: + self.apprise_instance.add(url) + + @property + def topic_pattern(self) -> str: + """Subscribe to log entry add events.""" + return "bely/logEntry/Add" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry add event with filtering.""" + if not APPRISE_AVAILABLE or not self.apprise_instance: + return + + try: + event = LogEntryAddEvent(**message.payload) + + # Filter by user + if event.event_triggered_by_username in self.exclude_users: + self.logger.debug( + f"Skipping notification for excluded user: " + f"{event.event_triggered_by_username}" + ) + return + + # Filter by logbook + if self.target_logbooks: + logbook_names = [lb.name for lb in event.logbook_list] + if not any(lb in self.target_logbooks for lb in logbook_names): + self.logger.debug( + f"Skipping notification for non-target logbooks: {logbook_names}" + ) + return + + # Send notification + title = f"📝 New Entry in {event.parent_log_document_info.name}" + body = ( + f"User: {event.event_triggered_by_username}\n" + f"Entry ID: {event.log_info.id}\n\n" + f"{event.text_diff[:200]}" + ) + + self.apprise_instance.notify(title=title, body=body) + self.logger.info(f"Notification sent for entry {event.log_info.id}") + + except Exception as e: + self.logger.error(f"Failed to handle notification: {e}", exc_info=True) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/README.md b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/README.md new file mode 100644 index 000000000..7bf9a2e0a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/README.md @@ -0,0 +1,78 @@ +# Apprise Smart Notification Handler + +A modular BELY MQTT handler for sending smart notifications via Apprise with enhanced email threading support. + +## Structure + +This handler is organized as a Python package with the following modules: + +``` +apprise_smart_notification/ +├── __init__.py # Package initialization and exports +├── handler.py # Main handler class implementation +├── config_loader.py # YAML configuration loading and processing +├── notification_processor.py # Notification routing and sending logic +├── formatters.py # Message formatting utilities +├── email_threading.py # Email threading header generation +├── apprise_email_wrapper.py # Custom wrapper for email headers support +└── README.md # This file +``` + +## Module Responsibilities + +### handler.py +- Main `AppriseSmartNotificationHandler` class +- Event handling methods (handle_log_entry_add, etc.) +- High-level event routing logic +- Coordination between other modules + +### config_loader.py +- `ConfigLoader` class for YAML file processing +- Global configuration management +- URL processing for mail server settings +- Configuration validation + +### notification_processor.py +- `NotificationProcessor` class for notification management +- User notification settings management +- Apprise instance management +- Notification sending logic + +### formatters.py +- `NotificationFormatter` class for message formatting +- HTML message generation +- Permalink generation +- Trigger description generation + +## Usage + +The handler can be used exactly the same way as before: + +```python +from apprise_smart_notification import AppriseSmartNotificationHandler + +handler = AppriseSmartNotificationHandler( + config_path="/path/to/config.yaml", + global_config=global_config +) +``` + +## Configuration + +See the main docstring in `__init__.py` for detailed configuration documentation. + +## Benefits of Modular Structure + +1. **Maintainability**: Each module has a single, clear responsibility +2. **Testability**: Individual components can be tested in isolation +3. **Reusability**: Components can be reused in other handlers +4. **Readability**: Smaller, focused files are easier to understand +5. **Extensibility**: New features can be added without modifying existing code + +## Development + +When adding new features: +- Event handling logic goes in `handler.py` +- Configuration features go in `config_loader.py` +- Notification logic goes in `notification_processor.py` +- Message formatting goes in `formatters.py` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/__init__.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/__init__.py new file mode 100644 index 000000000..7e208c9d5 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/__init__.py @@ -0,0 +1,57 @@ +""" +Smart Apprise Notification Handler for BELY MQTT Events. + +This handler sends notifications for BELY events using Apprise, with configuration +from a YAML file. It supports: + +1. Log Entry Updates - Notify when someone else updates a log entry +2. Log Entry Replies - Notify when someone else replies to a log entry +3. New Log Entries - Notify when someone else creates an entry in a document +4. Log Reactions - Notify when someone reacts to a log entry +5. Document Replies - Notify document owners when someone replies to any entry in their document +6. Own Entry Edits - Notify when someone else edits YOUR log entry (different from entry_updates) + +Configuration is loaded from a YAML file with the following structure: + + global: + # Global mail server settings - automatically applied to simple mailto:// URLs + + # Example 1: Authenticated mail server (Gmail, Office365, etc.) + mail_server: "smtp.gmail.com" + mail_port: 587 + mail_username: "your-email@gmail.com" # Optional - only for authenticated servers + mail_password: "your-app-password" # Optional - only for authenticated servers + mail_from: "your-email@gmail.com" + mail_from_name: "BELY Notifications" + + # Example 2: Non-authenticated mail server (internal/relay servers) + # mail_server: "mail.com" + # mail_port: 25 # Often port 25 for non-authenticated + # mail_from: "bely@aps.anl.gov" + # mail_from_name: "BELY Notifications" + # No username/password needed for non-authenticated servers + + users: + john_doe: + apprise_urls: + # Simple mailto URL - will use global mail settings if available + - "mailto://john@example.com" + # Other notification services + - "discord://webhook-id/webhook-token" + notifications: + entry_updates: true # Notify when any log entry is updated (typically for document owners) + own_entry_edits: true # Notify when YOUR log entries are edited by others + entry_replies: true + new_entries: true + reactions: true + document_replies: true # Notify when anyone replies in owned documents + +Example usage: + handler = AppriseSmartNotificationHandler( + config_path="/path/to/config.yaml" + ) +""" + +from .handler import AppriseSmartNotificationHandler + +__all__ = ["AppriseSmartNotificationHandler"] diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/apprise_email_wrapper.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/apprise_email_wrapper.py new file mode 100644 index 000000000..44ae7dab4 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/apprise_email_wrapper.py @@ -0,0 +1,192 @@ +""" +Custom Apprise wrapper to support email headers for threading. + +This module provides a custom implementation that allows passing email headers +to email notifications without modifying the Apprise library. +""" + +from typing import Dict, Optional +from urllib.parse import urlparse + +import apprise + + +class EmailNotificationWrapper: + """ + Wrapper for email notifications that supports custom headers. + + This class provides a way to send email notifications with custom headers + (like Message-ID and References for threading) without modifying Apprise. + """ + + def __init__(self, apprise_url: str): + """ + Initialize the email wrapper with an Apprise URL. + + Args: + apprise_url: The Apprise email URL (mailto:// or mailtos://) + """ + self.apprise_url = apprise_url + self._parse_email_config() + + def _parse_email_config(self) -> None: + """Parse the email configuration from the Apprise URL.""" + # Parse the URL to extract components + parsed = urlparse(self.apprise_url) + + # Check if this is an email notification + if parsed.scheme not in ("mailto", "mailtos"): + raise ValueError(f"Not an email URL: {self.apprise_url}") + + # Store the parsed components for later use + self.scheme = parsed.scheme + self.netloc = parsed.netloc + self.path = parsed.path + self.query = parsed.query + self.parsed_url = parsed + + def send_with_headers( + self, title: str, body: str, headers: Optional[Dict[str, str]] = None + ) -> bool: + """ + Send an email notification with custom headers. + + Args: + title: Email subject + body: Email body + headers: Optional dictionary of email headers + + Returns: + True if notification was sent successfully, False otherwise + """ + try: + # Create a NotifyEmail instance directly + email_instance = apprise.Apprise.instantiate(self.apprise_url) + + # Check if this is an email notification instance + # We check for the class name since we can't import NotifyEmail directly + if not (email_instance and email_instance.__class__.__name__ == "NotifyEmail"): + # Fall back to regular Apprise if not an email notification + apobj = apprise.Apprise() + apobj.add(self.apprise_url) + result = apobj.notify(body=body, title=title) + return bool(result) + + # If we have headers, inject them into the email instance + if headers: + # Ensure the email instance has a headers attribute + if not hasattr(email_instance, "headers"): + setattr(email_instance, "headers", {}) # type: ignore[attr-defined] + # Clear any existing headers and set our custom ones + getattr(email_instance, "headers").clear() # type: ignore[attr-defined] + getattr(email_instance, "headers").update(headers) # type: ignore[attr-defined] + else: + # Ensure headers is at least an empty dict + if not hasattr(email_instance, "headers"): + setattr(email_instance, "headers", {}) # type: ignore[attr-defined] + + # Send the notification using the email instance + result = email_instance.send(body=body, title=title) + return bool(result) + + except Exception as e: + print(f"Error sending email with headers: {e}") + return False + + +class AppriseWithEmailHeaders: + """ + Extended Apprise wrapper that supports email headers for threading. + + This class wraps around Apprise to provide email header support while + maintaining compatibility with other notification types. + """ + + def __init__(self): + """Initialize the wrapper.""" + self.apprise = apprise.Apprise() + self.email_wrappers = {} + self.non_email_urls = [] + + def add(self, url: str) -> bool: + """ + Add a notification URL. + + Args: + url: The Apprise notification URL + + Returns: + True if URL was added successfully + """ + # Check if this is an email URL + if self._is_email_url(url): + # Create an email wrapper for this URL + try: + wrapper = EmailNotificationWrapper(url) + # Use URL as key for now (could be improved) + self.email_wrappers[url] = wrapper + return True + except Exception: + # If wrapper creation fails, fall back to regular Apprise + result = self.apprise.add(url) + return bool(result) + else: + # For non-email URLs, use regular Apprise + self.non_email_urls.append(url) + result = self.apprise.add(url) + return bool(result) + + def _is_email_url(self, url: str) -> bool: + """Check if a URL is an email notification URL.""" + try: + parsed = urlparse(url) + return parsed.scheme in ("mailto", "mailtos") + except Exception: + return False + + def notify(self, body: str, title: str = "", headers: Optional[Dict[str, str]] = None) -> bool: + """ + Send notifications with optional email headers support. + + Args: + body: Notification body + title: Notification title + headers: Optional email headers (only used for email notifications) + + Returns: + True if all notifications were sent successfully + """ + success = True + + # Send to email endpoints with headers + for url, wrapper in self.email_wrappers.items(): + result = wrapper.send_with_headers(title, body, headers) + success = success and result + + # Send to non-email endpoints (without headers) + if self.non_email_urls: + result = self.apprise.notify(body=body, title=title) + success = success and bool(result) + + return success + + def __bool__(self) -> bool: + """Check if any notification endpoints are configured.""" + return bool(self.email_wrappers) or bool(self.non_email_urls) + + +def is_email_notification(url: str) -> bool: + """ + Check if an Apprise URL is for email notifications. + + Args: + url: The Apprise URL to check + + Returns: + True if the URL is for email notifications + """ + try: + parsed = urlparse(url) + return parsed.scheme in ("mailto", "mailtos") + except Exception: + return False diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/config_loader.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/config_loader.py new file mode 100644 index 000000000..dabe5d8ea --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/config_loader.py @@ -0,0 +1,172 @@ +""" +Configuration loader for Apprise Smart Notification Handler. +""" + +from logging import Logger +from pathlib import Path +from typing import Any, Dict + +try: + import yaml + + YAML_AVAILABLE = True +except ImportError: + YAML_AVAILABLE = False + + +class ConfigLoader: + """Handles loading and processing of YAML configuration files.""" + + def __init__(self, logger: Logger): + """ + Initialize the config loader. + + Args: + logger: Logger instance for output + """ + self.logger = logger + + if not YAML_AVAILABLE: + self.logger.warning("PyYAML not installed. Install with: pip install pyyaml") + raise ImportError("PyYAML is required for this handler") + + def load_config(self, config_path: str) -> Dict[str, Any]: + """ + Load configuration from YAML file. + + Args: + config_path: Path to YAML configuration file + + Returns: + Dictionary containing the configuration + + Raises: + FileNotFoundError: If config file not found + ValueError: If config is invalid + """ + if not YAML_AVAILABLE: + raise ImportError("PyYAML is required for configuration loading") + + config_file = Path(config_path) + + if not config_file.exists(): + raise FileNotFoundError(f"Config file not found: {config_path}") + + try: + with open(config_file, "r") as f: + config = yaml.safe_load(f) or {} + + self.logger.info(f"Loaded configuration from {config_path}") + + # Validate config structure + if "users" not in config: + self.logger.warning("No users configured in config file") + config["users"] = {} + + # Log global configuration status + self._log_global_config_status(config) + + return config + + except yaml.YAMLError as e: + raise ValueError(f"Invalid YAML in config file: {e}") + except Exception as e: + raise ValueError(f"Error loading config file: {e}") + + def _log_global_config_status(self, config: Dict[str, Any]) -> None: + """ + Log the status of global configuration. + + Args: + config: The loaded configuration dictionary + """ + if "global" in config: + global_config = config["global"] + if "mail_server" in global_config: + # Check if this is an authenticated or non-authenticated server + has_auth = "mail_username" in global_config and "mail_password" in global_config + auth_type = "authenticated" if has_auth else "non-authenticated" + port = global_config.get("mail_port", 25 if not has_auth else 587) + + self.logger.info( + f"Global mail server configured ({auth_type}): " + f"{global_config.get('mail_server')}:{port}" + ) + + # Warn if partial authentication (only username or only password) + if ("mail_username" in global_config) != ("mail_password" in global_config): + self.logger.warning( + "Partial authentication detected. Both mail_username and mail_password " + "are required for authenticated servers." + ) + else: + self.logger.info( + "No global mail server configured. Simple mailto:// URLs will need full configuration." + ) + else: + self.logger.info( + "No global configuration found. Simple mailto:// URLs will need full configuration." + ) + + def process_apprise_url(self, url: str, global_config: Dict[str, Any]) -> str: + """ + Process Apprise URL to incorporate global settings. + + For mailto:// URLs, this will use global mail server settings if available. + Supports both authenticated and non-authenticated mail servers. + + Args: + url: Original Apprise URL + global_config: Global configuration dictionary + + Returns: + Processed URL with global settings applied + """ + # Check if this is a simple mailto URL that needs global settings + if url.startswith("mailto://") and global_config and "mail_server" in global_config: + # Extract the email address from the simple mailto URL + email_part = url.replace("mailto://", "") + + # Get mail server settings + mail_server = global_config.get("mail_server") + mail_from = global_config.get("mail_from", "noreply@localhost") + mail_from_name = global_config.get("mail_from_name", "BELY Notifications") + + # Check if this is an authenticated or non-authenticated server + has_auth = "mail_username" in global_config and "mail_password" in global_config + + if has_auth: + # Authenticated mail server (Gmail, Office365, etc.) + mail_port = global_config.get("mail_port", 587) + mail_username = global_config.get("mail_username") + mail_password = global_config.get("mail_password") + + # If mail_from not specified, use username + if "mail_from" not in global_config: + mail_from = mail_username + + # Construct the full Apprise mailto URL with authentication + # Format: mailto://username:password@server:port?to=recipient&from=sender&name=sender_name + processed_url = ( + f"mailto://{mail_username}:{mail_password}@{mail_server}:{mail_port}" + f"?to={email_part}&from={mail_from}&name={mail_from_name}" + ) + + self.logger.debug(f"Processed mailto URL with authentication for: {email_part}") + else: + # Non-authenticated mail server (internal relay servers) + mail_port = global_config.get("mail_port", 25) # Default to port 25 for non-auth + + # Construct the Apprise mailto URL without authentication + # Format: mailto://server:port?to=recipient&from=sender&name=sender_name + processed_url = ( + f"mailto://{mail_server}:{mail_port}" + f"?to={email_part}&from={mail_from}&name={mail_from_name}" + ) + + self.logger.debug(f"Processed mailto URL without authentication for: {email_part}") + + return processed_url + + # Return original URL if not a mailto or no global config + return url diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/email_threading.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/email_threading.py new file mode 100644 index 000000000..4ef8c0156 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/email_threading.py @@ -0,0 +1,366 @@ +""" +Email threading support for Apprise Smart Notification Handler. + +This module provides email threading headers to ensure related notifications +are grouped together in email clients. +""" + +from enum import Enum +from typing import Optional, Dict, Any +from datetime import datetime, timezone +from urllib.parse import urlparse +import hashlib + + +class NotificationEventType(Enum): + """Types of notification events for email threading""" + + DOCUMENT_CREATE = "document_create" + DOCUMENT_UPDATE = "document_update" + DOCUMENT_DELETE = "document_delete" + ENTRY_ADD = "entry_add" + ENTRY_UPDATE = "entry_update" + ENTRY_DELETE = "entry_delete" + ENTRY_REPLY = "entry_reply" + REPLY_UPDATE = "reply_update" + REPLY_DELETE = "reply_delete" + REACTION_ADD = "reaction_add" + REACTION_DELETE = "reaction_delete" + + @property + def is_document_event(self) -> bool: + """Check if this is a document-level event""" + return self in {self.DOCUMENT_CREATE, self.DOCUMENT_UPDATE, self.DOCUMENT_DELETE} + + @property + def is_entry_event(self) -> bool: + """Check if this is an entry-level event (not a reply)""" + return self in {self.ENTRY_ADD, self.ENTRY_UPDATE, self.ENTRY_DELETE} + + @property + def is_reply_event(self) -> bool: + """Check if this is a reply event""" + return self in {self.ENTRY_REPLY, self.REPLY_UPDATE, self.REPLY_DELETE} + + @property + def is_reaction_event(self) -> bool: + """Check if this is a reaction event""" + return self in {self.REACTION_ADD, self.REACTION_DELETE} + + +class EmailThreadingStrategy: + """ + Email threading strategy that works with Apprise's custom headers. + Only applies to email-type notifications. + """ + + # Email notification schemes in Apprise + EMAIL_SCHEMES = { + "mailto", + "mailtos", # Generic email + } + + def __init__(self, domain: str = "notifications.bely.app"): + """ + Initialize the email threading strategy. + + Args: + domain: Domain to use in message IDs + """ + self.domain = domain + + @staticmethod + def is_email_notification(apprise_url: str) -> bool: + """ + Check if an Apprise URL is for email notifications. + + Args: + apprise_url: The Apprise notification URL + + Returns: + True if this is an email notification URL + """ + try: + # Handle special case for mailto URLs that might not have :// + if apprise_url.startswith("mailto:"): + return True + + parsed = urlparse(apprise_url) + scheme = parsed.scheme.lower() + + # Check if it's an email scheme + return scheme in EmailThreadingStrategy.EMAIL_SCHEMES + except Exception: + return False + + def _generate_stable_id(self, identifier: str) -> str: + """ + Generate a stable ID for threading purposes. + Uses a hash to ensure consistency across notifications. + + Args: + identifier: The identifier to hash + + Returns: + A stable message ID + """ + # Create a stable hash from the identifier + hash_obj = hashlib.sha256(identifier.encode()) + short_hash = hash_obj.hexdigest()[:12] + return f"<{identifier}.{short_hash}@{self.domain}>" + + def generate_document_thread_id(self, document_id: str) -> str: + """ + Generate the root thread ID for a document. + + Args: + document_id: The document ID + + Returns: + Thread ID for the document + """ + # Ensure document_id is a string + document_id = str(document_id) + return self._generate_stable_id(f"doc.{document_id}") + + def generate_entry_thread_id(self, entry_id: str) -> str: + """ + Generate a stable thread ID for an entry. + + Args: + entry_id: The entry ID + + Returns: + Thread ID for the entry + """ + # Ensure entry_id is a string + entry_id = str(entry_id) + return self._generate_stable_id(f"entry.{entry_id}") + + def get_email_headers( + self, + event_type: NotificationEventType, + document_id: str, + document_name: str, + entry_id: Optional[str] = None, + parent_entry_id: Optional[str] = None, + ) -> Dict[str, str]: + """ + Generate email-specific threading headers. + + Args: + event_type: Type of notification event + document_id: The document ID + document_name: The document name (for Thread-Topic) + entry_id: The log entry ID (if applicable) + parent_entry_id: The parent entry ID (for replies) + + Returns: + Dictionary of email threading headers + """ + headers = {} + + # Ensure all IDs are strings + document_id = str(document_id) + if entry_id is not None: + entry_id = str(entry_id) + if parent_entry_id is not None: + parent_entry_id = str(parent_entry_id) + + # Thread-Topic helps some email clients group messages + # Use document name for better readability + safe_name = document_name.replace("\n", " ").replace("\r", " ")[:100] + headers["Thread-Topic"] = f"Log: {safe_name}" + + # X-Thread-Id is a custom header for additional threading hint + headers["X-Thread-Id"] = self.generate_document_thread_id(document_id) + headers["X-Document-Id"] = document_id + + if event_type.is_document_event: + # Document events start the thread + # We'll use References to establish the thread root + headers["References"] = self.generate_document_thread_id(document_id) + + elif event_type.is_entry_event: + # Entry events reference the document + if not entry_id: + raise ValueError(f"entry_id required for {event_type.value}") + + headers["In-Reply-To"] = self.generate_document_thread_id(document_id) + headers["References"] = self.generate_document_thread_id(document_id) + headers["X-Entry-Id"] = entry_id + + elif event_type.is_reply_event: + # Reply events reference both document and parent entry + if not entry_id: + raise ValueError(f"entry_id required for {event_type.value}") + if not parent_entry_id: + raise ValueError(f"parent_entry_id required for {event_type.value}") + + parent_ref = self.generate_entry_thread_id(parent_entry_id) + doc_ref = self.generate_document_thread_id(document_id) + + headers["In-Reply-To"] = parent_ref + headers["References"] = f"{doc_ref} {parent_ref}" + headers["X-Entry-Id"] = entry_id + headers["X-Parent-Entry-Id"] = parent_entry_id + + elif event_type.is_reaction_event: + # Reaction events reference the entry they're reacting to + if not entry_id: + raise ValueError(f"entry_id required for {event_type.value}") + + entry_ref = self.generate_entry_thread_id(entry_id) + doc_ref = self.generate_document_thread_id(document_id) + + headers["In-Reply-To"] = entry_ref + headers["References"] = f"{doc_ref} {entry_ref}" + headers["X-Entry-Id"] = entry_id + + # Add metadata for debugging + headers["X-Notification-Time"] = datetime.now(timezone.utc).isoformat() + headers["X-Event-Type"] = event_type.value + + return headers + + def generate_subject( + self, + event_type: NotificationEventType, + document_title: str, + action_description: Optional[str] = None, + is_email: bool = True, + ) -> str: + """ + Generate subject lines - consistent for emails, descriptive for others. + + Args: + event_type: Type of notification event + document_title: Title of the document + action_description: Optional description of the action + is_email: Whether this is for an email notification + + Returns: + Subject line for the notification + """ + # Sanitize document title + safe_title = document_title.replace("\n", " ").replace("\r", " ") + + if is_email: + # Email subjects need consistency for threading + base_subject = f"Log: {safe_title}" + + # Document creation starts a new thread without "Re:" + if event_type == NotificationEventType.DOCUMENT_CREATE: + return f"New {base_subject}" + + # All other events use "Re:" for threading + prefix = "Re: " + + # Add event-specific indicators + if event_type == NotificationEventType.DOCUMENT_UPDATE: + suffix = " [Document Updated]" + elif event_type == NotificationEventType.DOCUMENT_DELETE: + suffix = " [Document Deleted]" + elif event_type == NotificationEventType.ENTRY_UPDATE: + suffix = " [Entry Updated]" + elif event_type == NotificationEventType.ENTRY_DELETE: + suffix = " [Entry Deleted]" + elif event_type == NotificationEventType.REPLY_UPDATE: + suffix = " [Reply Updated]" + elif event_type == NotificationEventType.REPLY_DELETE: + suffix = " [Reply Deleted]" + elif event_type == NotificationEventType.REACTION_ADD: + if action_description: + suffix = f" [{action_description}]" + else: + suffix = " [Reaction Added]" + elif event_type == NotificationEventType.REACTION_DELETE: + if action_description: + suffix = f" [{action_description}]" + else: + suffix = " [Reaction Removed]" + elif action_description: + suffix = f" - {action_description}" + else: + suffix = "" + + return f"{prefix}{base_subject}{suffix}" + + else: + # Non-email notifications can have more descriptive subjects + if event_type == NotificationEventType.DOCUMENT_CREATE: + return f"📄 New Document: {safe_title}" + elif event_type == NotificationEventType.DOCUMENT_UPDATE: + return f"📝 Document Updated: {safe_title}" + elif event_type == NotificationEventType.DOCUMENT_DELETE: + return f"🗑️ Document Deleted: {safe_title}" + elif event_type == NotificationEventType.ENTRY_ADD: + desc = f" - {action_description}" if action_description else "" + return f"➕ New Entry in {safe_title}{desc}" + elif event_type == NotificationEventType.ENTRY_UPDATE: + desc = f" - {action_description}" if action_description else "" + return f"✏️ Entry Updated in {safe_title}{desc}" + elif event_type == NotificationEventType.ENTRY_DELETE: + desc = f" - {action_description}" if action_description else "" + return f"🗑️ Entry Deleted in {safe_title}{desc}" + elif event_type == NotificationEventType.ENTRY_REPLY: + desc = f" - {action_description}" if action_description else "" + return f"💬 Reply in {safe_title}{desc}" + elif event_type == NotificationEventType.REPLY_UPDATE: + desc = f" - {action_description}" if action_description else "" + return f"✏️ Reply Updated in {safe_title}{desc}" + elif event_type == NotificationEventType.REPLY_DELETE: + desc = f" - {action_description}" if action_description else "" + return f"🗑️ Reply Deleted in {safe_title}{desc}" + elif event_type == NotificationEventType.REACTION_ADD: + desc = f" - {action_description}" if action_description else "" + return f"👍 Reaction in {safe_title}{desc}" + elif event_type == NotificationEventType.REACTION_DELETE: + desc = f" - {action_description}" if action_description else "" + return f"👎 Reaction Removed in {safe_title}{desc}" + + return safe_title + + +def detect_event_type( + event: Any, + is_reply: bool = False, + is_update: bool = False, + is_delete: bool = False, + is_reaction: bool = False, + is_reaction_delete: bool = False, +) -> NotificationEventType: + """ + Detect the notification event type from the event and context. + + Args: + event: The event object + is_reply: Whether this is a reply event + is_update: Whether this is an update event + is_delete: Whether this is a delete event + is_reaction: Whether this is a reaction event + is_reaction_delete: Whether this is a reaction delete event + + Returns: + The appropriate NotificationEventType + """ + if is_reaction: + if is_reaction_delete: + return NotificationEventType.REACTION_DELETE + return NotificationEventType.REACTION_ADD + + if is_reply: + if is_delete: + return NotificationEventType.REPLY_DELETE + elif is_update: + return NotificationEventType.REPLY_UPDATE + else: + return NotificationEventType.ENTRY_REPLY + + # Regular entry events + if is_delete: + return NotificationEventType.ENTRY_DELETE + elif is_update: + return NotificationEventType.ENTRY_UPDATE + else: + return NotificationEventType.ENTRY_ADD diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/formatters.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/formatters.py new file mode 100644 index 000000000..8be1a8dbc --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/formatters.py @@ -0,0 +1,409 @@ +""" +Notification formatters for Apprise Smart Notification Handler. +""" + +import logging +from datetime import datetime, tzinfo +from typing import Optional, Union +from zoneinfo import ZoneInfo + +from bely_mqtt import ( + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, + LogEntryEventBase, + LogReactionEventBase, +) +from bely_mqtt.models import CoreEvent + + +class NotificationFormatter: + """Handles formatting of notification messages.""" + + def __init__( + self, bely_url: Optional[str], logger: logging.Logger, timezone: Optional[str] = None + ): + """ + Initialize the formatter. + + Args: + bely_url: Base URL for BELY instance + logger: Logger instance for output + timezone: Timezone string (e.g., 'America/New_York'). If None, uses system local timezone. + """ + self.bely_url = bely_url + self.logger = logger + self.timezone: tzinfo # Declare the type + + # Set timezone - use provided timezone, or detect local timezone + if timezone: + try: + self.timezone = ZoneInfo(timezone) + except Exception as e: + self.logger.warning(f"Invalid timezone '{timezone}': {e}. Using UTC.") + self.timezone = ZoneInfo("UTC") + else: + # Detect local timezone using datetime + try: + # Get the local timezone from the system + local_tz = datetime.now().astimezone().tzinfo + if local_tz is not None: + self.timezone = local_tz + else: + self.timezone = ZoneInfo("UTC") + except Exception as e: + self.logger.debug(f"Could not detect local timezone: {e}. Using UTC.") + self.timezone = ZoneInfo("UTC") + + def _format_timestamp(self, timestamp: datetime) -> str: + """ + Format a timestamp for display in notifications. + + Args: + timestamp: The datetime object to format + + Returns: + Formatted timestamp string in local timezone + """ + # Ensure timestamp is timezone-aware + if timestamp.tzinfo is None: + # Assume UTC if no timezone info + timestamp = timestamp.replace(tzinfo=ZoneInfo("UTC")) + + # Convert to local timezone + local_timestamp = timestamp.astimezone(self.timezone) + + # Format as readable string with timezone + return local_timestamp.strftime("%Y-%m-%d %H:%M:%S %Z") + + def format_entry_added(self, event: LogEntryAddEvent) -> str: + """Format notification body for new log entry.""" + body = ( + f"New entry added to {event.parent_log_document_info.name}
" + f"By: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Entry markdown: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_entry_updated(self, event: LogEntryUpdateEvent) -> str: + """Format notification body for updated log entry (document owner notification).""" + body = ( + f"Entry updated in {event.parent_log_document_info.name}
" + f"Updated by: {event.event_triggered_by_username}
" + f"Original author: {event.log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Entry markdown changes: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_own_entry_edited(self, event: LogEntryUpdateEvent) -> str: + """Format notification body for when user's own entry is edited by someone else.""" + body = ( + f"Entry edited in {event.parent_log_document_info.name}
" + f"Original author: {event.log_info.entered_by_username}
" + f"Edited by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Entry markdown changes: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "own_entry_edit") + + def format_reply_added(self, event: LogEntryReplyAddEvent) -> str: + """Format notification body for new reply.""" + body = ( + f"New reply to entry in {event.parent_log_document_info.name}
" + f"Entry by: {event.parent_log_info.entered_by_username}
" + f"Reply by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Reply markdown: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_reply_updated(self, event: LogEntryReplyUpdateEvent) -> str: + """Format notification body for updated reply (document owner notification).""" + body = ( + f"Reply updated in {event.parent_log_document_info.name}
" + f"Updated by: {event.event_triggered_by_username}
" + f"On entry by: {event.parent_log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Reply markdown changes: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_own_reply_updated(self, event: LogEntryReplyUpdateEvent) -> str: + """Format notification body for when a reply on user's own entry is updated by someone else.""" + body = ( + f"Reply updated on entry in {event.parent_log_document_info.name}
" + f"Entry by: {event.parent_log_info.entered_by_username}
" + f"Updated by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Reply markdown changes: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "own_reply_update") + + def format_document_reply(self, event: LogEntryReplyAddEvent) -> str: + """Format notification body for document owner about new reply.""" + body = ( + f"New reply added in document {event.parent_log_document_info.name}
" + f"Reply by: {event.event_triggered_by_username}
" + f"To entry by: {event.parent_log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Reply markdown: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "document_owner") + + def format_reaction_added(self, event: LogReactionAddEvent) -> str: + """Format notification body for added reaction.""" + reaction_info = event.log_reaction.reaction + body = ( + f"New reaction added to entry in {event.parent_log_document_info.name}
" + f"By: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Reaction: {reaction_info.emoji} {reaction_info.name}
" + f"Description: {event.description}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_reaction_deleted(self, event: LogReactionDeleteEvent) -> str: + """Format notification body for deleted reaction.""" + reaction_info = event.log_reaction.reaction + body = ( + f"Reaction removed from entry in {event.parent_log_document_info.name}
" + f"By: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Reaction: {reaction_info.emoji} {reaction_info.name}
" + f"Description: {event.description}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_entry_deleted(self, event: LogEntryDeleteEvent) -> str: + """Format notification body for deleted log entry (document owner notification).""" + body = ( + f"Entry deleted from {event.parent_log_document_info.name}
" + f"Deleted by: {event.event_triggered_by_username}
" + f"Original author: {event.log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Deleted entry content: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "entry_delete") + + def format_own_entry_deleted(self, event: LogEntryDeleteEvent) -> str: + """Format notification body for when user's own entry is deleted by someone else.""" + body = ( + f"Entry was deleted from {event.parent_log_document_info.name}
" + f"Deleted by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Deleted entry content: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "own_entry_delete") + + def format_reply_deleted(self, event: LogEntryReplyDeleteEvent) -> str: + """Format notification body for deleted reply (entry creator notification).""" + body = ( + f"Reply deleted from entry in {event.parent_log_document_info.name}
" + f"Deleted by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Deleted reply content: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "reply_delete") + + def format_document_reply_deleted(self, event: LogEntryReplyDeleteEvent) -> str: + """Format notification body for document owner about deleted reply.""" + body = ( + f"Reply deleted from document {event.parent_log_document_info.name}
" + f"Deleted by: {event.event_triggered_by_username}
" + f"On entry by: {event.parent_log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Deleted reply content: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "document_owner") + + def _format_text_diff_pre(self, text_diff: str, max_height: str = "200px") -> str: + """ + Format text diff in a styled pre box. + + Args: + text_diff: The text difference to display + max_height: Maximum height of the pre box (default: "200px") + + Returns: + HTML formatted pre box with the text diff + """ + return ( + f"
"
+            f"{text_diff}
" + ) + + def _generate_log_entry_link(self, document_id: int, log_id: int) -> str: + """ + Generate a direct link to a log entry in BELY. + + Args: + document_id: The document ID + log_id: The log entry ID + + Returns: + URL string to the log entry + """ + if not self.bely_url: + return "" + + # Remove trailing slash if present + base_url = self.bely_url.rstrip("/") + + return f"{base_url}/views/item/view?id={document_id}&logId={log_id}" + + def _append_permalink_and_trigger( + self, + body: str, + event: Union[LogEntryEventBase, LogReactionEventBase], + notification_context: Optional[str] = None, + ) -> str: + """ + Append permalink and trigger description to notification body. + + Args: + body: The notification body + event: The event that triggered the notification + notification_context: Optional context about the notification type + + Returns: + Body with permalink and trigger description appended + """ + # Generate permalink if bely_url is available + if self.bely_url: + # Handle both LogEntryEventBase and LogReaction events + if isinstance(event, LogEntryEventBase): + log_id = event.log_info.id + document_id = event.parent_log_document_info.id + elif isinstance(event, LogReactionEventBase): + log_id = event.parent_log_info.id + document_id = event.parent_log_document_info.id + else: + log_id = None + document_id = None + + if log_id and document_id: + link = self._generate_log_entry_link(document_id, log_id) + body += f"

View entry: {link}" + + # Add trigger description + trigger_description = self._get_trigger_description(event, notification_context) + body += f"


{trigger_description}" + + return body + + def _get_trigger_description( + self, event: CoreEvent, notification_context: Optional[str] = None + ) -> str: + """ + Get a description of why this notification was triggered. + + Args: + event: The event that triggered the notification + notification_context: Optional context about the notification type + + Returns: + A human-readable description of the trigger + """ + if isinstance(event, LogEntryAddEvent): + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"added a new log entry to the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'new_entries' notifications enabled." + ) + elif isinstance(event, LogEntryUpdateEvent): + # Check the notification context to determine the type + if notification_context == "own_entry_edit": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"edited a log entry that you originally created in the document " + f"'{event.parent_log_document_info.name}'. You have 'own_entry_edits' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"updated a log entry in the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'entry_updates' notifications enabled." + ) + elif isinstance(event, LogEntryReplyAddEvent): + # Check the notification context to determine the type + if notification_context == "document_owner": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"added a reply to an entry in your document '{event.parent_log_document_info.name}'. " + f"You have 'document_replies' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"replied to your log entry in the document '{event.parent_log_document_info.name}'. " + f"You have 'entry_replies' notifications enabled." + ) + elif isinstance(event, LogEntryReplyUpdateEvent): + # Check the notification context to determine the type + if notification_context == "own_reply_update": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"updated a reply on your log entry in the document " + f"'{event.parent_log_document_info.name}'. You have 'own_entry_edits' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"updated a reply in the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'entry_replies' notifications enabled." + ) + elif isinstance(event, LogReactionAddEvent): + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"added a reaction to your log entry in the document " + f"'{event.parent_log_document_info.name}'. You have 'reactions' notifications enabled." + ) + elif isinstance(event, LogReactionDeleteEvent): + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"removed a reaction from your log entry in the document " + f"'{event.parent_log_document_info.name}'. You have 'reactions' notifications enabled." + ) + elif isinstance(event, LogEntryDeleteEvent): + # Check the notification context to determine the type + if notification_context == "own_entry_delete": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"deleted a log entry that you originally created in the document " + f"'{event.parent_log_document_info.name}'. You have 'own_entry_edits' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"deleted a log entry in the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'entry_updates' notifications enabled." + ) + elif isinstance(event, LogEntryReplyDeleteEvent): + # Check the notification context to determine the type + if notification_context == "reply_delete": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"deleted a reply from your log entry in the document " + f"'{event.parent_log_document_info.name}'. You have 'entry_replies' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"deleted a reply in the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'document_replies' notifications enabled." + ) + else: + return "This notification was sent due to activity on your BELY content." diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/handler.py new file mode 100644 index 000000000..d7740591c --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/handler.py @@ -0,0 +1,488 @@ +""" +Main handler implementation for Apprise Smart Notifications. +""" + +from typing import Any, Optional, Union + +from bely_mqtt import ( + MQTTHandler, + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, +) +from bely_mqtt.config import GlobalConfig + +try: + # Try relative imports first (when used as a package) + from .config_loader import ConfigLoader + from .notification_processor import NotificationProcessor + from .formatters import NotificationFormatter + from .email_threading import NotificationEventType +except ImportError: + # Fall back to absolute imports (when imported directly from tests) + from config_loader import ConfigLoader # type: ignore[no-redef] + from notification_processor import NotificationProcessor # type: ignore[no-redef] + from formatters import NotificationFormatter # type: ignore[no-redef] + from email_threading import NotificationEventType # type: ignore[no-redef] + + +class AppriseSmartNotificationHandler(MQTTHandler): + """ + Smart notification handler using Apprise with YAML configuration. + + Sends notifications for: + - Log entry updates by other users + - Log entry replies by other users + - New log entries in documents by other users + - Reactions to log entries by other users + """ + + def __init__( + self, + config_path: Optional[str] = None, + api_client: Optional[Any] = None, + global_config: Optional[GlobalConfig] = None, + ): + """ + Initialize the handler. + + Args: + config_path: Path to YAML configuration file + api_client: Optional BELY API client + global_config: Optional global configuration containing bely_url and other settings + + Raises: + ImportError: If apprise or yaml not installed + FileNotFoundError: If config file not found + ValueError: If config is invalid + """ + super().__init__(api_client=api_client) + + self.bely_url = global_config.bely_url if global_config else None + + # Initialize components + self.config_loader = ConfigLoader(self.logger) + + # Initialize formatter with timezone from config if available + self.timezone = None + self.formatter = NotificationFormatter( + self.bely_url, self.logger + ) # Will be updated after config load + self.processor = NotificationProcessor(self.logger) + + # Load configuration + if config_path: + config = self.config_loader.load_config(config_path) + self.processor.initialize_from_config(config, self.config_loader) + else: + self.logger.warning("No config path provided. Handler will not send notifications.") + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + """ + Handle new log entry events. + + Notify document owner/creators if entry is created by someone else. + + Args: + event: The log entry add event + """ + try: + await self._handle_add_event(event, is_reply=False) + except Exception as e: + self.logger.error(f"Error processing log entry add: {e}", exc_info=True) + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + """ + Handle log entry update events. + + Notify: + 1. The original creator if their entry is updated by someone else (own_entry_edits) + 2. Document owners about any updates + + Args: + event: The log entry update event + """ + try: + await self._handle_update_event(event, is_reply=False) + except Exception as e: + self.logger.error(f"Error processing log entry update: {e}", exc_info=True) + + async def handle_log_entry_reply_add(self, event: LogEntryReplyAddEvent) -> None: + """ + Handle log entry reply events. + + Notify: + 1. The original entry creator if someone else replies + 2. The document owner if someone replies to any entry in their document + + Args: + event: The log entry reply add event + """ + try: + await self._handle_add_event(event, is_reply=True) + except Exception as e: + self.logger.error(f"Error processing log entry reply add: {e}", exc_info=True) + + async def handle_log_entry_reply_update(self, event: LogEntryReplyUpdateEvent) -> None: + """ + Handle log entry reply update events. + + Notify: + 1. The original entry creator if someone else updates a reply on their entry (own_entry_edits) + 2. Document owners about any reply updates + + Args: + event: The log entry reply update event + """ + try: + await self._handle_update_event(event, is_reply=True) + except Exception as e: + self.logger.error(f"Error processing log entry reply update: {e}", exc_info=True) + + async def handle_log_reaction_add(self, event: LogReactionAddEvent) -> None: + """ + Handle log reaction add events. + + Notify the original entry creator if someone else reacts to their entry. + + Args: + event: The log reaction add event + """ + try: + await self._handle_reaction_event(event, is_add=True) + except Exception as e: + self.logger.error(f"Error processing log reaction add: {e}", exc_info=True) + + async def handle_log_reaction_delete(self, event: LogReactionDeleteEvent) -> None: + """ + Handle log reaction delete events. + + Notify the original entry creator if someone removes a reaction from their entry. + + Args: + event: The log reaction delete event + """ + try: + await self._handle_reaction_event(event, is_add=False) + except Exception as e: + self.logger.error(f"Error processing log reaction delete: {e}", exc_info=True) + + async def handle_log_entry_delete(self, event: LogEntryDeleteEvent) -> None: + """ + Handle log entry delete events. + + Notify: + 1. The original creator if their entry is deleted by someone else + 2. Document owners about any deletions + + Args: + event: The log entry delete event + """ + try: + await self._handle_delete_event(event, is_reply=False) + except Exception as e: + self.logger.error(f"Error processing log entry delete: {e}", exc_info=True) + + async def handle_log_entry_reply_delete(self, event: LogEntryReplyDeleteEvent) -> None: + """ + Handle log entry reply delete events. + + Notify: + 1. The original entry creator if someone deletes a reply on their entry + 2. Document owners about any reply deletions + + Args: + event: The log entry reply delete event + """ + try: + await self._handle_delete_event(event, is_reply=True) + except Exception as e: + self.logger.error(f"Error processing log entry reply delete: {e}", exc_info=True) + + async def _handle_add_event( + self, event: Union[LogEntryAddEvent, LogEntryReplyAddEvent], is_reply: bool = False + ) -> None: + """ + Unified handler for add events (entry or reply adds). + + Args: + event: The add event + is_reply: Whether this is a reply add + """ + notification_configs = [] + + # Determine event type for threading + event_type = ( + NotificationEventType.ENTRY_REPLY if is_reply else NotificationEventType.ENTRY_ADD + ) + + # Extract IDs for threading + entry_id = event.log_info.id if hasattr(event, "log_info") else None + parent_entry_id = ( + event.parent_log_info.id if is_reply and hasattr(event, "parent_log_info") else None + ) + + if is_reply: + # Notify the original entry creator + creator_username = event.parent_log_info.entered_by_username + notification_configs.append( + { + "username": creator_username, + "notification_type": "entry_replies", + "title": f"Reply to Your Log Entry in {event.parent_log_document_info.name}", + "body": self.formatter.format_reply_added(event), + "context": None, + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + + # Notify the document owner + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": "document_replies", + "title": f"New Reply in Your Document: {event.parent_log_document_info.name}", + "body": self.formatter.format_document_reply(event), + "context": "document_owner", + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + else: + # Don't notify if the creator is also the document creator + if ( + event.event_triggered_by_username + == event.parent_log_document_info.created_by_username + ): + return + + # Notify document owner about new entry + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": "new_entries", + "title": f"New Log Entry in {event.parent_log_document_info.name}", + "body": self.formatter.format_entry_added(event), + "context": None, + "event_type": event_type, + "entry_id": entry_id, + } + ) + + await self.processor.process_notifications(event, notification_configs) + + async def _handle_update_event( + self, event: Union[LogEntryUpdateEvent, LogEntryReplyUpdateEvent], is_reply: bool = False + ) -> None: + """ + Unified handler for update events (entry or reply updates). + + Args: + event: The update event + is_reply: Whether this is a reply update + """ + notification_configs = [] + + # Determine event type for threading + event_type = ( + NotificationEventType.REPLY_UPDATE if is_reply else NotificationEventType.ENTRY_UPDATE + ) + + # Extract IDs for threading + entry_id = event.log_info.id if hasattr(event, "log_info") else None + parent_entry_id = ( + event.parent_log_info.id if is_reply and hasattr(event, "parent_log_info") else None + ) + + if is_reply: + creator_username = event.parent_log_info.entered_by_username + own_edit_title = ( + f"Reply Updated on Your Log Entry in {event.parent_log_document_info.name}" + ) + own_edit_body = self.formatter.format_own_reply_updated(event) + owner_title = f"Reply Updated in {event.parent_log_document_info.name}" + owner_body = self.formatter.format_reply_updated(event) + owner_notification_type = "entry_replies" + own_context = "own_reply_update" + else: + creator_username = event.log_info.entered_by_username + own_edit_title = f"Your Log Entry Was Edited: {event.parent_log_document_info.name}" + own_edit_body = self.formatter.format_own_entry_edited(event) + owner_title = f"Log Entry Updated: {event.parent_log_document_info.name}" + owner_body = self.formatter.format_entry_updated(event) + owner_notification_type = "entry_updates" + own_context = "own_entry_edit" + + # Config for notifying the original creator + notification_configs.append( + { + "username": creator_username, + "notification_type": "own_entry_edits", + "title": own_edit_title, + "body": own_edit_body, + "context": own_context, + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + + # Config for notifying the document owner + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": owner_notification_type, + "title": owner_title, + "body": owner_body, + "context": None, + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + + await self.processor.process_notifications(event, notification_configs) + + async def _handle_reaction_event( + self, event: Union[LogReactionAddEvent, LogReactionDeleteEvent], is_add: bool = True + ) -> None: + """ + Handle log reaction events (both add and delete). + + Notify the original entry creator if someone else reacts to or removes a reaction from their entry. + + Args: + event: The log reaction event + is_add: Whether this is an add event (True) or delete event (False) + """ + # Don't notify the reactor (person who added/removed the reaction) + if event.event_triggered_by_username == event.parent_log_info.entered_by_username: + return + + # Check if we should notify about reactions + creator_username = event.parent_log_info.entered_by_username + if not self.processor.should_notify(creator_username, "reactions"): + return + + # Determine event type for threading + event_type = ( + NotificationEventType.REACTION_ADD if is_add else NotificationEventType.REACTION_DELETE + ) + + # Extract entry ID for threading + entry_id = event.parent_log_info.id if hasattr(event, "parent_log_info") else None + + # Build notification + if is_add: + title = f"Reaction to Your Log Entry in {event.parent_log_document_info.name}" + body = self.formatter.format_reaction_added(event) + else: + title = f"Reaction Removed from Your Log Entry in {event.parent_log_document_info.name}" + body = self.formatter.format_reaction_deleted(event) + + # Send notification with threading support + await self.processor.send_notification_with_threading( + username=creator_username, + title=title, + body=body, + event_type=event_type, + document_id=event.parent_log_document_info.id, + document_name=event.parent_log_document_info.name, + entry_id=entry_id, + action_by=event.event_triggered_by_username, + ) + + async def _handle_delete_event( + self, event: Union[LogEntryDeleteEvent, LogEntryReplyDeleteEvent], is_reply: bool = False + ) -> None: + """ + Unified handler for delete events (entry or reply deletes). + + Args: + event: The delete event + is_reply: Whether this is a reply delete + """ + notification_configs = [] + + # Determine event type for threading + event_type = ( + NotificationEventType.REPLY_DELETE if is_reply else NotificationEventType.ENTRY_DELETE + ) + + # Extract IDs for threading + entry_id = event.log_info.id if hasattr(event, "log_info") else None + parent_entry_id = ( + event.parent_log_info.id if is_reply and hasattr(event, "parent_log_info") else None + ) + + if is_reply: + # Notify the original entry creator about reply deletion + creator_username = event.parent_log_info.entered_by_username + notification_configs.append( + { + "username": creator_username, + "notification_type": "entry_replies", + "title": f"Reply Deleted from Your Log Entry in {event.parent_log_document_info.name}", + "body": self.formatter.format_reply_deleted(event), + "context": "reply_delete", + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + + # Notify the document owner about reply deletion + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": "document_replies", + "title": f"Reply Deleted in Your Document: {event.parent_log_document_info.name}", + "body": self.formatter.format_document_reply_deleted(event), + "context": "document_owner", + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + else: + # Notify the original entry creator about their entry being deleted + creator_username = event.log_info.entered_by_username + notification_configs.append( + { + "username": creator_username, + "notification_type": "own_entry_edits", + "title": f"Your Log Entry Was Deleted: {event.parent_log_document_info.name}", + "body": self.formatter.format_own_entry_deleted(event), + "context": "own_entry_delete", + "event_type": event_type, + "entry_id": entry_id, + } + ) + + # Notify the document owner about entry deletion + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": "entry_updates", + "title": f"Log Entry Deleted: {event.parent_log_document_info.name}", + "body": self.formatter.format_entry_deleted(event), + "context": "entry_delete", + "event_type": event_type, + "entry_id": entry_id, + } + ) + + await self.processor.process_notifications(event, notification_configs) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/notification_processor.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/notification_processor.py new file mode 100644 index 000000000..85b37be7e --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/notification_processor.py @@ -0,0 +1,276 @@ +""" +Notification processor for Apprise Smart Notification Handler. +""" + +from logging import Logger +from typing import Any, Dict, List, Optional + +try: + # Try relative imports first (when used as a package) + from .email_threading import EmailThreadingStrategy, NotificationEventType + from .apprise_email_wrapper import AppriseWithEmailHeaders, is_email_notification +except ImportError: + # Fall back to absolute imports (when imported directly from tests) + from email_threading import EmailThreadingStrategy, NotificationEventType # type: ignore[no-redef] + from apprise_email_wrapper import AppriseWithEmailHeaders, is_email_notification # type: ignore[no-redef] + + +class NotificationProcessor: + """Handles processing and sending of notifications.""" + + def __init__(self, logger: Logger, domain: str = "notifications.bely.app"): + """ + Initialize the notification processor. + + Args: + logger: Logger instance for output + domain: Domain for email threading IDs (default: notifications.bely.app) + """ + + self.logger = logger + + self.user_apprise_instances: Dict[str, AppriseWithEmailHeaders] = {} + self.user_notification_settings: Dict[str, Dict[str, bool]] = {} + self.user_has_email: Dict[str, bool] = {} # Track which users have email notifications + + # Initialize email threading strategy + self.email_threading = EmailThreadingStrategy(domain=domain) + + def initialize_from_config(self, config: Dict[str, Any], config_loader: Any) -> None: + """ + Initialize Apprise instances from configuration. + + Args: + config: Configuration dictionary + config_loader: ConfigLoader instance for URL processing + """ + global_config = config.get("global", {}) + users_config = config.get("users", {}) + + for username, user_config in users_config.items(): + try: + # Create AppriseWithEmailHeaders instance for better email header support + apobj = AppriseWithEmailHeaders() + + # Add URLs from config + apprise_urls = user_config.get("apprise_urls", []) + if isinstance(apprise_urls, str): + apprise_urls = [apprise_urls] + + has_email = False + for url in apprise_urls: + # Process mailto URLs to use global mail server settings if available + processed_url = config_loader.process_apprise_url(url, global_config) + if not apobj.add(processed_url): + self.logger.warning(f"Failed to add Apprise URL for user {username}: {url}") + else: + # Check if this is an email notification + if is_email_notification(processed_url): + has_email = True + + if apobj: + self.user_apprise_instances[username] = apobj + self.user_has_email[username] = has_email + self.logger.debug( + f"Initialized Apprise for user: {username} (has_email: {has_email})" + ) + else: + self.logger.warning(f"No valid Apprise URLs for user: {username}") + + # Store notification settings + notifications = user_config.get("notifications", {}) + self.user_notification_settings[username] = { + "entry_updates": notifications.get("entry_updates", True), + "own_entry_edits": notifications.get("own_entry_edits", True), + "entry_replies": notifications.get("entry_replies", True), + "new_entries": notifications.get("new_entries", True), + "reactions": notifications.get("reactions", True), + "document_replies": notifications.get("document_replies", True), + } + + except Exception as e: + self.logger.error(f"Error initializing Apprise for user {username}: {e}") + + def should_notify(self, username: Optional[str], notification_type: str) -> bool: + """ + Check if user should be notified for this type of event. + + Args: + username: Username to check + notification_type: Type of notification (entry_updates, own_entry_edits, + entry_replies, new_entries, reactions, document_replies) + + Returns: + True if user should be notified, False otherwise + """ + if not username: + return False + + if username not in self.user_apprise_instances: + return False + + settings = self.user_notification_settings.get(username, {}) + return settings.get(notification_type, True) + + async def send_notification( + self, + username: Optional[str], + title: str, + body: str, + headers: Optional[Dict[str, str]] = None, + ) -> None: + """ + Send notification to user via Apprise. + + Args: + username: Username to notify + title: Notification title + body: Notification body + headers: Optional headers (used for email threading) + """ + if not username or username not in self.user_apprise_instances: + self.logger.debug(f"No notification endpoints for user: {username}") + return + + try: + apobj = self.user_apprise_instances[username] + + # The AppriseWithEmailHeaders wrapper handles headers automatically + # It will only apply headers to email notifications and ignore them for others + if headers and self.user_has_email.get(username, False): + # Send notification with headers for email threading + result = apobj.notify( + body=body, + title=title, + headers=headers, + ) + self.logger.debug(f"Sent notification with email threading headers to {username}") + else: + # Send notification without headers + result = apobj.notify( + body=body, + title=title, + ) + + if result: + self.logger.info(f"Notification sent to {username}: {title}") + else: + self.logger.warning(f"Failed to send notification to {username}") + + except Exception as e: + self.logger.error(f"Error sending notification to {username}: {e}", exc_info=True) + + async def send_notification_with_threading( + self, + username: Optional[str], + title: str, + body: str, + event_type: NotificationEventType, + document_id: str, + document_name: str, + entry_id: Optional[str] = None, + parent_entry_id: Optional[str] = None, + action_by: Optional[str] = None, + ) -> None: + """ + Send notification with email threading support. + + Args: + username: Username to notify + title: Notification title (will be overridden for threading) + body: Notification body + event_type: Type of notification event + document_id: The document ID + document_name: The document name + entry_id: The log entry ID (if applicable) + parent_entry_id: The parent entry ID (for replies) + action_by: User who performed the action + """ + if not username or username not in self.user_apprise_instances: + self.logger.debug(f"No notification endpoints for user: {username}") + return + + # Check if user has email notifications + has_email = self.user_has_email.get(username, False) + + # Generate appropriate subject based on notification type + action_desc = f"by {action_by}" if action_by else None + threaded_subject = self.email_threading.generate_subject( + event_type=event_type, + document_title=document_name, + action_description=action_desc, + is_email=has_email, + ) + + # Generate email headers if user has email notifications + headers = None + if has_email: + try: + headers = self.email_threading.get_email_headers( + event_type=event_type, + document_id=document_id, + document_name=document_name, + entry_id=entry_id, + parent_entry_id=parent_entry_id, + ) + self.logger.debug(f"Generated email threading headers for {username}: {headers}") + except ValueError as e: + self.logger.warning(f"Failed to generate email headers: {e}") + + # Send notification with threading support + await self.send_notification(username, threaded_subject, body, headers) + + async def process_notifications( + self, event: Any, notification_configs: List[Dict[str, Any]] + ) -> None: + """ + Process and send notifications based on configuration. + + Args: + event: The event to process + notification_configs: List of dicts with: + - username: User to notify + - notification_type: Type of notification setting to check + - title: Notification title + - body: Notification body + - context: Optional context for trigger description + - event_type: Optional NotificationEventType for threading + - entry_id: Optional entry ID for threading + - parent_entry_id: Optional parent entry ID for threading + """ + notifications_sent = [] + + for config in notification_configs: + username = config["username"] + notification_type = config["notification_type"] + + # Skip if already notified or shouldn't notify + if username in notifications_sent: + continue + if not self.should_notify(username, notification_type): + continue + if event.event_triggered_by_username == username: + continue + + title = config["title"] + body = config["body"] + + # Check if we have threading information + if "event_type" in config and hasattr(event, "parent_log_document_info"): + # Use threading-aware notification + await self.send_notification_with_threading( + username=username, + title=title, + body=body, + event_type=config["event_type"], + document_id=event.parent_log_document_info.id, + document_name=event.parent_log_document_info.name, + entry_id=config.get("entry_id"), + parent_entry_id=config.get("parent_entry_id"), + action_by=event.event_triggered_by_username, + ) + else: + # Fall back to simple notification + await self.send_notification(username, title, body) + + notifications_sent.append(username) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/run_tests.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/run_tests.py new file mode 100644 index 000000000..fb9cc0967 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/run_tests.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +""" +Test runner for apprise_smart_notification handler. + +Usage: + python run_tests.py # Run all tests + python run_tests.py -v # Verbose output + python run_tests.py --cov # With coverage report + python run_tests.py -k test_entry_add # Run specific test +""" + +import sys +import subprocess +from pathlib import Path +import os + + +def main(): + """Run the test suite.""" + # Get the directory containing this script + test_dir = Path(__file__).parent + + # Add src directory to PYTHONPATH for the subprocess + src_path = test_dir.parent.parent.parent / "src" + env = os.environ.copy() + if src_path.exists(): + if "PYTHONPATH" in env: + env["PYTHONPATH"] = f"{src_path}{os.pathsep}{env['PYTHONPATH']}" + else: + env["PYTHONPATH"] = str(src_path) + + # Base pytest command + cmd = [sys.executable, "-m", "pytest", "test/"] + + # Add common options + cmd.extend( + [ + "--asyncio-mode=auto", # Handle async tests + "--tb=short", # Shorter traceback format + ] + ) + + # Check for coverage flag + if "--cov" in sys.argv: + cmd.extend( + [ + "--cov=apprise_smart_notification", + "--cov-report=term-missing", + "--cov-report=html:htmlcov", + ] + ) + sys.argv.remove("--cov") + + # Pass through any other arguments + cmd.extend(sys.argv[1:]) + + # Run tests + print(f"Running: {' '.join(cmd)}") + print("-" * 60) + + result = subprocess.run(cmd, cwd=test_dir, env=env) + sys.exit(result.returncode) + + +if __name__ == "__main__": + main() diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_email_threading.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_email_threading.py new file mode 100644 index 000000000..2e84db0e3 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_email_threading.py @@ -0,0 +1,669 @@ +""" +Test suite for email threading functionality in Apprise Smart Notification Handler. + +This module tests the email threading capabilities that ensure email notifications +are properly threaded together in email clients like Gmail, Outlook, and Thunderbird. +""" + +import sys +from pathlib import Path +from datetime import datetime, timedelta +from unittest.mock import MagicMock, patch, AsyncMock +import pytest +import yaml + +# Add src directory to path to import bely_mqtt +src_path = Path(__file__).parent.parent.parent.parent / "src" +if src_path.exists(): + sys.path.insert(0, str(src_path)) + +# Mock apprise before importing handler +sys.modules["apprise"] = MagicMock() + +from bely_mqtt import ( # noqa: E402 + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogReactionAddEvent, +) +from bely_mqtt.models import ( # noqa: E402 + LogInfo, + LogDocumentInfo, + LogReactionInfo, + ReactionInfo, + LogbookInfo, +) +from bely_mqtt.config import GlobalConfig # noqa: E402 + +# Add parent directory to path for imports +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from handler import AppriseSmartNotificationHandler # noqa: E402 +from email_threading import ( # noqa: E402 + EmailThreadingStrategy, + NotificationEventType, + detect_event_type, +) + + +class TestEmailThreadingStrategy: + """Test the EmailThreadingStrategy class.""" + + @pytest.fixture + def strategy(self): + """Create an EmailThreadingStrategy instance.""" + return EmailThreadingStrategy(domain="test.example.com") + + def test_email_url_detection(self, strategy): + """Test detection of email notification URLs.""" + # Email URLs that should be detected + email_urls = [ + "mailto://user:pass@gmail.com", + "mailtos://user:pass@mail.example.com:587", + ] + + # Non-email URLs that should not be detected + non_email_urls = [ + "slack://TokenA/TokenB/TokenC", + "discord://webhook_id/webhook_token", + "telegram://bot_token/chat_id", + "pushover://user_key@app_token", + "teams://webhook_url", + ] + + for url in email_urls: + assert EmailThreadingStrategy.is_email_notification( + url + ), f"Should detect {url} as email" + + for url in non_email_urls: + assert not EmailThreadingStrategy.is_email_notification( + url + ), f"Should not detect {url} as email" + + def test_thread_id_generation(self, strategy): + """Test generation of consistent thread IDs.""" + document_id = "doc123" + entry_id = "entry456" + + # Thread IDs should be consistent for the same input + doc_thread_1 = strategy.generate_document_thread_id(document_id) + doc_thread_2 = strategy.generate_document_thread_id(document_id) + assert doc_thread_1 == doc_thread_2 + assert "@test.example.com" in doc_thread_1 + + entry_thread_1 = strategy.generate_entry_thread_id(entry_id) + entry_thread_2 = strategy.generate_entry_thread_id(entry_id) + assert entry_thread_1 == entry_thread_2 + assert "@test.example.com" in entry_thread_1 + + # Different IDs should produce different thread IDs + assert doc_thread_1 != entry_thread_1 + + def test_email_headers_for_document_creation(self, strategy): + """Test email headers for document creation (starts thread).""" + headers = strategy.get_email_headers( + event_type=NotificationEventType.DOCUMENT_CREATE, + document_id="doc100", + document_name="Sprint Planning", + ) + + assert "Thread-Topic" in headers + assert "Sprint Planning" in headers["Thread-Topic"] + assert "References" in headers + assert "In-Reply-To" not in headers # Document creation starts the thread + + def test_email_headers_for_entry_addition(self, strategy): + """Test email headers for entry addition (replies to document).""" + headers = strategy.get_email_headers( + event_type=NotificationEventType.ENTRY_ADD, + document_id="doc100", + document_name="Sprint Planning", + entry_id="entry200", + ) + + assert "Thread-Topic" in headers + assert "References" in headers + assert "In-Reply-To" in headers + assert headers["In-Reply-To"] == strategy.generate_document_thread_id("doc100") + assert "X-Entry-Id" in headers + assert headers["X-Entry-Id"] == "entry200" + + def test_email_headers_for_nested_reply(self, strategy): + """Test email headers for reply to entry (nested threading).""" + headers = strategy.get_email_headers( + event_type=NotificationEventType.ENTRY_REPLY, + document_id="doc100", + document_name="Sprint Planning", + entry_id="reply300", + parent_entry_id="entry200", + ) + + assert "Thread-Topic" in headers + assert "References" in headers + assert "In-Reply-To" in headers + assert headers["In-Reply-To"] == strategy.generate_entry_thread_id("entry200") + assert "X-Entry-Id" in headers + assert headers["X-Entry-Id"] == "reply300" + assert "X-Parent-Entry-Id" in headers + assert headers["X-Parent-Entry-Id"] == "entry200" + + # References should include both document and parent entry + references = headers["References"].split() + assert strategy.generate_document_thread_id("doc100") in references + assert strategy.generate_entry_thread_id("entry200") in references + + def test_subject_generation_for_email(self, strategy): + """Test subject line generation for email notifications.""" + document_title = "Q4 Planning Document" + + # Email subjects should use Re: for threading + subjects = [ + (NotificationEventType.DOCUMENT_CREATE, None, "New Log: Q4 Planning Document"), + (NotificationEventType.ENTRY_ADD, "by Alice", "Re: Log: Q4 Planning Document"), + (NotificationEventType.ENTRY_UPDATE, None, "Re: Log: Q4 Planning Document"), + (NotificationEventType.ENTRY_REPLY, "by Bob", "Re: Log: Q4 Planning Document"), + (NotificationEventType.REACTION_ADD, "by Charlie", "Re: Log: Q4 Planning Document"), + ] + + for event_type, action, expected_prefix in subjects: + subject = strategy.generate_subject( + event_type=event_type, + document_title=document_title, + action_description=action, + is_email=True, + ) + assert subject.startswith(expected_prefix) + if action: + assert action in subject + + def test_subject_generation_for_non_email(self, strategy): + """Test subject line generation for non-email notifications.""" + document_title = "Q4 Planning Document" + + # Non-email subjects should include emojis + emoji_map = { + NotificationEventType.DOCUMENT_CREATE: "📄", + NotificationEventType.ENTRY_ADD: "➕", + NotificationEventType.ENTRY_UPDATE: "✏️", + NotificationEventType.ENTRY_DELETE: "🗑️", + NotificationEventType.ENTRY_REPLY: "💬", + NotificationEventType.REACTION_ADD: "👍", + } + + for event_type, emoji in emoji_map.items(): + subject = strategy.generate_subject( + event_type=event_type, document_title=document_title, is_email=False + ) + assert emoji in subject + assert document_title in subject + + def test_event_type_detection(self): + """Test detection of event types from context.""" + mock_event = MagicMock() + + test_cases = [ + # (is_reply, is_update, is_delete, is_reaction, is_reaction_delete, expected) + (False, False, False, False, False, NotificationEventType.ENTRY_ADD), + (False, True, False, False, False, NotificationEventType.ENTRY_UPDATE), + (False, False, True, False, False, NotificationEventType.ENTRY_DELETE), + (True, False, False, False, False, NotificationEventType.ENTRY_REPLY), + (True, True, False, False, False, NotificationEventType.REPLY_UPDATE), + (True, False, True, False, False, NotificationEventType.REPLY_DELETE), + (False, False, False, True, False, NotificationEventType.REACTION_ADD), + (False, False, False, True, True, NotificationEventType.REACTION_DELETE), + ] + + for is_reply, is_update, is_delete, is_reaction, is_reaction_delete, expected in test_cases: + result = detect_event_type( + mock_event, + is_reply=is_reply, + is_update=is_update, + is_delete=is_delete, + is_reaction=is_reaction, + is_reaction_delete=is_reaction_delete, + ) + assert result == expected, f"Expected {expected}, got {result}" + + +class TestEmailThreadingIntegration: + """Integration tests for email threading with the handler.""" + + @pytest.fixture + def test_config(self, tmp_path): + """Create a test configuration with email notifications.""" + config = { + "global": { + "mail_server": { + "host": "smtp.company.com", + "port": 587, + "username": "notifications@company.com", + "password": "secure_password", + "use_tls": True, + } + }, + "users": { + "alice": { + "apprise_urls": [ + "mailto://alice@company.com", + "slack://TokenA/TokenB/TokenC/#general", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + "bob": { + "apprise_urls": [ + "mailto://bob@company.com", + "discord://webhook_id/webhook_token", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + "charlie": { + "apprise_urls": [ + "teams://webhook_url", # No email for Charlie + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + }, + } + + config_path = tmp_path / "threading_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + return config_path + + @pytest.fixture + def handler(self, test_config): + """Create handler with test configuration.""" + global_config = GlobalConfig({"bely_url": "https://bely.company.com"}) + + # Mock AppriseWithEmailHeaders class + mock_apprise_wrapper = MagicMock() + mock_apprise_wrapper.return_value.notify = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.add = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.__bool__ = MagicMock(return_value=True) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_wrapper): + handler = AppriseSmartNotificationHandler( + config_path=str(test_config), global_config=global_config + ) + + # Mock Apprise notify for all users + for username, apobj in handler.processor.user_apprise_instances.items(): + apobj.notify = MagicMock(return_value=True) + + return handler + + @pytest.mark.asyncio + async def test_email_thread_conversation(self, handler): + """Test a complete email thread conversation.""" + base_time = datetime.now() + + # Track all notifications sent + notifications = [] + + async def track_notification(username, title, body, attach=None): + notifications.append( + { + "username": username, + "title": title, + "body": body, + "attach": attach, + "timestamp": datetime.now().isoformat(), + } + ) + return True + + handler.processor.send_notification = AsyncMock(side_effect=track_notification) + + # Create a document + doc = LogDocumentInfo( + id=1000, + name="Team Retrospective", + ownerUsername="alice", + createdByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # 1. Bob adds an entry to Alice's document + event1 = LogEntryAddEvent( + eventTimestamp=(base_time + timedelta(minutes=5)).isoformat(), + eventTriggedByUsername="bob", + entityName="LogEntry", + entityId=1001, + parentLogDocumentInfo=doc, + logInfo=LogInfo( + id=1001, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + ), + description="Added retrospective feedback", + textDiff="+ What went well: Sprint velocity improved\n+ To improve: Better testing coverage", + logbookList=[LogbookInfo(id=1, name="Retrospectives", displayName="Retrospectives")], + ) + + await handler.handle_log_entry_add(event1) + + # Alice should get notified (document owner) + assert len(notifications) == 1 + assert notifications[0]["username"] == "alice" + + # Check if email threading headers would be applied + # (In real implementation, these would be in the attach parameter) + # For email notifications, the subject should use "Re:" for threading + + # 2. Alice replies to Bob's entry + event2 = LogEntryReplyAddEvent( + eventTimestamp=(base_time + timedelta(minutes=10)).isoformat(), + eventTriggedByUsername="alice", + entityName="LogEntryReply", + entityId=1002, + parentLogDocumentInfo=doc, + parentLogInfo=LogInfo( + id=1001, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + ), + logInfo=LogInfo( + id=1002, + enteredByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + ), + textDiff="Great points! Let's schedule a meeting to discuss the testing strategy.", + logbookList=[LogbookInfo(id=1, name="Retrospectives", displayName="Retrospectives")], + description="Response to feedback", + ) + + await handler.handle_log_entry_reply_add(event2) + + # Bob should get notified (entry creator) + assert len(notifications) == 2 + assert notifications[1]["username"] == "bob" + + # 3. Bob reacts to Alice's reply + from bely_mqtt.models import ReactionId + + event3 = LogReactionAddEvent( + eventTimestamp=(base_time + timedelta(minutes=15)).isoformat(), + eventTriggedByUsername="bob", + entityName="LogReaction", + entityId=ReactionId(logId=1002, reactionId=1, userId=2), + parentLogDocumentInfo=doc, + parentLogInfo=LogInfo( + id=1002, + enteredByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + ), + logReaction=LogReactionInfo( + id=ReactionId(logId=1002, reactionId=1, userId=2), + reaction=ReactionInfo( + id=1, emoji="👍", name="thumbsup", emojiCode=128077, description="Agreed" + ), + username="bob", + ), + description="Agreed with meeting proposal", + ) + + await handler.handle_log_reaction_add(event3) + + # Alice should get notified about the reaction + assert len(notifications) == 3 + assert notifications[2]["username"] == "alice" + + # Verify all notifications are part of the same conversation thread + # In a real email client, these would all be grouped together + + @pytest.mark.asyncio + async def test_mixed_notification_types(self, handler): + """Test that email and non-email notifications are handled differently.""" + base_time = datetime.now() + + # Track notifications with their types + notifications = [] + + async def track_notification(username, title, body, attach=None): + # Determine if this is an email notification based on user config + is_email = False + if username in ["alice", "bob"]: # These users have email configured + is_email = True + + notifications.append( + { + "username": username, + "title": title, + "body": body, + "is_email": is_email, + } + ) + return True + + handler.processor.send_notification = AsyncMock(side_effect=track_notification) + + # Create a document owned by Charlie (no email) + doc = LogDocumentInfo( + id=2000, + name="Technical Specs", + ownerUsername="charlie", + createdByUsername="charlie", + lastModifiedByUsername="charlie", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # Alice (has email) adds an entry + event = LogEntryAddEvent( + eventTimestamp=base_time.isoformat(), + eventTriggedByUsername="alice", + entityName="LogEntry", + entityId=2001, + parentLogDocumentInfo=doc, + logInfo=LogInfo( + id=2001, + enteredByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ), + description="Added API specifications", + textDiff="+ API endpoint: /api/v2/users", + logbookList=[LogbookInfo(id=2, name="Tech Specs", displayName="Tech Specs")], + ) + + await handler.handle_log_entry_add(event) + + # Charlie should get notified (document owner) + assert len(notifications) == 1 + assert notifications[0]["username"] == "charlie" + assert not notifications[0]["is_email"] # Charlie doesn't have email + + @pytest.mark.asyncio + async def test_threading_with_updates_and_deletes(self, handler): + """Test that updates and deletes maintain thread continuity.""" + base_time = datetime.now() + + notifications = [] + + async def track_notification(username, title, body, attach=None): + notifications.append( + { + "username": username, + "title": title, + "body": body, + } + ) + return True + + handler.processor.send_notification = AsyncMock(side_effect=track_notification) + + # Alice's document + doc = LogDocumentInfo( + id=3000, + name="Project Roadmap", + ownerUsername="alice", + createdByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # Bob's entry + bob_entry = LogInfo( + id=3001, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # 1. Bob updates his own entry + event1 = LogEntryUpdateEvent( + eventTimestamp=(base_time + timedelta(minutes=5)).isoformat(), + eventTriggedByUsername="bob", + entityName="LogEntry", + entityId=3001, + parentLogDocumentInfo=doc, + logInfo=bob_entry, + description="Updated timeline", + textDiff="- Q3 delivery\n+ Q4 delivery", + logbookList=[LogbookInfo(id=3, name="Roadmap", displayName="Roadmap")], + ) + + await handler.handle_log_entry_update(event1) + + # Alice should be notified (document owner) + # Bob shouldn't be notified (he's the one updating) + assert len(notifications) == 1 + assert notifications[0]["username"] == "alice" + + # 2. Alice deletes Bob's entry + event2 = LogEntryDeleteEvent( + eventTimestamp=(base_time + timedelta(minutes=10)).isoformat(), + eventTriggedByUsername="alice", + entityName="LogEntry", + entityId=3001, + parentLogDocumentInfo=doc, + logInfo=bob_entry, + description="Removed outdated entry", + textDiff="- Deleted: Q4 delivery timeline", + logbookList=[LogbookInfo(id=3, name="Roadmap", displayName="Roadmap")], + ) + + await handler.handle_log_entry_delete(event2) + + # Bob should be notified (his entry was deleted) + assert len(notifications) == 2 + assert notifications[1]["username"] == "bob" + + # All these notifications should be part of the same email thread + + +class TestEmailThreadingEdgeCases: + """Test edge cases and error handling for email threading.""" + + @pytest.fixture + def strategy(self): + """Create an EmailThreadingStrategy instance.""" + return EmailThreadingStrategy() + + def test_missing_domain(self, strategy): + """Test thread ID generation without a domain.""" + # Should use default domain + thread_id = strategy.generate_document_thread_id("doc123") + assert "@" in thread_id + assert thread_id.endswith(">") + + def test_empty_document_name(self, strategy): + """Test handling of empty document names.""" + headers = strategy.get_email_headers( + event_type=NotificationEventType.ENTRY_ADD, + document_id="doc123", + document_name="", # Empty name + entry_id="entry456", + ) + + assert "Thread-Topic" in headers + # Should handle empty name gracefully + + def test_special_characters_in_ids(self, strategy): + """Test handling of special characters in IDs.""" + # IDs with special characters + doc_id = "doc-123_test@special" + entry_id = "entry/456\\test" + + doc_thread = strategy.generate_document_thread_id(doc_id) + entry_thread = strategy.generate_entry_thread_id(entry_id) + + # Should generate valid message IDs + assert doc_thread.startswith("<") + assert doc_thread.endswith(">") + assert entry_thread.startswith("<") + assert entry_thread.endswith(">") + + def test_very_long_document_name(self, strategy): + """Test handling of very long document names.""" + long_name = "A" * 500 # Very long document name + + subject = strategy.generate_subject( + event_type=NotificationEventType.ENTRY_ADD, document_title=long_name, is_email=True + ) + + # Should truncate or handle gracefully + assert len(subject) < 1000 # Reasonable subject length + + def test_unicode_in_document_name(self, strategy): + """Test handling of Unicode characters in document names.""" + unicode_name = "Project 项目 🚀 Проект" + + headers = strategy.get_email_headers( + event_type=NotificationEventType.ENTRY_ADD, + document_id="doc123", + document_name=unicode_name, + entry_id="entry456", + ) + + subject = strategy.generate_subject( + event_type=NotificationEventType.ENTRY_ADD, document_title=unicode_name, is_email=True + ) + + assert "Thread-Topic" in headers + assert unicode_name in headers["Thread-Topic"] + assert unicode_name in subject + + +if __name__ == "__main__": + # Run tests with pytest + pytest.main([__file__, "-v", "--asyncio-mode=auto"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_formatters.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_formatters.py new file mode 100644 index 000000000..8dec415d8 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_formatters.py @@ -0,0 +1,424 @@ +""" +Tests for notification formatters. +""" + +import logging +from datetime import datetime, timezone +from pathlib import Path +import sys +from unittest.mock import MagicMock +from zoneinfo import ZoneInfo + +import pytest + +# Add parent directory to path for imports +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from formatters import NotificationFormatter # noqa: E402 +from bely_mqtt import ( # noqa: E402 + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogReactionAddEvent, +) +from bely_mqtt.models import ( # noqa: E402 + LogDocumentInfo, + LogInfo, + LogbookInfo, + ReactionInfo, + ReactionId, + LogReactionInfo, +) + + +@pytest.fixture +def logger(): + """Create a mock logger.""" + return MagicMock(spec=logging.Logger) + + +@pytest.fixture +def formatter_utc(logger): + """Create a formatter with UTC timezone.""" + return NotificationFormatter(bely_url="https://bely.example.com", logger=logger, timezone="UTC") + + +@pytest.fixture +def formatter_eastern(logger): + """Create a formatter with Eastern timezone.""" + return NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="America/New_York" + ) + + +@pytest.fixture +def formatter_pacific(logger): + """Create a formatter with Pacific timezone.""" + return NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="America/Los_Angeles" + ) + + +@pytest.fixture +def sample_log_document_info(): + """Create sample log document info.""" + return LogDocumentInfo( + name="Test Document", id=123, ownerUsername="doc_owner", createdByUsername="doc_creator" + ) + + +@pytest.fixture +def sample_log_info(): + """Create sample log info.""" + return LogInfo(id=456, enteredByUsername="entry_author", lastModifiedByUsername="modifier") + + +@pytest.fixture +def sample_parent_log_info(): + """Create sample parent log info for replies.""" + return LogInfo( + id=789, enteredByUsername="parent_author", lastModifiedByUsername="parent_modifier" + ) + + +@pytest.fixture +def sample_logbook_list(): + """Create sample logbook list.""" + return [ + LogbookInfo(name="Logbook1", id=1, displayName="Display 1"), + LogbookInfo(name="Logbook2", id=2, displayName="Display 2"), + ] + + +@pytest.fixture +def sample_reaction_info(): + """Create sample reaction info.""" + return ReactionInfo( + id=1, name="thumbs_up", emojiCode=128077, emoji="👍", description="Thumbs up" + ) + + +@pytest.fixture +def sample_log_reaction_info(sample_reaction_info): + """Create sample log reaction info.""" + return LogReactionInfo( + reaction=sample_reaction_info, + id=ReactionId(logId=456, reactionId=1, userId=999), + username="reactor_user", + ) + + +class TestTimezoneFormatting: + """Test timezone formatting functionality.""" + + def test_utc_timestamp_formatting(self, formatter_utc): + """Test that UTC timestamps are formatted correctly.""" + # Create a UTC timestamp + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + + formatted = formatter_utc._format_timestamp(utc_time) + + # Should show UTC time + assert "2024-01-15 14:30:45" in formatted + assert "UTC" in formatted + + def test_eastern_timezone_formatting(self, formatter_eastern): + """Test that timestamps are converted to Eastern timezone.""" + # Create a UTC timestamp (2:30 PM UTC) + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + + formatted = formatter_eastern._format_timestamp(utc_time) + + # Should show Eastern time (9:30 AM EST, UTC-5) + assert "2024-01-15 09:30:45" in formatted + assert "EST" in formatted or "EDT" in formatted + + def test_pacific_timezone_formatting(self, formatter_pacific): + """Test that timestamps are converted to Pacific timezone.""" + # Create a UTC timestamp (2:30 PM UTC) + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + + formatted = formatter_pacific._format_timestamp(utc_time) + + # Should show Pacific time (6:30 AM PST, UTC-8) + assert "2024-01-15 06:30:45" in formatted + assert "PST" in formatted or "PDT" in formatted + + def test_naive_timestamp_assumes_utc(self, formatter_eastern): + """Test that naive timestamps are assumed to be UTC.""" + # Create a naive timestamp (no timezone info) + naive_time = datetime(2024, 1, 15, 14, 30, 45) + + formatted = formatter_eastern._format_timestamp(naive_time) + + # Should treat as UTC and convert to Eastern (9:30 AM EST) + assert "2024-01-15 09:30:45" in formatted + + def test_daylight_saving_time(self, formatter_eastern): + """Test that daylight saving time is handled correctly.""" + # Summer date (EDT - Eastern Daylight Time, UTC-4) + summer_time = datetime(2024, 7, 15, 14, 30, 45, tzinfo=timezone.utc) + formatted_summer = formatter_eastern._format_timestamp(summer_time) + assert "2024-07-15 10:30:45" in formatted_summer # UTC-4 + + # Winter date (EST - Eastern Standard Time, UTC-5) + winter_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + formatted_winter = formatter_eastern._format_timestamp(winter_time) + assert "2024-01-15 09:30:45" in formatted_winter # UTC-5 + + def test_invalid_timezone_falls_back_to_utc(self, logger): + """Test that invalid timezone falls back to UTC.""" + formatter = NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="Invalid/Timezone" + ) + + # Should have logged a warning + logger.warning.assert_called() + + # Should use UTC + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "2024-01-15 14:30:45 UTC" in formatted + + def test_no_timezone_specified(self, logger): + """Test formatter with no timezone specified (uses local or UTC).""" + formatter = NotificationFormatter(bely_url="https://bely.example.com", logger=logger) + + # Should have a timezone set (either local or UTC) + assert formatter.timezone is not None + + # Should be able to format timestamps + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "2024-01-15" in formatted + assert ":" in formatted # Has time component + + +class TestEventFormatting: + """Test event formatting with timezone support.""" + + def test_entry_added_with_timezone( + self, formatter_eastern, sample_log_document_info, sample_log_info, sample_logbook_list + ): + """Test that LogEntryAddEvent uses timezone formatting.""" + event = LogEntryAddEvent( + description="Test entry added", + eventTimestamp=datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc), + entityName="Log", + entityId=123, + eventTriggedByUsername="test_user", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="This is the entry content", + ) + + result = formatter_eastern.format_entry_added(event) + + # Check that Eastern time is shown (9:30 AM EST) + assert "2024-01-15 09:30:45" in result + assert "test_user" in result + assert "Test Document" in result + + def test_entry_updated_with_timezone( + self, formatter_pacific, sample_log_document_info, sample_log_info, sample_logbook_list + ): + """Test that LogEntryUpdateEvent uses timezone formatting.""" + event = LogEntryUpdateEvent( + description="Test entry updated", + eventTimestamp=datetime(2024, 7, 15, 18, 45, 30, tzinfo=timezone.utc), + entityName="Log", + entityId=123, + eventTriggedByUsername="updater", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Updated content", + ) + + result = formatter_pacific.format_entry_updated(event) + + # Check that Pacific time is shown (11:45 AM PDT, UTC-7 in summer) + assert "2024-07-15 11:45:30" in result + assert "updater" in result + + def test_reply_added_with_timezone( + self, + formatter_eastern, + sample_log_document_info, + sample_log_info, + sample_parent_log_info, + sample_logbook_list, + ): + """Test that LogEntryReplyAddEvent uses timezone formatting.""" + event = LogEntryReplyAddEvent( + description="Reply added", + eventTimestamp=datetime(2024, 3, 10, 22, 15, 0, tzinfo=timezone.utc), + entityName="LogReply", + entityId=999, + eventTriggedByUsername="replier", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + parentLogInfo=sample_parent_log_info, + logbookList=sample_logbook_list, + textDiff="Reply content", + ) + + result = formatter_eastern.format_reply_added(event) + + # Check that Eastern time is shown (6:15 PM EDT, UTC-4 in March after DST) + assert "2024-03-10 18:15:00" in result + assert "replier" in result + + def test_reaction_added_with_timezone( + self, + formatter_utc, + sample_log_document_info, + sample_parent_log_info, + sample_log_reaction_info, + ): + """Test that LogReactionAddEvent uses timezone formatting.""" + event = LogReactionAddEvent( + description="Reaction added", + eventTimestamp=datetime(2024, 12, 25, 10, 0, 0, tzinfo=timezone.utc), + entityName="LogReaction", + entityId={"logId": 456, "reactionId": 1, "userId": 999}, + eventTriggedByUsername="reactor", + parentLogInfo=sample_parent_log_info, + parentLogDocumentInfo=sample_log_document_info, + logReaction=sample_log_reaction_info, + ) + + result = formatter_utc.format_reaction_added(event) + + # Check that UTC time is shown + assert "2024-12-25 10:00:00 UTC" in result + assert "reactor" in result + assert "👍" in result + + def test_entry_deleted_with_timezone( + self, formatter_eastern, sample_log_document_info, sample_log_info, sample_logbook_list + ): + """Test that LogEntryDeleteEvent uses timezone formatting.""" + event = LogEntryDeleteEvent( + description="Entry deleted", + eventTimestamp=datetime(2024, 6, 1, 3, 30, 15, tzinfo=timezone.utc), + entityName="Log", + entityId=123, + eventTriggedByUsername="deleter", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Deleted content", + ) + + result = formatter_eastern.format_entry_deleted(event) + + # Check that Eastern time is shown (11:30 PM EDT previous day, UTC-4 in June) + assert "2024-05-31 23:30:15" in result + assert "deleter" in result + + def test_multiple_events_same_formatter( + self, formatter_eastern, sample_log_document_info, sample_log_info, sample_logbook_list + ): + """Test that the same formatter consistently formats times in the same timezone.""" + # Create multiple events with different UTC times + event1 = LogEntryAddEvent( + description="Morning entry", + eventTimestamp=datetime(2024, 1, 15, 8, 0, 0, tzinfo=timezone.utc), + entityName="Log", + entityId=1, + eventTriggedByUsername="user1", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Morning content", + ) + + event2 = LogEntryAddEvent( + description="Afternoon entry", + eventTimestamp=datetime(2024, 1, 15, 15, 0, 0, tzinfo=timezone.utc), + entityName="Log", + entityId=2, + eventTriggedByUsername="user2", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Afternoon content", + ) + + event3 = LogEntryAddEvent( + description="Evening entry", + eventTimestamp=datetime(2024, 1, 15, 23, 0, 0, tzinfo=timezone.utc), + entityName="Log", + entityId=3, + eventTriggedByUsername="user3", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Evening content", + ) + + result1 = formatter_eastern.format_entry_added(event1) + result2 = formatter_eastern.format_entry_added(event2) + result3 = formatter_eastern.format_entry_added(event3) + + # All should be in Eastern time + assert "03:00:00" in result1 # 8 AM UTC = 3 AM EST + assert "10:00:00" in result2 # 3 PM UTC = 10 AM EST + assert "18:00:00" in result3 # 11 PM UTC = 6 PM EST + + # All should show EST + assert "EST" in result1 or "EDT" in result1 + assert "EST" in result2 or "EDT" in result2 + assert "EST" in result3 or "EDT" in result3 + + +class TestFormatterInitialization: + """Test formatter initialization with different timezone configurations.""" + + def test_formatter_with_valid_timezone(self, logger): + """Test creating formatter with valid timezone.""" + formatter = NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="Europe/London" + ) + + assert formatter.timezone == ZoneInfo("Europe/London") + + # Test formatting + utc_time = datetime(2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "12:00:00" in formatted # Same as UTC in January (no DST) + + def test_formatter_with_asia_timezone(self, logger): + """Test creating formatter with Asian timezone.""" + formatter = NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="Asia/Tokyo" + ) + + assert formatter.timezone == ZoneInfo("Asia/Tokyo") + + # Test formatting + utc_time = datetime(2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "21:00:00" in formatted # UTC+9 for Tokyo + + def test_formatter_with_australia_timezone(self, logger): + """Test creating formatter with Australian timezone.""" + formatter = NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="Australia/Sydney" + ) + + assert formatter.timezone == ZoneInfo("Australia/Sydney") + + # Test formatting + utc_time = datetime(2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "23:00:00" in formatted # UTC+11 for Sydney in January (summer) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_handler.py new file mode 100644 index 000000000..0f1b82f37 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_handler.py @@ -0,0 +1,833 @@ +""" +Comprehensive test suite for Apprise Smart Notification Handler. + +Tests all event types and notification scenarios with mock data. +""" + +import logging +from datetime import datetime +from pathlib import Path +from unittest.mock import MagicMock, patch, AsyncMock +import pytest +import yaml +import sys + +# Add src directory to path to import bely_mqtt +src_path = Path(__file__).parent.parent.parent.parent / "src" +if src_path.exists(): + sys.path.insert(0, str(src_path)) + +# Mock apprise before importing handler +mock_apprise = MagicMock() +sys.modules["apprise"] = mock_apprise + +from bely_mqtt import ( # noqa: E402 + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, +) +from bely_mqtt.models import ( # noqa: E402 + LogInfo, + LogDocumentInfo, + LogReactionInfo, + ReactionInfo, +) +from bely_mqtt.config import GlobalConfig # noqa: E402 + +# Add parent directory to path to import handler module +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from handler import AppriseSmartNotificationHandler # noqa: E402 + + +class MockEventFactory: + """Factory for creating mock events with realistic data.""" + + def __init__(self): + self.timestamp = datetime.now().isoformat() + + # Define three test users for proper notification testing + self.alice = "alice" # Document owner + self.bob = "bob" # Entry creator (different from document owner) + self.charlie = "charlie" # Third party who updates/replies + + # Create mock document owned by Alice + self.document = self._create_document() + + # Create mock log entries + self.alice_entry = self._create_log_entry(self.alice, 1) + self.bob_entry = self._create_log_entry(self.bob, 2) # Bob's entry in Alice's document + self.charlie_entry = self._create_log_entry(self.charlie, 3) + + def _create_document(self) -> LogDocumentInfo: + """Create a mock document owned by Alice.""" + return LogDocumentInfo( + id=100, + name="Project Alpha Documentation", + ownerUsername=self.alice, + createdByUsername=self.alice, + lastModifiedByUsername=self.alice, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ) + + def _create_log_entry(self, username: str, entry_id: int) -> LogInfo: + """Create a mock log entry.""" + return LogInfo( + id=entry_id, + enteredByUsername=username, + lastModifiedByUsername=username, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ) + + def _create_reaction(self, emoji: str = "👍", name: str = "thumbsup") -> LogReactionInfo: + """Create a mock reaction.""" + from bely_mqtt.models import ReactionId + + return LogReactionInfo( + id=ReactionId(logId=1, reactionId=1, userId=2), + reaction=ReactionInfo( + id=1, + emoji=emoji, + name=name, + emojiCode=128077, # Unicode code point for thumbs up + description=f"Reaction {name}", + ), + username=self.bob, + ) + + def create_entry_add_by_bob(self) -> LogEntryAddEvent: + """Bob adds a new entry to Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogEntry", + entityId=2, + parentLogDocumentInfo=self.document, + logInfo=self.bob_entry, + description="New findings from testing", + textDiff="+ Added new test results\n+ Performance improved by 20%", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_entry_update_by_bob_on_alice(self) -> LogEntryUpdateEvent: + """Bob updates Alice's entry.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryUpdateEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogEntry", + entityId=1, + parentLogDocumentInfo=self.document, + logInfo=self.alice_entry, + description="Corrected typo and added details", + textDiff="- Old text with typo\n+ New text with correction", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_entry_update_by_charlie_on_bob(self) -> LogEntryUpdateEvent: + """Charlie updates Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryUpdateEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, # Charlie is the third party + entityName="LogEntry", + entityId=2, + parentLogDocumentInfo=self.document, + logInfo=self.bob_entry, + description="Added clarification", + textDiff="+ Added clarification note", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_reply_add_by_charlie_to_bob(self) -> LogEntryReplyAddEvent: + """Charlie replies to Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryReplyAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, # Charlie is the third party + entityName="LogEntryReply", + entityId=3, + parentLogDocumentInfo=self.document, + parentLogInfo=self.bob_entry, # Replying to Bob's entry + logInfo=LogInfo( + id=3, + enteredByUsername=self.charlie, + lastModifiedByUsername=self.charlie, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ), + textDiff="Great point! I agree with this approach.", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + description="Reply to Bob's entry", + ) + + def create_reply_add_by_alice_to_bob(self) -> LogEntryReplyAddEvent: + """Alice replies to Bob's entry in her document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryReplyAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.alice, + entityName="LogEntryReply", + entityId=4, + parentLogDocumentInfo=self.document, + parentLogInfo=self.bob_entry, + logInfo=LogInfo( + id=4, + enteredByUsername=self.alice, + lastModifiedByUsername=self.alice, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ), + textDiff="Thanks for the update. Please also check the API docs.", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + description="Reply to Bob's entry", + ) + + def create_reply_update_by_charlie_on_bob_entry(self) -> LogEntryReplyUpdateEvent: + """Charlie updates a reply on Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryReplyUpdateEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, # Charlie is updating + entityName="LogEntryReply", + entityId=3, + parentLogDocumentInfo=self.document, + parentLogInfo=self.bob_entry, # Bob's entry + logInfo=LogInfo( + id=3, + enteredByUsername=self.charlie, # Charlie originally created this reply + lastModifiedByUsername=self.charlie, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ), + textDiff="- Old reply text\n+ Updated reply with more details", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + description="Updated reply", + ) + + def create_reaction_add_by_bob_to_alice(self) -> LogReactionAddEvent: + """Bob adds a reaction to Alice's entry.""" + from bely_mqtt.models import ReactionId + + return LogReactionAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogReaction", + entityId=ReactionId(logId=1, reactionId=1, userId=2), + parentLogDocumentInfo=self.document, + parentLogInfo=self.alice_entry, + logReaction=self._create_reaction("🎉", "tada"), + description="Celebrating the achievement", + ) + + def create_reaction_delete_by_bob_from_alice(self) -> LogReactionDeleteEvent: + """Bob removes a reaction from Alice's entry.""" + from bely_mqtt.models import ReactionId + + return LogReactionDeleteEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogReaction", + entityId=ReactionId(logId=1, reactionId=1, userId=2), + parentLogDocumentInfo=self.document, + parentLogInfo=self.alice_entry, + logReaction=self._create_reaction("👍", "thumbsup"), + description="Removing thumbsup", + ) + + def create_self_reaction_by_alice(self) -> LogReactionAddEvent: + """Alice adds a reaction to her own entry (should not notify).""" + from bely_mqtt.models import ReactionId + + return LogReactionAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.alice, + entityName="LogReaction", + entityId=ReactionId(logId=1, reactionId=2, userId=1), + parentLogDocumentInfo=self.document, + parentLogInfo=self.alice_entry, + logReaction=self._create_reaction("✅", "check"), + description="Marking as complete", + ) + + def create_entry_delete_by_bob_on_alice(self) -> LogEntryDeleteEvent: + """Bob deletes Alice's entry.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryDeleteEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogEntry", + entityId=1, + parentLogDocumentInfo=self.document, + logInfo=self.alice_entry, + description="Deleted outdated entry", + textDiff="- Deleted content: This was the original entry text", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_entry_delete_by_charlie_on_bob(self) -> LogEntryDeleteEvent: + """Charlie deletes Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryDeleteEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, + entityName="LogEntry", + entityId=2, + parentLogDocumentInfo=self.document, + logInfo=self.bob_entry, + description="Removed duplicate entry", + textDiff="- Deleted content: Bob's test results", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_reply_delete_by_charlie_on_bob_entry(self) -> LogEntryReplyDeleteEvent: + """Charlie deletes a reply on Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryReplyDeleteEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, + entityName="LogEntryReply", + entityId=3, + parentLogDocumentInfo=self.document, + parentLogInfo=self.bob_entry, + logInfo=LogInfo( + id=3, + enteredByUsername=self.charlie, + lastModifiedByUsername=self.charlie, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ), + textDiff="- Deleted reply: This comment is no longer relevant", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + description="Deleted outdated reply", + ) + + +class TestAppriseSmartNotificationHandler: + """Test suite for AppriseSmartNotificationHandler.""" + + @pytest.fixture + def mock_factory(self): + """Provide a mock event factory.""" + return MockEventFactory() + + @pytest.fixture + def config_file(self, tmp_path): + """Create a temporary config file for testing.""" + config = { + "global": { + "mail_server": { + "host": "smtp.example.com", + "port": 587, + "username": "notifications@example.com", + "password": "secret123", + "use_tls": True, + } + }, + "users": { + "alice": { + "apprise_urls": [ + "mailto://alice@example.com", + "discord://webhook_id/webhook_token", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + "bob": { + "apprise_urls": [ + "mailto://bob@example.com", + "slack://TokenA/TokenB/TokenC/", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": False, # Bob doesn't want new entry notifications + "reactions": False, # Bob doesn't want reaction notifications + "document_replies": True, + }, + }, + "charlie": { + "apprise_urls": [ + "mailto://charlie@example.com", + "teams://webhook_url", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + }, + } + + config_path = tmp_path / "test_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + return config_path + + @pytest.fixture + def handler(self, config_file): + """Create a handler instance with mocked Apprise.""" + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + # Mock AppriseWithEmailHeaders class + mock_apprise_wrapper = MagicMock() + mock_apprise_wrapper.return_value.notify = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.add = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.__bool__ = MagicMock(return_value=True) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_wrapper): + handler = AppriseSmartNotificationHandler( + config_path=str(config_file), global_config=global_config + ) + + # Mock the notify method for all user instances + for username, apobj in handler.processor.user_apprise_instances.items(): + apobj.notify = MagicMock(return_value=True) + + return handler + + @pytest.mark.asyncio + async def test_entry_add_by_collaborator(self, handler, mock_factory): + """Test: Bob adds entry to Alice's document -> Alice gets notified.""" + event = mock_factory.create_entry_add_by_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_add(event) + + # Alice should be notified about new entry in her document + mock_send.assert_called_once() + call_args = mock_send.call_args + assert call_args[0][0] == "alice" + # Alice has email configured, so should get email-style subject + assert "Re: Log: Project Alpha Documentation" in call_args[0][1] + assert "bob" in call_args[0][1] or "bob" in call_args[0][2] + assert "Performance improved by 20%" in call_args[0][2] + + @pytest.mark.asyncio + async def test_entry_update_by_collaborator(self, handler, mock_factory): + """Test: Bob updates Alice's entry -> Alice gets notified (deduped as she's both owner & creator).""" + event = mock_factory.create_entry_update_by_bob_on_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_update(event) + + # Should send 1 notification (deduplicated since Alice is both owner and creator) + assert mock_send.call_count == 1 + + # Check notification + call_args = mock_send.call_args + assert call_args[0][0] == "alice" # As both owner and original creator + + # Verify notification content + assert "bob" in call_args[0][2] + assert "typo" in call_args[0][2] + + @pytest.mark.asyncio + async def test_entry_update_by_owner(self, handler, mock_factory): + """Test: Charlie updates Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_entry_update_by_charlie_on_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_update(event) + + # Bob should be notified his entry was edited + # Alice should also be notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames + assert "alice" in usernames + + @pytest.mark.asyncio + async def test_reply_add_by_collaborator(self, handler, mock_factory): + """Test: Charlie replies to Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_reply_add_by_charlie_to_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_reply_add(event) + + # Bob gets notified as entry creator + # Alice gets notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames # Entry creator + assert "alice" in usernames # Document owner + + # Check notification content + for call in calls: + assert "charlie" in call[0][2].lower() + + @pytest.mark.asyncio + async def test_reply_add_by_owner(self, handler, mock_factory): + """Test: Alice replies to Bob's entry in her own document -> Bob gets notified.""" + event = mock_factory.create_reply_add_by_alice_to_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_reply_add(event) + + # Only Bob should be notified as entry creator + # Alice doesn't get notified (she's the one replying to her own document) + assert mock_send.call_count == 1 + + call_args = mock_send.call_args + assert call_args[0][0] == "bob" + assert "alice" in call_args[0][2].lower() + + @pytest.mark.asyncio + async def test_reply_update(self, handler, mock_factory): + """Test: Charlie updates reply on Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_reply_update_by_charlie_on_bob_entry() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_reply_update(event) + + # Bob should be notified as entry creator + # Alice should be notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames # Entry creator + assert "alice" in usernames # Document owner + + # Check that charlie is mentioned in notifications + for call in calls: + assert "charlie" in call[0][2].lower() + + @pytest.mark.asyncio + async def test_reaction_add(self, handler, mock_factory): + """Test: Bob adds reaction to Alice's entry -> Alice gets notified.""" + event = mock_factory.create_reaction_add_by_bob_to_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_reaction_add(event) + + # Alice should be notified about the reaction + mock_send.assert_called_once() + call_args = mock_send.call_args + assert call_args[0][0] == "alice" + # Alice has email configured, so should get email-style subject with action + assert "Re: Log: Project Alpha Documentation" in call_args[0][1] + assert "bob" in call_args[0][1] # Action by bob should be in subject + assert "🎉" in call_args[0][2] + assert "tada" in call_args[0][2] + + @pytest.mark.asyncio + async def test_reaction_delete(self, handler, mock_factory): + """Test: Bob removes reaction from Alice's entry -> Alice gets notified.""" + event = mock_factory.create_reaction_delete_by_bob_from_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_reaction_delete(event) + + # Alice should be notified about the reaction removal + mock_send.assert_called_once() + call_args = mock_send.call_args + assert call_args[0][0] == "alice" + # Alice has email configured, so should get email-style subject with action + assert "Re: Log: Project Alpha Documentation" in call_args[0][1] + assert "bob" in call_args[0][1] # Action by bob should be in subject + assert "👍" in call_args[0][2] + + @pytest.mark.asyncio + async def test_self_reaction_no_notification(self, handler, mock_factory): + """Test: Alice reacts to her own entry -> No notification sent.""" + event = mock_factory.create_self_reaction_by_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_reaction_add(event) + + # No notification should be sent for self-reactions + mock_send.assert_not_called() + + @pytest.mark.asyncio + async def test_notification_settings_respected(self, handler, mock_factory): + """Test: Notification settings are respected (Bob has reactions disabled).""" + from bely_mqtt.models import ReactionId, LogReactionInfo, ReactionInfo + + # Create a reaction event where Alice reacts to Bob's entry + event = LogReactionAddEvent( + eventTimestamp=mock_factory.timestamp, + eventTriggedByUsername="alice", + entityName="LogReaction", + entityId=ReactionId(logId=2, reactionId=3, userId=1), + parentLogDocumentInfo=mock_factory.document, + parentLogInfo=mock_factory.bob_entry, # Bob's entry + logReaction=LogReactionInfo( + id=ReactionId(logId=2, reactionId=3, userId=1), + reaction=ReactionInfo( + id=3, emoji="💯", name="100", emojiCode=128175, description="Perfect" + ), + username="alice", + ), + description="Perfect solution!", + ) + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_reaction_add(event) + + # Bob has reactions disabled, so no notification + mock_send.assert_not_called() + + @pytest.mark.asyncio + async def test_permalink_generation(self, handler, mock_factory): + """Test: Permalinks are correctly generated in notifications.""" + event = mock_factory.create_entry_add_by_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_add(event) + + call_args = mock_send.call_args + body = call_args[0][2] + + # Check for permalink + assert "https://bely.example.com/views/item/view?id=100&logId=2" in body + assert "View entry:" in body + + @pytest.mark.asyncio + async def test_trigger_description(self, handler, mock_factory): + """Test: Trigger descriptions are included in notifications.""" + event = mock_factory.create_entry_add_by_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_add(event) + + call_args = mock_send.call_args + body = call_args[0][2] + + # Check for trigger description + assert "This notification was sent because" in body + assert "bob" in body + assert "new_entries" in body + + @pytest.mark.asyncio + async def test_error_handling(self, handler, mock_factory): + """Test: Errors are handled gracefully without crashing.""" + event = mock_factory.create_entry_add_by_bob() + + # Mock the logger + with patch.object(handler, "logger") as mock_logger: + # Simulate an error in send_notification + with patch.object( + handler.processor, "send_notification", side_effect=Exception("Network error") + ): + # Should not raise an exception + await handler.handle_log_entry_add(event) + + # Check that error was logged + assert mock_logger.error.called + + @pytest.mark.asyncio + async def test_entry_delete_by_collaborator(self, handler, mock_factory): + """Test: Bob deletes Alice's entry -> Alice gets notified.""" + event = mock_factory.create_entry_delete_by_bob_on_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_delete(event) + + # Alice should be notified as both owner and original creator (deduplicated) + assert mock_send.call_count == 1 + + call_args = mock_send.call_args + assert call_args[0][0] == "alice" + # Alice has email configured, so should get email-style subject + assert "Re: Log: Project Alpha Documentation" in call_args[0][1] + assert "[Entry Deleted]" in call_args[0][1] + assert "bob" in call_args[0][2] + assert "Deleted content" in call_args[0][2] + + @pytest.mark.asyncio + async def test_entry_delete_by_third_party(self, handler, mock_factory): + """Test: Charlie deletes Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_entry_delete_by_charlie_on_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_delete(event) + + # Bob should be notified his entry was deleted + # Alice should also be notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames + assert "alice" in usernames + + # Check notification content + for call in calls: + assert "charlie" in call[0][2].lower() + assert "deleted" in call[0][2].lower() + + @pytest.mark.asyncio + async def test_reply_delete(self, handler, mock_factory): + """Test: Charlie deletes reply on Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_reply_delete_by_charlie_on_bob_entry() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_reply_delete(event) + + # Bob should be notified as entry creator + # Alice should be notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames # Entry creator + assert "alice" in usernames # Document owner + + # Check that charlie is mentioned in notifications + for call in calls: + assert "charlie" in call[0][2].lower() + assert "deleted" in call[0][2].lower() + + @pytest.mark.asyncio + async def test_no_config_handler(self): + """Test: Handler works without config (no notifications sent).""" + # Mock AppriseWithEmailHeaders class + mock_apprise_wrapper = MagicMock() + mock_apprise_wrapper.return_value.notify = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.add = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.__bool__ = MagicMock(return_value=True) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_wrapper): + # Create handler without config - should not raise an error + handler = AppriseSmartNotificationHandler() + + # Verify processor has no user configurations + assert handler.processor.user_apprise_instances == {} + + factory = MockEventFactory() + event = factory.create_entry_add_by_bob() + + # Should handle event without error even without config + await handler.handle_log_entry_add(event) + + # Try other event types too - all should work without errors + await handler.handle_log_entry_update(factory.create_entry_update_by_bob_on_alice()) + await handler.handle_log_entry_reply_add(factory.create_reply_add_by_charlie_to_bob()) + await handler.handle_log_reaction_add(factory.create_reaction_add_by_bob_to_alice()) + await handler.handle_log_entry_delete(factory.create_entry_delete_by_bob_on_alice()) + await handler.handle_log_entry_reply_delete( + factory.create_reply_delete_by_charlie_on_bob_entry() + ) + + # Verify handler can process events without config + # (it just won't send notifications) + + +class TestNotificationContent: + """Test the content and formatting of notifications.""" + + @pytest.fixture + def formatter(self): + """Create a formatter instance.""" + from formatters import NotificationFormatter + + return NotificationFormatter( + bely_url="https://bely.example.com", logger=logging.getLogger("test") + ) + + def test_entry_add_formatting(self, formatter): + """Test formatting of entry add notifications.""" + factory = MockEventFactory() + event = factory.create_entry_add_by_bob() + + body = formatter.format_entry_added(event) + + assert "New entry added to Project Alpha Documentation" in body + assert "By: bob" in body + assert "Performance improved by 20%" in body + assert "alert('xss')" in body + + +if __name__ == "__main__": + # Run tests with pytest + pytest.main([__file__, "-v", "--asyncio-mode=auto"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_headers_wrapper.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_headers_wrapper.py new file mode 100644 index 000000000..38259474a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_headers_wrapper.py @@ -0,0 +1,602 @@ +#!/usr/bin/env python3 +""" +Comprehensive test suite for email headers wrapper functionality. + +This module tests that email headers (Message-ID, References, In-Reply-To) +are properly passed through the wrapper to enable email threading. +""" + +import sys +from pathlib import Path +from unittest.mock import MagicMock, patch +import pytest + +# Add parent directory to path for imports +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +# Mock apprise at module level before importing apprise_email_wrapper +sys.modules["apprise"] = MagicMock() +sys.modules["apprise.plugins.email.base"] = MagicMock() + +import apprise_email_wrapper # noqa: E402 + + +class TestEmailNotificationWrapper: + """Test suite for EmailNotificationWrapper class.""" + + @pytest.fixture + def mock_apprise(self): + """Create mock apprise module.""" + mock_apprise = MagicMock() + mock_apprise.Apprise = MagicMock() + + # Create a mock NotifyEmail instance + mock_email_instance = MagicMock() + mock_email_instance.__class__.__name__ = "NotifyEmail" + mock_email_instance.send = MagicMock(return_value=True) + mock_email_instance.headers = {} + + mock_apprise.Apprise.instantiate = MagicMock(return_value=mock_email_instance) + + return mock_apprise, mock_email_instance + + def test_wrapper_initialization(self): + """Test EmailNotificationWrapper initialization with various URLs.""" + # Test valid email URLs + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://user:pass@gmail.com") + assert wrapper.scheme == "mailto" + + wrapper_secure = apprise_email_wrapper.EmailNotificationWrapper( + "mailtos://user:pass@gmail.com" + ) + assert wrapper_secure.scheme == "mailtos" + + # Test invalid URL + with pytest.raises(ValueError, match="Not an email URL"): + apprise_email_wrapper.EmailNotificationWrapper("slack://token") + + def test_send_with_headers(self, mock_apprise): + """Test sending email with custom headers.""" + mock_apprise_module, mock_email_instance = mock_apprise + + # Patch the apprise module that's already imported + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Define custom headers for email threading + headers = { + "Message-ID": "", + "References": "", + "In-Reply-To": "", + "X-Thread-Topic": "Test Document", + } + + # Send notification with headers + result = wrapper.send_with_headers( + title="Test Subject", body="Test Body", headers=headers + ) + + # Verify the email instance was created + mock_apprise_module.Apprise.instantiate.assert_called_once_with( + "mailto://test@example.com" + ) + + # Verify headers were set on the email instance + assert mock_email_instance.headers == headers + + # Verify send was called with correct parameters + mock_email_instance.send.assert_called_once_with(body="Test Body", title="Test Subject") + + assert result is True + + def test_send_without_headers(self, mock_apprise): + """Test sending email without custom headers.""" + mock_apprise_module, mock_email_instance = mock_apprise + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Send notification without headers + result = wrapper.send_with_headers(title="Test Subject", body="Test Body", headers=None) + + # Verify headers attribute exists but is empty + assert hasattr(mock_email_instance, "headers") + assert mock_email_instance.headers == {} + + # Verify send was called + mock_email_instance.send.assert_called_once() + assert result is True + + def test_headers_override_existing(self, mock_apprise): + """Test that new headers override any existing headers.""" + mock_apprise_module, mock_email_instance = mock_apprise + + # Pre-populate headers with existing values + mock_email_instance.headers = {"Old-Header": "old-value"} + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Send with new headers + new_headers = {"Message-ID": "", "New-Header": "new-value"} + + wrapper.send_with_headers(title="Test", body="Test", headers=new_headers) + + # Verify old headers were cleared and new ones set + assert mock_email_instance.headers == new_headers + assert "Old-Header" not in mock_email_instance.headers + + def test_fallback_for_non_email(self, mock_apprise): + """Test fallback to regular Apprise for non-email notifications.""" + mock_apprise_module, _ = mock_apprise + + # Make instantiate return None to simulate non-email URL + mock_apprise_module.Apprise.instantiate.return_value = None + + # Create a mock Apprise instance for fallback + mock_apprise_instance = MagicMock() + mock_apprise_instance.add = MagicMock(return_value=True) + mock_apprise_instance.notify = MagicMock(return_value=True) + mock_apprise_module.Apprise.return_value = mock_apprise_instance + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Send notification (should fall back to regular Apprise) + result = wrapper.send_with_headers( + title="Test", body="Test Body", headers={"Message-ID": ""} + ) + + # Verify fallback was used + mock_apprise_instance.add.assert_called_once_with("mailto://test@example.com") + mock_apprise_instance.notify.assert_called_once_with(body="Test Body", title="Test") + assert result is True + + def test_error_handling(self, mock_apprise): + """Test error handling in send_with_headers.""" + mock_apprise_module, mock_email_instance = mock_apprise + + # Make send raise an exception + mock_email_instance.send.side_effect = Exception("Network error") + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Should return False on error + with patch("builtins.print") as mock_print: + result = wrapper.send_with_headers( + title="Test", body="Test", headers={"Message-ID": ""} + ) + + assert result is False + mock_print.assert_called_once() + assert "Error sending email with headers" in str(mock_print.call_args) + + +class TestAppriseWithEmailHeaders: + """Test suite for AppriseWithEmailHeaders class.""" + + @pytest.fixture + def mock_setup(self): + """Setup mocks for testing.""" + mock_apprise = MagicMock() + mock_apprise.Apprise = MagicMock() + + mock_apprise_instance = MagicMock() + mock_apprise_instance.add = MagicMock(return_value=True) + mock_apprise_instance.notify = MagicMock(return_value=True) + mock_apprise.Apprise.return_value = mock_apprise_instance + + return mock_apprise, mock_apprise_instance + + def test_add_email_url(self, mock_setup): + """Test adding email URLs creates wrapper instances.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Mock EmailNotificationWrapper + with patch.object( + apprise_email_wrapper, "EmailNotificationWrapper" + ) as mock_wrapper_class: + mock_email_wrapper = MagicMock() + mock_wrapper_class.return_value = mock_email_wrapper + + # Add email URLs + assert wrapper.add("mailto://user1@example.com") + assert wrapper.add("mailtos://user2@example.com") + + # Verify wrappers were created + assert mock_wrapper_class.call_count == 2 + mock_wrapper_class.assert_any_call("mailto://user1@example.com") + mock_wrapper_class.assert_any_call("mailtos://user2@example.com") + + # Verify wrappers are stored + assert len(wrapper.email_wrappers) == 2 + + def test_add_non_email_url(self, mock_setup): + """Test adding non-email URLs uses regular Apprise.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + wrapper.apprise = mock_apprise_instance + + # Add non-email URLs + assert wrapper.add("slack://token") + assert wrapper.add("discord://webhook") + + # Verify regular Apprise was used + assert mock_apprise_instance.add.call_count == 2 + mock_apprise_instance.add.assert_any_call("slack://token") + mock_apprise_instance.add.assert_any_call("discord://webhook") + + # Verify URLs are tracked + assert len(wrapper.non_email_urls) == 2 + + def test_notify_with_mixed_urls(self, mock_setup): + """Test notify with both email and non-email URLs.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + wrapper.apprise = mock_apprise_instance + + # Create mock email wrappers + mock_email_wrapper1 = MagicMock() + mock_email_wrapper1.send_with_headers = MagicMock(return_value=True) + + mock_email_wrapper2 = MagicMock() + mock_email_wrapper2.send_with_headers = MagicMock(return_value=True) + + # Manually add to simulate successful adds + wrapper.email_wrappers = { + "mailto://user1@example.com": mock_email_wrapper1, + "mailto://user2@example.com": mock_email_wrapper2, + } + wrapper.non_email_urls = ["slack://token"] + + # Define headers for email threading + headers = { + "Message-ID": "", + "References": "", + } + + # Send notification + result = wrapper.notify(body="Test notification", title="Test Title", headers=headers) + + # Verify email wrappers were called with headers + mock_email_wrapper1.send_with_headers.assert_called_once_with( + "Test Title", "Test notification", headers + ) + mock_email_wrapper2.send_with_headers.assert_called_once_with( + "Test Title", "Test notification", headers + ) + + # Verify regular Apprise was called for non-email + mock_apprise_instance.notify.assert_called_once_with( + body="Test notification", title="Test Title" + ) + + assert result is True + + def test_notify_without_headers(self, mock_setup): + """Test notify without headers still works.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Create mock email wrapper + mock_email_wrapper = MagicMock() + mock_email_wrapper.send_with_headers = MagicMock(return_value=True) + + wrapper.email_wrappers = {"mailto://user@example.com": mock_email_wrapper} + + # Send notification without headers + result = wrapper.notify(body="Test", title="Title") + + # Verify wrapper was called with None for headers + mock_email_wrapper.send_with_headers.assert_called_once_with("Title", "Test", None) + + assert result is True + + def test_bool_operator(self, mock_setup): + """Test __bool__ operator returns correct values.""" + mock_apprise, _ = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Empty wrapper should be False + assert not wrapper + + # Add email wrapper + wrapper.email_wrappers["mailto://test@example.com"] = MagicMock() + assert wrapper + + # Clear and add non-email URL + wrapper.email_wrappers.clear() + wrapper.non_email_urls.append("slack://token") + assert wrapper + + def test_partial_failure_handling(self, mock_setup): + """Test handling when some notifications fail.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Create mock email wrappers with different results + mock_email_wrapper1 = MagicMock() + mock_email_wrapper1.send_with_headers = MagicMock(return_value=True) + + mock_email_wrapper2 = MagicMock() + mock_email_wrapper2.send_with_headers = MagicMock(return_value=False) # This one fails + + wrapper.email_wrappers = { + "mailto://user1@example.com": mock_email_wrapper1, + "mailto://user2@example.com": mock_email_wrapper2, + } + + # Send notification + result = wrapper.notify( + body="Test", title="Title", headers={"Message-ID": ""} + ) + + # Should return False if any notification fails + assert result is False + + +class TestUtilityFunctions: + """Test utility functions in the module.""" + + def test_is_email_notification(self): + """Test is_email_notification function.""" + # Test email URLs + assert apprise_email_wrapper.is_email_notification("mailto://user@example.com") + assert apprise_email_wrapper.is_email_notification("mailtos://secure@example.com") + assert apprise_email_wrapper.is_email_notification("mailto://user:pass@smtp.gmail.com:587") + + # Test non-email URLs + assert not apprise_email_wrapper.is_email_notification("slack://token") + assert not apprise_email_wrapper.is_email_notification("discord://webhook/token") + assert not apprise_email_wrapper.is_email_notification("https://example.com") + assert not apprise_email_wrapper.is_email_notification("telegram://bot_token/chat_id") + + # Test invalid URLs + assert not apprise_email_wrapper.is_email_notification("") + assert not apprise_email_wrapper.is_email_notification("not a url") + + +class TestIntegrationScenarios: + """Test realistic integration scenarios.""" + + def test_email_threading_scenario(self): + """Test a complete email threading scenario.""" + mock_apprise = MagicMock() + + # Create a mock email instance that tracks headers + class MockEmailInstance: + def __init__(self): + self.headers = {} + self.__class__.__name__ = "NotifyEmail" + self.sent_messages = [] + + def send(self, body, title): + self.sent_messages.append( + {"title": title, "body": body, "headers": self.headers.copy()} + ) + return True + + mock_email = MockEmailInstance() + mock_apprise.Apprise.instantiate = MagicMock(return_value=mock_email) + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + # Create wrapper for email notification + wrapper = apprise_email_wrapper.EmailNotificationWrapper( + "mailto://notifications@example.com" + ) + + # Simulate a thread of messages + + # 1. Initial message + initial_headers = { + "Message-ID": "", + "X-Thread-Topic": "Project Status Report", + } + + wrapper.send_with_headers( + title="Project Status Report", + body="Initial project status", + headers=initial_headers, + ) + + # 2. First reply + reply1_headers = { + "Message-ID": "", + "References": "", + "In-Reply-To": "", + "X-Thread-Topic": "Project Status Report", + } + + wrapper.send_with_headers( + title="Re: Project Status Report", + body="Update on task completion", + headers=reply1_headers, + ) + + # 3. Second reply in thread + reply2_headers = { + "Message-ID": "", + "References": " ", + "In-Reply-To": "", + "X-Thread-Topic": "Project Status Report", + } + + wrapper.send_with_headers( + title="Re: Project Status Report", + body="Additional comments", + headers=reply2_headers, + ) + + # Verify all messages were sent with correct headers + assert len(mock_email.sent_messages) == 3 + + # Check first message + assert ( + mock_email.sent_messages[0]["headers"]["Message-ID"] + == "" + ) + assert "References" not in mock_email.sent_messages[0]["headers"] + + # Check first reply + assert ( + mock_email.sent_messages[1]["headers"]["Message-ID"] + == "" + ) + assert ( + mock_email.sent_messages[1]["headers"]["In-Reply-To"] + == "" + ) + + # Check second reply + assert ( + mock_email.sent_messages[2]["headers"]["Message-ID"] + == "" + ) + assert ( + mock_email.sent_messages[2]["headers"]["In-Reply-To"] + == "" + ) + assert ( + "" + in mock_email.sent_messages[2]["headers"]["References"] + ) + + def test_multiple_recipients_with_headers(self): + """Test sending to multiple email recipients with same headers.""" + mock_apprise = MagicMock() + mock_apprise.Apprise = MagicMock() + + # Track all email instances created + email_instances = [] + + def create_email_instance(url): + instance = MagicMock() + instance.__class__.__name__ = "NotifyEmail" + instance.headers = {} + instance.send = MagicMock(return_value=True) + instance.url = url # Track which URL this is for + email_instances.append(instance) + return instance + + mock_apprise.Apprise.instantiate = create_email_instance + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + # Create wrapper with multiple email endpoints + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Mock EmailNotificationWrapper to track calls + with patch.object( + apprise_email_wrapper, "EmailNotificationWrapper" + ) as mock_wrapper_class: + # Create individual mock wrappers + mock_wrappers = [] + for i in range(3): + mock_wrapper = MagicMock() + mock_wrapper.send_with_headers = MagicMock(return_value=True) + mock_wrappers.append(mock_wrapper) + + # Configure mock to return different wrapper for each call + mock_wrapper_class.side_effect = mock_wrappers + + # Add multiple email recipients + wrapper.add("mailto://alice@example.com") + wrapper.add("mailto://bob@example.com") + wrapper.add("mailto://charlie@example.com") + + # Send notification with threading headers + headers = { + "Message-ID": "", + "X-Priority": "High", + "X-Thread-Topic": "System Alert", + } + + result = wrapper.notify( + title="System Alert", body="Critical system update required", headers=headers + ) + + # Verify all wrappers were called with the same headers + for mock_wrapper in mock_wrappers: + mock_wrapper.send_with_headers.assert_called_once_with( + "System Alert", "Critical system update required", headers + ) + + assert result is True + + +class TestEdgeCases: + """Test edge cases and boundary conditions.""" + + def test_special_characters_in_headers(self): + """Test headers with special characters.""" + mock_apprise = MagicMock() + mock_email = MagicMock() + mock_email.__class__.__name__ = "NotifyEmail" + mock_email.headers = {} + mock_email.send = MagicMock(return_value=True) + mock_apprise.Apprise.instantiate = MagicMock(return_value=mock_email) + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Headers with special characters + headers = { + "Message-ID": "", + "X-Custom-Header": "Value with spaces and 特殊文字", + "References": " ", + } + + result = wrapper.send_with_headers(title="Test", body="Test", headers=headers) + + # Headers should be set exactly as provided + assert mock_email.headers == headers + assert result is True + + def test_very_long_header_values(self): + """Test headers with very long values.""" + mock_apprise = MagicMock() + mock_email = MagicMock() + mock_email.__class__.__name__ = "NotifyEmail" + mock_email.headers = {} + mock_email.send = MagicMock(return_value=True) + mock_apprise.Apprise.instantiate = MagicMock(return_value=mock_email) + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Create a very long References header (common in long email threads) + long_references = " ".join([f"" for i in range(50)]) + + headers = { + "Message-ID": "", + "References": long_references, + "X-Long-Value": "A" * 1000, # Very long header value + } + + result = wrapper.send_with_headers(title="Test", body="Test", headers=headers) + + # Headers should be set regardless of length + assert mock_email.headers == headers + assert len(mock_email.headers["References"]) > 500 + assert len(mock_email.headers["X-Long-Value"]) == 1000 + assert result is True + + +if __name__ == "__main__": + # Run tests with pytest + pytest.main([__file__, "-v", "--tb=short"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_integration.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_integration.py new file mode 100644 index 000000000..f72e9f6f1 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_integration.py @@ -0,0 +1,813 @@ +""" +Integration tests for Apprise Smart Notification Handler. + +These tests demonstrate the handler processing realistic MQTT event sequences. +""" + +import asyncio +from datetime import datetime, timedelta +from pathlib import Path +from unittest.mock import MagicMock, patch, AsyncMock +import pytest +import yaml +import sys + +# Add src directory to path to import bely_mqtt +src_path = Path(__file__).parent.parent.parent.parent / "src" +if src_path.exists(): + sys.path.insert(0, str(src_path)) + +# Mock apprise before importing handler +sys.modules["apprise"] = MagicMock() + +from bely_mqtt import ( # noqa: E402 + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, +) +from bely_mqtt.models import ( # noqa: E402 + LogInfo, + LogDocumentInfo, + LogReactionInfo, + ReactionInfo, + LogbookInfo, +) +from bely_mqtt.config import GlobalConfig # noqa: E402 + +# Add parent directory to path to import handler module +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from handler import AppriseSmartNotificationHandler # noqa: E402 + + +class TestScenarios: + """Integration test scenarios simulating real-world usage.""" + + @pytest.fixture + def test_config(self, tmp_path): + """Create a test configuration with multiple users.""" + config = { + "global": { + "mail_server": { + "host": "smtp.company.com", + "port": 587, + "username": "notifications@company.com", + "password": "secure_password", + "use_tls": True, + } + }, + "users": { + # Team lead - wants all notifications + "sarah": { + "apprise_urls": [ + "mailto://sarah@company.com", + "slack://TokenA/TokenB/TokenC/#general", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + # Developer - selective notifications + "john": { + "apprise_urls": [ + "mailto://john@company.com", + "discord://webhook_id/webhook_token", + ], + "notifications": { + "entry_updates": False, # Too noisy + "own_entry_edits": True, # Important + "entry_replies": True, # Important + "new_entries": False, # Too noisy + "reactions": True, # Fun + "document_replies": False, # Not a document owner + }, + }, + # QA Engineer - minimal notifications + "emma": { + "apprise_urls": [ + "mailto://emma@company.com", + ], + "notifications": { + "entry_updates": False, + "own_entry_edits": True, # Only when someone edits her entries + "entry_replies": True, # Only direct replies + "new_entries": False, + "reactions": False, + "document_replies": False, + }, + }, + }, + } + + config_path = tmp_path / "integration_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + return config_path + + @pytest.fixture + def handler(self, test_config): + """Create handler with test configuration.""" + global_config = GlobalConfig({"bely_url": "https://bely.company.com"}) + + # Mock AppriseWithEmailHeaders class + mock_apprise_wrapper = MagicMock() + mock_apprise_wrapper.return_value.notify = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.add = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.__bool__ = MagicMock(return_value=True) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_wrapper): + handler = AppriseSmartNotificationHandler( + config_path=str(test_config), global_config=global_config + ) + + # Mock Apprise notify for all users + for username, apobj in handler.processor.user_apprise_instances.items(): + apobj.notify = MagicMock(return_value=True) + + return handler + + @pytest.fixture + def notification_tracker(self, handler): + """Track all notifications sent during tests.""" + notifications = [] + + async def track_notification(username, title, body, headers=None): + notifications.append( + { + "username": username, + "title": title, + "body": body, + "headers": headers, + "timestamp": datetime.now().isoformat(), + } + ) + # Call the original mock + return True + + handler.processor.send_notification = AsyncMock(side_effect=track_notification) + return notifications + + @pytest.mark.asyncio + async def test_scenario_sprint_planning_document(self, handler, notification_tracker): + """ + Scenario: Sprint Planning Document + + Sarah (team lead) creates a sprint planning document. + John and Emma collaborate by adding entries and comments. + """ + base_time = datetime.now() + + # Sarah's sprint planning document + sprint_doc = LogDocumentInfo( + id=200, + name="Sprint 23 Planning", + ownerUsername="sarah", + createdByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # 1. John adds a task entry + from bely_mqtt.models import LogbookInfo + + john_entry = LogInfo( + id=201, + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + ) + + event1 = LogEntryAddEvent( + eventTimestamp=(base_time + timedelta(minutes=5)).isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=201, + parentLogDocumentInfo=sprint_doc, + logInfo=john_entry, + description="Added API refactoring task", + textDiff="+ Task: Refactor /api/v1/users endpoint\n+ Estimate: 5 story points\n+ Dependencies: Database migration", + logbookList=[LogbookInfo(id=1, name="Sprint Planning", displayName="Sprint Planning")], + ) + + await handler.handle_log_entry_add(event1) + + # Sarah should be notified (document owner, new_entries enabled) + assert len(notification_tracker) == 1 + assert notification_tracker[0]["username"] == "sarah" + # Sarah has email configured, so should get email-style subject + assert "Re: Log: Sprint 23 Planning" in notification_tracker[0]["title"] + + # 2. Emma adds a QA consideration as a reply + event2 = LogEntryReplyAddEvent( + eventTimestamp=(base_time + timedelta(minutes=10)).isoformat(), + eventTriggedByUsername="emma", + entityName="LogEntryReply", + entityId=202, + parentLogDocumentInfo=sprint_doc, + parentLogInfo=john_entry, + logInfo=LogInfo( + id=202, + enteredByUsername="emma", + lastModifiedByUsername="emma", + enteredOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + ), + textDiff="Need to update integration tests for the new endpoint structure. Also check backwards compatibility.", + logbookList=[LogbookInfo(id=1, name="Sprint Planning", displayName="Sprint Planning")], + description="QA consideration", + ) + + await handler.handle_log_entry_reply_add(event2) + + # John should be notified (entry creator, entry_replies enabled) + # Sarah should be notified (document owner, document_replies enabled) + assert len(notification_tracker) == 3 # 1 previous + 2 new + new_notifications = notification_tracker[-2:] + usernames = {n["username"] for n in new_notifications} + assert usernames == {"john", "sarah"} + + # 3. Sarah updates John's entry with priority + event3 = LogEntryUpdateEvent( + eventTimestamp=(base_time + timedelta(minutes=15)).isoformat(), + eventTriggedByUsername="sarah", + entityName="LogEntry", + entityId=201, + parentLogDocumentInfo=sprint_doc, + logInfo=john_entry, + description="Added priority", + textDiff="+ Priority: HIGH - Blocking customer feature", + logbookList=[LogbookInfo(id=1, name="Sprint Planning", displayName="Sprint Planning")], + ) + + await handler.handle_log_entry_update(event3) + + # John should be notified (own_entry_edits enabled) + # Sarah is the one making the update, so she won't be notified about her own action + assert len(notification_tracker) == 4 # 3 previous + 1 new + new_notification = notification_tracker[-1] + + # Check John's notification + assert new_notification["username"] == "john" + # John has email configured, so should get email-style subject + assert "Re: Log: Sprint 23 Planning" in new_notification["title"] + assert "[Entry Updated]" in new_notification["title"] + assert "sarah" in new_notification["body"].lower() + + # 4. John reacts to Emma's QA comment with thumbs up + from bely_mqtt.models import ReactionId + + event4 = LogReactionAddEvent( + eventTimestamp=(base_time + timedelta(minutes=20)).isoformat(), + eventTriggedByUsername="john", + entityName="LogReaction", + entityId=ReactionId(logId=202, reactionId=1, userId=2), + parentLogDocumentInfo=sprint_doc, + parentLogInfo=LogInfo( + id=202, # Emma's reply + enteredByUsername="emma", + lastModifiedByUsername="emma", + enteredOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + ), + logReaction=LogReactionInfo( + id=ReactionId(logId=202, reactionId=1, userId=2), + reaction=ReactionInfo( + id=1, emoji="👍", name="thumbsup", emojiCode=128077, description="Acknowledged" + ), + username="john", + ), + description="Agreed with QA considerations", + ) + + await handler.handle_log_reaction_add(event4) + + # Emma has reactions disabled, so no notification + assert len(notification_tracker) == 4 # No new notifications + + @pytest.mark.asyncio + async def test_scenario_bug_report_collaboration(self, handler, notification_tracker): + """ + Scenario: Bug Report Document + + Emma creates a bug report document. + John investigates and updates. + Sarah reviews and adds comments. + """ + base_time = datetime.now() + + # Emma's bug report document + bug_doc = LogDocumentInfo( + id=300, + name="BUG-1234: API Response Timeout", + ownerUsername="emma", + createdByUsername="emma", + lastModifiedByUsername="emma", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # Emma's initial bug report entry (not used in this test scenario) + # emma_entry = LogInfo( + # id=301, + # enteredByUsername="emma", + # lastModifiedByUsername="emma", + # enteredOnDateTime=base_time.isoformat(), + # lastModifiedOnDateTime=base_time.isoformat(), + # ) + + # 1. John adds investigation findings + event1 = LogEntryAddEvent( + eventTimestamp=(base_time + timedelta(hours=1)).isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=302, + parentLogDocumentInfo=bug_doc, + logInfo=LogInfo( + id=302, + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + ), + description="Root cause analysis", + textDiff="+ Found N+1 query issue in user permissions check\n+ Each API call triggers 50+ database queries\n+ Solution: Implement eager loading", + logbookList=[LogbookInfo(id=2, name="Bug Reports", displayName="Bug Reports")], + ) + + await handler.handle_log_entry_add(event1) + + # Emma should be notified (document owner, but new_entries disabled) + # So no notification + assert len(notification_tracker) == 0 + + # 2. Sarah adds a high-priority comment on John's findings + event2 = LogEntryReplyAddEvent( + eventTimestamp=(base_time + timedelta(hours=2)).isoformat(), + eventTriggedByUsername="sarah", + entityName="LogEntryReply", + entityId=303, + parentLogDocumentInfo=bug_doc, + parentLogInfo=LogInfo( + id=302, # John's investigation entry + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + ), + logInfo=LogInfo( + id=303, + enteredByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=(base_time + timedelta(hours=2)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=2)).isoformat(), + ), + textDiff="This is affecting our major client. Please prioritize the fix for today's deployment.", + logbookList=[LogbookInfo(id=2, name="Bug Reports", displayName="Bug Reports")], + description="Priority escalation", + ) + + await handler.handle_log_entry_reply_add(event2) + + # John should be notified (entry_replies enabled) + # Emma should be notified (document owner, but document_replies disabled) + assert len(notification_tracker) == 1 + assert notification_tracker[0]["username"] == "john" + # John has email configured, so should get email-style subject + assert "Re: Log: BUG-1234: API Response Timeout" in notification_tracker[0]["title"] + + # 3. John updates his own entry with fix status + event3 = LogEntryUpdateEvent( + eventTimestamp=(base_time + timedelta(hours=3)).isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=302, + parentLogDocumentInfo=bug_doc, + logInfo=LogInfo( + id=302, # John's own entry + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=3)).isoformat(), + ), + description="Fix deployed", + textDiff="+ STATUS: Fixed and deployed to production\n+ Deployment time: 15:30 UTC", + logbookList=[LogbookInfo(id=2, name="Bug Reports", displayName="Bug Reports")], + ) + + await handler.handle_log_entry_update(event3) + + # John is updating his own entry, so no notification to him + # Emma gets notified as document owner (but entry_updates disabled) + assert len(notification_tracker) == 1 # No new notifications + + # 4. Sarah reacts with celebration + from bely_mqtt.models import ReactionId + + event4 = LogReactionAddEvent( + eventTimestamp=(base_time + timedelta(hours=4)).isoformat(), + eventTriggedByUsername="sarah", + entityName="LogReaction", + entityId=ReactionId(logId=302, reactionId=2, userId=1), + parentLogDocumentInfo=bug_doc, + parentLogInfo=LogInfo( + id=302, # John's entry + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=3)).isoformat(), + ), + logReaction=LogReactionInfo( + id=ReactionId(logId=302, reactionId=2, userId=1), + reaction=ReactionInfo( + id=2, emoji="🎉", name="tada", emojiCode=127881, description="Celebration" + ), + username="sarah", + ), + description="Great work on the quick fix!", + ) + + await handler.handle_log_reaction_add(event4) + + # John should be notified (reactions enabled) + assert len(notification_tracker) == 2 + assert notification_tracker[-1]["username"] == "john" + assert "🎉" in notification_tracker[-1]["body"] + + @pytest.mark.asyncio + async def test_notification_deduplication(self, handler, notification_tracker): + """Test that users don't get duplicate notifications for the same event.""" + base_time = datetime.now() + + # Sarah's document + doc = LogDocumentInfo( + id=400, + name="Test Document", + ownerUsername="sarah", + createdByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # Sarah's entry (she is both owner and creator) + sarah_entry = LogInfo( + id=401, + enteredByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # John updates Sarah's entry + event = LogEntryUpdateEvent( + eventTimestamp=base_time.isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=401, + parentLogDocumentInfo=doc, + logInfo=sarah_entry, + description="Update", + textDiff="+ Added content", + logbookList=[LogbookInfo(id=3, name="Test", displayName="Test")], + ) + + await handler.handle_log_entry_update(event) + + # Sarah should only get ONE notification even though she's both owner and creator + sarah_notifications = [n for n in notification_tracker if n["username"] == "sarah"] + assert len(sarah_notifications) == 1 + + @pytest.mark.asyncio + async def test_self_action_no_notification(self, handler, notification_tracker): + """Test that users don't get notified about their own actions.""" + base_time = datetime.now() + + # John's document + doc = LogDocumentInfo( + id=500, + name="John's Notes", + ownerUsername="john", + createdByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # John adds an entry to his own document + event = LogEntryAddEvent( + eventTimestamp=base_time.isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=501, + parentLogDocumentInfo=doc, + logInfo=LogInfo( + id=501, + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ), + description="Personal note", + textDiff="+ Remember to review PR #123", + logbookList=[LogbookInfo(id=4, name="Personal", displayName="Personal")], + ) + + await handler.handle_log_entry_add(event) + + # No notification should be sent (John is both creator and document owner) + assert len(notification_tracker) == 0 + + @pytest.mark.asyncio + async def test_scenario_content_deletion(self, handler, notification_tracker): + """ + Scenario: Content Deletion + + Test notifications for entry and reply deletions. + """ + base_time = datetime.now() + + # Sarah's document + doc = LogDocumentInfo( + id=600, + name="Team Retrospective", + ownerUsername="sarah", + createdByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # John's entry in Sarah's document + john_entry = LogInfo( + id=601, + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # 1. Emma deletes John's entry + event1 = LogEntryDeleteEvent( + eventTimestamp=(base_time + timedelta(minutes=5)).isoformat(), + eventTriggedByUsername="emma", + entityName="LogEntry", + entityId=601, + parentLogDocumentInfo=doc, + logInfo=john_entry, + description="Removed duplicate entry", + textDiff="- Deleted content: This was a duplicate of entry #599", + logbookList=[LogbookInfo(id=5, name="Retrospective", displayName="Retrospective")], + ) + + await handler.handle_log_entry_delete(event1) + + # John should be notified (his entry was deleted) + # Sarah should be notified (document owner) + assert len(notification_tracker) == 2 + usernames = {n["username"] for n in notification_tracker} + assert usernames == {"john", "sarah"} + + # Check John's notification + john_notification = [n for n in notification_tracker if n["username"] == "john"][0] + # John has email configured, so should get email-style subject + assert "Re: Log: Team Retrospective" in john_notification["title"] + assert "[Entry Deleted]" in john_notification["title"] + assert "emma" in john_notification["body"].lower() + + # Check Sarah's notification + sarah_notification = [n for n in notification_tracker if n["username"] == "sarah"][0] + # Sarah has email configured, so should get email-style subject + assert "Re: Log: Team Retrospective" in sarah_notification["title"] + assert "[Entry Deleted]" in sarah_notification["title"] + + # 2. Sarah deletes a reply on John's entry + event2 = LogEntryReplyDeleteEvent( + eventTimestamp=(base_time + timedelta(minutes=10)).isoformat(), + eventTriggedByUsername="sarah", + entityName="LogEntryReply", + entityId=602, + parentLogDocumentInfo=doc, + parentLogInfo=john_entry, + logInfo=LogInfo( + id=602, + enteredByUsername="emma", + lastModifiedByUsername="emma", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ), + textDiff="- Deleted reply: This comment was off-topic", + logbookList=[LogbookInfo(id=5, name="Retrospective", displayName="Retrospective")], + description="Removed off-topic comment", + ) + + await handler.handle_log_entry_reply_delete(event2) + + # John should be notified (reply on his entry was deleted) + # Sarah is the one deleting and is the document owner, so she gets one notification + # (deduplicated since she's both the actor and the owner) + assert len(notification_tracker) == 3 # 2 previous + 1 new + new_notification = notification_tracker[-1] + assert new_notification["username"] == "john" + # John has email configured, so should get email-style subject + assert "Re: Log: Team Retrospective" in new_notification["title"] + assert "[Reply Deleted]" in new_notification["title"] + assert "sarah" in new_notification["body"].lower() + + +class TestErrorHandling: + """Test error handling and edge cases.""" + + @pytest.mark.asyncio + async def test_missing_user_config(self, tmp_path): + """Test handling of events for users not in config.""" + config = { + "users": { + "alice": { + "apprise_urls": ["mailto://alice@example.com"], + "notifications": {"entry_replies": True}, + } + } + } + + config_path = tmp_path / "limited_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + handler = AppriseSmartNotificationHandler(config_path=str(config_path)) + + # Event from unconfigured user "bob" + from bely_mqtt.models import LogbookInfo + + event = LogEntryAddEvent( + eventTimestamp=datetime.now().isoformat(), + eventTriggedByUsername="bob", + entityName="LogEntry", + entityId=1, + parentLogDocumentInfo=LogDocumentInfo( + id=1, + name="Test Doc", + ownerUsername="charlie", # Also unconfigured + createdByUsername="charlie", + lastModifiedByUsername="charlie", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + logInfo=LogInfo( + id=1, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + description="Test", + textDiff="Test content", + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + # Should handle gracefully without errors + await handler.handle_log_entry_add(event) + + @pytest.mark.asyncio + async def test_malformed_event_data(self, tmp_path): + """Test handling of events with empty or unusual data.""" + config = { + "users": { + "test": { + "apprise_urls": ["mailto://test@example.com"], + "notifications": { + "new_entries": True, + "entry_updates": True, + }, + } + } + } + + config_path = tmp_path / "test_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + handler = AppriseSmartNotificationHandler(config_path=str(config_path)) + + # Mock the Apprise notify method + for username, apobj in handler.processor.user_apprise_instances.items(): + apobj.notify = MagicMock(return_value=True) + + # Create event with empty strings and edge case data + from bely_mqtt.models import LogbookInfo + + # Test 1: Empty username (should not crash, but won't notify anyone) + event = LogEntryAddEvent( + eventTimestamp=datetime.now().isoformat(), + eventTriggedByUsername="", # Empty username + entityName="LogEntry", + entityId=1, + parentLogDocumentInfo=LogDocumentInfo( + id=1, + name="Test Document", + ownerUsername="test", + createdByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + logInfo=LogInfo( + id=1, + enteredByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + description="", # Empty description + textDiff="", # Empty text diff + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + # Should handle gracefully without crashing + try: + await asyncio.wait_for(handler.handle_log_entry_add(event), timeout=2.0) + except asyncio.TimeoutError: + pytest.fail("Handler timed out processing event with empty username") + + # Test 2: Very long strings (should truncate or handle gracefully) + event2 = LogEntryAddEvent( + eventTimestamp=datetime.now().isoformat(), + eventTriggedByUsername="long_user", + entityName="LogEntry", + entityId=2, + parentLogDocumentInfo=LogDocumentInfo( + id=2, + name="Document with very long name " + "y" * 500, # Reduced from 5000 + ownerUsername="test", + createdByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + logInfo=LogInfo( + id=2, + enteredByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + description="Very long description " + "z" * 1000, # Reduced from 10000 + textDiff="+ " + "content" * 200, # Reduced from 2000 + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + # Should handle gracefully without crashing + try: + await asyncio.wait_for(handler.handle_log_entry_add(event2), timeout=2.0) + except asyncio.TimeoutError: + pytest.fail("Handler timed out processing event with long strings") + + # Test 3: Special characters in strings + event3 = LogEntryAddEvent( + eventTimestamp=datetime.now().isoformat(), + eventTriggedByUsername="user", + entityName="LogEntry", + entityId=3, + parentLogDocumentInfo=LogDocumentInfo( + id=3, + name="Test HTML & Special Chars", + ownerUsername="test", + createdByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + logInfo=LogInfo( + id=3, + enteredByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + description="Description with ", + textDiff="+ Content with special chars: & < > \" '", + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + # Should handle gracefully without crashing + try: + await asyncio.wait_for(handler.handle_log_entry_add(event3), timeout=2.0) + except asyncio.TimeoutError: + pytest.fail("Handler timed out processing event with special characters") + + +if __name__ == "__main__": + # Run integration tests + pytest.main([__file__, "-v", "--asyncio-mode=auto"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/hybrid_event_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/hybrid_event_handler.py new file mode 100644 index 000000000..1c31dce77 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/hybrid_event_handler.py @@ -0,0 +1,222 @@ +""" +Example handler that uses both specific and generic event handler methods with typed events. + +This handler demonstrates how to implement both specific event handlers +(for specific event types) and a generic fallback handler for other events. +This is the most flexible approach. + +The framework automatically parses MQTT messages into properly typed event objects +and passes them to the appropriate handler method. +""" + +from bely_mqtt import ( + MQTTHandler, + MQTTMessage, + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, + CoreEvent, +) + + +class HybridEventHandler(MQTTHandler): + """ + Handler that implements both specific and generic event handler methods with typed events. + + This handler processes specific log entry events and replies, with a + fallback generic handler for other event types. All handler methods receive + properly typed event objects instead of raw MQTT messages. + """ + + @property + def topic_pattern(self) -> str: + """Subscribe to all BELY events.""" + return "bely/#" + + # Specific event handlers for log entries + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + """ + Handle log entry add events. + + Args: + event: The log entry add event (already parsed and typed). + """ + try: + self.logger.info(f"Log entry added: {event.log_info.id}") + self.logger.debug(f"Document: {event.parent_log_document_info.name}") + await self.process_log_entry_add(event) + except Exception as e: + self.logger.error(f"Error processing log entry add: {e}", exc_info=True) + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + """ + Handle log entry update events. + + Args: + event: The log entry update event (already parsed and typed). + """ + try: + self.logger.info(f"Log entry updated: {event.log_info.id}") + self.logger.debug(f"Document: {event.parent_log_document_info.name}") + await self.process_log_entry_update(event) + except Exception as e: + self.logger.error(f"Error processing log entry update: {e}", exc_info=True) + + # Specific event handlers for replies + async def handle_log_entry_reply_add(self, event: LogEntryReplyAddEvent) -> None: + """ + Handle log entry reply add events. + + Args: + event: The log entry reply add event (already parsed and typed). + """ + try: + self.logger.info(f"Reply added to log entry: {event.parent_log_info.id}") + await self.process_reply_add(event) + except Exception as e: + self.logger.error(f"Error processing reply add: {e}", exc_info=True) + + async def handle_log_entry_reply_update(self, event: LogEntryReplyUpdateEvent) -> None: + """ + Handle log entry reply update events. + + Args: + event: The log entry reply update event (already parsed and typed). + """ + try: + self.logger.info(f"Reply updated on log entry: {event.parent_log_info.id}") + await self.process_reply_update(event) + except Exception as e: + self.logger.error(f"Error processing reply update: {e}", exc_info=True) + + # Specific event handlers for reactions + async def handle_log_reaction_add(self, event: LogReactionAddEvent) -> None: + """ + Handle log reaction add events. + + Args: + event: The log reaction add event (already parsed and typed). + """ + try: + self.logger.info( + f"Reaction added to log entry {event.parent_log_info.id}: " + f"{event.log_reaction.reaction.emoji} by {event.log_reaction.username}" + ) + await self.process_reaction_add(event) + except Exception as e: + self.logger.error(f"Error processing reaction add: {e}", exc_info=True) + + async def handle_log_reaction_delete(self, event: LogReactionDeleteEvent) -> None: + """ + Handle log reaction delete events. + + Args: + event: The log reaction delete event (already parsed and typed). + """ + try: + self.logger.info( + f"Reaction deleted from log entry {event.parent_log_info.id}: " + f"{event.log_reaction.reaction.emoji} by {event.log_reaction.username}" + ) + await self.process_reaction_delete(event) + except Exception as e: + self.logger.error(f"Error processing reaction delete: {e}", exc_info=True) + + # Generic event handlers for other events + async def handle_generic_add(self, event: CoreEvent) -> None: + """ + Handle generic add events. + + Args: + event: The generic add event (already parsed and typed). + """ + try: + self.logger.info(f"Generic add: {event.entity_name} (ID: {event.entity_id})") + await self.process_generic_add(event) + except Exception as e: + self.logger.error(f"Error processing generic add: {e}", exc_info=True) + + async def handle_generic_update(self, event: CoreEvent) -> None: + """ + Handle generic update events. + + Args: + event: The generic update event (already parsed and typed). + """ + try: + self.logger.info(f"Generic update: {event.entity_name} (ID: {event.entity_id})") + await self.process_generic_update(event) + except Exception as e: + self.logger.error(f"Error processing generic update: {e}", exc_info=True) + + async def handle_generic_delete(self, event: CoreEvent) -> None: + """ + Handle generic delete events. + + Args: + event: The generic delete event (already parsed and typed). + """ + try: + self.logger.info(f"Generic delete: {event.entity_name} (ID: {event.entity_id})") + await self.process_generic_delete(event) + except Exception as e: + self.logger.error(f"Error processing generic delete: {e}", exc_info=True) + + # Fallback generic handler for unknown events + async def handle_generic(self, message: MQTTMessage) -> None: + """ + Fallback handler for events that don't match any specific handler. + + This is called if the message topic doesn't match any of the + specific event types defined above. + + Args: + message: The raw MQTT message. + """ + self.logger.debug(f"Received unknown event on topic: {message.topic}") + + # Processing methods + async def process_log_entry_add(self, event: LogEntryAddEvent) -> None: + """Process a log entry add event.""" + self.logger.debug(f"Processing log entry add: {event.log_info.id}") + + async def process_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + """Process a log entry update event.""" + self.logger.debug(f"Processing log entry update: {event.log_info.id}") + + async def process_reply_add(self, event: LogEntryReplyAddEvent) -> None: + """Process a reply add event.""" + self.logger.debug(f"Processing reply add to: {event.parent_log_info.id}") + + async def process_reply_update(self, event: LogEntryReplyUpdateEvent) -> None: + """Process a reply update event.""" + self.logger.debug(f"Processing reply update to: {event.parent_log_info.id}") + + async def process_reaction_add(self, event: LogReactionAddEvent) -> None: + """Process a reaction add event.""" + self.logger.debug( + f"Processing reaction add: {event.log_reaction.reaction.emoji} " + f"on log {event.parent_log_info.id}" + ) + + async def process_reaction_delete(self, event: LogReactionDeleteEvent) -> None: + """Process a reaction delete event.""" + self.logger.debug( + f"Processing reaction delete: {event.log_reaction.reaction.emoji} " + f"on log {event.parent_log_info.id}" + ) + + async def process_generic_add(self, event: CoreEvent) -> None: + """Process a generic add event.""" + self.logger.debug(f"Processing generic add: {event.entity_name}") + + async def process_generic_update(self, event: CoreEvent) -> None: + """Process a generic update event.""" + self.logger.debug(f"Processing generic update: {event.entity_name}") + + async def process_generic_delete(self, event: CoreEvent) -> None: + """Process a generic delete event.""" + self.logger.debug(f"Processing generic delete: {event.entity_name}") diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/log_entry_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/log_entry_handler.py new file mode 100644 index 000000000..fc14848cf --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/log_entry_handler.py @@ -0,0 +1,78 @@ +""" +Example handler for log entry events. + +This handler demonstrates how to parse specific event types +and extract relevant information. +""" + +import logging + +from bely_mqtt.models import LogEntryAddEvent, LogEntryUpdateEvent, MQTTMessage +from bely_mqtt.plugin import MQTTHandler + +logger = logging.getLogger(__name__) + + +class LogEntryAddHandler(MQTTHandler): + """ + Handler for log entry add events. + + This handler processes events when new log entries are added + and can be extended to send notifications or trigger workflows. + """ + + @property + def topic_pattern(self) -> str: + """Subscribe to log entry add events.""" + return "bely/logEntry/Add" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry add event.""" + try: + event = LogEntryAddEvent(**message.payload) + self.logger.info( + f"Log entry added: ID={event.log_info.id}, " + f"Document={event.parent_log_document_info.name}, " + f"By={event.event_triggered_by_username}" + ) + self.logger.debug(f"Text: {event.text_diff}") + + # Example: Query BELY API for more information + if self.api_client: + try: + log_data = self.api_client.get_log_entry(event.log_info.id) + self.logger.debug(f"Retrieved log entry data: {log_data}") + except NotImplementedError: + pass + + except Exception as e: + self.logger.error(f"Failed to parse log entry add event: {e}") + + +class LogEntryUpdateHandler(MQTTHandler): + """ + Handler for log entry update events. + + This handler processes events when log entries are modified. + This example shows how to override the default topic pattern + to subscribe to specific events only. + """ + + @property + def topic_pattern(self) -> str: + """Subscribe to log entry update events only (override default).""" + return "bely/logEntry/Update" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry update event.""" + try: + event = LogEntryUpdateEvent(**message.payload) + self.logger.info( + f"Log entry updated: ID={event.log_info.id}, " + f"Document={event.parent_log_document_info.name}, " + f"By={event.event_triggered_by_username}" + ) + self.logger.debug(f"Changes:\n{event.text_diff}") + + except Exception as e: + self.logger.error(f"Failed to parse log entry update event: {e}") diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler.py new file mode 100644 index 000000000..20d5cef2f --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler.py @@ -0,0 +1,33 @@ +""" +Example handler that logs all BELY events. + +This is a simple example handler that demonstrates how to create +a basic handler that processes MQTT messages. +""" + +import json +import logging + +from bely_mqtt.models import MQTTMessage +from bely_mqtt.plugin import MQTTHandler + +logger = logging.getLogger(__name__) + + +class LoggingHandler(MQTTHandler): + """ + Simple handler that logs all BELY events. + + This handler subscribes to all BELY topics and logs the events + to the standard logging system. + """ + + @property + def topic_pattern(self) -> str: + """Subscribe to all BELY topics.""" + return "bely/#" + + async def handle(self, message: MQTTMessage) -> None: + """Log the incoming message.""" + self.logger.info(f"Event received on topic: {message.topic}") + self.logger.debug(f"Payload: {json.dumps(message.payload, indent=2)}") diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler_advanced.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler_advanced.py new file mode 100644 index 000000000..9e95cfb28 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler_advanced.py @@ -0,0 +1,215 @@ +""" +Advanced logging handler that logs BELY events to files by topic. + +This handler demonstrates how to create a handler that: +- Logs events to files organized by topic +- Automatically creates files for each subtopic +- Supports configurable logging directory +- Maintains separate loggers for each topic +- Accepts configuration from the framework + +Environment Variables: + BELY_LOG_DIR: Directory to store log files (default: current directory) + +Configuration File: + { + "handlers": { + "AdvancedLoggingHandler": { + "logging_dir": "/var/log/bely" + } + } + } + +Usage: + # With environment variable + BELY_LOG_DIR=/var/log/bely bely-mqtt start --handlers-dir ./handlers + + # With configuration file + bely-mqtt start --handlers-dir ./handlers --config config.json + + # With direct instantiation + handler = AdvancedLoggingHandler(logging_dir="/var/log/bely") +""" + +import json +import logging +import os +from pathlib import Path +from typing import Dict, Optional + +from bely_mqtt.models import MQTTMessage +from bely_mqtt.plugin import MQTTHandler + + +class AdvancedLoggingHandler(MQTTHandler): + """ + Advanced handler that logs BELY events to files by topic. + + This handler subscribes to all BELY topics and logs each event to a + separate file based on the topic. Files are automatically created in + the specified logging directory. + + Example: + handler = AdvancedLoggingHandler(logging_dir="/var/log/bely") + + Messages on "bely/logEntry/Add" are logged to: + /var/log/bely/logEntry/Add.log + + Messages on "bely/logEntryReply/Update" are logged to: + /var/log/bely/logEntryReply/Update.log + """ + + def __init__( + self, + logging_dir: Optional[str] = None, + api_client: Optional[object] = None, + ): + """ + Initialize the advanced logging handler. + + Args: + logging_dir: Directory to store log files. If None, uses BELY_LOG_DIR + environment variable or current directory. + Directory is created if it doesn't exist. + api_client: Optional BELY API client (for compatibility with handler system). + + Examples: + # Use environment variable + BELY_LOG_DIR=/var/log/bely bely-mqtt start --handlers-dir ./handlers + + # Use direct instantiation + handler = AdvancedLoggingHandler(logging_dir="/var/log/bely") + + # Use configuration file + # config.json: + # { + # "handlers": { + # "AdvancedLoggingHandler": { + # "logging_dir": "/var/log/bely" + # } + # } + # } + # bely-mqtt start --handlers-dir ./handlers --config config.json + + # Use default (current directory) + handler = AdvancedLoggingHandler() + """ + super().__init__(api_client=api_client) + + # Set up logging directory + # Priority: parameter > environment variable > current directory + if logging_dir is None: + logging_dir = os.getenv("BELY_LOG_DIR", ".") + + self.logging_dir = Path(logging_dir) + self.logging_dir.mkdir(parents=True, exist_ok=True) + + # Dictionary to store loggers for each topic + self._topic_loggers: Dict[str, logging.Logger] = {} + + self.logger.info(f"AdvancedLoggingHandler initialized with directory: {self.logging_dir}") + + # Uses default topic_pattern "bely/#" - no need to override + # This handler will receive all BELY events + + def _get_logger_for_topic(self, topic: str) -> logging.Logger: + """ + Get or create a logger for the given topic. + + Creates a new logger if one doesn't exist for this topic. + The logger writes to a file named after the topic path. + + Args: + topic: The MQTT topic (e.g., "bely/logEntry/Add") + + Returns: + A configured logger for the topic. + """ + # Return cached logger if it exists + if topic in self._topic_loggers: + return self._topic_loggers[topic] + + # Create logger name from topic + logger_name = f"bely_mqtt.topic.{topic.replace('/', '.')}" + topic_logger = logging.getLogger(logger_name) + + # Set up file handler for this topic + try: + # Create subdirectories based on topic structure + # e.g., "bely/logEntry/Add" -> "bely/logEntry/Add.log" + topic_parts = topic.split("/") + log_dir = self.logging_dir / Path(*topic_parts[:-1]) + log_dir.mkdir(parents=True, exist_ok=True) + + log_file = log_dir / f"{topic_parts[-1]}.log" + + # Create file handler + file_handler = logging.FileHandler(log_file) + file_handler.setLevel(logging.DEBUG) + + # Create formatter + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + file_handler.setFormatter(formatter) + + # Add handler to logger + topic_logger.addHandler(file_handler) + topic_logger.setLevel(logging.DEBUG) + + # Prevent propagation to root logger + topic_logger.propagate = False + + self.logger.debug(f"Created logger for topic: {topic} -> {log_file}") + + except Exception as e: + self.logger.error(f"Failed to create logger for topic {topic}: {e}") + raise + + # Cache the logger + self._topic_loggers[topic] = topic_logger + return topic_logger + + async def handle(self, message: MQTTMessage) -> None: + """ + Log the incoming message to a file based on its topic. + + Args: + message: The MQTT message to log. + """ + try: + # Get logger for this topic + topic_logger = self._get_logger_for_topic(message.topic) + + # Log the event + topic_logger.info(f"Event received on topic: {message.topic}") + topic_logger.debug(f"Payload: {json.dumps(message.payload, indent=2)}") + + # Also log to main logger + self.logger.debug(f"Logged event from {message.topic}") + + except Exception as e: + self.logger.error(f"Error logging message from {message.topic}: {e}", exc_info=True) + + def get_log_file_for_topic(self, topic: str) -> Path: + """ + Get the log file path for a given topic. + + Args: + topic: The MQTT topic. + + Returns: + Path to the log file for this topic. + """ + topic_parts = topic.split("/") + log_dir = self.logging_dir / Path(*topic_parts[:-1]) + return log_dir / f"{topic_parts[-1]}.log" + + def get_all_log_files(self) -> list[Path]: + """ + Get all log files created by this handler. + + Returns: + List of all log file paths. + """ + return list(self.logging_dir.rglob("*.log")) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/notification_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/notification_handler.py new file mode 100644 index 000000000..d22dc0fba --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/notification_handler.py @@ -0,0 +1,106 @@ +""" +Example notification handler using Apprise. + +This handler demonstrates how to send notifications for specific events. +It requires the 'apprise' extra to be installed: + pip install bely-mqtt-framework[apprise] +""" + +import logging +from typing import Optional + +from bely_mqtt.models import ( + LogEntryAddEvent, + LogEntryReplyAddEvent, + LogEntryUpdateEvent, + MQTTMessage, +) +from bely_mqtt.plugin import MQTTHandler + +logger = logging.getLogger(__name__) + +try: + import apprise + + APPRISE_AVAILABLE = True +except ImportError: + APPRISE_AVAILABLE = False + logger.warning( + "Apprise not installed. Install with: pip install bely-mqtt-framework[apprise]" + ) + + +class NotificationHandler(MQTTHandler): + """ + Handler that sends notifications for BELY events using Apprise. + + This is an example handler showing how to integrate with external + notification services. Configure notification endpoints via environment + variables or configuration files. + """ + + def __init__(self, *args, **kwargs): + """Initialize the notification handler.""" + super().__init__(*args, **kwargs) + self.apprise_instance: Optional[apprise.Apprise] = None + if APPRISE_AVAILABLE: + self.apprise_instance = apprise.Apprise() + + @property + def topic_pattern(self) -> str: + """Subscribe to log entry events.""" + return "bely/logEntry/#" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry events and send notifications.""" + if not APPRISE_AVAILABLE: + self.logger.warning("Apprise not available, skipping notification") + return + + try: + if "Add" in message.topic: + await self._handle_add_event(message) + elif "Update" in message.topic: + await self._handle_update_event(message) + except Exception as e: + self.logger.error(f"Failed to handle notification event: {e}") + + async def _handle_add_event(self, message: MQTTMessage) -> None: + """Handle log entry add event.""" + event = LogEntryAddEvent(**message.payload) + title = f"New Log Entry in {event.parent_log_document_info.name}" + body = ( + f"User {event.event_triggered_by_username} added a new log entry.\n" + f"Logbooks: {', '.join(lb.display_name or lb.name for lb in event.logbook_list)}" + ) + await self._send_notification(title, body) + + async def _handle_update_event(self, message: MQTTMessage) -> None: + """Handle log entry update event.""" + event = LogEntryUpdateEvent(**message.payload) + title = f"Log Entry Updated in {event.parent_log_document_info.name}" + body = ( + f"User {event.event_triggered_by_username} updated a log entry.\n" + f"Changes:\n{event.text_diff[:200]}..." + ) + await self._send_notification(title, body) + + async def _send_notification(self, title: str, body: str) -> None: + """Send a notification via Apprise.""" + if not self.apprise_instance: + self.logger.warning("Apprise instance not initialized") + return + + # Example: Add notification endpoints + # self.apprise_instance.add('mailto://user:password@gmail.com') + # self.apprise_instance.add('discord://webhook_id/webhook_token') + + # For now, just log the notification + self.logger.info(f"Notification: {title}") + self.logger.info(f" {body}") + + # Uncomment to actually send notifications: + # self.apprise_instance.notify( + # body=body, + # title=title, + # ) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/simple_log_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/simple_log_handler.py new file mode 100644 index 000000000..ec9f42c12 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/simple_log_handler.py @@ -0,0 +1,39 @@ +""" +Simple log entry handler example. + +This handler demonstrates basic usage of the BELY MQTT Framework +by logging when new entries are added. +""" + +from bely_mqtt import MQTTHandler, LogEntryAddEvent + + +class SimpleLogHandler(MQTTHandler): + """Logs when new entries are added to BELY.""" + + # Uses default topic_pattern "bely/#" - receives all BELY events + # The framework will automatically route log entry add events to handle_log_entry_add + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + """Handle new log entry event using the specific handler method.""" + try: + # Log the event details + self.logger.info( + f"New log entry added: " + f"ID={event.log_info.id}, " + f"Document={event.parent_log_document_info.name}, " + f"By={event.event_triggered_by_username}" + ) + + # Log the entry description (first 100 chars) + description = event.description[:100] + if len(event.description) > 100: + description += "..." + self.logger.info(f"Description: {description}") + + # You can access all event fields with type safety + self.logger.debug(f"Timestamp: {event.event_timestamp}") + self.logger.debug(f"Logbooks: {[lb.name for lb in event.logbook_list]}") + + except Exception as e: + self.logger.error(f"Failed to process log entry: {e}", exc_info=True) \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.env b/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.env new file mode 100644 index 000000000..abf4c58a1 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.env @@ -0,0 +1,39 @@ +# BELY MQTT Framework Environment Configuration +# +# This file contains environment variables for the BELY MQTT Framework +# systemd service. Copy this to ~/.config/bely-mqtt/bely-mqtt.env and +# customize for your environment. + + +# Service Configuration +# Path to the service executable +# %h expands to user's home directory in systemd +SERVICE_PATH=%h/.local/bin/bely-mqtt + +# MQTT Broker Configuration +MQTT_BROKER_HOST=localhost +MQTT_BROKER_PORT=1883 +MQTT_CLIENT_ID= +MQTT_USERNAME= +MQTT_PASSWORD= +MQTT_TOPIC= + +# Logging Configuration +LOG_LEVEL=INFO +# Options: DEBUG, INFO, WARNING, ERROR, CRITICAL + +# Application Configuration +# Path to handler configuration JSON file +# Default: ~/.config/bely-mqtt/config.json +CONFIG_FILE=~/.config/bely-mqtt/config.json + +# Path to handlers directory +# Default: ~/.config/bely-mqtt/handlers +HANDLERS_DIR=~/.config/bely-mqtt/handlers + +# BELY API Configuration +BELY_API_URL= +BELY_API_KEY= + +# Additional environment file (optional) +ENV_FILE= diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.service b/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.service new file mode 100644 index 000000000..7f267c804 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.service @@ -0,0 +1,28 @@ +[Unit] +Description=BELY MQTT Message Broker Framework +After=network-online.target +Wants=network-online.target + +[Service] +Type=simple +EnvironmentFile=%h/.config/bely-mqtt/bely-mqtt.env +Restart=on-failure +RestartSec=5 + +# Service execution +ExecStart=/bin/sh -c '${SERVICE_PATH} start \ + --handlers-dir ${HANDLERS_DIR} \ + --config ${CONFIG_FILE} \ + ${MQTT_BROKER_HOST:+--broker-host=${MQTT_BROKER_HOST}} \ + ${MQTT_BROKER_PORT:+--broker-port=${MQTT_BROKER_PORT}} \ + ${MQTT_CLIENT_ID:+--client-id=${MQTT_CLIENT_ID}} \ + ${MQTT_USERNAME:+--username=${MQTT_USERNAME}} \ + ${MQTT_PASSWORD:+--password=${MQTT_PASSWORD}} \ + ${MQTT_TOPIC:+--topic=${MQTT_TOPIC}} \ + ${BELY_API_URL:+--api-url=${BELY_API_URL}} \ + ${BELY_API_KEY:+--api-key=${BELY_API_KEY}} \ + ${LOG_LEVEL:+--log-level=${LOG_LEVEL}} \ + ${ENV_FILE:+--env-file=${ENV_FILE}}' + +[Install] +WantedBy=default.target \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/pyproject.toml b/tools/developer_tools/bely-mqtt-message-broker/pyproject.toml new file mode 100644 index 000000000..2b0d798b7 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/pyproject.toml @@ -0,0 +1,76 @@ +[build-system] +requires = ["setuptools>=65.0", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "bely-mqtt-framework" +version = "0.1.0" +description = "Pluggable Python framework for handling BELY MQTT events" +readme = "README.md" +requires-python = ">=3.10" +license = {text = "MIT"} +authors = [ + {name = "BELY Team", email = "team@bely.dev"} +] +keywords = ["mqtt", "bely", "logbook", "plugins", "framework", "events", "async"] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: System :: Monitoring", + "Framework :: AsyncIO", +] + +dependencies = [ + "click>=8.1.0", + "paho-mqtt>=1.6.1", + "pydantic>=2.0.0", + "python-dotenv>=1.0.0", + "pluggy>=1.3.0", + "PyYAML>=6.0.0", + "apprise>=1.4.0" +] + +[project.urls] +Homepage = "https://github.com/bely-org/bely-mqtt-framework" +Documentation = "https://github.com/bely-org/bely-mqtt-framework/tree/main/docs" +Repository = "https://github.com/bely-org/bely-mqtt-framework" +Issues = "https://github.com/bely-org/bely-mqtt-framework/issues" +Changelog = "https://github.com/bely-org/bely-mqtt-framework/blob/main/CHANGELOG.md" + +[project.optional-dependencies] +dev = [ + "pytest>=7.0.0", + "pytest-cov>=4.0.0", + "pytest-asyncio>=0.21.0", + "black>=23.0.0", + "ruff>=0.1.0", + "mypy>=1.0.0", + "types-paho-mqtt>=1.6.0", + "types-PyYAML>=6.0.0", +] +apprise = [ + "apprise>=1.4.0", +] + +[project.scripts] +bely-mqtt = "bely_mqtt.cli:main" + +[tool.black] +line-length = 100 +target-version = ['py310'] + +[tool.ruff] +line-length = 100 +target-version = "py310" + +[tool.mypy] +python_version = "3.10" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = false diff --git a/tools/developer_tools/bely-mqtt-message-broker/pytest.ini b/tools/developer_tools/bely-mqtt-message-broker/pytest.ini new file mode 100644 index 000000000..1a2e83f7a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/pytest.ini @@ -0,0 +1,17 @@ +[pytest] +minversion = 7.0 +testpaths = tests +python_files = test_*.py +python_classes = Test* +python_functions = test_* +asyncio_mode = auto +asyncio_default_fixture_loop_scope = function +addopts = + -v + --strict-markers + --tb=short + +markers = + asyncio: marks tests as async (deselect with '-m "not asyncio"') + integration: marks tests as integration tests + slow: marks tests as slow diff --git a/tools/developer_tools/bely-mqtt-message-broker/sample-mosquitto_sub_output b/tools/developer_tools/bely-mqtt-message-broker/sample-mosquitto_sub_output new file mode 100644 index 000000000..d9ce21caf --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/sample-mosquitto_sub_output @@ -0,0 +1,23 @@ +# Sample mqtt shell output + +mosquitto_sub -v -t "bely/#" + +bely/add {"description":"Add action completed","eventTimestamp":"2025-11-21T16:37:52.537+00:00","entityName":"ItemDomainLogbook","entityId":105,"eventTriggedByUsername":"logr"} +bely/add {"description":"Add action completed","eventTimestamp":"2025-11-21T16:38:06.160+00:00","entityName":"Log","entityId":267,"eventTriggedByUsername":"logr"} +bely/logEntry/Add {"description":"log entry was added","eventTimestamp":"2025-11-21T16:38:06.157+00:00","parentLogDocumentInfo":{"name":"[2025/11/21/2] New Document","id":105,"lastModifiedOnDateTime":"2025-11-21T16:37:42.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","enteredOnDateTime":"2025-11-21T16:37:42.000+00:00","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN"},"logInfo":{"id":267,"lastModifiedOnDateTime":"2025-11-21T16:38:00.392+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:00.392+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"New Log Entry Added","entityName":"Log","entityId":267,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:06.164+00:00","entityName":"EntityInfo","entityId":115,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:14.821+00:00","entityName":"Log","entityId":267,"eventTriggedByUsername":"logr"} +bely/logEntry/Update {"description":"log entry id [267] was modified","eventTimestamp":"2025-11-21T16:38:14.819+00:00","parentLogDocumentInfo":{"name":"[2025/11/21/2] New Document","id":105,"lastModifiedOnDateTime":"2025-11-21T16:38:06.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","enteredOnDateTime":"2025-11-21T16:37:42.000+00:00","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN"},"logInfo":{"id":267,"lastModifiedOnDateTime":"2025-11-21T16:38:14.819+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:00.000+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"- New Log Entry Added\n+ New Log Entry Added\r\n+ \r\n+ Same Entry Updated\r\n","entityName":"Log","entityId":267,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:14.827+00:00","entityName":"EntityInfo","entityId":115,"eventTriggedByUsername":"logr"} +bely/add {"description":"Add action completed","eventTimestamp":"2025-11-21T16:38:20.221+00:00","entityName":"Log","entityId":268,"eventTriggedByUsername":"logr"} +bely/logEntryReply/Add {"description":"reply log entry was added","eventTimestamp":"2025-11-21T16:38:20.219+00:00","parentLogDocumentInfo":{"name":"[2025/11/21/2] New Document","id":105,"lastModifiedOnDateTime":"2025-11-21T16:38:14.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","enteredOnDateTime":"2025-11-21T16:37:42.000+00:00","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN"},"logInfo":{"id":268,"lastModifiedOnDateTime":"2025-11-21T16:38:16.169+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:16.169+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"Reply Added","parentlogInfo":{"id":267,"lastModifiedOnDateTime":"2025-11-21T16:38:14.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:00.000+00:00"},"entityName":"Log","entityId":268,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:20.226+00:00","entityName":"EntityInfo","entityId":115,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:31.075+00:00","entityName":"Log","entityId":268,"eventTriggedByUsername":"logr"} +bely/logEntryReply/Update {"description":"reply log entry id [268] was modified","eventTimestamp":"2025-11-21T16:38:31.073+00:00","parentLogDocumentInfo":{"name":"[2025/11/21/2] New Document","id":105,"lastModifiedOnDateTime":"2025-11-21T16:38:20.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","enteredOnDateTime":"2025-11-21T16:37:42.000+00:00","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN"},"logInfo":{"id":268,"lastModifiedOnDateTime":"2025-11-21T16:38:31.073+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:16.000+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"- Reply Added\n+ Reply Added\r\n+ \r\n+ Reply Modified\n","parentlogInfo":{"id":267,"lastModifiedOnDateTime":"2025-11-21T16:38:14.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:00.000+00:00"},"entityName":"Log","entityId":268,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:31.080+00:00","entityName":"EntityInfo","entityId":115,"eventTriggedByUsername":"logr"} +bely/delete {"description":"Delete action completed","eventTimestamp":"2025-12-17T21:25:44.039+00:00","entityName":"Log","eventTriggedByUsername":"logr","entityId":214} +bely/logEntryReply/Delete {"description":"reply log entry was deleted","eventTimestamp":"2025-12-17T21:25:44.033+00:00","parentLogDocumentInfo":{"name":"[2025/11/17/2] dfsg","id":104,"lastModifiedOnDateTime":"2025-12-17T21:21:06.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN","enteredOnDateTime":"2025-11-17T20:45:34.000+00:00"},"logInfo":{"id":214,"lastModifiedOnDateTime":"2025-11-20T18:35:33.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-18T21:05:32.000+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"TEST\r\n\r\nsadf","parentLogInfo":{"id":212,"lastModifiedOnDateTime":"2025-11-24T18:35:55.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-18T14:19:36.000+00:00"},"entityName":"Log","eventTriggedByUsername":"logr","entityId":214} +bely/update {"description":"Update action completed","eventTimestamp":"2025-12-17T21:25:44.048+00:00","entityName":"EntityInfo","eventTriggedByUsername":"logr","entityId":114} +bely/delete {"description":"Delete action completed","eventTimestamp":"2025-12-17T21:25:51.958+00:00","entityName":"Log","eventTriggedByUsername":"logr","entityId":208} +bely/logEntry/Delete {"description":"log entry was deleted","eventTimestamp":"2025-12-17T21:25:51.954+00:00","parentLogDocumentInfo":{"name":"[2025/11/17/2] dfsg","id":104,"lastModifiedOnDateTime":"2025-12-17T21:25:44.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN","enteredOnDateTime":"2025-11-17T20:45:34.000+00:00"},"logInfo":{"id":208,"lastModifiedOnDateTime":"2025-11-18T13:31:38.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-18T13:31:38.000+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"hello World","entityName":"Log","eventTriggedByUsername":"logr","entityId":208} +bely/update {"description":"Update action completed","eventTimestamp":"2025-12-17T21:25:51.963+00:00","entityName":"EntityInfo","eventTriggedByUsername":"logr","entityId":114} \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/setup.py b/tools/developer_tools/bely-mqtt-message-broker/setup.py new file mode 100644 index 000000000..d5f755f30 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/setup.py @@ -0,0 +1,58 @@ +"""Setup configuration for BELY MQTT Framework.""" + +from setuptools import find_packages, setup + +setup( + name="bely-mqtt-framework", + version="0.1.0", + description="Pluggable Python framework for handling BELY MQTT events", + long_description=open("README.md").read(), + long_description_content_type="text/markdown", + author="BELY Team", + author_email="team@bely.dev", + url="https://github.com/bely/mqtt-framework", + license="MIT", + packages=find_packages(where="src"), + package_dir={"": "src"}, + python_requires=">=3.9", + install_requires=[ + "click>=8.1.0", + "paho-mqtt>=1.6.1", + "pydantic>=2.0.0", + "python-dotenv>=1.0.0", + "pluggy>=1.3.0", + ], + extras_require={ + "dev": [ + "pytest>=7.0.0", + "pytest-cov>=4.0.0", + "pytest-asyncio>=0.21.0", + "black>=23.0.0", + "ruff>=0.1.0", + "mypy>=1.0.0", + "types-paho-mqtt>=1.6.0", + ], + "apprise": [ + "apprise>=1.4.0", + ], + }, + entry_points={ + "console_scripts": [ + "bely-mqtt=bely_mqtt.cli:main", + ], + }, + classifiers=[ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Communications :: Email", + "Topic :: System :: Monitoring", + ], + keywords="mqtt bely logbook plugins framework", +) diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/__init__.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/__init__.py new file mode 100644 index 000000000..1d20c743b --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/__init__.py @@ -0,0 +1,64 @@ +""" +BELY MQTT Framework - Pluggable Python framework for handling BELY MQTT events. + +This framework provides: +- Pluggable handler system for MQTT topics +- Type-safe models for BELY events (CoreEvent, LogEntryAddEvent, etc.) +- Integration with BELY API for additional data +- CLI for easy configuration and management +- Specific event handlers for different event types +""" + +from bely_mqtt.events import EventType +from bely_mqtt.models import ( + BaseEvent, + CoreEvent, + LogbookInfo, + LogDocumentInfo, + LogEntryEventBase, + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogInfo, + LogReactionEventBase, + LogReactionAddEvent, + LogReactionDeleteEvent, + LogReactionInfo, + MQTTMessage, + ReactionId, + ReactionInfo, +) +from bely_mqtt.mqtt_client import BelyMQTTClient +from bely_mqtt.plugin import BelyAPIClient, MQTTHandler, PluginManager + +__version__ = "0.1.0" + +__all__ = [ + "BelyMQTTClient", + "BelyAPIClient", + "MQTTHandler", + "PluginManager", + "EventType", + "BaseEvent", + "CoreEvent", + "LogEntryEventBase", + "LogEntryAddEvent", + "LogEntryUpdateEvent", + "LogEntryDeleteEvent", + "LogEntryReplyAddEvent", + "LogEntryReplyUpdateEvent", + "LogEntryReplyDeleteEvent", + "LogReactionEventBase", + "LogReactionAddEvent", + "LogReactionDeleteEvent", + "LogDocumentInfo", + "LogInfo", + "LogbookInfo", + "LogReactionInfo", + "ReactionInfo", + "ReactionId", + "MQTTMessage", +] diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/cli.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/cli.py new file mode 100644 index 000000000..424962392 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/cli.py @@ -0,0 +1,227 @@ +""" +Command-line interface for BELY MQTT framework. +""" + +import logging +import sys +from pathlib import Path +from typing import Optional + +import click +from dotenv import load_dotenv + +from bely_mqtt import __version__ +from bely_mqtt.config import ConfigManager +from bely_mqtt.mqtt_client import BelyMQTTClient +from bely_mqtt.plugin import BelyAPIClient, PluginManager + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", +) +logger = logging.getLogger(__name__) + + +@click.group() +@click.version_option(version=__version__, prog_name="bely-mqtt") +def cli() -> None: + """BELY MQTT Framework - Pluggable MQTT handler for BELY events.""" + pass + + +@cli.command() +@click.option( + "--broker-host", + default="localhost", + envvar="MQTT_BROKER_HOST", + help="MQTT broker hostname or IP address.", +) +@click.option( + "--broker-port", + default=1883, + type=int, + envvar="MQTT_BROKER_PORT", + help="MQTT broker port.", +) +@click.option( + "--client-id", + default="bely-mqtt-client", + envvar="MQTT_CLIENT_ID", + help="MQTT client ID.", +) +@click.option( + "--username", + default=None, + envvar="MQTT_USERNAME", + help="MQTT broker username.", +) +@click.option( + "--password", + default=None, + envvar="MQTT_PASSWORD", + help="MQTT broker password.", +) +@click.option( + "--topic", + "-t", + multiple=True, + default=["bely/#"], + help="MQTT topic(s) to subscribe to. Can be specified multiple times.", +) +@click.option( + "--handlers-dir", + type=click.Path(exists=False, file_okay=False, dir_okay=True, path_type=Path), + default=None, + envvar="BELY_HANDLERS_DIR", + help="Directory containing handler plugins.", +) +@click.option( + "--api-url", + default=None, + envvar="BELY_API_URL", + help="BELY API base URL for querying additional information.", +) +@click.option( + "--api-key", + default=None, + envvar="BELY_API_KEY", + help="BELY API key for authentication.", +) +@click.option( + "--log-level", + type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]), + default="INFO", + envvar="LOG_LEVEL", + help="Logging level.", +) +@click.option( + "--env-file", + type=click.Path(exists=True, file_okay=True, dir_okay=False, path_type=Path), + default=None, + help="Path to .env file for environment variables.", +) +@click.option( + "--config", + type=click.Path(exists=True, file_okay=True, dir_okay=False, path_type=Path), + default=None, + envvar="BELY_CONFIG", + help="Path to configuration file for handlers (YAML format).", +) +def start( + broker_host: str, + broker_port: int, + client_id: str, + username: Optional[str], + password: Optional[str], + topic: tuple[str, ...], + handlers_dir: Optional[Path], + api_url: Optional[str], + api_key: Optional[str], + log_level: str, + env_file: Optional[Path], + config: Optional[Path], +) -> None: + """Start the BELY MQTT client with registered handlers.""" + # Load environment variables from file if provided + if env_file: + load_dotenv(env_file) + + # Set logging level + logging.getLogger().setLevel(log_level) + + logger.info("Starting BELY MQTT Framework") + logger.info(f"Broker: {broker_host}:{broker_port}") + logger.info(f"Topics: {', '.join(topic)}") + + # Initialize API client if URL is provided + api_client = None + if api_url: + api_client = BelyAPIClient(base_url=api_url, api_key=api_key) + logger.info(f"BELY API client initialized: {api_url}") + + # Initialize configuration manager + config_manager = None + if config: + config_manager = ConfigManager() + try: + config_manager.load_from_file(config) + logger.info(f"Loaded handler configuration from: {config}") + except Exception as e: + logger.error(f"Failed to load configuration file: {e}") + sys.exit(1) + + # Initialize plugin manager + plugin_manager = PluginManager(api_client=api_client, config_manager=config_manager) + + # Load handlers from directory if provided + if handlers_dir: + logger.info(f"Loading handlers from: {handlers_dir}") + plugin_manager.load_handlers_from_directory(handlers_dir) + + if not plugin_manager.handlers: + logger.warning("No handlers registered. Messages will be received but not processed.") + + # Initialize MQTT client + mqtt_client = BelyMQTTClient( + broker_host=broker_host, + broker_port=broker_port, + client_id=client_id, + username=username, + password=password, + plugin_manager=plugin_manager, + ) + + # Subscribe to topics + for t in topic: + mqtt_client.subscribe(t) + + # Start the client + try: + mqtt_client.start() + except KeyboardInterrupt: + logger.info("Shutting down...") + sys.exit(0) + except Exception as e: + logger.error(f"Fatal error: {e}", exc_info=True) + sys.exit(1) + + +@cli.command() +@click.option( + "--handlers-dir", + type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path), + required=True, + help="Directory containing handler plugins.", +) +def list_handlers(handlers_dir: Path) -> None: + """List all available handlers in a directory.""" + plugin_manager = PluginManager() + plugin_manager.load_handlers_from_directory(handlers_dir) + + if not plugin_manager.handlers: + click.echo("No handlers found.") + return + + click.echo(f"Found {len(plugin_manager.handlers)} handler(s):\n") + for handler in plugin_manager.handlers: + click.echo(f" {handler.__class__.__name__}") + click.echo(f" Topic Pattern: {handler.topic_pattern}") + if handler.__doc__: + click.echo(f" Description: {handler.__doc__.strip()}") + click.echo() + + +@cli.command() +def validate_config() -> None: + """Validate the current configuration.""" + click.echo("Configuration validation not yet implemented.") + + +def main() -> None: + """Entry point for the CLI.""" + cli() + + +if __name__ == "__main__": + main() diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/config.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/config.py new file mode 100644 index 000000000..0f56a2c12 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/config.py @@ -0,0 +1,233 @@ +""" +Configuration system for BELY MQTT handlers. + +This module provides a configuration system that allows handlers to receive +configuration parameters when they are instantiated. +""" + +import logging +from pathlib import Path +from typing import Any, Dict, Optional + +import yaml + +logger = logging.getLogger(__name__) + + +class GlobalConfig: + """ + Global configuration shared across all handlers. + + Stores configuration parameters that will be available to all handlers. + """ + + def __init__(self, config: Optional[Dict[str, Any]] = None): + """ + Initialize global configuration. + + Args: + config: Dictionary of global configuration parameters. + """ + self.config = config or {} + + def get(self, key: str, default: Any = None) -> Any: + """ + Get a configuration value. + + Args: + key: Configuration key. + default: Default value if key not found. + + Returns: + Configuration value or default. + """ + return self.config.get(key, default) + + @property + def bely_url(self) -> Optional[str]: + """ + Get the BELY URL from configuration. + + Returns: + BELY URL if configured, None otherwise. + """ + return self.config.get("bely_url") + + def __repr__(self) -> str: + """Return string representation.""" + return f"GlobalConfig({self.config})" + + +class HandlerConfig: + """ + Configuration for a handler. + + Stores configuration parameters that will be passed to handler constructors. + """ + + def __init__(self, handler_name: str, config: Optional[Dict[str, Any]] = None): + """ + Initialize handler configuration. + + Args: + handler_name: Name of the handler class. + config: Dictionary of configuration parameters. + """ + self.handler_name = handler_name + self.config = config or {} + + def get(self, key: str, default: Any = None) -> Any: + """ + Get a configuration value. + + Args: + key: Configuration key. + default: Default value if key not found. + + Returns: + Configuration value or default. + """ + return self.config.get(key, default) + + def __repr__(self) -> str: + """Return string representation.""" + return f"HandlerConfig({self.handler_name}, {self.config})" + + +class ConfigManager: + """ + Manages configuration for handlers. + + Loads configuration from files or dictionaries and provides it to handlers. + Supports both global configuration (shared across all handlers) and + handler-specific configuration. + """ + + def __init__(self): + """Initialize the configuration manager.""" + self.global_config: Optional[GlobalConfig] = None + self.configs: Dict[str, HandlerConfig] = {} + self.logger = logging.getLogger(__name__) + + def load_from_file(self, config_file: Path) -> None: + """ + Load configuration from a YAML file. + + File format: + global: + shared_param: value + another_param: value + bely_url: https://bely.example.com + + handlers: + AdvancedLoggingHandler: + logging_dir: /var/log/bely + MyHandler: + param1: value1 + param2: value2 + AppriseSmartNotificationHandler: + config_path: /path/to/apprise_config.yaml + + Args: + config_file: Path to the YAML configuration file. + + Raises: + FileNotFoundError: If the configuration file doesn't exist. + yaml.YAMLError: If the file is not valid YAML. + """ + config_file = Path(config_file) + if not config_file.exists(): + raise FileNotFoundError(f"Configuration file not found: {config_file}") + + try: + with open(config_file) as f: + data = yaml.safe_load(f) + + # Load global configuration if present + if "global" in data: + self.set_global_config(data["global"]) + self.logger.info(f"Loaded global configuration: {data['global']}") + + # Load handler-specific configurations + handlers_config = data.get("handlers", {}) + for handler_name, config in handlers_config.items(): + self.set_config(handler_name, config) + + self.logger.info(f"Loaded configuration from {config_file}") + except yaml.YAMLError as e: + self.logger.error(f"Invalid YAML in configuration file: {e}") + raise + + def load_from_dict(self, config_dict: Dict[str, Any]) -> None: + """ + Load configuration from a dictionary. + + Args: + config_dict: Dictionary with optional "global" and "handlers" keys. + """ + # Load global configuration if present + if "global" in config_dict: + self.set_global_config(config_dict["global"]) + + # Load handler-specific configurations + handlers_config = config_dict.get("handlers", {}) + for handler_name, config in handlers_config.items(): + self.set_config(handler_name, config) + + def set_global_config(self, config: Dict[str, Any]) -> None: + """ + Set global configuration. + + Args: + config: Global configuration dictionary. + """ + self.global_config = GlobalConfig(config) + self.logger.debug(f"Set global configuration: {config}") + + def get_global_config(self) -> Optional[GlobalConfig]: + """ + Get global configuration. + + Returns: + GlobalConfig if set, None otherwise. + """ + return self.global_config + + def set_config(self, handler_name: str, config: Dict[str, Any]) -> None: + """ + Set configuration for a handler. + + Args: + handler_name: Name of the handler class. + config: Configuration dictionary. + """ + self.configs[handler_name] = HandlerConfig(handler_name, config) + self.logger.debug(f"Set configuration for {handler_name}: {config}") + + def get_config(self, handler_name: str) -> Optional[HandlerConfig]: + """ + Get configuration for a handler. + + Args: + handler_name: Name of the handler class. + + Returns: + HandlerConfig if found, None otherwise. + """ + return self.configs.get(handler_name) + + def has_config(self, handler_name: str) -> bool: + """ + Check if configuration exists for a handler. + + Args: + handler_name: Name of the handler class. + + Returns: + True if configuration exists. + """ + return handler_name in self.configs + + def __repr__(self) -> str: + """Return string representation.""" + return f"ConfigManager(global={self.global_config}, handlers={self.configs})" diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/events.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/events.py new file mode 100644 index 000000000..d3ba6062d --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/events.py @@ -0,0 +1,101 @@ +""" +Event type enumeration for BELY MQTT events. + +This module defines all supported event types and provides utilities +for working with them. +""" + +from enum import Enum +from typing import Optional + + +class EventType(Enum): + """ + Enumeration of all supported BELY event types. + + Each event type corresponds to a specific MQTT topic pattern. + + Generic Events: + GENERIC_ADD - Generic add event (bely/add) + GENERIC_UPDATE - Generic update event (bely/update) + GENERIC_DELETE - Generic delete event (bely/delete) + + Log Entry Events: + LOG_ENTRY_ADD - Log entry added (bely/logEntry/Add) + LOG_ENTRY_UPDATE - Log entry updated (bely/logEntry/Update) + LOG_ENTRY_DELETE - Log entry deleted (bely/logEntry/Delete) + + Log Entry Reply Events: + LOG_ENTRY_REPLY_ADD - Reply added (bely/logEntryReply/Add) + LOG_ENTRY_REPLY_UPDATE - Reply updated (bely/logEntryReply/Update) + LOG_ENTRY_REPLY_DELETE - Reply deleted (bely/logEntryReply/Delete) + + Log Reaction Events: + LOG_REACTION_ADD - Reaction added (bely/logReaction/Add) + LOG_REACTION_DELETE - Reaction deleted (bely/logReaction/Delete) + """ + + # Generic events + GENERIC_ADD = "bely/add" + GENERIC_UPDATE = "bely/update" + GENERIC_DELETE = "bely/delete" + + # Log entry events + LOG_ENTRY_ADD = "bely/logEntry/Add" + LOG_ENTRY_UPDATE = "bely/logEntry/Update" + LOG_ENTRY_DELETE = "bely/logEntry/Delete" + + # Log entry reply events + LOG_ENTRY_REPLY_ADD = "bely/logEntryReply/Add" + LOG_ENTRY_REPLY_UPDATE = "bely/logEntryReply/Update" + LOG_ENTRY_REPLY_DELETE = "bely/logEntryReply/Delete" + + # Log reaction events + LOG_REACTION_ADD = "bely/logReaction/Add" + LOG_REACTION_DELETE = "bely/logReaction/Delete" + + def __str__(self) -> str: + """Return the topic pattern for this event type.""" + return self.value + + @classmethod + def from_topic(cls, topic: str) -> Optional["EventType"]: + """ + Get EventType from MQTT topic. + + Args: + topic: MQTT topic string (e.g., "bely/logEntry/Add") + + Returns: + EventType if topic matches, None otherwise. + + Examples: + >>> EventType.from_topic("bely/logEntry/Add") + + + >>> EventType.from_topic("bely/unknown") + None + """ + for event_type in cls: + if event_type.value == topic: + return event_type + return None + + @property + def handler_method_name(self) -> str: + """ + Get the handler method name for this event type. + + Returns: + Method name like "handle_log_entry_add" + + Examples: + >>> EventType.LOG_ENTRY_ADD.handler_method_name + 'handle_log_entry_add' + + >>> EventType.GENERIC_UPDATE.handler_method_name + 'handle_generic_update' + """ + # Convert enum name to snake_case method name + # e.g., LOG_ENTRY_ADD -> handle_log_entry_add + return f"handle_{self.name.lower()}" diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/models.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/models.py new file mode 100644 index 000000000..c4f9a79ef --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/models.py @@ -0,0 +1,200 @@ +""" +Data models for BELY MQTT events. + +These models are designed to be mappable to MQTT messages from BELY. +""" + +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, ConfigDict, Field + + +class LogbookInfo(BaseModel): + """Information about a logbook.""" + + model_config = ConfigDict(populate_by_name=True) + + name: str + id: int + display_name: Optional[str] = Field(None, alias="displayName") + + +class UserInfo(BaseModel): + """Basic user information.""" + + username: str + + +class LogDocumentInfo(BaseModel): + """Information about a log document.""" + + model_config = ConfigDict(populate_by_name=True) + + name: str + id: int + last_modified_on_date_time: Optional[datetime] = Field(None, alias="lastModifiedOnDateTime") + created_by_username: Optional[str] = Field(None, alias="createdByUsername") + last_modified_by_username: Optional[str] = Field(None, alias="lastModifiedByUsername") + entered_on_date_time: Optional[datetime] = Field(None, alias="enteredOnDateTime") + owner_username: Optional[str] = Field(None, alias="ownerUsername") + owner_user_group_name: Optional[str] = Field(None, alias="ownerUserGroupName") + + +class LogInfo(BaseModel): + """Information about a log entry.""" + + model_config = ConfigDict(populate_by_name=True) + + id: int + last_modified_on_date_time: Optional[datetime] = Field(None, alias="lastModifiedOnDateTime") + last_modified_by_username: Optional[str] = Field(None, alias="lastModifiedByUsername") + entered_by_username: Optional[str] = Field(None, alias="enteredByUsername") + entered_on_date_time: Optional[datetime] = Field(None, alias="enteredOnDateTime") + + +class BaseEvent(BaseModel): + """Base class for all BELY MQTT events. + + Note: entity_id can be either an integer (for most events) or a composite + object like ReactionId (for log reaction events). + """ + + model_config = ConfigDict(populate_by_name=True) + + description: str + event_timestamp: datetime = Field(alias="eventTimestamp") + entity_name: str = Field(alias="entityName") + entity_id: Any = Field(alias="entityId") # Can be int or composite object + event_triggered_by_username: str = Field(alias="eventTriggedByUsername") + + +class CoreEvent(BaseEvent): + """Core event with minimal information (add/update/delete).""" + + pass + + +class LogEntryEventBase(BaseEvent): + """Base class for log entry events with common fields.""" + + parent_log_document_info: LogDocumentInfo = Field(alias="parentLogDocumentInfo") + log_info: LogInfo = Field(alias="logInfo") + logbook_list: List[LogbookInfo] = Field(alias="logbookList") + text_diff: str = Field(alias="textDiff") + + +class LogEntryReplyEventBase(LogEntryEventBase): + """Base class for log entry reply events. + + Extends LogEntryEventBase with parent log information for replies. + """ + + parent_log_info: LogInfo = Field(alias="parentLogInfo") + + +# Log Entry Events (Add/Update/Delete) + + +class LogEntryAddEvent(LogEntryEventBase): + """Event triggered when a log entry is added.""" + + pass + + +class LogEntryUpdateEvent(LogEntryEventBase): + """Event triggered when a log entry is updated.""" + + pass + + +class LogEntryDeleteEvent(LogEntryEventBase): + """Event triggered when a log entry is deleted.""" + + pass + + +# Log Entry Reply Events (Add/Update/Delete) + + +class LogEntryReplyAddEvent(LogEntryReplyEventBase): + """Event triggered when a reply to a log entry is added.""" + + pass + + +class LogEntryReplyUpdateEvent(LogEntryReplyEventBase): + """Event triggered when a reply to a log entry is updated.""" + + pass + + +class LogEntryReplyDeleteEvent(LogEntryReplyEventBase): + """Event triggered when a reply to a log entry is deleted.""" + + pass + + +# Reaction Models + + +class ReactionInfo(BaseModel): + """Information about a reaction.""" + + model_config = ConfigDict(populate_by_name=True) + + id: int + name: str + emoji_code: int = Field(alias="emojiCode") + description: Optional[str] = None + emoji: str + + +class ReactionId(BaseModel): + """Composite ID for a reaction (log ID, reaction ID, user ID).""" + + model_config = ConfigDict(populate_by_name=True) + + log_id: int = Field(alias="logId") + reaction_id: int = Field(alias="reactionId") + user_id: int = Field(alias="userId") + + +class LogReactionInfo(BaseModel): + """Information about a log reaction.""" + + model_config = ConfigDict(populate_by_name=True) + + reaction: ReactionInfo + id: ReactionId + username: str + + +class LogReactionEventBase(BaseEvent): + """Base class for log reaction events with common fields.""" + + parent_log_info: LogInfo = Field(alias="parentLogInfo") + parent_log_document_info: LogDocumentInfo = Field(alias="parentLogDocumentInfo") + log_reaction: LogReactionInfo = Field(alias="logReaction") + + +class LogReactionAddEvent(LogReactionEventBase): + """Event triggered when a reaction is added to a log entry.""" + + pass + + +class LogReactionDeleteEvent(LogReactionEventBase): + """Event triggered when a reaction is deleted from a log entry.""" + + pass + + +class MQTTMessage(BaseModel): + """Wrapper for an MQTT message with topic and payload.""" + + model_config = ConfigDict(arbitrary_types_allowed=True) + + topic: str + payload: Dict[str, Any] + raw_payload: str = "" diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/mqtt_client.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/mqtt_client.py new file mode 100644 index 000000000..b20e6f907 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/mqtt_client.py @@ -0,0 +1,218 @@ +""" +MQTT client for connecting to BELY message broker. +""" + +import asyncio +import json +import logging +from typing import Any, Optional + +import paho.mqtt.client as mqtt +from paho.mqtt.client import ReasonCodes +from paho.mqtt.client import Properties + +from bely_mqtt.models import MQTTMessage +from bely_mqtt.plugin import PluginManager + +logger = logging.getLogger(__name__) + + +class BelyMQTTClient: + """ + MQTT client for BELY events. + + Connects to an MQTT broker and routes messages to registered handlers. + """ + + def __init__( + self, + broker_host: str, + broker_port: int = 1883, + client_id: str = "bely-mqtt-client", + username: Optional[str] = None, + password: Optional[str] = None, + plugin_manager: Optional[PluginManager] = None, + ): + """ + Initialize the MQTT client. + + Args: + broker_host: MQTT broker hostname or IP. + broker_port: MQTT broker port (default: 1883). + client_id: MQTT client ID. + username: Optional MQTT username. + password: Optional MQTT password. + plugin_manager: PluginManager instance for handling messages. + """ + self.broker_host = broker_host + self.broker_port = broker_port + self.client_id = client_id + self.username = username + self.password = password + self.plugin_manager = plugin_manager or PluginManager() + self.logger = logging.getLogger(__name__) + + self.client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2, client_id) # type: ignore[attr-defined, arg-type] + self.client.on_connect = self._on_connect + self.client.on_message = self._on_message + self.client.on_disconnect = self._on_disconnect # type: ignore[assignment] + self.client.on_subscribe = self._on_subscribe + + if username and password: + self.client.username_pw_set(username, password) + + self._subscribed_topics: set[str] = set() + self._running = False + self._loop: Optional[asyncio.AbstractEventLoop] = None + + def subscribe(self, topic: str) -> None: + """ + Subscribe to an MQTT topic. + + Args: + topic: The topic pattern to subscribe to. + """ + self._subscribed_topics.add(topic) + if self.client.is_connected(): + self.client.subscribe(topic) + self.logger.info(f"Subscribed to topic: {topic}") + + # CallbackOnConnect_v2 = Callable[["Client", Any, ConnectFlags, ReasonCode, Union[Properties, None]], None] + # Callable[[Client, Any, Optional[ReasonCodes], Optional[Properties]], object], None]") + def _on_connect( + self, + client: mqtt.Client, + userdata: Any, + connect_flags: dict[str, int], + reason_code: ReasonCodes, + properties: Optional[Properties], + ) -> None: + """Handle MQTT connection.""" + if reason_code == 0: + self.logger.info(f"Connected to MQTT broker at {self.broker_host}:{self.broker_port}") + # Resubscribe to all topics + for topic in self._subscribed_topics: + client.subscribe(topic) + self.logger.info(f"Subscribed to topic: {topic}") + else: + self.logger.error(f"Failed to connect to MQTT broker: {reason_code}") + + def _on_message( + self, + client: mqtt.Client, + userdata: object, + msg: mqtt.MQTTMessage, + ) -> None: + """Handle incoming MQTT message.""" + try: + payload_str = msg.payload.decode("utf-8") + payload_dict = json.loads(payload_str) + + message = MQTTMessage( + topic=msg.topic, + payload=payload_dict, + raw_payload=payload_str, + ) + + # Schedule the async handler on the event loop + if self._loop and self._loop.is_running(): + asyncio.run_coroutine_threadsafe( + self.plugin_manager.handle_message(message), + self._loop, + ) + else: + self.logger.warning("Event loop not running, cannot process message") + + except json.JSONDecodeError as e: + self.logger.error(f"Failed to parse JSON payload from {msg.topic}: {e}") + except Exception as e: + self.logger.error(f"Error processing message from {msg.topic}: {e}") + + def _on_disconnect( + self, + client: mqtt.Client, + userdata: Any, + disconnect_flags: dict[str, int], + reason_code: Optional[ReasonCodes], + properties: Optional[Properties], + ) -> None: + """Handle MQTT disconnection.""" + if reason_code == 0: + self.logger.info("Disconnected from MQTT broker") + else: + self.logger.warning(f"Unexpected disconnection from MQTT broker: {reason_code}") + + def _on_subscribe( + self, + client: mqtt.Client, + userdata: object, + mid: int, + reason_code_list: list[mqtt.ReasonCodes], + properties: mqtt.Properties, + ) -> None: + """Handle subscription confirmation.""" + for reason_code in reason_code_list: + if reason_code == 0: + self.logger.debug("Subscription successful") + else: + self.logger.warning(f"Subscription failed: {reason_code}") + + def connect(self) -> None: + """Connect to the MQTT broker.""" + try: + self.client.connect(self.broker_host, self.broker_port, keepalive=60) + self.logger.info(f"Connecting to MQTT broker at {self.broker_host}:{self.broker_port}") + except Exception as e: + self.logger.error(f"Failed to connect to MQTT broker: {e}") + raise + + def disconnect(self) -> None: + """Disconnect from the MQTT broker.""" + self.client.disconnect() + + async def _run_mqtt_loop(self) -> None: + """Run the MQTT loop in a separate thread.""" + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, self.client.loop_forever) + + def start(self) -> None: + """Start the MQTT client (blocking).""" + self._running = True + # Create and set the event loop for async handler execution + try: + self._loop = asyncio.new_event_loop() + asyncio.set_event_loop(self._loop) + except RuntimeError: + # Event loop already exists, use the current one + self._loop = asyncio.get_event_loop() + + try: + self.connect() + # Run MQTT loop in executor to avoid blocking the event loop + self._loop.run_until_complete(self._run_mqtt_loop()) + except KeyboardInterrupt: + self.logger.info("Interrupted by user") + finally: + self.disconnect() + self._running = False + if self._loop and not self._loop.is_closed(): + self._loop.close() + + async def start_async(self) -> None: + """Start the MQTT client (async).""" + self._running = True + self._loop = asyncio.get_event_loop() + + try: + self.connect() + # Run the MQTT loop in a separate thread + await self._run_mqtt_loop() + except KeyboardInterrupt: + self.logger.info("Interrupted by user") + finally: + self.disconnect() + self._running = False + + def is_connected(self) -> bool: + """Check if the client is connected to the broker.""" + return self.client.is_connected() diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/plugin.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/plugin.py new file mode 100644 index 000000000..ade2bb931 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/plugin.py @@ -0,0 +1,532 @@ +""" +Plugin system for BELY MQTT handlers. + +This module provides the base classes and interfaces for creating +pluggable handlers for MQTT topics. +""" + +import importlib +import importlib.util +import inspect +import logging +from abc import ABC +from pathlib import Path +from typing import Any, Dict, List, Optional, Type + +from bely_mqtt.config import GlobalConfig +from bely_mqtt.events import EventType +from bely_mqtt.models import MQTTMessage + +logger = logging.getLogger(__name__) + + +class BelyAPIClient: + """ + Interface for BELY API client. + + This is a placeholder interface. The actual implementation should be + provided by the bely-api library. + """ + + def __init__(self, base_url: str, api_key: Optional[str] = None): + """Initialize the BELY API client.""" + self.base_url = base_url + self.api_key = api_key + + def get_log_entry(self, log_id: int) -> Dict[str, Any]: + """Get a log entry by ID.""" + raise NotImplementedError("Implement in actual BELY API library") + + def get_log_document(self, doc_id: int) -> Dict[str, Any]: + """Get a log document by ID.""" + raise NotImplementedError("Implement in actual BELY API library") + + def get_user(self, username: str) -> Dict[str, Any]: + """Get user information.""" + raise NotImplementedError("Implement in actual BELY API library") + + +class MQTTHandler(ABC): + """ + Base class for MQTT message handlers. + + Subclass this to create handlers for specific MQTT topics or event types. + By default, handlers subscribe to all BELY topics (bely/#). Override the + topic_pattern property to subscribe to specific topics only. + + Handlers can implement either: + 1. The generic `handle()` method for all messages + 2. Specific event handler methods like `handle_log_entry_add()`, `handle_generic_update()`, etc. + 3. Both (specific handlers are called first, then generic handle if not overridden) + + Supported specific event handlers: + - handle_generic_add(event: CoreEvent) - Generic add events (bely/add) + - handle_generic_update(event: CoreEvent) - Generic update events (bely/update) + - handle_generic_delete(event: CoreEvent) - Generic delete events (bely/delete) + - handle_log_entry_add(event: LogEntryAddEvent) - Log entry added (bely/logEntry/Add) + - handle_log_entry_update(event: LogEntryUpdateEvent) - Log entry updated (bely/logEntry/Update) + - handle_log_entry_delete(event: LogEntryDeleteEvent) - Log entry deleted (bely/logEntry/Delete) + - handle_log_entry_reply_add(event: LogEntryReplyAddEvent) - Reply added (bely/logEntryReply/Add) + - handle_log_entry_reply_update(event: LogEntryReplyUpdateEvent) - Reply updated (bely/logEntryReply/Update) + - handle_log_entry_reply_delete(event: LogEntryReplyDeleteEvent) - Reply deleted (bely/logEntryReply/Delete) + - handle_log_reaction_add(event: LogReactionAddEvent) - Reaction added (bely/logReaction/Add) + - handle_log_reaction_delete(event: LogReactionDeleteEvent) - Reaction deleted (bely/logReaction/Delete) + + Example: + class MyHandler(MQTTHandler): + # Uses default topic_pattern "bely/#" - receives all BELY events + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Only called for bely/logEntry/Add events + # event is already parsed and typed + self.logger.info(f"New entry: {event.log_info.id}") + + async def handle_generic_update(self, event: CoreEvent) -> None: + # Only called for bely/update events + self.logger.info(f"Updated: {event.entity_name}") + + class SpecificHandler(MQTTHandler): + @property + def topic_pattern(self) -> str: + # Override to subscribe to specific topics only + return "bely/logEntry/#" + + async def handle(self, message: MQTTMessage) -> None: + # Handle only log entry related events + pass + """ + + def __init__( + self, + api_client: Optional[BelyAPIClient] = None, + global_config: Optional[GlobalConfig] = None, + ): + """ + Initialize the handler. + + Args: + api_client: Optional BELY API client for querying additional information. + global_config: Optional global configuration shared across all handlers. + """ + self.api_client = api_client + self.global_config = global_config + self.logger = logging.getLogger(self.__class__.__name__) + + @property + def topic_pattern(self) -> str: + """ + Return the MQTT topic pattern this handler subscribes to. + + By default, handlers subscribe to all BELY topics (bely/#). + Override this property to subscribe to specific topics only. + + Examples: + - "bely/#" - matches all BELY topics (default) + - "bely/add" - matches exact topic + - "bely/logEntry/#" - matches all log entry subtopics + - "bely/+/Add" - matches single level wildcard + + Returns: + MQTT topic pattern string. + """ + return "bely/#" + + async def handle(self, message: MQTTMessage) -> None: + """ + Handle an MQTT message. + + This method is called for all messages matching the topic pattern. + Override this method to handle all messages, or implement specific + event handler methods (handle_log_entry_add, etc.) for specific events. + + Args: + message: The MQTT message to handle. + + Raises: + Exception: Any exception raised will be logged but not propagated. + """ + # Try to route to specific event handler + event_type = EventType.from_topic(message.topic) + if event_type: + handler_method_name = event_type.handler_method_name + if hasattr(self, handler_method_name): + handler_method = getattr(self, handler_method_name) + if callable(handler_method): + # Parse the message payload into the appropriate event type + event = self._parse_event(event_type, message) + await handler_method(event) + return + + # If no specific handler found, call the generic handler + await self.handle_generic(message) + + async def handle_generic(self, message: MQTTMessage) -> None: + """ + Generic handler for messages that don't match specific event types. + + Override this method to handle messages that don't match any specific + event type, or implement specific event handler methods. + + Args: + message: The MQTT message to handle. + """ + # Default implementation does nothing + # Subclasses can override to provide custom behavior + pass + + @staticmethod + def _parse_event(event_type: EventType, message: MQTTMessage) -> Any: + """ + Parse an MQTT message into the appropriate event type. + + Args: + event_type: The EventType to parse into. + message: The MQTT message to parse. + + Returns: + Parsed event object of the appropriate type. + + Raises: + ValueError: If the event type is not recognized. + """ + # Import here to avoid circular imports + from bely_mqtt.models import ( + CoreEvent, + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, + ) + + event_type_map = { + EventType.GENERIC_ADD: CoreEvent, + EventType.GENERIC_UPDATE: CoreEvent, + EventType.GENERIC_DELETE: CoreEvent, + EventType.LOG_ENTRY_ADD: LogEntryAddEvent, + EventType.LOG_ENTRY_UPDATE: LogEntryUpdateEvent, + EventType.LOG_ENTRY_DELETE: LogEntryDeleteEvent, + EventType.LOG_ENTRY_REPLY_ADD: LogEntryReplyAddEvent, + EventType.LOG_ENTRY_REPLY_UPDATE: LogEntryReplyUpdateEvent, + EventType.LOG_ENTRY_REPLY_DELETE: LogEntryReplyDeleteEvent, + EventType.LOG_REACTION_ADD: LogReactionAddEvent, + EventType.LOG_REACTION_DELETE: LogReactionDeleteEvent, + } + + event_class = event_type_map.get(event_type) + if not event_class: + raise ValueError(f"Unknown event type: {event_type}") + + return event_class(**message.payload) + + def topic_matches(self, topic: str) -> bool: + """ + Check if a topic matches this handler's pattern. + + Args: + topic: The topic to check. + + Returns: + True if the topic matches the pattern, False otherwise. + """ + return self._match_mqtt_pattern(self.topic_pattern, topic) + + @staticmethod + def _match_mqtt_pattern(pattern: str, topic: str) -> bool: + """ + Match an MQTT topic against a pattern. + + Supports: + - # (multi-level wildcard, must be at end) + - + (single-level wildcard) + - exact matches + + Args: + pattern: The MQTT pattern. + topic: The topic to match. + + Returns: + True if the topic matches the pattern. + """ + pattern_parts = pattern.split("/") + topic_parts = topic.split("/") + + for i, pattern_part in enumerate(pattern_parts): + if pattern_part == "#": + # Multi-level wildcard matches everything remaining + return True + elif pattern_part == "+": + # Single-level wildcard matches one level + if i >= len(topic_parts): + return False + else: + # Exact match required + if i >= len(topic_parts) or topic_parts[i] != pattern_part: + return False + + # Check if we've consumed all topic parts + return len(topic_parts) == len(pattern_parts) + + +class PluginManager: + """ + Manages loading and executing MQTT handlers. + + Supports passing both global and handler-specific configuration to handlers + during instantiation. + """ + + def __init__( + self, + api_client: Optional[BelyAPIClient] = None, + config_manager: Optional[Any] = None, + ): + """ + Initialize the plugin manager. + + Args: + api_client: Optional BELY API client to pass to handlers. + config_manager: Optional ConfigManager for handler configuration. + """ + self.api_client = api_client + self.config_manager = config_manager + self.handlers: List[MQTTHandler] = [] + self.logger = logging.getLogger(__name__) + + def register_handler(self, handler: MQTTHandler) -> None: + """ + Register a handler. + + Args: + handler: The handler instance to register. + """ + self.handlers.append(handler) + self.logger.info( + f"Registered handler {handler.__class__.__name__} " + f"for topic pattern: {handler.topic_pattern}" + ) + + def register_handler_class(self, handler_class: Type[MQTTHandler]) -> None: + """ + Register a handler by class. + + Attempts to pass both global configuration and handler-specific + configuration to the handler if available. + + Args: + handler_class: The handler class to instantiate and register. + """ + handler_name = handler_class.__name__ + + # Get global configuration if available + global_config = None + if self.config_manager: + global_config = self.config_manager.get_global_config() + + # Try to get handler-specific configuration + handler_config = None + if self.config_manager: + handler_config = self.config_manager.get_config(handler_name) + + # Prepare constructor arguments + constructor_args = { + "api_client": self.api_client, + } + + # Add global_config if the handler accepts it + if global_config: + constructor_args["global_config"] = global_config + + # Add handler-specific configuration if available + if handler_config and handler_config.config: + constructor_args.update(handler_config.config) + + # Instantiate handler with configuration + try: + # First try with all arguments + handler = handler_class(**constructor_args) # type: ignore[arg-type] + if global_config: + self.logger.debug(f"Instantiated {handler_name} with global configuration") + if handler_config: + self.logger.debug( + f"Instantiated {handler_name} with handler configuration: {handler_config.config}" + ) + except TypeError as e: + # Handler might not accept all parameters, try with just what it accepts + self.logger.debug( + f"Handler {handler_name} does not accept all configuration parameters: {e}" + ) + + # Get the handler's __init__ signature + import inspect + + sig = inspect.signature(handler_class.__init__) + accepted_params = set(sig.parameters.keys()) - {"self"} + + # Filter constructor args to only what the handler accepts + filtered_args = {k: v for k, v in constructor_args.items() if k in accepted_params} + + try: + handler = handler_class(**filtered_args) # type: ignore[arg-type] + self.logger.debug( + f"Instantiated {handler_name} with filtered parameters: {filtered_args.keys()}" + ) + except Exception as e2: + # Fall back to minimal instantiation + self.logger.warning( + f"Failed to instantiate {handler_name} with configuration, " + f"falling back to minimal instantiation: {e2}" + ) + handler = handler_class() + + self.register_handler(handler) + + def load_handlers_from_directory(self, directory: Path) -> None: + """ + Dynamically load handlers from a directory. + + Looks for Python files and packages in the directory and imports any classes + that inherit from MQTTHandler. + + Supports: + - Single Python files (*.py) + - Python packages (directories with __init__.py) + + Args: + directory: Path to the directory containing handler modules. + """ + directory = Path(directory) + if not directory.exists(): + self.logger.warning(f"Handler directory does not exist: {directory}") + return + + # Load handlers from Python files + for py_file in directory.glob("*.py"): + if py_file.name.startswith("_"): + continue + + module_name = py_file.stem + try: + spec = importlib.util.spec_from_file_location(module_name, py_file) + if spec is None or spec.loader is None: + continue + + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + + # Find all MQTTHandler subclasses in the module + for name, obj in inspect.getmembers(module): + if ( + inspect.isclass(obj) + and issubclass(obj, MQTTHandler) + and obj is not MQTTHandler + ): + self.register_handler_class(obj) + self.logger.info(f"Loaded handler from {py_file}: {name}") + + except Exception as e: + self.logger.error(f"Failed to load handlers from {py_file}: {e}") + + # Load handlers from Python packages (directories with __init__.py) + for item in directory.iterdir(): + if not item.is_dir() or item.name.startswith("_") or item.name.startswith("."): + continue + + init_file = item / "__init__.py" + if not init_file.exists(): + continue + + module_name = item.name + try: + # Add parent directory to sys.path temporarily + import sys + + old_path = sys.path.copy() + sys.path.insert(0, str(directory)) + + try: + # Import the package + module = importlib.import_module(module_name) + + # Find all MQTTHandler subclasses in the package + for name, obj in inspect.getmembers(module): + if ( + inspect.isclass(obj) + and issubclass(obj, MQTTHandler) + and obj is not MQTTHandler + ): + self.register_handler_class(obj) + self.logger.info(f"Loaded handler from package {item}: {name}") + + finally: + # Restore original sys.path + sys.path = old_path + + except Exception as e: + self.logger.error(f"Failed to load handlers from package {item}: {e}") + + async def handle_message(self, message: MQTTMessage) -> None: + """ + Route a message to all matching handlers. + + For each matching handler, attempts to route to a specific event handler + method (e.g., handle_log_entry_add) if available, otherwise calls the + generic handle() method. + + Specific event handlers receive parsed event objects (e.g., LogEntryAddEvent) + instead of raw MQTT messages. + + Args: + message: The MQTT message to handle. + """ + matching_handlers = [h for h in self.handlers if h.topic_matches(message.topic)] + + if not matching_handlers: + self.logger.debug(f"No handlers found for topic: {message.topic}") + return + + # Get event type for this topic + event_type = EventType.from_topic(message.topic) + + for handler in matching_handlers: + try: + # Try to route to specific event handler + if event_type: + handler_method_name = event_type.handler_method_name + if hasattr(handler, handler_method_name): + handler_method = getattr(handler, handler_method_name) + if callable(handler_method): + self.logger.debug( + f"Routing {message.topic} to " + f"{handler.__class__.__name__}.{handler_method_name}" + ) + # Parse message into typed event object + event = MQTTHandler._parse_event(event_type, message) + await handler_method(event) + continue + + # Fall back to generic handle method + self.logger.debug( + f"Routing {message.topic} to " f"{handler.__class__.__name__}.handle" + ) + await handler.handle(message) + except Exception as e: + self.logger.error( + f"Error in handler {handler.__class__.__name__}: {e}", + exc_info=True, + ) + + def get_handlers_for_topic(self, topic: str) -> List[MQTTHandler]: + """ + Get all handlers that match a topic. + + Args: + topic: The MQTT topic. + + Returns: + List of matching handlers. + """ + return [h for h in self.handlers if h.topic_matches(topic)] diff --git a/tools/developer_tools/bely-mqtt-message-broker/tests/__init__.py b/tools/developer_tools/bely-mqtt-message-broker/tests/__init__.py new file mode 100644 index 000000000..f8d9483e4 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for BELY MQTT Framework.""" diff --git a/tools/developer_tools/bely-mqtt-message-broker/tests/test_models.py b/tools/developer_tools/bely-mqtt-message-broker/tests/test_models.py new file mode 100644 index 000000000..9645d0de9 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/tests/test_models.py @@ -0,0 +1,213 @@ +"""Tests for BELY event models.""" + +from datetime import datetime + +import pytest + +from bely_mqtt.models import ( + CoreEvent, + LogEntryAddEvent, + LogEntryDeleteEvent, + LogEntryReplyDeleteEvent, + LogbookInfo, +) + + +@pytest.fixture +def sample_log_entry_add_payload(): + """Sample log entry add event payload.""" + return { + "description": "log entry was added", + "eventTimestamp": "2025-11-21T16:38:06.157+00:00", + "parentLogDocumentInfo": { + "name": "[2025/11/21/2] New Document", + "id": 105, + "lastModifiedOnDateTime": "2025-11-21T16:37:42.000+00:00", + "createdByUsername": "logr", + "lastModifiedByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:37:42.000+00:00", + "ownerUsername": "logr", + "ownerUserGroupName": "LOGR_ADMIN", + }, + "logInfo": { + "id": 267, + "lastModifiedOnDateTime": "2025-11-21T16:38:00.392+00:00", + "lastModifiedByUsername": "logr", + "enteredByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:38:00.392+00:00", + }, + "logbookList": [{"name": "studies-sr", "id": 7, "displayName": "SR"}], + "textDiff": "New Log Entry Added", + "entityName": "Log", + "entityId": 267, + "eventTriggedByUsername": "logr", + } + + +@pytest.fixture +def sample_simple_event_payload(): + """Sample simple event payload.""" + return { + "description": "Add action completed", + "eventTimestamp": "2025-11-21T16:37:52.537+00:00", + "entityName": "ItemDomainLogbook", + "entityId": 105, + "eventTriggedByUsername": "logr", + } + + +def test_core_event_parsing(sample_simple_event_payload): + """Test parsing of core event.""" + event = CoreEvent(**sample_simple_event_payload) + + assert event.description == "Add action completed" + assert event.entity_name == "ItemDomainLogbook" + assert event.entity_id == 105 + assert event.event_triggered_by_username == "logr" + assert isinstance(event.event_timestamp, datetime) + + +def test_log_entry_add_event_parsing(sample_log_entry_add_payload): + """Test parsing of log entry add event.""" + event = LogEntryAddEvent(**sample_log_entry_add_payload) + + assert event.description == "log entry was added" + assert event.log_info.id == 267 + assert event.parent_log_document_info.id == 105 + assert event.parent_log_document_info.name == "[2025/11/21/2] New Document" + assert len(event.logbook_list) == 1 + assert event.logbook_list[0].display_name == "SR" + assert event.text_diff == "New Log Entry Added" + assert event.event_triggered_by_username == "logr" + + +def test_logbook_info_parsing(): + """Test parsing of logbook info.""" + data = {"name": "studies-sr", "id": 7, "displayName": "SR"} + logbook = LogbookInfo(**data) + + assert logbook.name == "studies-sr" + assert logbook.id == 7 + assert logbook.display_name == "SR" + + +def test_event_timestamp_parsing(sample_simple_event_payload): + """Test that event timestamp is properly parsed.""" + event = CoreEvent(**sample_simple_event_payload) + + assert isinstance(event.event_timestamp, datetime) + assert event.event_timestamp.year == 2025 + assert event.event_timestamp.month == 11 + assert event.event_timestamp.day == 21 + + +def test_alias_field_mapping(sample_simple_event_payload): + """Test that aliased fields are properly mapped.""" + event = CoreEvent(**sample_simple_event_payload) + + # Check that snake_case attributes work + assert event.event_triggered_by_username == "logr" + assert event.entity_name == "ItemDomainLogbook" + assert event.entity_id == 105 + + +@pytest.fixture +def sample_log_entry_delete_payload(): + """Sample log entry delete event payload.""" + return { + "description": "log entry was deleted", + "eventTimestamp": "2025-11-21T16:40:00.000+00:00", + "parentLogDocumentInfo": { + "name": "[2025/11/21/2] New Document", + "id": 105, + "lastModifiedOnDateTime": "2025-11-21T16:38:20.000+00:00", + "createdByUsername": "logr", + "lastModifiedByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:37:42.000+00:00", + "ownerUsername": "logr", + "ownerUserGroupName": "LOGR_ADMIN", + }, + "logInfo": { + "id": 267, + "lastModifiedOnDateTime": "2025-11-21T16:40:00.000+00:00", + "lastModifiedByUsername": "logr", + "enteredByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:38:00.000+00:00", + }, + "logbookList": [{"name": "studies-sr", "id": 7, "displayName": "SR"}], + "textDiff": "Log Entry Deleted", + "entityName": "Log", + "entityId": 267, + "eventTriggedByUsername": "logr", + } + + +@pytest.fixture +def sample_log_entry_reply_delete_payload(): + """Sample log entry reply delete event payload.""" + return { + "description": "reply log entry was deleted", + "eventTimestamp": "2025-11-21T16:41:00.000+00:00", + "parentLogDocumentInfo": { + "name": "[2025/11/21/2] New Document", + "id": 105, + "lastModifiedOnDateTime": "2025-11-21T16:40:00.000+00:00", + "createdByUsername": "logr", + "lastModifiedByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:37:42.000+00:00", + "ownerUsername": "logr", + "ownerUserGroupName": "LOGR_ADMIN", + }, + "logInfo": { + "id": 268, + "lastModifiedOnDateTime": "2025-11-21T16:41:00.000+00:00", + "lastModifiedByUsername": "logr", + "enteredByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:38:16.000+00:00", + }, + "logbookList": [{"name": "studies-sr", "id": 7, "displayName": "SR"}], + "textDiff": "Reply Deleted", + "parentLogInfo": { + "id": 267, + "lastModifiedOnDateTime": "2025-11-21T16:38:14.000+00:00", + "lastModifiedByUsername": "logr", + "enteredByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:38:00.000+00:00", + }, + "entityName": "Log", + "entityId": 268, + "eventTriggedByUsername": "logr", + } + + +def test_log_entry_delete_event_parsing(sample_log_entry_delete_payload): + """Test parsing of log entry delete event.""" + event = LogEntryDeleteEvent(**sample_log_entry_delete_payload) + + assert event.description == "log entry was deleted" + assert event.log_info.id == 267 + assert event.parent_log_document_info.id == 105 + assert event.parent_log_document_info.name == "[2025/11/21/2] New Document" + assert len(event.logbook_list) == 1 + assert event.logbook_list[0].display_name == "SR" + assert event.text_diff == "Log Entry Deleted" + assert event.entity_id == 267 + assert event.event_triggered_by_username == "logr" + assert isinstance(event.event_timestamp, datetime) + + +def test_log_entry_reply_delete_event_parsing(sample_log_entry_reply_delete_payload): + """Test parsing of log entry reply delete event.""" + event = LogEntryReplyDeleteEvent(**sample_log_entry_reply_delete_payload) + + assert event.description == "reply log entry was deleted" + assert event.log_info.id == 268 + assert event.parent_log_info.id == 267 + assert event.parent_log_document_info.id == 105 + assert event.parent_log_document_info.name == "[2025/11/21/2] New Document" + assert len(event.logbook_list) == 1 + assert event.logbook_list[0].display_name == "SR" + assert event.text_diff == "Reply Deleted" + assert event.entity_id == 268 + assert event.event_triggered_by_username == "logr" + assert isinstance(event.event_timestamp, datetime) diff --git a/tools/developer_tools/bely-mqtt-message-broker/tests/test_plugin.py b/tools/developer_tools/bely-mqtt-message-broker/tests/test_plugin.py new file mode 100644 index 000000000..1680d9d8a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/tests/test_plugin.py @@ -0,0 +1,157 @@ +"""Tests for plugin system.""" + +import pytest + +from bely_mqtt.models import MQTTMessage +from bely_mqtt.plugin import MQTTHandler, PluginManager + + +class MockHandler(MQTTHandler): + """Test handler for testing.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.handled_messages = [] + + @property + def topic_pattern(self) -> str: + return "test/topic" + + async def handle(self, message: MQTTMessage) -> None: + self.handled_messages.append(message) + + +class WildcardHandler(MQTTHandler): + """Handler with wildcard pattern.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.handled_messages = [] + + @property + def topic_pattern(self) -> str: + return "test/+" + + async def handle(self, message: MQTTMessage) -> None: + self.handled_messages.append(message) + + +class MultiLevelHandler(MQTTHandler): + """Handler with multi-level wildcard.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.handled_messages = [] + + @property + def topic_pattern(self) -> str: + return "test/#" + + async def handle(self, message: MQTTMessage) -> None: + self.handled_messages.append(message) + + +def test_topic_matching(): + """Test MQTT topic pattern matching.""" + handler = MockHandler() + + assert handler.topic_matches("test/topic") + assert not handler.topic_matches("test/other") + assert not handler.topic_matches("other/topic") + + +def test_single_level_wildcard(): + """Test single-level wildcard matching.""" + handler = WildcardHandler() + + assert handler.topic_matches("test/topic") + assert handler.topic_matches("test/other") + assert not handler.topic_matches("test/topic/sub") + assert not handler.topic_matches("other/topic") + + +def test_multi_level_wildcard(): + """Test multi-level wildcard matching.""" + handler = MultiLevelHandler() + + assert handler.topic_matches("test/topic") + assert handler.topic_matches("test/topic/sub") + assert handler.topic_matches("test/topic/sub/deep") + assert not handler.topic_matches("other/topic") + + +@pytest.mark.asyncio +async def test_plugin_manager_registration(): + """Test handler registration.""" + manager = PluginManager() + handler = MockHandler() + + manager.register_handler(handler) + + assert len(manager.handlers) == 1 + assert manager.handlers[0] is handler + + +@pytest.mark.asyncio +async def test_plugin_manager_message_routing(): + """Test message routing to handlers.""" + manager = PluginManager() + handler = MockHandler() + manager.register_handler(handler) + + message = MQTTMessage(topic="test/topic", payload={"test": "data"}) + await manager.handle_message(message) + + assert len(handler.handled_messages) == 1 + assert handler.handled_messages[0] is message + + +@pytest.mark.asyncio +async def test_plugin_manager_no_matching_handlers(): + """Test that no error occurs when no handlers match.""" + manager = PluginManager() + handler = MockHandler() + manager.register_handler(handler) + + message = MQTTMessage(topic="other/topic", payload={"test": "data"}) + # Should not raise + await manager.handle_message(message) + + assert len(handler.handled_messages) == 0 + + +@pytest.mark.asyncio +async def test_plugin_manager_multiple_handlers(): + """Test routing to multiple matching handlers.""" + manager = PluginManager() + handler1 = MockHandler() + handler2 = MockHandler() + manager.register_handler(handler1) + manager.register_handler(handler2) + + message = MQTTMessage(topic="test/topic", payload={"test": "data"}) + await manager.handle_message(message) + + assert len(handler1.handled_messages) == 1 + assert len(handler2.handled_messages) == 1 + + +def test_get_handlers_for_topic(): + """Test getting handlers for a specific topic.""" + manager = PluginManager() + handler1 = MockHandler() + handler2 = WildcardHandler() + handler3 = MultiLevelHandler() + + manager.register_handler(handler1) + manager.register_handler(handler2) + manager.register_handler(handler3) + + handlers = manager.get_handlers_for_topic("test/topic") + assert len(handlers) == 3 + + handlers = manager.get_handlers_for_topic("test/other") + assert len(handlers) == 2 # WildcardHandler and MultiLevelHandler + + handlers = manager.get_handlers_for_topic("other/topic") + assert len(handlers) == 0