diff --git a/.github/workflows/test-mqtt-broker.yml b/.github/workflows/test-mqtt-broker.yml new file mode 100644 index 000000000..64a9110ce --- /dev/null +++ b/.github/workflows/test-mqtt-broker.yml @@ -0,0 +1,55 @@ +name: Test BELY MQTT Message Broker + +on: + pull_request: + paths: + - 'tools/developer_tools/bely-mqtt-message-broker/**' + - '.github/workflows/test-mqtt-broker.yml' + workflow_dispatch: + +jobs: + test: + name: Run MQTT Broker Tests + runs-on: ubuntu-latest + + defaults: + run: + working-directory: tools/developer_tools/bely-mqtt-message-broker + + strategy: + matrix: + python-version: ['3.11', '3.12'] + # python-version: ['3.11'] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + make install-dev + + - name: Run code quality linter + run: | + make lint + + - name: Run code quality type-check + run: | + make type-check + + - name: Run tests with coverage + run: | + make test-cov + + # - name: Upload coverage reports + # if: matrix.python-version == '3.11' + # uses: actions/upload-artifact@v4 + # with: + # name: coverage-report + # path: tools/developer_tools/bely-mqtt-message-broker/htmlcov/ \ No newline at end of file diff --git a/.gitignore b/.gitignore index cb6708344..90726d60b 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ .vscode .DS_Store src/python/.loglogin +.loglogin # Compiled files *.pyc @@ -56,3 +57,5 @@ docs/python/_build/ # Temporary testing csv files tools/developer_tools/python-client/cdbCli/service/cli/Spreadsheets/ .env +tools/developer_tools/bely-mqtt-message-broker/conda-bld +tools/developer_tools/bely-mqtt-message-broker/dev-config diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 000000000..f35430680 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,237 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +BELY (Best Electronic Logbook Yet) is a Java EE web application for electronic logbook management. It is deployed on Payara Server (GlassFish fork) and uses MySQL/MariaDB as its database. The project includes: + +- **LogrPortal**: Main Java EE web application (JSF/PrimeFaces frontend + REST API) +- **Python web service**: Python-based web service components +- **MQTT integration**: Python framework for handling MQTT events and notifications +- **CLI utilities**: Command-line tools for searching and querying the system + +## Build and Development Commands + +### Environment Setup + +Always source the environment setup script before running any commands: +```bash +source setup.sh +``` + +This sets up critical environment variables: +- `LOGR_ROOT_DIR`: Root directory of the repository +- `LOGR_SUPPORT_DIR`: Support software directory (Payara, Java, MySQL, etc.) +- `LOGR_INSTALL_DIR`: Installation directory +- `LOGR_DATA_DIR`: Data directory +- `PYTHONPATH`: Includes `src/python` and Python client paths + +### Initial Setup + +```bash +# Install support software (Java, Payara, etc.) +make support + +# Install MySQL (if needed) +make support-mysql + +# Install NetBeans IDE +make support-netbeans + +# Create development configuration +make dev-config +``` + +### Database Management + +```bash +# Create clean database with schema +make clean-db + +# Create test database with test data +make test-db + +# Backup database +make backup + +# Development database variants +make clean-db-dev +make backup-dev +``` + +Database SQL files are located in `db/sql/`: +- `clean/`: Clean database schema initialization scripts +- `test/`: Test data initialization scripts +- `static/`: Static lookup data (e.g., notification providers) +- `updates/`: Database migration scripts (e.g., `updateTo2025.3.sql`) +- `create_logr_tables.sql`: Main table creation script +- `create_stored_procedures.sql`: Stored procedures +- `create_views.sql`: Database views + +### Building and Deployment + +```bash +# Build the web portal +cd src/java/LogrPortal +ant clean +ant dist + +# Deploy to Payara (from project root) +make deploy-web-portal + +# Deploy web service +make deploy-web-service + +# Undeploy +make undeploy-web-portal +make undeploy-web-service + +# Development deployment variants +make deploy-web-portal-dev +make deploy-web-service-dev +``` + +### Running Tests + +```bash +# Run full test suite (backs up DB, deploys test DB, runs tests, restores DB) +make test + +# Manual API tests +cd tools/developer_tools/python-client/ +pytest test/api_test.py + +# Test requirements +pip install -r tools/developer_tools/python-client/test/requirements.txt +``` + +### Python Web Service Development + +```bash +# Start the Python web service +./sbin/cdbWebService.sh +``` + +Python code is in `src/python/cdb/`. + +## Architecture + +### Java Package Structure + +All Java code is under `src/java/LogrPortal/src/java/gov/anl/aps/logr/`: + +**portal** - Main web portal application +- `controllers/` - JSF managed beans for UI logic +- `model/db/entities/` - JPA entity classes (e.g., `Log`, `UserInfo`, `NotificationConfiguration`) +- `model/db/beans/` - EJB facades for database operations (stateless session beans) +- `view/` - View objects and JSF utilities +- `import_export/` - Import/export functionality +- `plugins/` - Plugin support system +- `utilities/` - General utility classes + +**rest** - REST API +- `routes/` - JAX-RS resource classes (e.g., `LogbookRoute`, `SearchRoute`) +- `authentication/` - Authentication filters and utilities +- `entities/` - DTO classes for API responses +- `provider/` - JAX-RS providers + +**common** - Shared utilities +- `mqtt/` - MQTT integration models and utilities +- `objects/` - Common data objects +- `utilities/` - Shared utility classes +- `search/` - Search functionality + +**api** - API client interfaces + +### Web Application Structure + +Location: `src/java/LogrPortal/web/` + +- `views/` - XHTML pages organized by entity type (e.g., `log/`, `userInfo/`, `notificationConfiguration/`) +- `templates/` - XHTML template files +- `resources/` - Static resources (CSS, JavaScript, images) +- `WEB-INF/` - Web application configuration + +### Technology Stack + +- **Framework**: Java EE 8 (JSF 2.3, EJB 3.2, JPA 2.2, JAX-RS 2.1) +- **UI**: PrimeFaces 11 + PrimeFaces Extensions, OmniFaces 3 +- **App Server**: Payara 5.2022.5 (GlassFish fork) +- **Database**: MySQL/MariaDB (driver: mariadb-java-client-3.1.0.jar) +- **ORM**: EclipseLink (JPA implementation) +- **Build**: Apache Ant + NetBeans project +- **API Docs**: Swagger/OpenAPI 2.1.5 +- **PDF Generation**: iText 5.5.13.1 +- **Markdown**: Flexmark 0.64.8 +- **Logging**: Log4j 2.17.0 + +### MQTT Integration + +The MQTT notification framework is a pluggable Python system for handling MQTT events: +- Location: `tools/developer_tools/bely-mqtt-message-broker/` +- Features: Type-safe Pydantic models, topic matching with wildcards, notification handlers (email, Slack, Discord) +- See `tools/developer_tools/bely-mqtt-message-broker/README.md` for details + +### Plugins System + +BELY supports custom plugins for extending functionality: +- Templates: `tools/developer_tools/logr_plugins/pluginTemplates/` +- Deployment: `make deploy-cdb-plugin` or `make deploy-cdb-plugin-dev` + +## Development Workflow + +### NetBeans Setup + +1. Open NetBeans: `netbeans &` +2. File > Open Project > Select `src/java/LogrPortal` +3. Resolve missing server: Right-click project > "Resolve Missing Server Problem" +4. Add Payara Server pointing to `$LOGR_SUPPORT_DIR/netbeans/payara` +5. Copy MariaDB driver to Payara: + ```bash + cp src/java/LogrPortal/lib/mariadb-java-client-3.1.0.jar \ + $LOGR_SUPPORT_DIR/netbeans/payara/glassfish/domains/domain1/lib/ + ``` +6. Run project from NetBeans + +### Adding New Entities + +When adding new database entities (e.g., notification system): + +1. Update `db/sql/create_logr_tables.sql` with new table definitions +2. Add static data SQL files to `db/sql/static/` if needed +3. Create JPA `@Entity` class in `portal/model/db/entities/` +4. Create `@Stateless` facade in `portal/model/db/beans/` (extends `AbstractFacade`) +5. Create JSF controller in `portal/controllers/` (extends appropriate base controller) +6. Create XHTML views in `web/views//` +7. Update database with `make clean-db` or create migration in `db/sql/updates/` + +### Database Migrations + +Version updates go in `db/sql/updates/updateToYYYY.X.sql` (e.g., `updateTo2025.3.sql`) + +## Key Patterns + +- **Entity/Facade/Controller**: Standard Java EE pattern - JPA entities, EJB facades for CRUD, JSF controllers for UI +- **Named Queries**: Use JPA `@NamedQuery` annotations on entities for common queries +- **Lazy Loading**: JSF data tables use lazy loading models +- **Session Management**: Session-scoped JSF beans maintain user state +- **REST Authentication**: Token-based auth via `rest/authentication/` +- **MQTT Events**: Notification configurations trigger MQTT messages handled by Python framework + +## Important Files + +- `Makefile`: Top-level make targets for building, deploying, testing +- `setup.sh`: Environment variable setup (must be sourced) +- `sbin/`: Deployment and utility scripts +- `src/java/LogrPortal/build.xml`: Ant build file +- `src/java/LogrPortal/nbproject/project.properties`: NetBeans project configuration +- `db/sql/create_logr_tables.sql`: Main database schema + +## Notes + +- The project is also known as "ComponentDB" or "CDB" in some contexts +- Default database name: `logr` (development: `logr_dev`) +- Application URL after deployment: `https://:8181/bely` or `https://:8181/cdb` +- The main branch is `master` (not `main`) +- Always run database operations and deployments from the repository root after sourcing `setup.sh` diff --git a/db/sql/create_logr_tables.sql b/db/sql/create_logr_tables.sql index 140f0e7d0..c58d5340e 100644 --- a/db/sql/create_logr_tables.sql +++ b/db/sql/create_logr_tables.sql @@ -1200,6 +1200,119 @@ CREATE TABLE `connector_property` ( CONSTRAINT `connector_property_fk2` FOREIGN KEY (`property_value_id`) REFERENCES `property_value` (`id`) ON UPDATE CASCADE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; +-- +-- Table `notification_provider` +-- + +DROP TABLE IF EXISTS `notification_provider`; +CREATE TABLE `notification_provider` ( + `id` int(11) unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(64) NOT NULL, + `description` varchar(256) DEFAULT NULL, + `instructions` TEXT DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_provider_u1` (`name`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- +-- Table `notification_configuration` +-- + +DROP TABLE IF EXISTS `notification_configuration`; +CREATE TABLE `notification_configuration` ( + `id` int(11) unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(64) NOT NULL, + `description` varchar(256) DEFAULT NULL, + `notification_provider_id` int(11) unsigned NOT NULL, + `notification_endpoint` varchar(256) NOT NULL, + `user_id` int(11) unsigned DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_configuration_u1` (`name`), + KEY `notification_configuration_k1` (`notification_provider_id`), + KEY `notification_configuration_k2` (`user_id`), + CONSTRAINT `notification_configuration_fk1` FOREIGN KEY (`notification_provider_id`) REFERENCES `notification_provider` (`id`) ON UPDATE CASCADE, + CONSTRAINT `notification_configuration_fk2` FOREIGN KEY (`user_id`) REFERENCES `user_info` (`id`) ON UPDATE CASCADE ON DELETE CASCADE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- +-- Table `notification_provider_config_key` +-- + +DROP TABLE IF EXISTS `notification_provider_config_key`; +CREATE TABLE `notification_provider_config_key` ( + `id` int(11) unsigned NOT NULL AUTO_INCREMENT, + `notification_provider_id` int(11) unsigned NOT NULL, + `config_key` varchar(64) NOT NULL, + `description` varchar(256) DEFAULT NULL, + `is_required` bool NOT NULL DEFAULT 0, + `display_order` int(11) DEFAULT 0, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_provider_config_key_u1` (`notification_provider_id`, `config_key`), + KEY `notification_provider_config_key_k1` (`notification_provider_id`), + CONSTRAINT `notification_provider_config_key_fk1` FOREIGN KEY (`notification_provider_id`) REFERENCES `notification_provider` (`id`) ON UPDATE CASCADE ON DELETE CASCADE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- +-- Table `notification_configuration_setting` +-- + +DROP TABLE IF EXISTS `notification_configuration_setting`; +CREATE TABLE `notification_configuration_setting` ( + `id` int(11) unsigned NOT NULL AUTO_INCREMENT, + `notification_configuration_id` int(11) unsigned NOT NULL, + `notification_provider_config_key_id` int(11) unsigned NOT NULL, + `config_value` varchar(256) DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_configuration_setting_u1` (`notification_configuration_id`, `notification_provider_config_key_id`), + KEY `notification_configuration_setting_k1` (`notification_configuration_id`), + KEY `notification_configuration_setting_k2` (`notification_provider_config_key_id`), + CONSTRAINT `notification_configuration_setting_fk1` FOREIGN KEY (`notification_configuration_id`) REFERENCES `notification_configuration` (`id`) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT `notification_configuration_setting_fk2` FOREIGN KEY (`notification_provider_config_key_id`) REFERENCES `notification_provider_config_key` (`id`) ON UPDATE CASCADE ON DELETE CASCADE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- ============================================================================ +-- Notification Handler Configuration Tables +-- ============================================================================ + +-- Handler configuration key definitions +-- These define global handler settings like entry_updates, own_entry_edits +DROP TABLE IF EXISTS `notification_handler_config_key`; +CREATE TABLE `notification_handler_config_key` ( + `id` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT, + `config_key` VARCHAR(64) NOT NULL COMMENT 'Key name like entry_updates', + `display_name` VARCHAR(64) DEFAULT NULL COMMENT 'Short display name for UI', + `description` VARCHAR(256) DEFAULT NULL, + `value_type` ENUM('boolean', 'string', 'integer') NOT NULL DEFAULT 'boolean', + `default_value` VARCHAR(256) DEFAULT NULL, + `display_order` INT(11) DEFAULT 0, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_handler_config_key_u1` (`config_key`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- Per-configuration handler settings +-- Links notification_configuration to handler settings +DROP TABLE IF EXISTS `notification_configuration_handler_setting`; +CREATE TABLE `notification_configuration_handler_setting` ( + `id` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT, + `notification_configuration_id` INT(11) UNSIGNED NOT NULL, + `notification_handler_config_key_id` INT(11) UNSIGNED NOT NULL, + `config_value` VARCHAR(256) NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_configuration_handler_setting_u1` + (`notification_configuration_id`, `notification_handler_config_key_id`), + KEY `notification_configuration_handler_setting_k1` (`notification_configuration_id`), + KEY `notification_configuration_handler_setting_k2` (`notification_handler_config_key_id`), + CONSTRAINT `notification_configuration_handler_setting_fk1` + FOREIGN KEY (`notification_configuration_id`) + REFERENCES `notification_configuration` (`id`) + ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT `notification_configuration_handler_setting_fk2` + FOREIGN KEY (`notification_handler_config_key_id`) + REFERENCES `notification_handler_config_key` (`id`) + ON UPDATE CASCADE ON DELETE CASCADE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + + -- Note: CHECK constraint is not supported in MySQL. -- Hence, we need triggers to verify that at least one of -- is_used_required/optional is NULL diff --git a/db/sql/static/populate_notification_handler_config_key.sql b/db/sql/static/populate_notification_handler_config_key.sql new file mode 100644 index 000000000..04ce755a9 --- /dev/null +++ b/db/sql/static/populate_notification_handler_config_key.sql @@ -0,0 +1,11 @@ +LOCK TABLES `notification_handler_config_key` WRITE; +/*!40000 ALTER TABLE `notification_handler_config_key` DISABLE KEYS */; +INSERT INTO `notification_handler_config_key` VALUES +(1,'entry_updates', 'Entry Updates', 'Notify document owner when log entries in their document are updated or deleted by others', 'boolean', 'true', 1), +(2,'entry_replies', 'Replies to My Entries', 'Notify original entry creator when someone replies to their log entry, or when replies are updated/deleted on their entry', 'boolean', 'true', 2), +(3,'new_entries', 'New Entries', 'Notify document owner when new log entries are created in their document (excluding entries they create themselves)', 'boolean', 'true', 3), +(4,'document_replies', 'All Document Replies', 'Notify document owner when replies are added, updated, or deleted anywhere in their document', 'boolean', 'true', 4), +(5,'reactions', 'Entry Reactions', 'Notify entry creator when someone adds or removes a reaction (like emoji) to their entry', 'boolean', 'true', 5), +(6,'own_entry_edits', 'Edits to My Entries', 'Notify users when their own entries (or replies they created) are edited or deleted by someone else', 'boolean', 'true', 6); +/*!40000 ALTER TABLE `notification_handler_config_key` ENABLE KEYS */; +UNLOCK TABLES; diff --git a/db/sql/static/populate_notification_provider.sql b/db/sql/static/populate_notification_provider.sql new file mode 100644 index 000000000..d864e116e --- /dev/null +++ b/db/sql/static/populate_notification_provider.sql @@ -0,0 +1,34 @@ +LOCK TABLES `notification_provider` WRITE; +/*!40000 ALTER TABLE `notification_provider` DISABLE KEYS */; +INSERT INTO `notification_provider` VALUES +(1,'apprise', 'Apprise unified notification library supporting email, Discord, Slack, Teams, etc.', '# Apprise Notification URLs + +[Apprise](https://github.com/caronc/apprise) supports many notification services. Below are some common URL formats. + +## Email (SMTP) + +``` +mailto://user@gmail.com +``` + +## Microsoft Teams + +``` +msteams://TokenA/TokenB/TokenC +``` + +## Slack + +``` +slack://TokenA/TokenB/TokenC/#channel +``` + +## Custom Webhooks + +``` +json://hostname/path +``` + +For the full list of supported services, see the [Apprise Wiki](https://github.com/caronc/apprise/wiki).'); +/*!40000 ALTER TABLE `notification_provider` ENABLE KEYS */; +UNLOCK TABLES; diff --git a/db/sql/updates/dev/2026.3.dev1.sql b/db/sql/updates/dev/2026.3.dev1.sql new file mode 100644 index 000000000..f5fff821d --- /dev/null +++ b/db/sql/updates/dev/2026.3.dev1.sql @@ -0,0 +1,37 @@ +-- +-- Copyright (c) UChicago Argonne, LLC. All rights reserved. +-- See LICENSE file. +-- + +ALTER TABLE `notification_provider` ADD COLUMN `instructions` TEXT DEFAULT NULL; + +UPDATE `notification_provider` SET `instructions` = '# Apprise Notification URLs + +[Apprise](https://github.com/caronc/apprise) supports many notification services. Below are some common URL formats. + +## Email (SMTP) + +``` +mailto://user@gmail.com +``` + +## Microsoft Teams + +``` +msteams://TokenA/TokenB/TokenC +``` + +## Slack + +``` +slack://TokenA/TokenB/TokenC/#channel +``` + +## Custom Webhooks + +``` +json://hostname/path +``` + +For the full list of supported services, see the [Apprise Wiki](https://github.com/caronc/apprise/wiki).' +WHERE `name` = 'apprise'; diff --git a/db/sql/updates/updateTo2026.3.sql b/db/sql/updates/updateTo2026.3.sql new file mode 100644 index 000000000..bd173b81e --- /dev/null +++ b/db/sql/updates/updateTo2026.3.sql @@ -0,0 +1,161 @@ +-- +-- Table `notification_provider` +-- + +DROP TABLE IF EXISTS `notification_provider`; +CREATE TABLE `notification_provider` ( + `id` int(11) unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(64) NOT NULL, + `description` varchar(256) DEFAULT NULL, + `instructions` TEXT DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_provider_u1` (`name`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- +-- Table `notification_configuration` +-- + +DROP TABLE IF EXISTS `notification_configuration`; +CREATE TABLE `notification_configuration` ( + `id` int(11) unsigned NOT NULL AUTO_INCREMENT, + `name` varchar(64) NOT NULL, + `description` varchar(256) DEFAULT NULL, + `notification_provider_id` int(11) unsigned NOT NULL, + `notification_endpoint` varchar(256) NOT NULL, + `user_id` int(11) unsigned DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_configuration_u1` (`name`), + KEY `notification_configuration_k1` (`notification_provider_id`), + KEY `notification_configuration_k2` (`user_id`), + CONSTRAINT `notification_configuration_fk1` FOREIGN KEY (`notification_provider_id`) REFERENCES `notification_provider` (`id`) ON UPDATE CASCADE, + CONSTRAINT `notification_configuration_fk2` FOREIGN KEY (`user_id`) REFERENCES `user_info` (`id`) ON UPDATE CASCADE ON DELETE CASCADE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- +-- Table `notification_provider_config_key` +-- + +DROP TABLE IF EXISTS `notification_provider_config_key`; +CREATE TABLE `notification_provider_config_key` ( + `id` int(11) unsigned NOT NULL AUTO_INCREMENT, + `notification_provider_id` int(11) unsigned NOT NULL, + `config_key` varchar(64) NOT NULL, + `description` varchar(256) DEFAULT NULL, + `is_required` bool NOT NULL DEFAULT 0, + `display_order` int(11) DEFAULT 0, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_provider_config_key_u1` (`notification_provider_id`, `config_key`), + KEY `notification_provider_config_key_k1` (`notification_provider_id`), + CONSTRAINT `notification_provider_config_key_fk1` FOREIGN KEY (`notification_provider_id`) REFERENCES `notification_provider` (`id`) ON UPDATE CASCADE ON DELETE CASCADE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- +-- Table `notification_configuration_setting` +-- + +DROP TABLE IF EXISTS `notification_configuration_setting`; +CREATE TABLE `notification_configuration_setting` ( + `id` int(11) unsigned NOT NULL AUTO_INCREMENT, + `notification_configuration_id` int(11) unsigned NOT NULL, + `notification_provider_config_key_id` int(11) unsigned NOT NULL, + `config_value` varchar(256) DEFAULT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_configuration_setting_u1` (`notification_configuration_id`, `notification_provider_config_key_id`), + KEY `notification_configuration_setting_k1` (`notification_configuration_id`), + KEY `notification_configuration_setting_k2` (`notification_provider_config_key_id`), + CONSTRAINT `notification_configuration_setting_fk1` FOREIGN KEY (`notification_configuration_id`) REFERENCES `notification_configuration` (`id`) ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT `notification_configuration_setting_fk2` FOREIGN KEY (`notification_provider_config_key_id`) REFERENCES `notification_provider_config_key` (`id`) ON UPDATE CASCADE ON DELETE CASCADE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- ============================================================================ +-- Notification Handler Configuration Tables +-- ============================================================================ + +-- Handler configuration key definitions +-- These define global handler settings like entry_updates, own_entry_edits +DROP TABLE IF EXISTS `notification_handler_config_key`; +CREATE TABLE `notification_handler_config_key` ( + `id` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT, + `config_key` VARCHAR(64) NOT NULL COMMENT 'Key name like entry_updates', + `display_name` VARCHAR(64) DEFAULT NULL COMMENT 'Short display name for UI', + `description` VARCHAR(256) DEFAULT NULL, + `value_type` ENUM('boolean', 'string', 'integer') NOT NULL DEFAULT 'boolean', + `default_value` VARCHAR(256) DEFAULT NULL, + `display_order` INT(11) DEFAULT 0, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_handler_config_key_u1` (`config_key`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- Per-configuration handler settings +-- Links notification_configuration to handler settings +DROP TABLE IF EXISTS `notification_configuration_handler_setting`; +CREATE TABLE `notification_configuration_handler_setting` ( + `id` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT, + `notification_configuration_id` INT(11) UNSIGNED NOT NULL, + `notification_handler_config_key_id` INT(11) UNSIGNED NOT NULL, + `config_value` VARCHAR(256) NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `notification_configuration_handler_setting_u1` + (`notification_configuration_id`, `notification_handler_config_key_id`), + KEY `notification_configuration_handler_setting_k1` (`notification_configuration_id`), + KEY `notification_configuration_handler_setting_k2` (`notification_handler_config_key_id`), + CONSTRAINT `notification_configuration_handler_setting_fk1` + FOREIGN KEY (`notification_configuration_id`) + REFERENCES `notification_configuration` (`id`) + ON UPDATE CASCADE ON DELETE CASCADE, + CONSTRAINT `notification_configuration_handler_setting_fk2` + FOREIGN KEY (`notification_handler_config_key_id`) + REFERENCES `notification_handler_config_key` (`id`) + ON UPDATE CASCADE ON DELETE CASCADE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +-- ============================================================================ +-- Initial Data Population +-- ============================================================================ + +-- +-- Populate notification_provider +-- + +INSERT INTO `notification_provider` VALUES +(1,'apprise', 'Apprise unified notification library supporting email, Discord, Slack, Teams, etc.', '# Apprise Notification URLs + +[Apprise](https://github.com/caronc/apprise) supports many notification services. Below are some common URL formats. + +## Email (SMTP) + +``` +mailto://user@gmail.com +``` + +## Microsoft Teams + +``` +msteams://TokenA/TokenB/TokenC +``` + +## Slack + +``` +slack://TokenA/TokenB/TokenC/#channel +``` + +## Custom Webhooks + +``` +json://hostname/path +``` + +For the full list of supported services, see the [Apprise Wiki](https://github.com/caronc/apprise/wiki).'); +-- +-- Populate notification_handler_config_key +-- + +INSERT INTO `notification_handler_config_key` VALUES +(1,'entry_updates', 'Entry Updates', 'Notify document owner when log entries in their document are updated or deleted by others', 'boolean', 'true', 1), +(2,'entry_replies', 'Replies to My Entries', 'Notify original entry creator when someone replies to their log entry, or when replies are updated/deleted on their entry', 'boolean', 'true', 2), +(3,'new_entries', 'New Entries', 'Notify document owner when new log entries are created in their document (excluding entries they create themselves)', 'boolean', 'true', 3), +(4,'document_replies', 'All Document Replies', 'Notify document owner when replies are added, updated, or deleted anywhere in their document', 'boolean', 'true', 4), +(5,'reactions', 'Entry Reactions', 'Notify entry creator when someone adds or removes a reaction (like emoji) to their entry', 'boolean', 'true', 5), +(6,'own_entry_edits', 'Edits to My Entries', 'Notify users when their own entries (or replies they created) are edited or deleted by someone else', 'boolean', 'true', 6); \ No newline at end of file diff --git a/etc/bely-mqtt-template b/etc/bely-mqtt-template new file mode 100644 index 000000000..06b37c5ea --- /dev/null +++ b/etc/bely-mqtt-template @@ -0,0 +1,157 @@ +keys: + - name: topic + type: string + color: + foreground: crimson + background: black + max-width: 25 + - name: description + type: string + color: + foreground: white + background: black + max-width: 25 + - name: entityId + type: string + color: + foreground: white + background: black + max-width: 25 + - name: entityName + type: string + color: + foreground: white + background: black + max-width: 25 + - name: eventTimestamp + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: eventTriggedByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: logInfo/enteredByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: logInfo/enteredOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: logInfo/id + type: string + color: + foreground: white + background: black + max-width: 25 + - name: logInfo/lastModifiedByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: logInfo/lastModifiedOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: logbookList + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/createdByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/enteredOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: parentLogDocumentInfo/id + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/lastModifiedByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/lastModifiedOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: parentLogDocumentInfo/name + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/ownerUserGroupName + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentLogDocumentInfo/ownerUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentlogInfo/enteredByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentlogInfo/enteredOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: parentlogInfo/id + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentlogInfo/lastModifiedByUsername + type: string + color: + foreground: white + background: black + max-width: 25 + - name: parentlogInfo/lastModifiedOnDateTime + type: string + color: + foreground: limegreen + background: black + max-width: 25 + - name: textDiff + type: string + color: + foreground: white + background: black + max-width: 25 diff --git a/sbin/bely_configure_mqtt_service.sh b/sbin/bely_configure_mqtt_service.sh new file mode 100755 index 000000000..727d275d4 --- /dev/null +++ b/sbin/bely_configure_mqtt_service.sh @@ -0,0 +1,165 @@ +#!/bin/bash + +# Copyright (c) UChicago Argonne, LLC. All rights reserved. +# See LICENSE file. + + +# +# Script used for configuring MQTT connector for Bely webapp +# Deploys MQTT Resource Adapter and creates connection pool/resource +# +# Usage: +# +# $0 [mqtt_config_file] +# +# If no config file is specified, defaults to $LOGR_INSTALL_DIR/etc/mqtt.conf +# +# Sample MQTT configuration file contents (mqtt.conf): +# MQTT_HOST=localhost # MQTT broker hostname (default: localhost) +# MQTT_PORT=1883 # MQTT broker port (default: 1883) +# MQTT_USERNAME=admin # MQTT username (optional) +# MQTT_PASSWORD=admin # MQTT password (optional) +# MQTT_CLEAN_SESSION=true # Clean session flag (optional) +# MQTT_QOS=1 # Quality of Service level (optional) +# MQTT_KEEP_ALIVE_INTERVAL=60 # Keep alive interval in seconds (optional) +# MQTT_CONNECTION_TIMEOUT=30 # Connection timeout in seconds (optional) +# MQTT_MAX_INFLIGHT=10 # Maximum number of messages in flight (optional) +# MQTT_AUTOMATIC_RECONNECT=true # Automatic reconnection on disconnect (optional) +# MQTT_FILE_PERSISTANCE=false # Enable file-based message persistence (optional) +# MQTT_PERSISTENCE_DIRECTORY=. # Directory for persistent message storage (optional) +# MQTT_TOPIC_FILTER # MQTT topic filter for subscriptions (optional) + +MY_DIR=`dirname $0` && cd $MY_DIR && MY_DIR=`pwd` +if [ -z "${BELY_ROOT_DIR}" ]; then + BELY_ROOT_DIR=$MY_DIR/.. +fi +BELY_ENV_FILE=${BELY_ROOT_DIR}/setup.sh +if [ ! -f ${BELY_ENV_FILE} ]; then + echo "Environment file ${BELY_ENV_FILE} does not exist." + exit 2 +fi +. ${BELY_ENV_FILE} > /dev/null + +# Constants +MQTT_POOL_NAME="bely/MQTT/pool" +MQTT_RESOURCE_NAME="bely/MQTT/resource" +MQTT_RAR_NAME="mqtt-rar-0.8.0" +MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME="mqtt-rar-deployment" +MQTT_RAR_PATH=$BELY_ROOT_DIR/src/lib/${MQTT_RAR_NAME}.rar + +# Look for MQTT configuration file +if [ ! -z "$1" ]; then + mqttConfigFile=$1 +else + mqttConfigFile=$LOGR_INSTALL_DIR/etc/mqtt.conf +fi + +if [ -f $mqttConfigFile ]; then + echo "Using MQTT config file: $mqttConfigFile" + . $mqttConfigFile +else + echo "Error: MQTT config file $mqttConfigFile not found." + echo "You can create one using bely_create_mqtt_configuration.sh" + exit 1 +fi + +BELY_HOST_ARCH=$(uname -sm | tr -s '[:upper:][:blank:]' '[:lower:][\-]') +GLASSFISH_DIR=$LOGR_SUPPORT_DIR/payara/$BELY_HOST_ARCH + +ASADMIN_CMD=$GLASSFISH_DIR/bin/asadmin + +# MQTT Configuration defaults +MQTT_HOST=${MQTT_HOST:=localhost} +MQTT_PORT=${MQTT_PORT:=1883} + +# Build properties string for connection pool +PROPERTIES="serverURIs=tcp\\://${MQTT_HOST}\\:${MQTT_PORT}" + +if [ ! -z "$MQTT_USERNAME" ]; then + PROPERTIES="${PROPERTIES}:userName=${MQTT_USERNAME}" +fi + +if [ ! -z "$MQTT_PASSWORD" ]; then + PROPERTIES="${PROPERTIES}:password=${MQTT_PASSWORD}" +fi + +if [ ! -z "$MQTT_CLEAN_SESSION" ]; then + PROPERTIES="${PROPERTIES}:cleanSession=${MQTT_CLEAN_SESSION}" +fi + +if [ ! -z "$MQTT_KEEP_ALIVE_INTERVAL" ]; then + PROPERTIES="${PROPERTIES}:keepAliveInterval=${MQTT_KEEP_ALIVE_INTERVAL}" +fi + +if [ ! -z "$MQTT_CONNECTION_TIMEOUT" ]; then + PROPERTIES="${PROPERTIES}:connectionTimeout=${MQTT_CONNECTION_TIMEOUT}" +fi + +if [ ! -z "$MQTT_MAX_INFLIGHT" ]; then + PROPERTIES="${PROPERTIES}:maxInflight=${MQTT_MAX_INFLIGHT}" +fi + +if [ ! -z "$MQTT_AUTOMATIC_RECONNECT" ]; then + PROPERTIES="${PROPERTIES}:automaticReconnect=${MQTT_AUTOMATIC_RECONNECT}" +fi + +if [ ! -z "$MQTT_FILE_PERSISTANCE" ]; then + PROPERTIES="${PROPERTIES}:filePersistance=${MQTT_FILE_PERSISTANCE}" +fi + +if [ ! -z "$MQTT_PERSISTENCE_DIRECTORY" ]; then + PROPERTIES="${PROPERTIES}:persistenceDirectory=${MQTT_PERSISTENCE_DIRECTORY}" +fi + +if [ ! -z "$MQTT_QOS" ]; then + PROPERTIES="${PROPERTIES}:qos=${MQTT_QOS}" +fi + +if [ ! -z "$MQTT_TOPIC_FILTER" ]; then + PROPERTIES="${PROPERTIES}:topicFilter=${MQTT_TOPIC_FILTER}" +fi + +# Deploy MQTT RAR +echo "Deploying MQTT RAR" +if [ -f "$MQTT_RAR_PATH" ]; then + # Check if already deployed and undeploy if needed + $ASADMIN_CMD list-applications | grep -q ${MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME} && { + echo "Undeploying existing MQTT RAR" + # Check if resource exists and delete it + $ASADMIN_CMD list-connector-resources | grep -q ${MQTT_RESOURCE_NAME} && { + echo "Deleting existing MQTT resource" + $ASADMIN_CMD delete-connector-resource ${MQTT_RESOURCE_NAME} || exit 1 + } + + # Check if connection pool exists and delete it + $ASADMIN_CMD list-connector-connection-pools | grep -q ${MQTT_POOL_NAME} && { + echo "Deleting existing MQTT connection pool" + $ASADMIN_CMD delete-connector-connection-pool ${MQTT_POOL_NAME} || exit 1 + } + + echo "Undeploying existing MQTT RAR" + $ASADMIN_CMD undeploy ${MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME} || exit 1 + } + $ASADMIN_CMD deploy --name ${MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME} $MQTT_RAR_PATH || exit 1 +else + echo "Warning: MQTT RAR file not found at $MQTT_RAR_PATH" + exit 1 +fi + +# Create MQTT connection pool +echo "Creating MQTT connection pool ${MQTT_POOL_NAME}" +$ASADMIN_CMD create-connector-connection-pool \ + --raname ${MQTT_CONNECTOR_RAR_DEPLOYMENT_NAME} \ + --connectiondefinition fish.payara.cloud.connectors.mqtt.api.MQTTConnectionFactory \ + --property "${PROPERTIES}" \ + ${MQTT_POOL_NAME} || exit 1 +# Create MQTT resource +echo "Creating MQTT resource ${MQTT_RESOURCE_NAME}" +$ASADMIN_CMD create-connector-resource \ + --poolname ${MQTT_POOL_NAME} \ + ${MQTT_RESOURCE_NAME} || exit 1 +# Test MQTT connection pool +echo "Testing MQTT connection pool" +$ASADMIN_CMD ping-connection-pool ${MQTT_POOL_NAME} || { echo "Warning: MQTT connection pool ping failed"; exit 1; } + +echo "Restart or redeploy BELY." \ No newline at end of file diff --git a/sbin/bely_create_mqtt_configuration.sh b/sbin/bely_create_mqtt_configuration.sh new file mode 100755 index 000000000..bd9ffde1d --- /dev/null +++ b/sbin/bely_create_mqtt_configuration.sh @@ -0,0 +1,133 @@ +#!/bin/bash + +# Script to create MQTT configuration file for Bely service + +MY_DIR=`dirname $0` && cd $MY_DIR && MY_DIR=`pwd` +if [ -z "${BELY_ROOT_DIR}" ]; then + BELY_ROOT_DIR=$MY_DIR/.. +fi +BELY_ENV_FILE=${BELY_ROOT_DIR}/setup.sh +if [ ! -f ${BELY_ENV_FILE} ]; then + echo "Environment file ${BELY_ENV_FILE} does not exist." + exit 2 +fi +. ${BELY_ENV_FILE} > /dev/null + +# Default configuration file location +MQTT_CONFIG_FILE=${LOGR_INSTALL_DIR}/etc/mqtt.conf + +echo "===================================" +echo "MQTT Configuration Setup for Bely" +echo "===================================" +echo "" +echo "This script will help you create an MQTT configuration file." +echo "Configuration will be saved to: $MQTT_CONFIG_FILE" +echo "" + +# Check if config file already exists +if [ -f "$MQTT_CONFIG_FILE" ]; then + read -p "Configuration file already exists. Overwrite? (y/n): " -n 1 -r + echo + if [[ ! $REPLY =~ ^[Yy]$ ]]; then + echo "Exiting without changes." + exit 0 + fi +fi + +echo "" +echo "Configuration Details:" +echo "- cleanSession: Whether client and server should remember state across reconnects" +echo "- automaticReconnect: Whether client will automatically reconnect if connection is lost" +echo "- filePersistance: Whether to use file persistence for un-acknowledged messages" +echo "- persistenceDirectory: Directory to use for file persistence" +echo "- connectionTimeout: Connection timeout value in seconds" +echo "- maxInflight: Maximum messages that can be sent without acknowledgements" +echo "- keepAliveInterval: Keep alive interval in seconds" +echo "- userName/password: Authentication credentials" +# Disable MDB only variables. +# echo "- topicFilter: Topic Filter (For MDBs only)" +# echo "- qos: Quality of Service for the subscription (For MDBs only)" +echo "" + +# Ensure directory exists +mkdir -p $(dirname "$MQTT_CONFIG_FILE") + +# Prompt for configuration values +read -p "MQTT Host [localhost]: " MQTT_HOST +MQTT_HOST=${MQTT_HOST:-localhost} + +read -p "MQTT Port [1883]: " MQTT_PORT +MQTT_PORT=${MQTT_PORT:-1883} + +read -p "MQTT Username (leave empty for no auth): " MQTT_USERNAME + +if [ ! -z "$MQTT_USERNAME" ]; then + read -s -p "MQTT Password: " MQTT_PASSWORD + echo +fi + +read -p "Clean Session (true/false) [false]: " CLEAN_SESSION +CLEAN_SESSION=${CLEAN_SESSION:-false} + +read -p "Automatic Reconnect (true/false) [true]: " AUTOMATIC_RECONNECT +AUTOMATIC_RECONNECT=${AUTOMATIC_RECONNECT:-true} + +read -p "File Persistance (true/false) [false]: " FILE_PERSISTANCE +FILE_PERSISTANCE=${FILE_PERSISTANCE:-false} + +read -p "Persistence Directory [.]: " PERSISTENCE_DIRECTORY +PERSISTENCE_DIRECTORY=${PERSISTENCE_DIRECTORY:-.} + +read -p "Connection Timeout (seconds) [30]: " CONNECTION_TIMEOUT +CONNECTION_TIMEOUT=${CONNECTION_TIMEOUT:-30} + +read -p "Max Inflight [10]: " MAX_INFLIGHT +MAX_INFLIGHT=${MAX_INFLIGHT:-10} + +read -p "Keep Alive Interval (seconds) [60]: " KEEP_ALIVE_INTERVAL +KEEP_ALIVE_INTERVAL=${KEEP_ALIVE_INTERVAL:-60} + +# MDB only variables +# read -p "Topic Filter (leave empty if not using MDB): " TOPIC_FILTER +# read -p "QoS (0/1/2) [0]: " QOS +# QOS=${QOS:-0} + +# Write configuration file +cat > "$MQTT_CONFIG_FILE" << EOF +# MQTT Configuration for Bely Service +# Generated on $(date) + +MQTT_HOST=$MQTT_HOST +MQTT_PORT=$MQTT_PORT +EOF + +if [ ! -z "$MQTT_USERNAME" ]; then + echo "MQTT_USERNAME=$MQTT_USERNAME" >> "$MQTT_CONFIG_FILE" +fi + +if [ ! -z "$MQTT_PASSWORD" ]; then + echo "MQTT_PASSWORD=$MQTT_PASSWORD" >> "$MQTT_CONFIG_FILE" +fi + +cat >> "$MQTT_CONFIG_FILE" << EOF +MQTT_CLEAN_SESSION=$CLEAN_SESSION +MQTT_AUTOMATIC_RECONNECT=$AUTOMATIC_RECONNECT +MQTT_FILE_PERSISTANCE=$FILE_PERSISTANCE +MQTT_PERSISTENCE_DIRECTORY=$PERSISTENCE_DIRECTORY +MQTT_CONNECTION_TIMEOUT=$CONNECTION_TIMEOUT +MQTT_MAX_INFLIGHT=$MAX_INFLIGHT +MQTT_KEEP_ALIVE_INTERVAL=$KEEP_ALIVE_INTERVAL +EOF + +if [ ! -z "$TOPIC_FILTER" ]; then + echo "MQTT_TOPIC_FILTER=$TOPIC_FILTER" >> "$MQTT_CONFIG_FILE" +fi + +if [ ! -z "$QOS" ]; then + echo "MQTT_QOS=$QOS" >> "$MQTT_CONFIG_FILE" +fi + +echo "" +echo "Configuration file created successfully at: $MQTT_CONFIG_FILE" +echo "" +echo "You can now run bely_configure_mqtt_service.sh to apply this configuration." \ No newline at end of file diff --git a/sbin/bely_release_bely_api_pip.py b/sbin/bely_release_bely_api_pip.py new file mode 100755 index 000000000..034316540 --- /dev/null +++ b/sbin/bely_release_bely_api_pip.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +""" +Copyright (c) UChicago Argonne, LLC. All rights reserved. +See LICENSE file. +""" + +import os +from pathlib import Path +import shutil + +DIST_ROOT_DIRECTORY_ENV_KEY = "LOGR_ROOT_DIR" +PYTHON_SRC_DIST_PATH = 'tools/developer_tools/python-client' +DIST_VERSION_FILE_PATH = 'etc/version' +PYTHON_API_SETUP_FILE = 'setup-api.py' + +rootDir = os.getenv(DIST_ROOT_DIRECTORY_ENV_KEY) +if rootDir is None: + raise EnvironmentError('Please run setup.sh from the root directory of the bely distribution.') + + +def getDistVersion(): + versionFilePath = '%s/%s' % (rootDir, DIST_VERSION_FILE_PATH) + return open(versionFilePath, 'r').read().split('\n')[0] + +def publish_api(setup_file): + setupFilePath = "%s/%s/%s" % (rootDir, PYTHON_SRC_DIST_PATH, setup_file) + setupFile = open(setupFilePath, 'r') + + projectName = "" + + for line in setupFile.readlines(): + if 'name=' in line: + projectName = line.split("'")[1] + if 'version=' in line: + versionLineSplit = line.split("'") + versionNumber = versionLineSplit[1] + + os.chdir('%s/%s' % (rootDir, PYTHON_SRC_DIST_PATH)) + + p = Path('setup.py') + p.symlink_to("./%s" % setup_file) + + buildExit = os.system('python setup.py sdist') + + # Clean up + if os.path.exists('./setup.py'): + os.remove('./setup.py') + + if buildExit == 0: + # Clean up + egg_info_file_path = './%s.egg-info' % projectName.replace("-", "_") + if os.path.exists(egg_info_file_path): + shutil.rmtree(egg_info_file_path) + + tarFileName = '%s-%s.tar.gz' % (projectName, versionNumber) + + # Attempt to upload to pip + distFilePath = 'dist/%s' % tarFileName + return distFilePath + +new_api_generator_script = "%s/%s/%s http://localhost:8080/bely" % (rootDir, PYTHON_SRC_DIST_PATH, 'generatePyClient.sh') +generation_exit = os.system(new_api_generator_script) + +if generation_exit == 0: + api_bin_path = publish_api(PYTHON_API_SETUP_FILE) + + print(api_bin_path) + + pipExit = os.system('twine upload %s' % api_bin_path) diff --git a/sbin/bely_update_mqtt_connector_build.sh b/sbin/bely_update_mqtt_connector_build.sh new file mode 100755 index 000000000..ff5756de4 --- /dev/null +++ b/sbin/bely_update_mqtt_connector_build.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +# Copyright (c) UChicago Argonne, LLC. All rights reserved. +# See LICENSE file. + + +# +# Script downloads builds cloud connectors. Updates the one included with the repo. +# +# Usage: +# +# $0 +# + +MY_DIR=`dirname $0` && cd $MY_DIR && MY_DIR=`pwd` +if [ -z "${BELY_ROOT_DIR}" ]; then + BELY_ROOT_DIR=$MY_DIR/.. +fi +BELY_ENV_FILE=${BELY_ROOT_DIR}/setup.sh +if [ ! -f ${BELY_ENV_FILE} ]; then + echo "Environment file ${BELY_ENV_FILE} does not exist." + exit 2 +fi +. ${BELY_ENV_FILE} > /dev/null + +# Constants +CONNECTOR_URL=https://github.com/payara/Cloud-Connectors/archive/refs/tags/0.8.0.tar.gz +TARGET_FILE_NAME=mqtt-rar-0.8.0.rar +TARGET_FILE_PATH=MQTT/MQTTRAR/target +DEST_DIR="${BELY_ROOT_DIR}/src/lib" + +# Create temporary directory +TMP_DIR=$(mktemp -d) +cd $TMP_DIR + +# Download and extract connector +curl -L --progress-bar $CONNECTOR_URL -o connector.tar.gz +tar -xzf connector.tar.gz + +# Find extracted directory and build +EXTRACTED_DIR=$(find . -maxdepth 1 -type d -name "Cloud-Connectors-*" | head -1) +cd $EXTRACTED_DIR + +# Run maven build +mvn clean install -DskipTests + +# Find and print the newly built target +TARGET_FILE="$TARGET_FILE_PATH/$TARGET_FILE_NAME" +if [ -f "$TARGET_FILE" ]; then + echo "Found target: $TARGET_FILE" + ls -la "$TARGET_FILE" + + # Copy to root/src/lib + cp "$TARGET_FILE" "$DEST_DIR/" + echo "Copied $TARGET_FILE_NAME to $DEST_DIR" +else + echo "Target file not found: $TMP_DIR/$TARGET_FILE" + exit 1 +fi + +# Clean up temporary directory +cd $MY_DIR +rm -rf $TMP_DIR +echo "Cleaned up temporary directory: $TMP_DIR" diff --git a/sbin/cdb_create_configuration_openssl.sh b/sbin/cdb_create_configuration_openssl.sh index cd8f48b59..9df68b510 100755 --- a/sbin/cdb_create_configuration_openssl.sh +++ b/sbin/cdb_create_configuration_openssl.sh @@ -13,26 +13,26 @@ # MY_DIR=`dirname $0` && cd $MY_DIR && MY_DIR=`pwd` -if [ -z "${CDB_ROOT_DIR}" ]; then - CDB_ROOT_DIR=$MY_DIR/.. +if [ -z "${LOGR_ROOT_DIR}" ]; then + LOGR_ROOT_DIR=$MY_DIR/.. fi -CDB_ENV_FILE=${CDB_ROOT_DIR}/setup.sh -if [ ! -f ${CDB_ENV_FILE} ]; then - echo "Environment file ${CDB_ENV_FILE} does not exist." +LOGR_ENV_FILE=${LOGR_ROOT_DIR}/setup.sh +if [ ! -f ${LOGR_ENV_FILE} ]; then + echo "Environment file ${LOGR_ENV_FILE} does not exist." exit 2 fi -. ${CDB_ENV_FILE} > /dev/null +. ${LOGR_ENV_FILE} > /dev/null -CDB_ETC_DIR=$CDB_INSTALL_DIR/etc +LOGR_ETC_DIR=$LOGR_INSTALL_DIR/etc -OPENSSL_CONFIG_FILE=$CDB_ETC_DIR/cdb.openssl.cnf +OPENSSL_CONFIG_FILE=$LOGR_ETC_DIR/cdb.openssl.cnf if [ -f $OPENSSL_CONFIG_FILE ]; then echo "Configuration file $OPENSSL_CONFIG_FILE has already been created" exit 0; fi -OPEN_SSL_TEMPLATE_FILE=$CDB_ROOT_DIR/etc/cdb.openssl.cnf.template +OPEN_SSL_TEMPLATE_FILE=$LOGR_ROOT_DIR/etc/cdb.openssl.cnf.template echo "Creating the cdb openSSL configuration file." read -p "Enter the organizational name [company]: " SSL_ORG_NAME diff --git a/sbin/cdb_create_db.sh b/sbin/cdb_create_db.sh index 040ae74c7..11488b7e7 100755 --- a/sbin/cdb_create_db.sh +++ b/sbin/cdb_create_db.sh @@ -192,9 +192,11 @@ STATIC_DB_SCRIPTS_DIR="$LOGR_SQL_DIR/static" cd $CURRENT_DIR && cd $STATIC_DB_SCRIPTS_DIR STATIC_LOGR_DB_TABLES="\ setting_type \ - domain \ + domain \ relationship_type_handler \ relationship_type \ + notification_provider \ + notification_handler_config_key \ " executePopulateScripts "$STATIC_LOGR_DB_TABLES" @@ -281,7 +283,7 @@ if [ -z "$adminWithLocalPassword" ]; then read -sp "Enter password for local portal admin (username: logr): [leave blank for no local password] " LOGR_LOCAL_SYSTEM_ADMIN_PASSWORD echo "" if [ ! -z "$LOGR_LOCAL_SYSTEM_ADMIN_PASSWORD" ]; then - adminCryptPassword=`python -c "from cdb.common.utility.cryptUtility import CryptUtility; print(str(CryptUtility.cryptPasswordWithPbkdf2('$CDB_LOCAL_SYSTEM_ADMIN_PASSWORD')))"` + adminCryptPassword=`python -c "from cdb.common.utility.cryptUtility import CryptUtility; print(str(CryptUtility.cryptPasswordWithPbkdf2('$LOGR_LOCAL_SYSTEM_ADMIN_PASSWORD')))"` echo "update user_info set password = '$adminCryptPassword' where username='logr'" > temporaryAdminCommand.sql execute $mysqlCmd temporaryAdminCommand.sql fi diff --git a/sbin/cdb_deploy_mysqld.sh b/sbin/cdb_deploy_mysqld.sh index 69513b654..ce8f885b9 100755 --- a/sbin/cdb_deploy_mysqld.sh +++ b/sbin/cdb_deploy_mysqld.sh @@ -24,7 +24,7 @@ fi . ${LOGR_ENV_FILE} > /dev/null # Use first argument as db name, if provided -LOGR_DB_NAME=${LOGR_DB_NAME:=cdb} +LOGR_DB_NAME=${LOGR_DB_NAME:=logr} if [ ! -z "$1" ]; then LOGR_DB_NAME=$1 fi diff --git a/sbin/cdb_release_cdb_api_pip.py b/sbin/cdb_release_cdb_api_pip.py deleted file mode 100755 index ff768d762..000000000 --- a/sbin/cdb_release_cdb_api_pip.py +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env python - -""" -Copyright (c) UChicago Argonne, LLC. All rights reserved. -See LICENSE file. -""" - -import os -from pathlib import Path -import shutil - -DIST_ROOT_DIRECTORY_ENV_KEY = "CDB_ROOT_DIR" -PYTHON_SRC_DIST_PATH = 'tools/developer_tools/python-client' -DIST_VERSION_FILE_PATH = 'etc/version' -PYTHON_API_SETUP_FILE = 'setup-api.py' -PYTHON_CLI_SETUP_FILE = 'setup-cli.py' - -rootDir = os.getenv(DIST_ROOT_DIRECTORY_ENV_KEY) -if rootDir is None: - raise EnvironmentError('Please run setup.sh from the root directory of the cdb distribution.') - - -def getDistVersion(): - versionFilePath = '%s/%s' % (rootDir, DIST_VERSION_FILE_PATH) - return open(versionFilePath, 'r').read().split('\n')[0] - -def publish_api(setup_file): - setupFilePath = "%s/%s/%s" % (rootDir, PYTHON_SRC_DIST_PATH, setup_file) - setupFile = open(setupFilePath, 'r') - - newSetupFileContents = "" - - projectName = "" - - for line in setupFile.readlines(): - if 'name=' in line: - projectName = line.split("'")[1] - if 'version=' in line: - versionLineSplit = line.split("'") - versionNumber = versionLineSplit[1] - - '''Code commented out aides in automatically generating the next dev version... Will handle manually for now.''' - # releaseDev = ('.DEV' in getDistVersion()) - - # if releaseDev: - # if 'dev' in versionNumber: - # # A previous dev release exists - # devVersionSplit = versionNumber.split('dev') - # devVersion = int(devVersionSplit[1]) + 1 - # versionNumber = devVersionSplit[0] - # versionNumber += 'dev' + str(devVersion) - # else: - # # First dev release for the planned release - # versionNumber = getDistVersion().lower() + "0" - # else: - # # Production release of cdb_api - # versionNumber = getDistVersion() - - # response = input('Continue with releasing version [%s] of cdb_api to pip: [Y/n]: ' % versionNumber) - - # if response == '' or response.lower() == 'y': - # line = versionLineSplit[0] + "'" - # line += versionNumber + "'" - # line += versionLineSplit[2] - # elif response.lower() == 'n': - # print('Exiting script') - # quit() - # else: - # raise IOError("Invalid input given %s" % response) - - # newSetupFileContents += line - - # setupFile = open(setupFilePath, 'r') - # originalSetupFileContents = setupFile.read() - - # setupFile = open(setupFilePath, 'w+') - # setupFile.write(newSetupFileContents) - # setupFile.close() - - - os.chdir('%s/%s' % (rootDir, PYTHON_SRC_DIST_PATH)) - - p = Path('setup.py') - p.symlink_to("./%s" % setup_file) - - buildExit = os.system('python setup.py sdist') - - # Clean up - if os.path.exists('./setup.py'): - os.remove('./setup.py') - - if buildExit == 0: - # Clean up - egg_info_file_path = './%s.egg-info' % projectName.replace("-", "_") - if os.path.exists(egg_info_file_path): - shutil.rmtree(egg_info_file_path) - - tarFileName = '%s-%s.tar.gz' % (projectName, versionNumber) - - # Attempt to upload to pip - distFilePath = 'dist/%s' % tarFileName - return distFilePath - -new_api_generator_script = "%s/%s/%s http://localhost:8080/cdb" % (rootDir, PYTHON_SRC_DIST_PATH, 'generatePyClient.sh') -generation_exit = os.system(new_api_generator_script) - -if generation_exit == 0: - api_bin_path = publish_api(PYTHON_API_SETUP_FILE) - cli_bin_path = publish_api(PYTHON_CLI_SETUP_FILE) - - print("%s %s" % (api_bin_path, cli_bin_path)) - - pipExit = os.system('twine upload %s %s' % (api_bin_path, cli_bin_path)) - - '''Code commented out aides in automatically generating the next dev version... Will handle manually for now.''' - # if pipExit != 0: - # setupFile = open(setupFilePath, 'w') - # setupFile.write(originalSetupFileContents) - # setupFile.close() \ No newline at end of file diff --git a/src/java/LogrPortal/lib/flexmark-ext-tables-0.64.8.jar b/src/java/LogrPortal/lib/flexmark-ext-tables-0.64.8.jar new file mode 100644 index 000000000..9201e31bf Binary files /dev/null and b/src/java/LogrPortal/lib/flexmark-ext-tables-0.64.8.jar differ diff --git a/src/java/LogrPortal/lib/jakarta.resource-api-2.1.0.jar b/src/java/LogrPortal/lib/jakarta.resource-api-2.1.0.jar new file mode 100644 index 000000000..5a5d90897 Binary files /dev/null and b/src/java/LogrPortal/lib/jakarta.resource-api-2.1.0.jar differ diff --git a/src/java/LogrPortal/lib/log4j-api-2.17.0.jar b/src/java/LogrPortal/lib/log4j-api-2.17.0.jar deleted file mode 100644 index e39dab0d3..000000000 Binary files a/src/java/LogrPortal/lib/log4j-api-2.17.0.jar and /dev/null differ diff --git a/src/java/LogrPortal/lib/log4j-api-2.25.3.jar b/src/java/LogrPortal/lib/log4j-api-2.25.3.jar new file mode 100644 index 000000000..640998bb3 Binary files /dev/null and b/src/java/LogrPortal/lib/log4j-api-2.25.3.jar differ diff --git a/src/java/LogrPortal/lib/log4j-core-2.17.0.jar b/src/java/LogrPortal/lib/log4j-core-2.17.0.jar deleted file mode 100644 index b853057e9..000000000 Binary files a/src/java/LogrPortal/lib/log4j-core-2.17.0.jar and /dev/null differ diff --git a/src/java/LogrPortal/lib/log4j-core-2.25.3.jar b/src/java/LogrPortal/lib/log4j-core-2.25.3.jar new file mode 100644 index 000000000..a26e5b016 Binary files /dev/null and b/src/java/LogrPortal/lib/log4j-core-2.25.3.jar differ diff --git a/src/java/LogrPortal/lib/mqtt-jca-api-1.0.0.jar b/src/java/LogrPortal/lib/mqtt-jca-api-1.0.0.jar new file mode 100644 index 000000000..fa423699f Binary files /dev/null and b/src/java/LogrPortal/lib/mqtt-jca-api-1.0.0.jar differ diff --git a/src/java/LogrPortal/nbproject/build-impl.xml b/src/java/LogrPortal/nbproject/build-impl.xml index f7b8a2414..554ad7f4c 100644 --- a/src/java/LogrPortal/nbproject/build-impl.xml +++ b/src/java/LogrPortal/nbproject/build-impl.xml @@ -17,7 +17,7 @@ - cleanup --> - + @@ -470,7 +470,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -618,7 +618,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -1026,8 +1026,8 @@ exists or setup the property manually. For example like this: - - + + @@ -1041,6 +1041,9 @@ exists or setup the property manually. For example like this: + + + @@ -1089,8 +1092,8 @@ exists or setup the property manually. For example like this: - - + + @@ -1104,6 +1107,9 @@ exists or setup the property manually. For example like this: + + + diff --git a/src/java/LogrPortal/nbproject/build-impl.xml~ b/src/java/LogrPortal/nbproject/build-impl.xml~ index f144c2662..f7b8a2414 100644 --- a/src/java/LogrPortal/nbproject/build-impl.xml~ +++ b/src/java/LogrPortal/nbproject/build-impl.xml~ @@ -17,7 +17,7 @@ - cleanup --> - + @@ -49,36 +49,6 @@ - - - - - - - - - - - - - - - - - - - Must set platform.home - Must set platform.bootcp - Must set platform.java - Must set platform.javac - - The J2SE Platform is not correctly set up. - Your active platform is: ${platform.active}, but the corresponding property "platforms.${platform.active}.home" is not found in the project's properties files. - Either open the project in the IDE and setup the Platform with the same name or add it manually. - For example like this: - ant -Duser.properties.file=<path_to_property_file> jar (where you put the property "platforms.${platform.active}.home" in a .properties file) - or ant -Dplatforms.${platform.active}.home=<path_to_JDK_home> jar (where no properties file is used) - @@ -220,6 +190,15 @@ + + + + + + + + + @@ -314,7 +293,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -351,7 +330,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -428,7 +407,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -451,7 +430,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -491,7 +470,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -569,7 +548,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -593,7 +572,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -639,7 +618,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -716,7 +695,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -748,9 +727,6 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - - - @@ -785,7 +761,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -1293,9 +1269,6 @@ exists or setup the property manually. For example like this: - - - @@ -1418,7 +1391,7 @@ exists or setup the property manually. For example like this: --> - + diff --git a/src/java/LogrPortal/nbproject/genfiles.properties b/src/java/LogrPortal/nbproject/genfiles.properties index 1a378891c..630d6a289 100644 --- a/src/java/LogrPortal/nbproject/genfiles.properties +++ b/src/java/LogrPortal/nbproject/genfiles.properties @@ -3,9 +3,9 @@ build.xml.script.CRC32=67492cbd build.xml.stylesheet.CRC32=1707db4f@1.94.0.1 # This file is used by a NetBeans-based IDE to track changes in generated files such as build-impl.xml. # Do not edit this file. You may delete it but then the IDE will never regenerate such files for you. -nbproject/build-impl.xml.data.CRC32=69e278e8 -nbproject/build-impl.xml.script.CRC32=709f92a3 -nbproject/build-impl.xml.stylesheet.CRC32=334708a0@1.91.0.1 +nbproject/build-impl.xml.data.CRC32=26f30e3e +nbproject/build-impl.xml.script.CRC32=e243752e +nbproject/build-impl.xml.stylesheet.CRC32=334708a0@1.94.0.1 nbproject/rest-build.xml.data.CRC32=83e13ee9 nbproject/rest-build.xml.script.CRC32=0d1fb1b4 nbproject/rest-build.xml.stylesheet.CRC32=0cfeebcc@1.31.1 diff --git a/src/java/LogrPortal/nbproject/project.properties b/src/java/LogrPortal/nbproject/project.properties index 853ccb196..82109d52c 100644 --- a/src/java/LogrPortal/nbproject/project.properties +++ b/src/java/LogrPortal/nbproject/project.properties @@ -48,6 +48,7 @@ file.reference.dm-api-3.3.1.jar=lib/dm-api-3.3.1.jar file.reference.dm-base-3.3.1.jar=lib/dm-base-3.3.1.jar file.reference.ejb-api-3.0.jar=lib/ejb-api-3.0.jar file.reference.flexmark-0.64.8.jar=lib/flexmark-0.64.8.jar +file.reference.flexmark-ext-tables-0.64.8.jar=lib/flexmark-ext-tables-0.64.8.jar file.reference.flexmark-util-ast-0.64.8.jar=lib/flexmark-util-ast-0.64.8.jar file.reference.flexmark-util-builder-0.64.8.jar=lib/flexmark-util-builder-0.64.8.jar file.reference.flexmark-util-collection-0.64.8.jar=lib/flexmark-util-collection-0.64.8.jar @@ -69,6 +70,7 @@ file.reference.jackson-dataformat-yaml-2.11.2.jar=lib/jackson-dataformat-yaml-2. file.reference.jackson-datatype-jsr310-2.11.3.jar=lib/jackson-datatype-jsr310-2.11.3.jar file.reference.jackson-jaxrs-base-2.11.2.jar=lib/jackson-jaxrs-base-2.11.2.jar file.reference.jackson-jaxrs-json-provider-2.11.2.jar=lib/jackson-jaxrs-json-provider-2.11.2.jar +file.reference.jakarta.resource-api-2.1.0.jar=lib/jakarta.resource-api-2.1.0.jar file.reference.javaee-web-api-8.0.1.jar=lib/javaee-web-api-8.0.1.jar file.reference.javax.faces-api-2.1.jar=lib/javax.faces-api-2.1.jar file.reference.javax.inject-1.jar=lib/javax.inject-1.jar @@ -76,9 +78,10 @@ file.reference.javax.servlet-api-4.0.1.jar=lib/javax.servlet-api-4.0.1.jar file.reference.javax.xml.soap-api-1.4.0.jar=lib/javax.xml.soap-api-1.4.0.jar file.reference.jersey-media-json-jackson-2.28.jar=lib/jersey-media-json-jackson-2.28.jar file.reference.libphonenumber-8.12.3.jar=lib/libphonenumber-8.12.3.jar -file.reference.log4j-api-2.17.0.jar=lib/log4j-api-2.17.0.jar -file.reference.log4j-core-2.17.0.jar=lib/log4j-core-2.17.0.jar +file.reference.log4j-api-2.25.3.jar=lib/log4j-api-2.25.3.jar +file.reference.log4j-core-2.25.3.jar=lib/log4j-core-2.25.3.jar file.reference.metadata-extractor-2.17.0.jar=lib/metadata-extractor-2.17.0.jar +file.reference.mqtt-jca-api-1.0.0.jar=lib/mqtt-jca-api-1.0.0.jar file.reference.omnifaces-3.10.1.jar-1=lib/omnifaces-3.10.1.jar file.reference.pdfbox-2.0.24.jar=lib/pdfbox-2.0.24.jar file.reference.poi-4.0.1.jar=lib/poi-4.0.1.jar @@ -155,8 +158,8 @@ javac.classpath=\ ${file.reference.xmpcore-6.1.10.jar}:\ ${file.reference.metadata-extractor-2.17.0.jar}:\ ${file.reference.javax.xml.soap-api-1.4.0.jar}:\ - ${file.reference.log4j-api-2.17.0.jar}:\ - ${file.reference.log4j-core-2.17.0.jar}:\ + ${file.reference.log4j-api-2.25.3.jar}:\ + ${file.reference.log4j-core-2.25.3.jar}:\ ${file.reference.dm-api-3.3.1.jar}:\ ${file.reference.dm-base-3.3.1.jar}:\ ${file.reference.flexmark-0.64.8.jar}:\ @@ -169,7 +172,10 @@ javac.classpath=\ ${file.reference.flexmark-util-collection-0.64.8.jar}:\ ${file.reference.flexmark-util-format-0.64.8.jar}:\ ${file.reference.flexmark-util-visitor-0.64.8.jar}:\ - ${file.reference.flexmark-util-html-0.64.8.jar} + ${file.reference.flexmark-util-html-0.64.8.jar}:\ + ${file.reference.flexmark-ext-tables-0.64.8.jar}:\ + ${file.reference.jakarta.resource-api-2.1.0.jar}:\ + ${file.reference.mqtt-jca-api-1.0.0.jar} # Space-separated list of extra javac options javac.compilerargs= javac.debug=true diff --git a/src/java/LogrPortal/nbproject/project.xml b/src/java/LogrPortal/nbproject/project.xml index b5acfa92b..cfb6bdc29 100644 --- a/src/java/LogrPortal/nbproject/project.xml +++ b/src/java/LogrPortal/nbproject/project.xml @@ -5,7 +5,6 @@ LogrPortal 1.6.5 - ${file.reference.pdfbox-2.0.24.jar} @@ -184,11 +183,11 @@ WEB-INF/lib - ${file.reference.log4j-api-2.17.0.jar} + ${file.reference.log4j-api-2.25.3.jar} WEB-INF/lib - ${file.reference.log4j-core-2.17.0.jar} + ${file.reference.log4j-core-2.25.3.jar} WEB-INF/lib @@ -243,6 +242,18 @@ ${file.reference.flexmark-util-html-0.64.8.jar} WEB-INF/lib + + ${file.reference.flexmark-ext-tables-0.64.8.jar} + WEB-INF/lib + + + ${file.reference.jakarta.resource-api-2.1.0.jar} + WEB-INF/lib + + + ${file.reference.mqtt-jca-api-1.0.0.jar} + WEB-INF/lib + diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/exceptions/MqttNotConfiguredException.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/exceptions/MqttNotConfiguredException.java new file mode 100644 index 000000000..06476c496 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/exceptions/MqttNotConfiguredException.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.exceptions; + +/** + * Exception thrown when MQTT is not configured on the server. + */ +public class MqttNotConfiguredException extends ConfigurationError { + + public MqttNotConfiguredException() { + super(); + } + + public MqttNotConfiguredException(String message) { + super(message); + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/CallSource.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/CallSource.java new file mode 100644 index 000000000..a754b1a4b --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/CallSource.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.constants; + +/** + * + * @author djarosz + */ +public enum CallSource { + + API("API"), + Portal("Portal"); + + private final String value; + + CallSource(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/ChangeType.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/ChangeType.java new file mode 100644 index 000000000..1224a1d77 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/ChangeType.java @@ -0,0 +1,27 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.constants; + +/** + * + * @author djarosz + */ +public enum ChangeType { + + ADD(1), + UPDATE(2), + DELETE(3); + + private final int value; + + ChangeType(int value) { + this.value = value; + } + + public int getValue() { + return value; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/MqttTopic.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/MqttTopic.java new file mode 100644 index 000000000..8115f6968 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/constants/MqttTopic.java @@ -0,0 +1,40 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.constants; + +/** + * + * @author djarosz + */ +public enum MqttTopic { + UPDATE("bely/update"), + ADD("bely/add"), + DELETE("bely/delete"), + LOGENTRYADD("bely/logEntry/Add"), + LOGENTRYUPDATE("bely/logEntry/Update"), + LOGENTRYDELETE("bely/logEntry/Delete"), + LOGENTRYREPLYADD("bely/logEntryReply/Add"), + LOGENTRYREPLYUPDATE("bely/logEntryReply/Update"), + LOGENTRYREPLYDELETE("bely/logEntryReply/Delete"), + LOGREACTIONADD("bely/logReaction/Add"), + LOGREACTIONDELETE("bely/logReaction/Delete"), + SEARCH("bely/search"), + NOTIFICATIONTEST("bely/notification/test"); + + private final String value; + + MqttTopic(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + @Override + public String toString() { + return super.toString() + "(" + value + ")"; + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/AddEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/AddEvent.java new file mode 100644 index 000000000..89ee7a63a --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/AddEvent.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class AddEvent extends MqttEntityEvent { + + public AddEvent(CdbEntity entity, UserInfo eventTriggedByUser, String description) { + super(entity, eventTriggedByUser, description); + } + + @Override + public MqttTopic getTopic() { + return MqttTopic.ADD; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/DeleteEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/DeleteEvent.java new file mode 100644 index 000000000..babcafa0d --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/DeleteEvent.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class DeleteEvent extends MqttEntityEvent { + + public DeleteEvent(CdbEntity entity, UserInfo eventTriggedByUser, String description) { + super(entity, eventTriggedByUser, description); + } + + @Override + public MqttTopic getTopic() { + return MqttTopic.DELETE; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogEntryEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogEntryEvent.java new file mode 100644 index 000000000..a95157223 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogEntryEvent.java @@ -0,0 +1,87 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.ChangeType; +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.common.mqtt.model.entities.LogInfo; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookDocumentInfo; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookInfo; +import gov.anl.aps.logr.portal.model.db.entities.EntityType; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; +import java.util.ArrayList; +import java.util.List; + +/** + * + * @author djarosz + */ +public class LogEntryEvent extends MqttEntityEvent { + + LogbookDocumentInfo parentLogDocumentInfo; + LogInfo logInfo; + List logbookList; + String textDiff; + protected ChangeType changeType; + + public LogEntryEvent(ItemDomainLogbook parentLogbook, Log entity, UserInfo eventTriggedByUser, String description, String textDiff, ChangeType changeType) { + super(entity, eventTriggedByUser, description); + this.logInfo = new LogInfo(entity); + this.textDiff = textDiff; + this.changeType = changeType; + + if (parentLogbook != null) { + parentLogDocumentInfo = new LogbookDocumentInfo(parentLogbook); + } + + List entityTypeList = parentLogbook.getEntityTypeList(); + logbookList = new ArrayList<>(); + for (EntityType entityType : entityTypeList) { + LogbookInfo info = new LogbookInfo(entityType); + logbookList.add(info); + } + } + + @Override + public final MqttTopic getTopic() { + if (changeType == ChangeType.ADD) { + return getAddEventTopic(); + } else if (changeType == ChangeType.DELETE) { + return getDeletedEventTopic(); + } + return getUpdateEventTopic(); + } + + protected MqttTopic getAddEventTopic() { + return MqttTopic.LOGENTRYADD; + } + + protected MqttTopic getUpdateEventTopic() { + return MqttTopic.LOGENTRYUPDATE; + } + + protected MqttTopic getDeletedEventTopic() { + return MqttTopic.LOGENTRYDELETE; + } + + public LogInfo getLogInfo() { + return logInfo; + } + + public LogbookDocumentInfo getParentLogDocumentInfo() { + return parentLogDocumentInfo; + } + + public List getLogbookList() { + return logbookList; + } + + public String getTextDiff() { + return textDiff; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogReactionEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogReactionEvent.java new file mode 100644 index 000000000..97f9cfeaf --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/LogReactionEvent.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.common.mqtt.model.entities.LogInfo; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookDocumentInfo; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import gov.anl.aps.logr.portal.model.db.entities.LogReaction; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class LogReactionEvent extends MqttEntityEvent { + + boolean isDelete; + + LogInfo parentLogInfo; + LogbookDocumentInfo parentLogDocumentInfo; + + public LogReactionEvent(LogReaction entity, + Log parentLogEntry, + ItemDomainLogbook parentLogDocument, + UserInfo eventTriggedByUser, + String description, boolean isDelete) { + super(entity, eventTriggedByUser, description); + + this.isDelete = isDelete; + + if (parentLogEntry != null) { + parentLogInfo = new LogInfo(parentLogEntry); + } + if (parentLogDocument != null) { + parentLogDocumentInfo = new LogbookDocumentInfo(parentLogDocument); + } + } + + @Override + public MqttTopic getTopic() { + if (isDelete) { + return MqttTopic.LOGREACTIONDELETE; + } + return MqttTopic.LOGREACTIONADD; + } + + public LogReaction getLogReaction() { + return entity; + } + + public LogInfo getParentLogInfo() { + return parentLogInfo; + } + + public LogbookDocumentInfo getParentLogDocumentInfo() { + return parentLogDocumentInfo; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/MqttEntityEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/MqttEntityEvent.java new file mode 100644 index 000000000..faccbf854 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/MqttEntityEvent.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public abstract class MqttEntityEvent extends MqttEvent { + + Entity entity; + String description; + UserInfo eventTriggedByUser; + + public MqttEntityEvent(Entity entity, UserInfo eventTriggedByUser, String description) { + super(); + this.entity = entity; + this.description = description; + this.eventTriggedByUser = eventTriggedByUser; + } + + @JsonIgnore + public CdbEntity getEntity() { + return entity; + } + + public Object getEntityId() { + return entity.getId(); + } + + public String getEntityName() { + return entity.getClass().getSimpleName(); + } + + public String getDescription() { + return description; + } + + public String getEventTriggedByUsername() { + if (eventTriggedByUser == null) { + return null; + } + + return eventTriggedByUser.getUsername(); + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/MqttEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/MqttEvent.java new file mode 100644 index 000000000..83678665e --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/MqttEvent.java @@ -0,0 +1,39 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import java.util.Date; + +/** + * + * @author djarosz + */ +public abstract class MqttEvent { + + Date eventTimestamp; + + @JsonIgnore + public abstract MqttTopic getTopic(); + + public MqttEvent() { + this.eventTimestamp = new Date(); + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getEventTimestamp() { + return eventTimestamp; + } + + public String toJson() throws JsonProcessingException { + ObjectMapper mapper = new ObjectMapper(); + return mapper.writeValueAsString(this); + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/ReplyLogEntryEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/ReplyLogEntryEvent.java new file mode 100644 index 000000000..7d1f0e16c --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/ReplyLogEntryEvent.java @@ -0,0 +1,60 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.mqtt.constants.ChangeType; +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.common.mqtt.model.entities.LogInfo; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class ReplyLogEntryEvent extends LogEntryEvent { + + LogInfo parentLogInfo; + + public ReplyLogEntryEvent(ItemDomainLogbook parentLogbook, Log entity, UserInfo eventTriggedByUser, String description, String textDiff, ChangeType changeType) { + super(parentLogbook, entity, eventTriggedByUser, description, textDiff, changeType); + + Log parentLogObject = getParentLogObject(); + + if (parentLogObject != null) { + parentLogInfo = new LogInfo(parentLogObject); + } + } + + @Override + protected MqttTopic getAddEventTopic() { + return MqttTopic.LOGENTRYREPLYADD; + } + + @Override + protected MqttTopic getUpdateEventTopic() { + return MqttTopic.LOGENTRYREPLYUPDATE; + } + + @Override + protected MqttTopic getDeletedEventTopic() { + return MqttTopic.LOGENTRYREPLYDELETE; + } + + @JsonIgnore + public final Log getParentLogObject() { + if (entity == null) { + return null; + } + return entity.getParentLog(); + } + + public LogInfo getParentLogInfo() { + return parentLogInfo; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/SearchEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/SearchEvent.java new file mode 100644 index 000000000..3f341fa87 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/SearchEvent.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.mqtt.constants.CallSource; +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookSearchOptions; + +/** + * MQTT event for search operations. + * + * @author djarosz + */ +public class SearchEvent extends MqttEvent { + + private String searchText; + private LogbookSearchOptions searchOptions; + private CallSource source; // "API" or "Portal" + + public SearchEvent(String searchText, LogbookSearchOptions searchOptions, CallSource searchSource) { + super(); + this.searchText = searchText; + this.searchOptions = searchOptions; + this.source = searchSource; + + } + + @Override + @JsonIgnore + public MqttTopic getTopic() { + return MqttTopic.SEARCH; + } + + public String getSearchText() { + return searchText; + } + + public LogbookSearchOptions getSearchOptions() { + return searchOptions; + } + + public String getSource() { + return source.getValue(); + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/TestNotificationEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/TestNotificationEvent.java new file mode 100644 index 000000000..30decd0e6 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/TestNotificationEvent.java @@ -0,0 +1,61 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import java.util.Map; + +/** + * MQTT event for test notification operations. + * + * @author djarosz + */ +@JsonInclude(JsonInclude.Include.NON_NULL) +public class TestNotificationEvent extends MqttEvent { + + private String notificationEndpoint; + private String configurationName; + private Integer configurationId; + private Map providerSettings; + private String eventTriggedByUsername; + + public TestNotificationEvent(String notificationEndpoint, String configurationName, + Integer configurationId, Map providerSettings, + String eventTriggedByUsername) { + super(); + this.notificationEndpoint = notificationEndpoint; + this.configurationName = configurationName; + this.configurationId = configurationId; + this.providerSettings = providerSettings; + this.eventTriggedByUsername = eventTriggedByUsername; + } + + @Override + public MqttTopic getTopic() { + return MqttTopic.NOTIFICATIONTEST; + } + + public String getNotificationEndpoint() { + return notificationEndpoint; + } + + public String getConfigurationName() { + return configurationName; + } + + public Integer getConfigurationId() { + return configurationId; + } + + public Map getProviderSettings() { + return providerSettings; + } + + public String getEventTriggedByUsername() { + return eventTriggedByUsername; + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/UpdateEvent.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/UpdateEvent.java new file mode 100644 index 000000000..650bd2e9b --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/UpdateEvent.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model; + +import gov.anl.aps.logr.common.mqtt.constants.MqttTopic; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; + +/** + * + * @author djarosz + */ +public class UpdateEvent extends MqttEntityEvent { + + public UpdateEvent(CdbEntity entity, UserInfo eventTriggedByUser, String description) { + super(entity, eventTriggedByUser, description); + } + + @Override + public MqttTopic getTopic() { + return MqttTopic.UPDATE; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogInfo.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogInfo.java new file mode 100644 index 000000000..5982730d8 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogInfo.java @@ -0,0 +1,44 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model.entities; + +import com.fasterxml.jackson.annotation.JsonFormat; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import java.util.Date; + +/** + * + * @author djarosz + */ +public class LogInfo { + + Log log; + + public LogInfo(Log log) { + this.log = log; + } + + public Integer getId() { + return log.getId(); + } + + public String getEnteredByUsername() { + return log.getEnteredByUsername(); + } + + public String getLastModifiedByUsername() { + return log.getLastModifiedByUsername(); + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getEnteredOnDateTime() { + return log.getEnteredOnDateTime(); + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getLastModifiedOnDateTime() { + return log.getLastModifiedOnDateTime(); + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookDocumentInfo.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookDocumentInfo.java new file mode 100644 index 000000000..d96b88920 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookDocumentInfo.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model.entities; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.portal.model.db.entities.EntityInfo; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import java.util.Date; + +/** + * + * @author djarosz + */ +public class LogbookDocumentInfo { + + ItemDomainLogbook parentLogbook; + + public LogbookDocumentInfo(ItemDomainLogbook parentLogbook) { + this.parentLogbook = parentLogbook; + } + + public Integer getId() { + return parentLogbook.getId(); + } + + public String getName() { + return parentLogbook.getName(); + } + + @JsonIgnore + public EntityInfo getEntityInfo() { + return parentLogbook.getEntityInfo(); + } + + public String getCreatedByUsername() { + return getEntityInfo().getCreatedByUsername(); + } + + public String getLastModifiedByUsername() { + return getEntityInfo().getLastModifiedByUsername(); + } + + public String getOwnerUsername() { + return getEntityInfo().getOwnerUsername(); + } + + public String getOwnerUserGroupName() { + return getEntityInfo().getOwnerUserGroupName(); + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getEnteredOnDateTime() { + return getEntityInfo().getCreatedOnDateTime(); + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getLastModifiedOnDateTime() { + return getEntityInfo().getLastModifiedOnDateTime(); + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookInfo.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookInfo.java new file mode 100644 index 000000000..35fa5ea4d --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookInfo.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model.entities; + +import gov.anl.aps.logr.portal.model.db.entities.EntityType; + +/** + * + * @author djarosz + */ +public class LogbookInfo { + + EntityType logbook; + + public LogbookInfo(EntityType logbook) { + this.logbook = logbook; + } + + public Integer getId() { + return this.logbook.getId(); + } + + public String getName() { + return this.logbook.getName(); + } + + public String getDisplayName() { + return this.logbook.getDisplayName(); + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookSearchOptions.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookSearchOptions.java new file mode 100644 index 000000000..c51e2a8bd --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/common/mqtt/model/entities/LogbookSearchOptions.java @@ -0,0 +1,112 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.common.mqtt.model.entities; + +import com.fasterxml.jackson.annotation.JsonFormat; +import gov.anl.aps.logr.portal.model.db.entities.EntityType; +import gov.anl.aps.logr.portal.model.db.entities.ItemType; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; +import java.util.Date; +import java.util.List; +import java.util.stream.Collectors; + +/** + * Search options for logbook search MQTT events. + * + * @author djarosz + */ +public class LogbookSearchOptions { + + private final List logbookTypes; + private final List systems; + private final List users; + private final Date startModifiedDate; + private final Date endModifiedDate; + private final Date startCreatedDate; + private final Date endCreatedDate; + private final boolean caseInsensitive; + + public LogbookSearchOptions( + List logbookTypes, + List systems, + List users, + Date startModifiedDate, + Date endModifiedDate, + Date startCreatedDate, + Date endCreatedDate, + boolean caseInsensitive) { + + this.logbookTypes = toFilterItems(logbookTypes, e -> new FilterItem(e.getId(), e.getName())); + this.systems = toFilterItems(systems, s -> new FilterItem(s.getId(), s.getName())); + this.users = toFilterItems(users, u -> new FilterItem(u.getId(), u.getUsername())); + this.startModifiedDate = startModifiedDate; + this.endModifiedDate = endModifiedDate; + this.startCreatedDate = startCreatedDate; + this.endCreatedDate = endCreatedDate; + this.caseInsensitive = caseInsensitive; + } + + private List toFilterItems(List list, java.util.function.Function mapper) { + if (list == null) { + return null; + } + return list.stream().map(mapper).collect(Collectors.toList()); + } + + public List getLogbookTypes() { + return logbookTypes; + } + + public List getSystems() { + return systems; + } + + public List getUsers() { + return users; + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getStartModifiedDate() { + return startModifiedDate; + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getEndModifiedDate() { + return endModifiedDate; + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getStartCreatedDate() { + return startCreatedDate; + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getEndCreatedDate() { + return endCreatedDate; + } + + public boolean isCaseInsensitive() { + return caseInsensitive; + } + + public static class FilterItem { + + private int id; + private String name; + + public FilterItem(int id, String name) { + this.id = id; + this.name = name; + } + + public int getId() { + return id; + } + + public String getName() { + return name; + } + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/CdbEntityController.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/CdbEntityController.java index c326c0716..24c8ecb0c 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/CdbEntityController.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/CdbEntityController.java @@ -933,12 +933,14 @@ public String create(Boolean silent) { if (!silent) { SessionUtility.addErrorMessage("Error", "Could not create " + getDisplayEntityTypeName() + ": " + ex.getMessage()); } + SessionUtility.setValidationFailed(); return null; } catch (RuntimeException ex) { if (!silent) { Throwable t = ExceptionUtils.getRootCause(ex); SessionUtility.addErrorMessage("Error", "Could not create " + getDisplayEntityTypeName() + ": " + t.getMessage()); } + SessionUtility.setValidationFailed(); return null; } } @@ -1057,10 +1059,12 @@ public String update() { return viewForCurrentEntity(); } catch (CdbException ex) { SessionUtility.addErrorMessage("Error", "Could not update " + getDisplayEntityTypeName() + ": " + ex.getMessage()); + SessionUtility.setValidationFailed(); return null; } catch (RuntimeException ex) { Throwable t = ExceptionUtils.getRootCause(ex); SessionUtility.addErrorMessage("Error", "Could not update " + getDisplayEntityTypeName() + ": " + t.getMessage()); + SessionUtility.setValidationFailed(); return null; } } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/ItemDomainLogbookController.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/ItemDomainLogbookController.java index a02591f07..b390de6b7 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/ItemDomainLogbookController.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/ItemDomainLogbookController.java @@ -6,6 +6,8 @@ import gov.anl.aps.logr.common.exceptions.CdbException; import gov.anl.aps.logr.common.exceptions.InvalidObjectState; +import gov.anl.aps.logr.common.mqtt.constants.CallSource; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookSearchOptions; import gov.anl.aps.logr.common.utilities.CollectionUtility; import gov.anl.aps.logr.portal.constants.EntityTypeName; import gov.anl.aps.logr.portal.constants.LogDocumentSettings; @@ -16,6 +18,8 @@ import gov.anl.aps.logr.portal.controllers.utilities.EntityInfoControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.EntityTypeControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.ItemDomainLogbookControllerUtility; +import gov.anl.aps.logr.portal.controllers.utilities.SearchControllerUtility; +import gov.anl.aps.logr.portal.controllers.utilities.LogReactionControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.PropertyTypeControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.SettingTypeControllerUtility; import gov.anl.aps.logr.portal.model.ItemDomainLogbookLazyDataModel; @@ -51,12 +55,13 @@ import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; import java.util.ArrayList; -import java.util.Calendar; + import java.util.Date; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.regex.Pattern; +import java.util.logging.Level; + import javax.ejb.EJB; import javax.enterprise.context.SessionScoped; import javax.faces.component.UIComponent; @@ -121,7 +126,7 @@ public class ItemDomainLogbookController extends ItemController private EntityInfoControllerUtility entityInfoControllerUtility; + private LogReactionControllerUtility logReactionControllerUtility; private static final String OPS_ENTITY_TYPE_NAME = "ops"; @@ -302,6 +308,13 @@ private EntityInfoControllerUtility getEntityInfoControllerUtility() { return entityInfoControllerUtility; } + public LogReactionControllerUtility getLogReactionControllerUtility() { + if (logReactionControllerUtility == null) { + logReactionControllerUtility = new LogReactionControllerUtility(); + } + return logReactionControllerUtility; + } + @Override public String getDefaultDomainDerivedFromDomainName() { throw new UnsupportedOperationException("Not supported yet."); // Generated from nbfs://nbhost/SystemFileSystem/Templates/Classes/Code/GeneratedMethodBody @@ -598,11 +611,16 @@ private void updateModifiedDateForCurrent() { public void destroyLogEntry(Log entry) { if (isSaveLogLockoutsForCurrent(entry)) { - LogController instance = LogController.getInstance(); - instance.destroy(entry); + ItemDomainLogbookControllerUtility utility = getControllerUtility(); + UserInfo user = SessionUtility.getUser(); + try { + utility.destroyLogEntry(entry, user); + } catch (CdbException ex) { + logger.error(ex); + SessionUtility.addErrorMessage("Error", ex.getErrorMessage()); + } + updateModifiedDateForCurrent(); } - - updateModifiedDateForCurrent(); } public String getAddedReactionsString(Log entry) { @@ -646,32 +664,14 @@ public List getGroupedReactions(Log entry) { } public void toggleReaction(Log entry, Reaction reaction) { - // Fetch the latest version - entry = logFacade.find(entry.getId()); + LogReactionControllerUtility utility = getLogReactionControllerUtility(); UserInfo user = SessionUtility.getUser(); - List logReactionList = entry.getLogReactionList(); - boolean add = true; - - // Check if need to remove log reaction. - for (LogReaction lr : logReactionList) { - UserInfo userId = lr.getUserInfo(); - - if (user.equals(userId)) { - Reaction existingReaction = lr.getReaction(); - - if (existingReaction.equals(reaction)) { - add = false; - logReactionList.remove(lr); - logReactionFacade.remove(lr); - break; - } - } - } - - if (add) { - LogReaction lr = new LogReaction(entry.getId(), reaction.getId(), user.getId()); - logReactionFacade.create(lr); + try { + utility.toggleReaction(entry, reaction, user); + } catch (CdbException ex) { + logger.error(ex); + SessionUtility.addErrorMessage("Error", ex.getMessage()); } // No need to scroll to any log entry. Ajax event. @@ -682,9 +682,10 @@ public void toggleReaction(Log entry, Reaction reaction) { @Override public String saveLogList() { Log newLogEdit = getNewLogEdit(); + Log savedLogEntry = null; if (newLogEdit.getId() != null) { // Perform validation - Log savedLogEntry = logFacade.find(newLogEdit.getId()); + savedLogEntry = logFacade.find(newLogEdit.getId()); if (savedLogEntry == null) { handleDeletedLogEntryDuringSync(newLogEdit); @@ -704,8 +705,19 @@ public String saveLogList() { } } - LogController logController = LogController.getInstance(); - logController.saveLogEntry(newLogEdit); + UserInfo userInfo = SessionUtility.getUser(); + + try { + controllerUtility.saveLog(newLogEdit, userInfo, savedLogEntry); + } catch (CdbException ex) { + String persitanceErrorMessage = newLogEdit.getPersitanceErrorMessage(); + SessionUtility.addErrorMessage("Error", persitanceErrorMessage); + return null; + } catch (RuntimeException ex) { + String persitanceErrorMessage = newLogEdit.getPersitanceErrorMessage(); + SessionUtility.addErrorMessage("Error", persitanceErrorMessage); + return null; + } lastLog = newLogEdit; if (newLogEdit.getId() == null) { @@ -1040,26 +1052,19 @@ public void performEntitySearch(String searchString, boolean caseInsensitive) { SearchSettings searchSettings = searchCtrl.getSearchSettings(); Boolean advancedSearch = searchSettings.getAdvancedSearch(); - String entityTypeIdList = null; - String itemTypeIdList = null; - String userIdList = null; Date startModifiedTime = null; Date endModifiedTime = null; Date startCreatedTime = null; Date endCreatedTime = null; if (advancedSearch) { - entityTypeIdList = CollectionUtility.generateIdListString(searchLogbookTypeList); - itemTypeIdList = CollectionUtility.generateIdListString(searchSystemList); - userIdList = CollectionUtility.generateIdListString(searchUserList); - startModifiedTime = searchModifiedStartDate; endModifiedTime = searchModifiedEndDate; startCreatedTime = searchCreatedStartDate; endCreatedTime = searchCreatedEndDate; - endModifiedTime = adjustEndTime(endModifiedTime); - endCreatedTime = adjustEndTime(endCreatedTime); + endModifiedTime = ItemDomainLogbookControllerUtility.adjustEndTimeForSearch(endModifiedTime); + endCreatedTime = ItemDomainLogbookControllerUtility.adjustEndTimeForSearch(endCreatedTime); } resetSearchVariables(); @@ -1070,68 +1075,16 @@ public void performEntitySearch(String searchString, boolean caseInsensitive) { super.performEntitySearch(searchString, searchArgs, caseInsensitive); - // Search log entries. - List results = itemDomainLogbookFacade.searchEntityLogs(searchString, itemTypeIdList, entityTypeIdList, userIdList, - startModifiedTime, endModifiedTime, startCreatedTime, endCreatedTime); - - ItemDomainLogbookControllerUtility controllerUtility1 = getControllerUtility(); - String patternString = controllerUtility1.generatePatternString(searchString); - Pattern searchPattern = controllerUtility1.getSearchPattern(patternString, caseInsensitive); - - logResults = new ArrayList<>(); - - for (Object[] result : results) { - ItemDomainLogbook logbook = (ItemDomainLogbook) result[0]; - Log log = (Log) result[1]; - Long logId = (Long) result[2]; - - SearchResult searchResult = new SearchResult(logbook, logbook.getId(), logbook.getName(), log); - searchResult.setAdditionalAttribute("" + logId); - - String text = log.getText(); - String[] logLines = text.split("\n"); - String matching_lines = ""; - - for (int i = 0; i < logLines.length; i++) { - String lineText = logLines[i]; - - if (searchPattern.matcher(lineText).find()) { - matching_lines += lineText + "\n"; - } - } - searchResult.addAttributeMatch("log entry", matching_lines); - - controllerUtility1.addCommonLogEntryDocumentMatches(searchResult, searchLogbookTypeList, searchSystemList); - - if (searchUserList != null && !searchUserList.isEmpty()) { - for (UserInfo ui : searchUserList) { - Integer searchUserId = ui.getId(); - - UserInfo enteredByUser = log.getEnteredByUser(); - UserInfo lastModifiedByUser = log.getLastModifiedByUser(); - - if (Objects.equals(enteredByUser.getId(), searchUserId)) { - searchResult.addAttributeMatch("Create User", enteredByUser.toString()); - } - if (Objects.equals(lastModifiedByUser.getId(), searchUserId)) { - searchResult.addAttributeMatch("Last Modify User", lastModifiedByUser.toString()); - } - } - } + // Search log entries using shared utility method. + logResults = utility.searchLogEntries(searchString, caseInsensitive, searchArgs); - if (startCreatedTime != null || endCreatedTime != null) { - Date enteredOnDateTime = log.getEnteredOnDateTime(); - searchResult.addAttributeMatch("Created on", enteredOnDateTime.toString()); - } - - if (startModifiedTime != null || endModifiedTime != null) { - Date modifiedOnDateTime = log.getLastModifiedOnDateTime(); - searchResult.addAttributeMatch("Modified on", modifiedOnDateTime.toString()); - } - - logResults.add(searchResult); - } - } + // Publish MQTT search event with logbook-specific options + LogbookSearchOptions options = new LogbookSearchOptions( + searchLogbookTypeList, searchSystemList, searchUserList, + startModifiedTime, endModifiedTime, + startCreatedTime, endCreatedTime, caseInsensitive); + SearchControllerUtility.publishSearchMqttEvent(searchString, options, CallSource.Portal); + } public String getSearchOpts() { if (searchOpts == null) { @@ -1165,24 +1118,24 @@ public String getSearchOpts() { return searchOpts; } - public void processSearchRequestParams() { + public void processSearchRequestParams() { String entityTypeIdList = SessionUtility.getRequestParameterValue(SEARCH_ETL_IDS); String itemTypeIdList = SessionUtility.getRequestParameterValue(SEARCH_ITL_IDS); - String userIdList = SessionUtility.getRequestParameterValue(SEARCH_USR_IDS); + String userIdList = SessionUtility.getRequestParameterValue(SEARCH_USR_IDS); String createStart = SessionUtility.getRequestParameterValue(SEARCH_CREATE_START_DATE); String createEnd = SessionUtility.getRequestParameterValue(SEARCH_CREATE_END_DATE); String modifyStart = SessionUtility.getRequestParameterValue(SEARCH_MOD_START_DATE); String modifyEnd = SessionUtility.getRequestParameterValue(SEARCH_MOD_END_DATE); // If any variables have been passed in, reset all search options. - if (entityTypeIdList != null + if (entityTypeIdList != null || itemTypeIdList != null || userIdList != null || createStart != null || createEnd != null || modifyStart != null || modifyEnd != null) { - + SearchController searchCtrl = SearchController.getInstance(); SearchSettings searchSettings = searchCtrl.getSearchSettings(); searchSettings.setAdvancedSearch(true); @@ -1194,29 +1147,29 @@ public void processSearchRequestParams() { searchCreatedEndDate = null; searchModifiedStartDate = null; searchModifiedEndDate = null; - + if (entityTypeIdList != null) { String[] ids = entityTypeIdList.split(","); List selection = new ArrayList<>(); - + for (String id : ids) { selection.add(entityTypeFacade.find(Integer.valueOf(id))); } setSearchLogbookTypeList(selection); - } + } if (itemTypeIdList != null) { String[] ids = itemTypeIdList.split(","); List selection = new ArrayList<>(); - + for (String id : ids) { selection.add(itemTypeFacade.find(Integer.valueOf(id))); } - setSearchSystemList(selection); + setSearchSystemList(selection); } if (userIdList != null) { String[] ids = userIdList.split(","); List selection = new ArrayList<>(); - + for (String id : ids) { selection.add(userInfoFacade.find(Integer.valueOf(id))); } @@ -1232,27 +1185,13 @@ public void processSearchRequestParams() { } if (modifyStart != null) { long unixTimestamp = Long.parseLong(modifyStart); - setSearchModifiedStartDate(new Date(unixTimestamp)); + setSearchModifiedStartDate(new Date(unixTimestamp)); } if (modifyEnd != null) { long unixTimestamp = Long.parseLong(modifyEnd); setSearchModifiedEndDate(new Date(unixTimestamp)); - } - } - } - - private Date adjustEndTime(Date endTime) { - if (endTime != null) { - // Add offset to the end of the selected date. - Calendar endDateCal = Calendar.getInstance(); - endDateCal.setTime(endTime); - endDateCal.set(Calendar.HOUR, 23); - endDateCal.set(Calendar.MINUTE, 59); - endDateCal.set(Calendar.SECOND, 59); - endTime = endDateCal.getTime(); + } } - - return endTime; } public List getLogResults() { diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/LogController.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/LogController.java index bebae3c62..57be8efd6 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/LogController.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/LogController.java @@ -100,25 +100,6 @@ protected LogControllerUtility createControllerUtilityInstance() { return new LogControllerUtility(); } - public void saveLogEntry(Log log) { - LogControllerUtility controllerUtility1 = getControllerUtility(); - UserInfo userInfo = SessionUtility.getUser(); - - try { - if (log.getId() != null) { - controllerUtility1.update(log, userInfo); - } else { - controllerUtility1.create(log, userInfo); - } - } catch (CdbException ex) { - String persitanceErrorMessage = log.getPersitanceErrorMessage(); - SessionUtility.addErrorMessage("Error", persitanceErrorMessage); - } catch (RuntimeException ex) { - String persitanceErrorMessage = log.getPersitanceErrorMessage(); - SessionUtility.addErrorMessage("Error", persitanceErrorMessage); - } - } - /** * Converter class for log objects. */ diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/NotificationConfigurationController.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/NotificationConfigurationController.java new file mode 100644 index 000000000..ee4527e44 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/NotificationConfigurationController.java @@ -0,0 +1,520 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.controllers; + +import gov.anl.aps.logr.portal.controllers.settings.NotificationConfigurationSettings; +import gov.anl.aps.logr.portal.controllers.utilities.NotificationConfigurationControllerUtility; +import gov.anl.aps.logr.common.mqtt.model.TestNotificationEvent; +import gov.anl.aps.logr.portal.model.db.beans.NotificationConfigurationFacade; +import gov.anl.aps.logr.portal.model.db.beans.NotificationConfigurationHandlerSettingFacade; +import gov.anl.aps.logr.portal.model.db.beans.NotificationConfigurationSettingFacade; +import gov.anl.aps.logr.portal.model.db.beans.NotificationHandlerConfigKeyFacade; +import gov.anl.aps.logr.portal.model.db.beans.NotificationProviderConfigKeyFacade; +import gov.anl.aps.logr.portal.model.db.beans.NotificationProviderFacade; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfiguration; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfiguration.UnsubscribeState; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfigurationHandlerSetting; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfigurationSetting; +import gov.anl.aps.logr.portal.model.db.entities.NotificationHandlerConfigKey; +import gov.anl.aps.logr.portal.model.db.entities.NotificationProvider; +import gov.anl.aps.logr.portal.model.db.entities.NotificationProviderConfigKey; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; +import gov.anl.aps.logr.portal.utilities.SessionUtility; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.ejb.EJB; +import javax.enterprise.context.SessionScoped; +import javax.faces.component.UIComponent; +import javax.faces.context.FacesContext; +import javax.faces.convert.Converter; +import javax.faces.convert.FacesConverter; +import javax.inject.Named; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Controller for managing notification configurations. Handles CRUD operations + * for user notification configurations including provider settings and handler + * preferences. + * + * @author djarosz + */ +@Named(NotificationConfigurationController.CONTROLLER_NAMED) +@SessionScoped +public class NotificationConfigurationController extends CdbEntityController implements Serializable { + + public static final String CONTROLLER_NAMED = "notificationConfigurationController"; + + public static final String APPRISE_NOTIFICATION_PROVIDER_NAME = "apprise"; + + private static final Logger logger = LogManager.getLogger(NotificationConfigurationController.class.getName()); + + @EJB + private NotificationProviderFacade notificationProviderFacade; + + @EJB + private NotificationHandlerConfigKeyFacade notificationHandlerConfigKeyFacade; + + @EJB + private NotificationConfigurationHandlerSettingFacade notificationConfigurationHandlerSettingFacade; + + @EJB + private NotificationProviderConfigKeyFacade notificationProviderConfigKeyFacade; + + @EJB + private NotificationConfigurationSettingFacade notificationConfigurationSettingFacade; + + @EJB + private NotificationConfigurationFacade notificationConfigurationFacade; + + @Override + protected NotificationConfigurationControllerUtility createControllerUtilityInstance() { + return new NotificationConfigurationControllerUtility(); + } + + @Override + protected NotificationConfigurationSettings createNewSettingObject() { + return new NotificationConfigurationSettings(); + } + + @Override + protected NotificationConfigurationFacade getEntityDbFacade() { + return notificationConfigurationFacade; + } + + /** + * Get all configurations for the current logged-in user. + * + * @return + */ + public List getItemsForCurrentUser() { + UserInfoController instance = UserInfoController.getInstance(); + UserInfo currentUserInfoUser = instance.getCurrent(); + if (currentUserInfoUser != null) { + return notificationConfigurationFacade.findByUser(currentUserInfoUser); + } + return new ArrayList<>(); + } + + /** + * Get all available notification providers. + * + * @return + */ + public List getAvailableProviders() { + return notificationProviderFacade.findAll(); + } + + /** + * Called when provider is selected in dialog. + */ + public void onProviderChange() { + NotificationProvider provider = getCurrent().getNotificationProvider(); + if (provider != null) { + loadProviderConfigKeys(); + } else { + NotificationConfiguration current = getCurrent(); + current.getProviderConfigKeys().clear(); + current.getConfigSettings().clear(); + } + } +// + + /** + * Prepare to create a new configuration. + * + * @param userInfo + */ + public void prepareCreateDialog(UserInfo userInfo) { + super.prepareCreate(); + NotificationConfiguration current = getCurrent(); + current.setUserInfo(userInfo); + + // Load default handler preferences + Map handlerPreferences = current.getHandlerPreferences(); + List handlerKeys = notificationHandlerConfigKeyFacade.findAll(); + for (NotificationHandlerConfigKey key : handlerKeys) { + handlerPreferences.put(key.getId(), "true".equalsIgnoreCase(key.getDefaultValue())); + } + } + + /** + * Prepare to create a new email notification configuration with user's + * email pre-filled. + */ + public void populateEmailInfo() { + NotificationConfiguration current = getCurrent(); + UserInfo currentUser = current.getUserInfo(); + + // Find Apprise provider + NotificationProvider appriseProvider = notificationProviderFacade.findByName(APPRISE_NOTIFICATION_PROVIDER_NAME); + if (appriseProvider == null) { + SessionUtility.addErrorMessage("Error", "Apprise provider not found in database"); + return; + } + + // Set provider + current.setNotificationProvider(appriseProvider); + loadProviderConfigKeys(); + + // Pre-fill name and notification endpoint with user's email + current.setName(currentUser.getUsername() + " Email Notifications"); + current.setDescription("Email notifications for " + currentUser.getUsername()); + current.setNotificationEndpoint("mailto://" + currentUser.getEmail()); + } + + /** + * Prepare to edit an existing configuration. + */ + public void prepareEditDialog(NotificationConfiguration config) { + super.prepareEdit(config); + setCurrent(config); + + NotificationConfiguration current = getCurrent(); + + Map configSettings = current.getConfigSettings(); + + configSettings.clear(); + + // Load provider info + NotificationProvider provider = config.getNotificationProvider(); + if (provider != null) { + loadProviderConfigKeys(); + + // Load existing config settings + List existingSettings + = notificationConfigurationSettingFacade.findByNotificationConfiguration(config); + for (NotificationConfigurationSetting setting : existingSettings) { + configSettings.put(setting.getNotificationProviderConfigKey().getId(), + setting.getConfigValue()); + } + + // Load existing handler preferences + populateHandlerPreferences(config); + } + } + + private void populateHandlerPreferences(NotificationConfiguration config) { + Map handlerPreferences = config.getHandlerPreferences(); + handlerPreferences.clear(); + + List handlerSettings + = notificationConfigurationHandlerSettingFacade.findByNotificationConfiguration(config); + for (NotificationConfigurationHandlerSetting setting : handlerSettings) { + handlerPreferences.put(setting.getNotificationHandlerConfigKey().getId(), + "true".equalsIgnoreCase(setting.getConfigValue())); + } + } + + /** + * Save the configuration (create or update). + */ + public void save() { + NotificationConfiguration current = getCurrent(); + + if (current.getId() == null) { + create(); + } else { + update(); + } + } + + private void loadProviderConfigKeys() { + NotificationConfiguration current = getCurrent(); + + NotificationProvider provider = getCurrent().getNotificationProvider(); + List providerConfigKeys = notificationProviderConfigKeyFacade.findByProvider(provider); + current.setProviderConfigKeys(providerConfigKeys); + Map configSettings = current.getConfigSettings(); + + for (NotificationProviderConfigKey key : providerConfigKeys) { + if (!configSettings.containsKey(key.getId())) { + configSettings.put(key.getId(), ""); + } + } + } + + /** + * Get active handler preferences for a configuration (for display in + * table). + * + * @param config + * @return + */ + public List getActiveHandlerPreferences(NotificationConfiguration config) { + List active = new ArrayList<>(); + List settings + = notificationConfigurationHandlerSettingFacade.findByNotificationConfiguration(config); + + for (NotificationConfigurationHandlerSetting setting : settings) { + if ("true".equalsIgnoreCase(setting.getConfigValue())) { + active.add(setting.getNotificationHandlerConfigKey()); + } + } + return active; + } + + /** + * Send a test notification via MQTT. + * + * @param config + */ + public void sendTestNotification(NotificationConfiguration config) { + try { + // Get current username + UserInfoController userInfoController = UserInfoController.getInstance(); + String username = userInfoController.getCurrent().getUsername(); + + // Build provider settings map from persisted configuration settings + Map providerSettings = new HashMap<>(); + Collection settingsCollection + = config.getNotificationConfigurationSettingCollection(); + if (settingsCollection != null) { + for (NotificationConfigurationSetting setting : settingsCollection) { + String key = setting.getNotificationProviderConfigKey().getConfigKey(); + String value = setting.getConfigValue(); + if (key != null && value != null && !value.isEmpty()) { + providerSettings.put(key, value); + } + } + } + + // Create test notification event + TestNotificationEvent event = new TestNotificationEvent( + config.getNotificationEndpoint(), + config.getName(), + config.getId(), + providerSettings.isEmpty() ? null : providerSettings, + username + ); + + SessionUtility.publishMqttEvent(event); + SessionUtility.addInfoMessage("Test Sent", + "Test notification sent to: " + config.getName()); + } catch (Exception ex) { + SessionUtility.addErrorMessage("Error", + "Failed to send test notification: " + ex.getMessage()); + logger.error("Failed to send test notification", ex); + } + } + + /** + * Process unsubscribe request from URL parameters. Called by f:viewAction + * on page load. + */ + public void processUnsubscribeRequest() { + LoginController loginController = LoginController.getInstance(); + if (!loginController.isLoggedIn()) { + return; + } + + String configIdStr = SessionUtility.getRequestParameterValue("configId"); + String notificationType = SessionUtility.getRequestParameterValue("notificationType"); + + if (configIdStr == null || notificationType == null) { + NotificationConfiguration config = new NotificationConfiguration(); + config.setUnsubscribeState(UnsubscribeState.ERROR); + config.setUnsubscribeError("Invalid unsubscribe request. Missing required parameters."); + setCurrent(config); + return; + } + + Integer configId; + try { + configId = Integer.valueOf(configIdStr); + } catch (NumberFormatException ex) { + NotificationConfiguration config = new NotificationConfiguration(); + config.setUnsubscribeState(UnsubscribeState.ERROR); + config.setUnsubscribeError("Invalid configuration ID."); + setCurrent(config); + return; + } + + NotificationConfiguration config = findById(configId); + if (config == null) { + NotificationConfiguration errorConfig = new NotificationConfiguration(); + errorConfig.setUnsubscribeState(UnsubscribeState.ERROR); + errorConfig.setUnsubscribeError("Notification configuration not found."); + setCurrent(errorConfig); + return; + } + + setCurrent(config); + populateHandlerPreferences(config); + + // Verify ownership or admin + UserInfo currentUser = SessionUtility.getUser(); + boolean isAdmin = loginController.isLoggedInAsAdmin(); + boolean isOwner = currentUser != null && config.getUserInfo() != null + && currentUser.getId().equals(config.getUserInfo().getId()); + if (!isAdmin && !isOwner) { + String ownerName = config.getUserInfo() != null ? config.getUserInfo().getUsername() : "unknown"; + config.setUnsubscribeState(UnsubscribeState.ERROR); + config.setUnsubscribeError("This notification configuration belongs to " + ownerName + + ". You do not have permission to modify it."); + return; + } + + if (isAdmin && !isOwner) { + config.setUnsubscribeForOtherUser(true); + } + + // Find handler key + NotificationHandlerConfigKey handlerKey = notificationHandlerConfigKeyFacade.findByConfigKey(notificationType); + if (handlerKey == null) { + config.setUnsubscribeState(UnsubscribeState.ERROR); + config.setUnsubscribeError("Unknown notification type: " + notificationType); + return; + } + + config.setUnsubscribeHandlerConfigKey(handlerKey); + + // Check if already unsubscribed + Boolean currentValue = getCurrent().getHandlerPreferences().get(handlerKey.getId()); + if (currentValue != null && !currentValue) { + config.setUnsubscribeState(UnsubscribeState.ALREADY_UNSUBSCRIBED); + return; + } + + config.setUnsubscribeState(UnsubscribeState.CONFIRM); + } + + /** + * Execute the unsubscribe action. Disables the notification type for the + * current configuration using the standard update flow. + */ + public void executeUnsubscribe() { + NotificationConfiguration current = getCurrent(); + if (current == null) { + NotificationConfiguration errorConfig = new NotificationConfiguration(); + errorConfig.setUnsubscribeState(UnsubscribeState.ERROR); + errorConfig.setUnsubscribeError("No configuration selected."); + setCurrent(errorConfig); + return; + } + + NotificationHandlerConfigKey handlerKey = current.getUnsubscribeHandlerConfigKey(); + if (handlerKey == null) { + current.setUnsubscribeState(UnsubscribeState.ERROR); + current.setUnsubscribeError("No notification type selected."); + return; + } + + try { + current.getHandlerPreferences().put(handlerKey.getId(), false); + update(); + + // Preserve transient fields on the refreshed entity + boolean unsubscribeForOtherUser = current.isUnsubscribeForOtherUser(); + + current = getCurrent(); + current.setUnsubscribeState(UnsubscribeState.SUCCESS); + current.setUnsubscribeHandlerConfigKey(handlerKey); + current.setUnsubscribeForOtherUser(unsubscribeForOtherUser); + } catch (Exception ex) { + current.setUnsubscribeState(UnsubscribeState.ERROR); + current.setUnsubscribeError("Failed to update notification setting: " + ex.getMessage()); + logger.error("Failed to execute unsubscribe", ex); + } + } + + /** + * Cancel the unsubscribe action and return to user profile. + * + * @return Navigation outcome + */ + public String cancelUnsubscribe() { + return redirectToSettingsForCurrentConfigurationUser(); + } + + public String redirectToSettingsForCurrentConfigurationUser() { + UserInfoController instance = UserInfoController.getInstance(); + + NotificationConfiguration current = getCurrent(); + if (current != null) { + instance.prepareView(current.getUserInfo()); + + return "/views/userInfo/edit.xhtml?faces-redirect=true"; + } + + SessionUtility.addErrorMessage("Error", "No configuration"); + + return "/"; + } + + public NotificationProvider getSelectedProvider() { + return getCurrent().getNotificationProvider(); + } + + public String getSelectedProviderHtmlInstructions() { + NotificationProvider provider = getSelectedProvider(); + if (provider != null) { + return provider.getHtmlInstructions(); + } + return null; + } + + public NotificationProvider findProviderById(Integer id) { + return notificationProviderFacade.find(id); + } + + @Override + public String destroy() { + super.destroy(); + return null; + } + + public static NotificationConfigurationController getInstance() { + return (NotificationConfigurationController) SessionUtility.findBean(NotificationConfigurationController.CONTROLLER_NAMED); + } + + public List getAvailableHandlerConfigKeys() { + return notificationHandlerConfigKeyFacade.findAll(); + } + + @FacesConverter(value = "notificationProviderConverter") + public static class NotificationProviderConverter implements Converter { + + @Override + public Object getAsObject(FacesContext facesContext, UIComponent component, String value) { + if (value == null || value.length() == 0 || "null".equals(value)) { + return null; + } + NotificationConfigurationController controller = (NotificationConfigurationController) facesContext.getApplication().getELResolver(). + getValue(facesContext.getELContext(), null, NotificationConfigurationController.CONTROLLER_NAMED); + return controller.findProviderById(getKey(value)); + } + + java.lang.Integer getKey(String value) { + java.lang.Integer key; + key = Integer.valueOf(value); + return key; + } + + String getStringKey(java.lang.Integer value) { + StringBuilder sb = new StringBuilder(); + sb.append(value); + return sb.toString(); + } + + @Override + public String getAsString(FacesContext facesContext, UIComponent component, Object object) { + if (object == null) { + return ""; + } + if (object instanceof NotificationProvider) { + NotificationProvider o = (NotificationProvider) object; + return getStringKey(o.getId()); + } else { + throw new IllegalArgumentException("object " + object + " is of type " + object.getClass().getName() + "; expected type: " + NotificationProvider.class.getName()); + } + } + + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/SearchController.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/SearchController.java index 1db05036b..a518ab4c8 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/SearchController.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/SearchController.java @@ -12,7 +12,9 @@ import java.io.Serializable; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; +import java.util.HashMap; import java.util.HashSet; +import java.util.Map; import java.util.Set; import javax.annotation.PostConstruct; import javax.enterprise.context.SessionScoped; @@ -31,12 +33,12 @@ public class SearchController implements Serializable { public static final String controllerNamed = "searchController"; private static final Logger logger = LogManager.getLogger(SearchController.class.getName()); - - private static final String SEARCH_TEXT_URL_KEY = "searchString"; - + + private static final String SEARCH_TEXT_URL_KEY = "searchString"; + private String searchString = null; - - private String searchOpts = null; + + private String searchOpts = null; private Boolean performSearch = false; private Boolean performExternallyInitializedSearch = false; @@ -44,7 +46,7 @@ public class SearchController implements Serializable { private SearchSettings searchSettings; private final Set searchableControllers; - + protected String contextRootPermanentUrl; /** @@ -73,9 +75,9 @@ public String performInputBoxSearchAdvamced() { getSearchSettings().setAdvancedSearch(true); return performInputBoxSearch(false); } - + public String getSearchPath() { - return "/views/search/search"; + return "/views/search/search"; } public String performInputBoxSearch() { @@ -100,7 +102,7 @@ public String getInputBoxSearchString() { } public void setInputBoxSearchString(String searchString) { - setSearchString(searchString); + setSearchString(searchString); } public void prepareSearch() { @@ -203,7 +205,7 @@ public String getSearchString() { public void setSearchString(String searchString) { this.searchString = searchString; - searchOpts = null; + searchOpts = null; } public SearchSettings getSearchSettings() { @@ -213,27 +215,26 @@ public SearchSettings getSearchSettings() { public void processPreRender() { searchSettings.updateSettings(); } - + public void processSearchRequestParams() { // User friendly search string in URL. - String text = SessionUtility.getRequestParameterValue(SEARCH_TEXT_URL_KEY); + String text = SessionUtility.getRequestParameterValue(SEARCH_TEXT_URL_KEY); if (text != null && !text.isEmpty()) { searchString = text; - performExternallyInitializedSearch = true; - return; - } + performExternallyInitializedSearch = true; + return; + } } - + public String getSearchOpts() { - if (searchOpts == null) { - searchOpts = URLEncoder.encode(searchString, StandardCharsets.UTF_8); + if (searchOpts == null) { + searchOpts = URLEncoder.encode(searchString, StandardCharsets.UTF_8); searchOpts = String.format("%s=%s", SEARCH_TEXT_URL_KEY, searchOpts); } - return searchOpts; + return searchOpts; } - + public String getSearchPermaLink() { return String.format("%s%s?%s", contextRootPermanentUrl, getSearchPath(), getSearchOpts()); } - } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/settings/NotificationConfigurationSettings.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/settings/NotificationConfigurationSettings.java new file mode 100644 index 000000000..ba76f3af9 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/settings/NotificationConfigurationSettings.java @@ -0,0 +1,46 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.controllers.settings; + +import javax.faces.event.ActionEvent; +import org.primefaces.component.datatable.DataTable; + +/** + * + * @author djarosz + */ +public class NotificationConfigurationSettings implements ICdbSettings { + + @Override + public boolean updateSettings() { + return false; + } + + @Override + public void saveDisplayListPageHelpFragmentActionListener() { + + } + + @Override + public void saveListSettingsForSessionSettingEntityActionListener(ActionEvent actionEvent) { + + } + + @Override + public void updateListSettingsFromListDataTable(DataTable dataTable) { + + } + + @Override + public void clearListFilters() { + + } + + @Override + public void clearSelectFilters() { + + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/CdbEntityControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/CdbEntityControllerUtility.java index 14bd21d11..e4feb57ce 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/CdbEntityControllerUtility.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/CdbEntityControllerUtility.java @@ -5,6 +5,11 @@ package gov.anl.aps.logr.portal.controllers.utilities; import gov.anl.aps.logr.common.exceptions.CdbException; +import gov.anl.aps.logr.common.exceptions.MqttNotConfiguredException; +import gov.anl.aps.logr.common.mqtt.model.AddEvent; +import gov.anl.aps.logr.common.mqtt.model.DeleteEvent; +import gov.anl.aps.logr.common.mqtt.model.UpdateEvent; +import gov.anl.aps.logr.common.mqtt.model.MqttEntityEvent; import gov.anl.aps.logr.common.utilities.StringUtility; import gov.anl.aps.logr.portal.constants.SystemLogLevel; import gov.anl.aps.logr.portal.model.db.beans.CdbEntityFacade; @@ -13,6 +18,7 @@ import gov.anl.aps.logr.portal.model.db.entities.PropertyValue; import gov.anl.aps.logr.portal.model.db.entities.UserInfo; import gov.anl.aps.logr.portal.utilities.SearchResult; +import gov.anl.aps.logr.portal.utilities.SessionUtility; import java.util.Date; import java.util.LinkedList; import java.util.List; @@ -23,43 +29,66 @@ import org.apache.logging.log4j.Logger; /** - * Controller utility provides unified functionality for managing entities - * to be used from view controllers as well as API endpoints. + * Controller utility provides unified functionality for managing entities to be + * used from view controllers as well as API endpoints. * * @author darek - * @param Database mapped class of the entity. - * @param Database facade provides communication to database. + * @param Database mapped class of the entity. + * @param Database facade provides communication to database. */ public abstract class CdbEntityControllerUtility> { - + private static final Logger logger = LogManager.getLogger(CdbEntityControllerUtility.class.getName()); - - LogControllerUtility logControllerUtility; - + + protected void publishMqttEvent(MqttEntityEvent event) { + try { + SessionUtility.publishMqttEvent(event); + } catch (MqttNotConfiguredException ex) { + logger.debug(ex); + return; + } catch (CdbException ex) { + logger.error(ex); + return; + } + + CdbEntity entity = event.getEntity(); + List actionEvents = entity.getActionEvents(); + if (actionEvents != null) { + for (MqttEntityEvent actionEvent : actionEvents) { + try { + SessionUtility.publishMqttEvent(actionEvent); + } catch (CdbException ex) { + logger.error(ex); + } + } + } + } + /** * Abstract method for returning entity DB facade. * * @return entity DB facade */ protected abstract FacadeType getEntityDbFacade(); - + /** * Abstract method for creating new entity instance. * * @return created entity instance */ - public abstract EntityType createEntityInstance(UserInfo sessionUser); - + public abstract EntityType createEntityInstance(UserInfo sessionUser); + public EntityType create(EntityType entity, UserInfo createdByUserInfo) throws CdbException, RuntimeException { - try { + try { prepareEntityInsert(entity, createdByUserInfo); getEntityDbFacade().create(entity); - + addCreatedSystemLog(entity, createdByUserInfo); entity.setPersitanceErrorMessage(null); - + publishMqttEvent(new AddEvent(entity, createdByUserInfo, "Add action completed")); + clearCaches(); - return entity; + return entity; } catch (CdbException ex) { logger.error("Could not create " + getDisplayEntityTypeName() + ": " + ex.getMessage()); addCreatedWarningSystemLog(ex, entity, createdByUserInfo); @@ -73,15 +102,18 @@ public EntityType create(EntityType entity, UserInfo createdByUserInfo) throws C throw ex; } } - + public void createList(List entities, UserInfo createdByUserInfo) throws CdbException, RuntimeException { try { for (EntityType entity : entities) { prepareEntityInsert(entity, createdByUserInfo); } - getEntityDbFacade().create(entities); - - addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Created " + entities.size() + " entities.", createdByUserInfo); + getEntityDbFacade().create(entities); + + addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Created " + entities.size() + " entities.", createdByUserInfo); + for (EntityType entity : entities) { + publishMqttEvent(new AddEvent(entity, createdByUserInfo, "Add action completed")); + } setPersistenceErrorMessageForList(entities, null); clearCaches(); } catch (CdbException ex) { @@ -95,59 +127,62 @@ public void createList(List entities, UserInfo createdByUserInfo) th setPersistenceErrorMessageForList(entities, ex.getMessage()); addCdbEntityWarningSystemLog("Failed to create list of entities: " + getDisplayEntityTypeName(), ex, null, createdByUserInfo); throw ex; - } + } } - + public EntityType update(EntityType entity, UserInfo updatedByUserInfo) throws CdbException, RuntimeException { - try { + try { logger.debug("Updating " + getDisplayEntityTypeName() + " " + getEntityInstanceName(entity)); prepareEntityUpdate(entity, updatedByUserInfo); EntityType updatedEntity = getEntityDbFacade().edit(entity); addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Updated: " + entity.getSystemLogString(), updatedByUserInfo); + publishMqttEvent(new UpdateEvent(entity, updatedByUserInfo, "Update action completed")); entity.setPersitanceErrorMessage(null); - + clearCaches(); - return updatedEntity; + return updatedEntity; } catch (CdbException ex) { entity.setPersitanceErrorMessage(ex.getMessage()); addCdbEntityWarningSystemLog("Failed to update", ex, entity, updatedByUserInfo); logger.error("Could not update " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + ex.getMessage()); + + getEntityInstanceName(entity) + ": " + ex.getMessage()); throw ex; } catch (RuntimeException ex) { - Throwable t = ExceptionUtils.getRootCause(ex); + Throwable t = ExceptionUtils.getRootCause(ex); logger.error("Could not update " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + t.getMessage()); + + getEntityInstanceName(entity) + ": " + t.getMessage()); addCdbEntityWarningSystemLog("Failed to update", ex, entity, updatedByUserInfo); entity.setPersitanceErrorMessage(t.getMessage()); throw ex; - } + } } - + public EntityType updateOnRemoval(EntityType entity, UserInfo updatedByUserInfo) throws CdbException, RuntimeException { try { logger.debug("Updating " + getDisplayEntityTypeName() + " " + getEntityInstanceName(entity)); prepareEntityUpdateOnRemoval(entity); EntityType updatedEntity = getEntityDbFacade().edit(entity); clearCaches(); - - return updatedEntity; + + publishMqttEvent(new UpdateEvent(entity, updatedByUserInfo, "Update on removal action completed")); + + return updatedEntity; } catch (CdbException ex) { entity.setPersitanceErrorMessage(ex.getMessage()); addCdbEntityWarningSystemLog("Failed to update", ex, entity, updatedByUserInfo); logger.error("Could not update " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + ex.getMessage()); + + getEntityInstanceName(entity) + ": " + ex.getMessage()); throw ex; } catch (RuntimeException ex) { - Throwable t = ExceptionUtils.getRootCause(ex); + Throwable t = ExceptionUtils.getRootCause(ex); logger.error("Could not update " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + t.getMessage()); + + getEntityInstanceName(entity) + ": " + t.getMessage()); addCdbEntityWarningSystemLog("Failed to update", ex, entity, updatedByUserInfo); entity.setPersitanceErrorMessage(t.getMessage()); throw ex; - } + } } - + public void updateList(List entities, UserInfo updatedByUserInfo) throws CdbException, RuntimeException { try { for (EntityType entity : entities) { @@ -155,9 +190,10 @@ public void updateList(List entities, UserInfo updatedByUserInfo) th prepareEntityUpdate(entity, updatedByUserInfo); } getEntityDbFacade().edit(entities); - for (EntityType entity : entities) { + for (EntityType entity : entities) { entity.setPersitanceErrorMessage(null); addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Updated: " + entity.getSystemLogString(), updatedByUserInfo); + publishMqttEvent(new UpdateEvent(entity, updatedByUserInfo, "Update action completed")); } clearCaches(); } catch (CdbException ex) { @@ -168,35 +204,37 @@ public void updateList(List entities, UserInfo updatedByUserInfo) th } catch (RuntimeException ex) { Throwable t = ExceptionUtils.getRootCause(ex); logger.error("Could not update list of " + getDisplayEntityTypeName() + ": " + t.getMessage()); - addCdbEntityWarningSystemLog("Failed to update list of " + getDisplayEntityTypeName(), ex, null, updatedByUserInfo); + addCdbEntityWarningSystemLog("Failed to update list of " + getDisplayEntityTypeName(), ex, null, updatedByUserInfo); setPersistenceErrorMessageForList(entities, t.getMessage()); throw ex; - } + } } - + public void destroy(EntityType entity, UserInfo destroyedByUserInfo) throws CdbException, RuntimeException { try { prepareEntityDestroy(entity, destroyedByUserInfo); getEntityDbFacade().remove(entity); - - addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Deleted: " + entity.getSystemLogString(), destroyedByUserInfo); + + addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Deleted: " + entity.getSystemLogString(), destroyedByUserInfo); + publishMqttEvent(new DeleteEvent(entity, destroyedByUserInfo, "Delete action completed")); + clearCaches(); } catch (CdbException ex) { entity.setPersitanceErrorMessage(ex.getMessage()); addCdbEntityWarningSystemLog("Failed to destroy", ex, entity, destroyedByUserInfo); logger.error("Could not destroy " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + ex.getMessage()); + + getEntityInstanceName(entity) + ": " + ex.getMessage()); throw ex; } catch (RuntimeException ex) { - Throwable t = ExceptionUtils.getRootCause(ex); + Throwable t = ExceptionUtils.getRootCause(ex); logger.error("Could not destroy " + getDisplayEntityTypeName() + " " - + getEntityInstanceName(entity)+ ": " + t.getMessage()); + + getEntityInstanceName(entity) + ": " + t.getMessage()); addCdbEntityWarningSystemLog("Failed to destroy", ex, entity, destroyedByUserInfo); entity.setPersitanceErrorMessage(t.getMessage()); throw ex; - } + } } - + public void destroyList( List entities, EntityType updateEntity, UserInfo destroyedByUserInfo) @@ -216,6 +254,9 @@ public void destroyList( getEntityDbFacade().remove(entities, updateEntity); addCdbEntitySystemLog(SystemLogLevel.entityInfo, "Deleted: " + entities.size() + " entities.", destroyedByUserInfo); + for (EntityType entity : entities) { + publishMqttEvent(new DeleteEvent(entity, destroyedByUserInfo, "Delete action completed")); + } setPersistenceErrorMessageForList(entities, null); clearCaches(); } catch (CdbException ex) { @@ -228,15 +269,15 @@ public void destroyList( setPersistenceErrorMessageForList(entities, ex.getMessage()); addCdbEntityWarningSystemLog("Failed to delete list of " + getDisplayEntityTypeName(), ex, updateEntity, destroyedByUserInfo); throw ex; - } + } } - + /** - * On database operation clear cache of related cached entity when needed. + * On database operation clear cache of related cached entity when needed. */ - protected void clearCaches() { + protected void clearCaches() { } - + /** * Find entity instance by id. * @@ -246,46 +287,46 @@ protected void clearCaches() { public EntityType findById(Integer id) { return getEntityDbFacade().find(id); } - + /** - * Used by import framework. Looks up entity by path. Default implementation - * raises exception. Subclasses should override to provide support for lookup - * by path. + * Used by import framework. Looks up entity by path. Default implementation + * raises exception. Subclasses should override to provide support for + * lookup by path. */ public EntityType findByPath(String path) throws CdbException { throw new CdbException("controller utility does not support lookup by path"); } - + public String getEntityInstanceName(EntityType entity) { if (entity != null) { - return entity.toString(); + return entity.toString(); } - return ""; - } - - public abstract String getEntityTypeName(); - + return ""; + } + + public abstract String getEntityTypeName(); + public String getDisplayEntityTypeName() { String entityTypeName = getEntityTypeName(); - - entityTypeName = entityTypeName.substring(0, 1).toUpperCase() + entityTypeName.substring(1); - - String displayEntityTypeName = ""; - - int prevEnd = 0; + + entityTypeName = entityTypeName.substring(0, 1).toUpperCase() + entityTypeName.substring(1); + + String displayEntityTypeName = ""; + + int prevEnd = 0; for (int i = 1; i < entityTypeName.length(); i++) { - Character c = entityTypeName.charAt(i); + Character c = entityTypeName.charAt(i); if (Character.isUpperCase(c)) { - displayEntityTypeName += entityTypeName.substring(prevEnd, i) + " "; - prevEnd = i; + displayEntityTypeName += entityTypeName.substring(prevEnd, i) + " "; + prevEnd = i; } } - - displayEntityTypeName += entityTypeName.substring(prevEnd); - - return displayEntityTypeName; + + displayEntityTypeName += entityTypeName.substring(prevEnd); + + return displayEntityTypeName; } - + /** * Prepare entity insert. * @@ -294,9 +335,9 @@ public String getDisplayEntityTypeName() { * @param entity entity instance * @throws CdbException in case of any errors */ - protected void prepareEntityInsert(EntityType entity, UserInfo userInfo) throws CdbException { + protected void prepareEntityInsert(EntityType entity, UserInfo userInfo) throws CdbException { } - + /** * Prepare entity update. * @@ -305,24 +346,24 @@ protected void prepareEntityInsert(EntityType entity, UserInfo userInfo) throws * @param entity entity instance * @throws CdbException in case of any errors */ - protected void prepareEntityUpdate(EntityType entity, UserInfo updatedByUser) throws CdbException { + protected void prepareEntityUpdate(EntityType entity, UserInfo updatedByUser) throws CdbException { } - + protected void prepareEntityUpdateOnRemoval(EntityType entity) throws CdbException { } - - protected void prepareEntityDestroy(EntityType entity, UserInfo userInfo) throws CdbException { + + protected void prepareEntityDestroy(EntityType entity, UserInfo userInfo) throws CdbException { } - + protected void addCreatedSystemLog(EntityType entity, UserInfo createdByUserInfo) throws CdbException { - String message = "Created: " + entity.getSystemLogString(); - addCdbEntitySystemLog(SystemLogLevel.entityInfo, message, createdByUserInfo); + String message = "Created: " + entity.getSystemLogString(); + addCdbEntitySystemLog(SystemLogLevel.entityInfo, message, createdByUserInfo); } - + protected void addCreatedWarningSystemLog(Exception exception, EntityType entity, UserInfo createdByUserInfo) throws CdbException { addCdbEntityWarningSystemLog("Failed to create", exception, entity, createdByUserInfo); } - + /** * Allows the controller to quickly add a warning log entry while * automatically appending appropriate info. @@ -342,8 +383,8 @@ protected void addCdbEntityWarningSystemLog(String warningMessage, Exception exc addCdbEntitySystemLog(SystemLogLevel.entityWarning, warningMessage, sessionUser); } - - /** + + /** * Allows the controller to quickly add a log entry to system logs with * current session user stamp. * @@ -351,7 +392,7 @@ protected void addCdbEntityWarningSystemLog(String warningMessage, Exception exc * @param message * @param sessionUser */ - protected void addCdbEntitySystemLog(SystemLogLevel logLevel, String message, UserInfo sessionUser) throws CdbException { + protected void addCdbEntitySystemLog(SystemLogLevel logLevel, String message, UserInfo sessionUser) throws CdbException { if (sessionUser != null) { String username = sessionUser.getUsername(); message = "User: " + username + " | " + message; @@ -359,36 +400,36 @@ protected void addCdbEntitySystemLog(SystemLogLevel logLevel, String message, Us LogControllerUtility logControllerUtility = LogControllerUtility.getSystemLogInstance(); logControllerUtility.addSystemLog(logLevel, message); } - + protected void setPersistenceErrorMessageForList(List entities, String msg) { for (EntityType entity : entities) { entity.setPersitanceErrorMessage(msg); } - } + } public List getAllEntities() { return getEntityDbFacade().findAll(); } - + public List searchEntities(String searchString, Map searchOpts) { - return searchEntities(searchString); + return searchEntities(searchString); } - + public List searchEntities(String searchString) { - return getEntityDbFacade().searchEntities(searchString); + return getEntityDbFacade().searchEntities(searchString); } - + public String generatePatternString(String searchString) { - String patternString; - if (searchString.contains("?") || searchString.contains("*")) { - patternString = searchString.replace("*", ".*"); + String patternString; + if (searchString.contains("?") || searchString.contains("*")) { + patternString = searchString.replace("*", ".*"); patternString = patternString.replace("?", "."); } else { - patternString = Pattern.quote(searchString); + patternString = Pattern.quote(searchString); } - return patternString; + return patternString; } - + public Pattern getSearchPattern(String patternString, boolean caseInsensitive) { Pattern searchPattern; if (caseInsensitive) { @@ -396,56 +437,55 @@ public Pattern getSearchPattern(String patternString, boolean caseInsensitive) { } else { searchPattern = Pattern.compile(patternString); } - - return searchPattern; + + return searchPattern; } - + public LinkedList performEntitySearch(String searchString, boolean caseInsensitive) { - return performEntitySearch(searchString, null, caseInsensitive); + return performEntitySearch(searchString, null, caseInsensitive); } - + /** * Search all entities for a given string. * * @param searchString search string * @param caseInsensitive use case insensitive search - * @return + * @return */ public LinkedList performEntitySearch(String searchString, Map searchOpts, boolean caseInsensitive) { - LinkedList searchResultList = new LinkedList<>(); - if (searchString == null || searchString.isEmpty()) { + LinkedList searchResultList = new LinkedList<>(); + if (searchString == null || searchString.isEmpty()) { return searchResultList; } - + // Start new search String patternString = generatePatternString(searchString); - Pattern searchPattern = getSearchPattern(patternString, caseInsensitive); - - + Pattern searchPattern = getSearchPattern(patternString, caseInsensitive); + List allObjectList = searchEntities(searchString, searchOpts); for (EntityType entity : allObjectList) { try { SearchResult searchResult = entity.createSearchResultInfo(searchPattern); - if (!searchResult.isEmpty()) { - searchResultList.add(searchResult); + if (!searchResult.isEmpty()) { + searchResultList.add(searchResult); } } catch (RuntimeException ex) { logger.warn("Could not search entity " + entity.toString() + " (Error: " + ex.toString() + ")"); } } - - return searchResultList; + + return searchResultList; } - + public PropertyValue preparePropertyTypeValueAdd(EntityType cdbDomainEntity, PropertyType propertyType) { return preparePropertyTypeValueAdd(cdbDomainEntity, propertyType, propertyType.getDefaultValue(), null); } public PropertyValue preparePropertyTypeValueAdd(EntityType cdbDomainEntity, - PropertyType propertyType, String propertyValueString, String tag) { + PropertyType propertyType, String propertyValueString, String tag) { // Implement in controller with entity info. - return null; + return null; } public PropertyValue preparePropertyTypeValueAdd(EntityType cdbEntity, @@ -474,7 +514,7 @@ public PropertyValue preparePropertyTypeValueAdd(EntityType cdbEntity, } public PropertyType prepareCableEndDesignationPropertyType() { - + PropertyTypeControllerUtility propertyTypeControllerUtility = new PropertyTypeControllerUtility(); PropertyType propertyType = propertyTypeControllerUtility.createEntityInstance(null); @@ -488,7 +528,7 @@ public PropertyType prepareCableEndDesignationPropertyType() { logger.error(ex.getMessage()); return null; } - + return propertyType; } @@ -510,5 +550,5 @@ public String getDisplayItemConnectorsLabel() { String labelString = StringUtility.capitalize(getDisplayItemConnectorName()); return labelString + "s"; } - + } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/ItemDomainLogbookControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/ItemDomainLogbookControllerUtility.java index 7dd0cd904..75fe1d521 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/ItemDomainLogbookControllerUtility.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/ItemDomainLogbookControllerUtility.java @@ -6,6 +6,9 @@ import gov.anl.aps.logr.common.exceptions.CdbException; import gov.anl.aps.logr.common.exceptions.InvalidObjectState; +import gov.anl.aps.logr.common.mqtt.constants.ChangeType; +import gov.anl.aps.logr.common.mqtt.model.LogEntryEvent; +import gov.anl.aps.logr.common.mqtt.model.ReplyLogEntryEvent; import gov.anl.aps.logr.common.utilities.CollectionUtility; import gov.anl.aps.logr.portal.constants.ItemDomainName; import gov.anl.aps.logr.portal.constants.LogDocumentSettings; @@ -29,6 +32,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.regex.Pattern; /** * @@ -337,9 +341,9 @@ public LinkedList performEntitySearch(String searchString, Map sea // Add search opts to match description. for (SearchResult result : searchResultList) { - addCommonLogEntryDocumentMatches(result, searchEntityTypeList, searchItemTypeList); - - ItemDomainLogbook resultItem = (ItemDomainLogbook) result.getCdbEntity(); + addCommonLogEntryDocumentMatches(result, searchEntityTypeList, searchItemTypeList); + + ItemDomainLogbook resultItem = (ItemDomainLogbook) result.getCdbEntity(); EntityInfo entityInfo = resultItem.getEntityInfo(); if (searchUserList != null && !searchUserList.isEmpty()) { @@ -404,6 +408,120 @@ public void addCommonLogEntryDocumentMatches(SearchResult result, List searchLogEntries(String searchString, boolean caseInsensitive, Map searchOpts) { + LinkedList logEntryResults = new LinkedList<>(); + + List searchEntityTypeList = null; + List searchItemTypeList = null; + List searchUserList = null; + String entityTypeIdList = null; + String itemTypeIdList = null; + String userIdList = null; + Date startModifiedTime = null; + Date endModifiedTime = null; + Date startCreatedTime = null; + Date endCreatedTime = null; + + if (searchOpts != null) { + searchEntityTypeList = (List) searchOpts.get(SEARCH_OPT_KEY_ENTITY_TYPE_LIST); + searchItemTypeList = (List) searchOpts.get(SEARCH_OPT_KEY_ITEM_TYPE_LIST); + searchUserList = (List) searchOpts.get(SEARCH_OPT_KEY_USER_LIST); + + entityTypeIdList = CollectionUtility.generateIdListString(searchEntityTypeList); + itemTypeIdList = CollectionUtility.generateIdListString(searchItemTypeList); + userIdList = CollectionUtility.generateIdListString(searchUserList); + + startModifiedTime = (Date) searchOpts.get(SEARCH_OPT_KEY_START_MODIFIED_TIME); + endModifiedTime = (Date) searchOpts.get(SEARCH_OPT_KEY_END_MODIFIED_TIME); + startCreatedTime = (Date) searchOpts.get(SEARCH_OPT_KEY_START_CREATED_TIME); + endCreatedTime = (Date) searchOpts.get(SEARCH_OPT_KEY_END_CREATED_TIME); + } + + List results = getEntityDbFacade().searchEntityLogs( + searchString, itemTypeIdList, entityTypeIdList, userIdList, + startModifiedTime, endModifiedTime, startCreatedTime, endCreatedTime); + + String patternString = generatePatternString(searchString); + Pattern searchPattern = getSearchPattern(patternString, caseInsensitive); + + for (Object[] result : results) { + ItemDomainLogbook logbook = (ItemDomainLogbook) result[0]; + Log log = (Log) result[1]; + Long logId = (Long) result[2]; + + SearchResult searchResult = new SearchResult(logbook, logbook.getId(), logbook.getName(), log); + searchResult.setAdditionalAttribute("" + logId); + + String text = log.getText(); + String[] logLines = text.split("\n"); + String matchingLines = ""; + + for (String lineText : logLines) { + if (searchPattern.matcher(lineText).find()) { + matchingLines += lineText + "\n"; + } + } + searchResult.addAttributeMatch("log entry", matchingLines); + + addCommonLogEntryDocumentMatches(searchResult, searchEntityTypeList, searchItemTypeList); + + if (searchUserList != null && !searchUserList.isEmpty()) { + for (UserInfo ui : searchUserList) { + Integer searchUserId = ui.getId(); + + UserInfo enteredByUser = log.getEnteredByUser(); + UserInfo lastModifiedByUser = log.getLastModifiedByUser(); + + if (Objects.equals(enteredByUser.getId(), searchUserId)) { + searchResult.addAttributeMatch("Create User", enteredByUser.toString()); + } + if (Objects.equals(lastModifiedByUser.getId(), searchUserId)) { + searchResult.addAttributeMatch("Last Modify User", lastModifiedByUser.toString()); + } + } + } + + if (startCreatedTime != null || endCreatedTime != null) { + Date enteredOnDateTime = log.getEnteredOnDateTime(); + searchResult.addAttributeMatch("Created on", enteredOnDateTime.toString()); + } + + if (startModifiedTime != null || endModifiedTime != null) { + Date modifiedOnDateTime = log.getLastModifiedOnDateTime(); + searchResult.addAttributeMatch("Modified on", modifiedOnDateTime.toString()); + } + + logEntryResults.add(searchResult); + } + + return logEntryResults; + } + + /** + * Adjust end time to end-of-day (23:59:59) for date range searches. + * Shared between UI controller and REST API. + */ + public static Date adjustEndTimeForSearch(Date endTime) { + if (endTime != null) { + Calendar endDateCal = Calendar.getInstance(); + endDateCal.setTime(endTime); + endDateCal.set(Calendar.HOUR, 23); + endDateCal.set(Calendar.MINUTE, 59); + endDateCal.set(Calendar.SECOND, 59); + endTime = endDateCal.getTime(); + } + return endTime; + } + public static void copyLogs(ItemDomainLogbook oldLogDoc, ItemDomainLogbook newLogDoc) { List logList = oldLogDoc.getLogList(); EntityInfo entityInfo = newLogDoc.getEntityInfo(); @@ -422,4 +540,111 @@ public static void copyLogs(ItemDomainLogbook oldLogDoc, ItemDomainLogbook newLo } } + public static ItemDomainLogbook getParentLogDocument(Log logEntry) { + Log parentLog = logEntry.getParentLog(); + + if (parentLog != null) { + // Parent log has association to the log document. + logEntry = parentLog; + } + List itemElementList = logEntry.getItemElementList(); + + if (itemElementList != null && itemElementList.size() == 1) { + // This should always happen. + // No exception however since this is required for notification framework not core functionality. + ItemElement parentElement = itemElementList.get(0); + Item parentItem = parentElement.getParentItem(); + if (parentItem instanceof ItemDomainLogbook) { + return (ItemDomainLogbook) parentItem; + } + } + return null; + + } + + private String getLogDiffString(Log originalLog, Log updatedLog) { + String originalText; + String updatedText = updatedLog.getText(); + + if (originalLog != null) { + originalText = originalLog.getText(); + } else { + return updatedText; + } + + StringBuilder diffOutput = new StringBuilder(); + String[] originalLines = originalText.split("\n"); + String[] updatedLines = updatedText.split("\n"); + + int maxLines = Math.max(originalLines.length, updatedLines.length); + + for (int i = 0; i < maxLines; i++) { + String originalLine = i < originalLines.length ? originalLines[i] : ""; + String updatedLine = i < updatedLines.length ? updatedLines[i] : ""; + + if (!originalLine.equals(updatedLine)) { + if (i < originalLines.length) { + diffOutput.append("- ").append(originalLine).append("\n"); + } + if (i < updatedLines.length) { + diffOutput.append("+ ").append(updatedLine).append("\n"); + } + } else if (i < originalLines.length) { + diffOutput.append(" ").append(originalLine).append("\n"); + } + } + return diffOutput.toString(); + + } + + public void destroyLogEntry(Log logEntity, UserInfo user) throws CdbException { + addLogEntryMqttEvent(logEntity, null, user, true); + + LogControllerUtility utility = new LogControllerUtility(); + utility.destroy(logEntity, user); + } + + private void addLogEntryMqttEvent(Log logEntity, Log originalLog, UserInfo user, boolean isDestroy) { + String logDiffString = getLogDiffString(originalLog, logEntity); + + // Avoid duplicates + logEntity.clearActionEvents(); + + ItemDomainLogbook parentLogbook = getParentLogDocument(logEntity); + + Log parentLog = logEntity.getParentLog(); + Integer id = logEntity.getId(); + ChangeType changeType; + + String description = ""; + if (isDestroy) { + description += "log entry was deleted"; + changeType = ChangeType.DELETE; + } else if (id == null) { + description += "log entry was added"; + changeType = ChangeType.ADD; + } else { + description += "log entry id [" + id + "] was modified"; + changeType = ChangeType.UPDATE; + } + + if (parentLog != null) { + description = "reply " + description; + + // Reply + logEntity.addActionEvent(new ReplyLogEntryEvent(parentLogbook, logEntity, user, description, logDiffString, changeType)); + } else { + logEntity.addActionEvent(new LogEntryEvent(parentLogbook, logEntity, user, description, logDiffString, changeType)); + } + } + + public Log saveLog(Log logEntity, UserInfo user, Log originalLog) throws CdbException { + LogControllerUtility utility = new LogControllerUtility(); + + addLogEntryMqttEvent(logEntity, originalLog, user, false); + + // Add a generic log entry. + return utility.saveLogEntry(logEntity, user); + } + } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogControllerUtility.java index f6a7b5ad0..6b2db6543 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogControllerUtility.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogControllerUtility.java @@ -5,10 +5,12 @@ package gov.anl.aps.logr.portal.controllers.utilities; import gov.anl.aps.logr.common.exceptions.CdbException; +import gov.anl.aps.logr.common.mqtt.model.MqttEntityEvent; import gov.anl.aps.logr.portal.constants.SystemLogLevel; import gov.anl.aps.logr.portal.model.db.beans.LogFacade; import gov.anl.aps.logr.portal.model.db.beans.LogLevelFacade; import gov.anl.aps.logr.portal.model.db.beans.UserInfoFacade; +import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; import gov.anl.aps.logr.portal.model.db.entities.Log; import gov.anl.aps.logr.portal.model.db.entities.LogLevel; import gov.anl.aps.logr.portal.model.db.entities.UserInfo; @@ -19,47 +21,47 @@ * * @author darek */ -public class LogControllerUtility extends CdbEntityControllerUtility{ - +public class LogControllerUtility extends CdbEntityControllerUtility { + @EJB - private LogFacade logFacade; - + private LogFacade logFacade; + @EJB private LogLevelFacade logLevelFacade; @EJB private UserInfoFacade userInfoFacade; - + private final String DEFAULT_SYSTEM_ADMIN_USERNAME = "logr"; - + private static LogControllerUtility systemLogInstance; public LogControllerUtility() { if (logFacade == null) { - logFacade = LogFacade.getInstance(); + logFacade = LogFacade.getInstance(); } if (logLevelFacade == null) { logLevelFacade = LogLevelFacade.getInstance(); } if (userInfoFacade == null) { - userInfoFacade = UserInfoFacade.getInstance(); + userInfoFacade = UserInfoFacade.getInstance(); } } @Override - protected LogFacade getEntityDbFacade() { - return logFacade; + protected LogFacade getEntityDbFacade() { + return logFacade; } - + public static synchronized LogControllerUtility getSystemLogInstance() { if (systemLogInstance == null) { systemLogInstance = new LogControllerUtility(); } return systemLogInstance; } - + public void addSystemLog(SystemLogLevel systemlogLevel, String logMessage) throws CdbException { - String logLevelName = systemlogLevel.toString(); + String logLevelName = systemlogLevel.toString(); UserInfo enteredByUser = userInfoFacade.findByUsername(DEFAULT_SYSTEM_ADMIN_USERNAME); if (enteredByUser == null) { throw new CdbException("User '" + DEFAULT_SYSTEM_ADMIN_USERNAME + "' needs to be in the system. Please notify system administrator."); @@ -79,28 +81,29 @@ public void addSystemLog(SystemLogLevel systemlogLevel, String logMessage) throw newSystemLog.setText(logMessage); newSystemLog.setEnteredOnDateTime(enteredOnDateTime); newSystemLog.setEnteredByUser(enteredByUser); - + newSystemLog.markAsSystemLog(); + create(newSystemLog, enteredByUser); } @Override protected void prepareEntityUpdate(Log entity, UserInfo updatedByUser) throws CdbException { super.prepareEntityUpdate(entity, updatedByUser); - + Date lastModifiedOnDate = new Date(); entity.setLastModifiedOnDateTime(lastModifiedOnDate); - entity.setLastModifiedByUser(updatedByUser); + entity.setLastModifiedByUser(updatedByUser); } @Override protected void prepareEntityInsert(Log entity, UserInfo userInfo) throws CdbException { super.prepareEntityInsert(entity, userInfo); - + Date enteredOnDateTime = entity.getEnteredOnDateTime(); entity.setLastModifiedOnDateTime(enteredOnDateTime); entity.setLastModifiedByUser(userInfo); } - + protected Log createEntityInstance() { return new Log(); } @@ -110,7 +113,7 @@ protected void addCdbEntitySystemLog(SystemLogLevel logLevel, String message, Us // No need to create system logs. return; } - + @Override protected void addCreatedSystemLog(Log entity, UserInfo createdByUserInfo) { // No need to create a system log when creating a log. @@ -122,7 +125,7 @@ protected void addCreatedWarningSystemLog(Exception exception, Log entity, UserI // No need to create a system log when creating a log. return; } - + @Override public String getEntityTypeName() { return "log"; @@ -130,8 +133,26 @@ public String getEntityTypeName() { @Override public Log createEntityInstance(UserInfo sessionUser) { - return new Log(); + return new Log(); + } + + @Override + protected void publishMqttEvent(MqttEntityEvent event) { + CdbEntity entity = event.getEntity(); + if (entity instanceof Log) { + if (((Log) entity).isSystemLog()) { + return; + } + } + super.publishMqttEvent(event); + } + + public Log saveLogEntry(Log log, UserInfo userInfo) throws CdbException { + if (log.getId() != null) { + return update(log, userInfo); + } else { + return create(log, userInfo); + } } - } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogReactionControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogReactionControllerUtility.java new file mode 100644 index 000000000..c4351ffba --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/LogReactionControllerUtility.java @@ -0,0 +1,96 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.controllers.utilities; + +import gov.anl.aps.logr.common.exceptions.CdbException; +import gov.anl.aps.logr.common.mqtt.model.LogReactionEvent; +import gov.anl.aps.logr.portal.model.db.beans.LogFacade; +import gov.anl.aps.logr.portal.model.db.beans.LogReactionFacade; +import gov.anl.aps.logr.portal.model.db.entities.ItemDomainLogbook; +import gov.anl.aps.logr.portal.model.db.entities.Log; +import gov.anl.aps.logr.portal.model.db.entities.LogReaction; +import gov.anl.aps.logr.portal.model.db.entities.Reaction; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; +import java.util.List; + +/** + * + * @author djarosz + */ +public class LogReactionControllerUtility extends CdbEntityControllerUtility { + + LogReactionFacade logReactionFacade; + LogFacade logFacade; + + public LogReactionControllerUtility() { + logReactionFacade = LogReactionFacade.getInstance(); + logFacade = LogFacade.getInstance(); + } + + @Override + protected LogReactionFacade getEntityDbFacade() { + return logReactionFacade; + } + + @Override + public LogReaction createEntityInstance(UserInfo sessionUser) { + LogReaction lr = new LogReaction(); + lr.setUserInfo(sessionUser); + return lr; + } + + @Override + public String getEntityTypeName() { + return "Log Reaction"; + } + + public void toggleReaction(Log entry, Reaction reaction, UserInfo user) throws CdbException { + // Fetch the latest version + entry = logFacade.find(entry.getId()); + + List logReactionList = entry.getLogReactionList(); + LogReaction dbReaction = null; + + // Check if need to remove log reaction. + for (LogReaction lr : logReactionList) { + UserInfo userId = lr.getUserInfo(); + + if (user.equals(userId)) { + Reaction existingReaction = lr.getReaction(); + + if (existingReaction.equals(reaction)) { + dbReaction = lr; + break; + } + } + } + + ItemDomainLogbook parentLogDoc = ItemDomainLogbookControllerUtility.getParentLogDocument(entry); + + if (dbReaction != null) { + dbReaction.addActionEvent(new LogReactionEvent(dbReaction, + entry, + parentLogDoc, + user, + "User removed a reaction", + true)); + logReactionList.remove(dbReaction); + destroy(dbReaction, user); + } else { + LogReaction lr = createEntityInstance(user); + lr.setLog(entry); + lr.setReaction(reaction); + lr.addActionEvent(new LogReactionEvent(lr, + entry, + parentLogDoc, + user, + "User added a reaction", + false)); + create(lr, user); + } + + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/NotificationConfigurationControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/NotificationConfigurationControllerUtility.java new file mode 100644 index 000000000..ffe34cdcf --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/NotificationConfigurationControllerUtility.java @@ -0,0 +1,142 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.controllers.utilities; + +import gov.anl.aps.logr.common.exceptions.CdbException; +import gov.anl.aps.logr.common.exceptions.InvalidObjectState; +import gov.anl.aps.logr.portal.model.db.beans.NotificationConfigurationFacade; +import gov.anl.aps.logr.portal.model.db.beans.NotificationHandlerConfigKeyFacade; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfiguration; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfigurationHandlerSetting; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfigurationSetting; +import gov.anl.aps.logr.portal.model.db.entities.NotificationHandlerConfigKey; +import gov.anl.aps.logr.portal.model.db.entities.NotificationProvider; +import gov.anl.aps.logr.portal.model.db.entities.NotificationProviderConfigKey; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * + * @author djarosz + */ +public class NotificationConfigurationControllerUtility extends CdbEntityControllerUtility { + + @Override + protected NotificationConfigurationFacade getEntityDbFacade() { + return NotificationConfigurationFacade.getInstance(); + } + + @Override + public NotificationConfiguration createEntityInstance(UserInfo sessionUser) { + return new NotificationConfiguration(); + } + + @Override + public String getEntityTypeName() { + return "Notification Configuration"; + } + + @Override + protected void prepareEntityInsert(NotificationConfiguration entity, UserInfo userInfo) throws CdbException { + super.prepareEntityInsert(entity, userInfo); + prepareEntityUpdateInsert(entity, userInfo); + } + + @Override + protected void prepareEntityUpdate(NotificationConfiguration entity, UserInfo updatedByUser) throws CdbException { + super.prepareEntityUpdate(entity, updatedByUser); + prepareEntityUpdateInsert(entity, updatedByUser); + } + + private void prepareEntityUpdateInsert(NotificationConfiguration selected, UserInfo updatedByUser) throws InvalidObjectState { + // Validate required fields + if (selected.getName() == null || selected.getName().trim().isEmpty()) { + throw new InvalidObjectState("Name is required"); + } + NotificationProvider notificationProvider = selected.getNotificationProvider(); + + if (notificationProvider == null) { + throw new InvalidObjectState("Notification Provider is required"); + } + if (selected.getNotificationEndpoint() == null || selected.getNotificationEndpoint().trim().isEmpty()) { + throw new InvalidObjectState("Notification Endpoint is required"); + } + + // Validate required provider config keys + List providerConfigKeys = selected.getProviderConfigKeys(); + Map configSettings = selected.getConfigSettings(); + for (NotificationProviderConfigKey key : providerConfigKeys) { + if (key.getIsRequired()) { + String value = configSettings.get(key.getId()); + if (value == null || value.trim().isEmpty()) { + throw new InvalidObjectState(key.getDescription() + " is required"); + } + } + } + UserInfo userInfo = selected.getUserInfo(); + if (userInfo == null) { + throw new InvalidObjectState("User is required."); + } + + // Populate notification provider config settings into entity format + // Build map of existing settings by provider config key ID for reuse + Map existingSettingsByKeyId = new HashMap<>(); + if (selected.getNotificationConfigurationSettingCollection() != null) { + for (NotificationConfigurationSetting existing : selected.getNotificationConfigurationSettingCollection()) { + existingSettingsByKeyId.put(existing.getNotificationProviderConfigKey().getId(), existing); + } + } + + List configSettingEntities = new ArrayList<>(); + for (NotificationProviderConfigKey key : providerConfigKeys) { + String value = configSettings.get(key.getId()); + if (value != null && !value.trim().isEmpty()) { + NotificationConfigurationSetting setting = existingSettingsByKeyId.get(key.getId()); + if (setting == null) { + setting = new NotificationConfigurationSetting(); + setting.setNotificationConfiguration(selected); + setting.setNotificationProviderConfigKey(key); + } + setting.setConfigValue(value); + configSettingEntities.add(setting); + } + } + selected.setNotificationConfigurationSettingCollection(configSettingEntities); + + // Populate handler preferences into entity format + // Build map of existing handler settings by handler config key ID for reuse + Map existingHandlersByKeyId = new HashMap<>(); + if (selected.getNotificationConfigurationHandlerSettingCollection() != null) { + for (NotificationConfigurationHandlerSetting existing : selected.getNotificationConfigurationHandlerSettingCollection()) { + existingHandlersByKeyId.put(existing.getNotificationHandlerConfigKey().getId(), existing); + } + } + + List handlerSettingEntities = new ArrayList<>(); + Map handlerPreferences = selected.getHandlerPreferences(); + NotificationHandlerConfigKeyFacade handlerConfigKeyFacade = NotificationHandlerConfigKeyFacade.getInstance(); + for (Map.Entry entry : handlerPreferences.entrySet()) { + if (entry.getValue() != null) { + NotificationConfigurationHandlerSetting handlerSetting = existingHandlersByKeyId.get(entry.getKey()); + if (handlerSetting == null) { + NotificationHandlerConfigKey handlerKey = handlerConfigKeyFacade.find(entry.getKey()); + if (handlerKey == null) { + continue; + } + handlerSetting = new NotificationConfigurationHandlerSetting(); + handlerSetting.setNotificationConfiguration(selected); + handlerSetting.setNotificationHandlerConfigKey(handlerKey); + } + handlerSetting.setConfigValue(String.valueOf(entry.getValue())); + handlerSettingEntities.add(handlerSetting); + } + } + selected.setNotificationConfigurationHandlerSettingCollection(handlerSettingEntities); + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/SearchControllerUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/SearchControllerUtility.java new file mode 100644 index 000000000..25e3d7307 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/controllers/utilities/SearchControllerUtility.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.controllers.utilities; + +import gov.anl.aps.logr.common.mqtt.constants.CallSource; +import gov.anl.aps.logr.common.mqtt.model.SearchEvent; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookSearchOptions; +import gov.anl.aps.logr.portal.utilities.SessionUtility; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Utility class for search-related operations. + * + * @author djarosz + */ +public class SearchControllerUtility { + + private static final Logger logger = LogManager.getLogger(SearchControllerUtility.class.getName()); + + /** + * Publishes an anonymous MQTT event when a search is performed. + * + * @param searchText The search text + * @param searchOptions Map of search options/settings + * @param searchSource The source of the search (e.g., "Portal", "API") + */ + public static void publishSearchMqttEvent(String searchText, LogbookSearchOptions searchOptions, CallSource searchSource) { + SearchEvent searchEvent = new SearchEvent(searchText, searchOptions, searchSource); + try { + SessionUtility.publishMqttEvent(searchEvent); + } catch (Exception ex) { + logger.error("Failed to publish search MQTT event from {}: {}", searchSource, ex.getMessage()); + } + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/CdbEntityFacade.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/CdbEntityFacade.java index dca9f86a1..449d4950c 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/CdbEntityFacade.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/CdbEntityFacade.java @@ -42,7 +42,8 @@ public T edit(T entity) { // delete list of connectors, if any if (entity instanceof CdbEntity) { CdbEntity cdbEntity = (CdbEntity) entity; - for (ItemConnector connector : cdbEntity.getDeletedConnectorList()) { + List deletedConnectorList = cdbEntity.getDeletedConnectorList(); + for (ItemConnector connector : deletedConnectorList) { ItemConnectorFacade.getInstance().remove(connector); } cdbEntity.clearDeletedConnectorList(); diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationConfigurationFacade.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationConfigurationFacade.java new file mode 100644 index 000000000..5d1ec6f68 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationConfigurationFacade.java @@ -0,0 +1,101 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.beans; + +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfiguration; +import gov.anl.aps.logr.portal.model.db.entities.NotificationProvider; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; +import gov.anl.aps.logr.portal.utilities.SessionUtility; +import java.util.List; +import javax.ejb.Stateless; +import javax.persistence.EntityManager; +import javax.persistence.NoResultException; +import javax.persistence.PersistenceContext; + +/** + * Facade for NotificationConfiguration entity. + * Provides database access methods for notification configurations. + * + * @author djarosz + */ +@Stateless +public class NotificationConfigurationFacade extends CdbEntityFacade { + + @PersistenceContext(unitName = "LogrPortalPU") + private EntityManager em; + + @Override + protected EntityManager getEntityManager() { + return em; + } + + public NotificationConfigurationFacade() { + super(NotificationConfiguration.class); + } + + public static NotificationConfigurationFacade getInstance() { + return (NotificationConfigurationFacade) SessionUtility.findFacade(NotificationConfigurationFacade.class.getSimpleName()); + } + + /** + * Find all notification configurations for a specific user. + * + * @param user The user + * @return List of notification configurations for the user + */ + public List findByUser(UserInfo user) { + return em.createQuery( + "SELECT n FROM NotificationConfiguration n WHERE n.userInfo = :user ORDER BY n.name", + NotificationConfiguration.class) + .setParameter("user", user) + .getResultList(); + } + + /** + * Find notification configurations by name. + * + * @param name The configuration name + * @return List of notification configurations with the given name + */ + public List findByName(String name) { + return em.createNamedQuery("NotificationConfiguration.findByName", NotificationConfiguration.class) + .setParameter("name", name) + .getResultList(); + } + + /** + * Find all notification configurations for a specific provider. + * + * @param provider The notification provider + * @return List of notification configurations for the provider + */ + public List findByProvider(NotificationProvider provider) { + return em.createQuery( + "SELECT n FROM NotificationConfiguration n WHERE n.notificationProvider = :provider ORDER BY n.name", + NotificationConfiguration.class) + .setParameter("provider", provider) + .getResultList(); + } + + /** + * Find a notification configuration by name for a specific user. + * + * @param user The user + * @param name The configuration name + * @return The matching configuration or null if not found + */ + public NotificationConfiguration findByUserAndName(UserInfo user, String name) { + try { + return em.createQuery( + "SELECT n FROM NotificationConfiguration n WHERE n.userInfo = :user AND n.name = :name", + NotificationConfiguration.class) + .setParameter("user", user) + .setParameter("name", name) + .getSingleResult(); + } catch (NoResultException ex) { + return null; + } + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationConfigurationHandlerSettingFacade.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationConfigurationHandlerSettingFacade.java new file mode 100644 index 000000000..c43c42509 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationConfigurationHandlerSettingFacade.java @@ -0,0 +1,77 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.beans; + +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfiguration; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfigurationHandlerSetting; +import gov.anl.aps.logr.portal.model.db.entities.NotificationHandlerConfigKey; +import gov.anl.aps.logr.portal.utilities.SessionUtility; +import java.util.List; +import javax.ejb.Stateless; +import javax.persistence.EntityManager; +import javax.persistence.NoResultException; +import javax.persistence.PersistenceContext; + +/** + * Facade for NotificationConfigurationHandlerSetting entity. + * Provides database access methods for handler settings per notification configuration. + * + * @author djarosz + */ +@Stateless +public class NotificationConfigurationHandlerSettingFacade extends CdbEntityFacade { + + @PersistenceContext(unitName = "LogrPortalPU") + private EntityManager em; + + @Override + protected EntityManager getEntityManager() { + return em; + } + + public NotificationConfigurationHandlerSettingFacade() { + super(NotificationConfigurationHandlerSetting.class); + } + + public static NotificationConfigurationHandlerSettingFacade getInstance() { + return (NotificationConfigurationHandlerSettingFacade) SessionUtility.findFacade(NotificationConfigurationHandlerSettingFacade.class.getSimpleName()); + } + + /** + * Find all handler settings for a specific notification configuration. + * + * @param notificationConfiguration The notification configuration + * @return List of handler settings for the configuration + */ + public List findByNotificationConfiguration(NotificationConfiguration notificationConfiguration) { + return em.createQuery( + "SELECT n FROM NotificationConfigurationHandlerSetting n WHERE n.notificationConfiguration = :notificationConfiguration", + NotificationConfigurationHandlerSetting.class) + .setParameter("notificationConfiguration", notificationConfiguration) + .getResultList(); + } + + /** + * Find a specific handler setting for a configuration and key. + * + * @param notificationConfiguration The notification configuration + * @param notificationHandlerConfigKey The handler config key + * @return The matching setting or null if not found + */ + public NotificationConfigurationHandlerSetting findByNotificationConfigurationAndKey( + NotificationConfiguration notificationConfiguration, + NotificationHandlerConfigKey notificationHandlerConfigKey) { + try { + return em.createQuery( + "SELECT n FROM NotificationConfigurationHandlerSetting n WHERE n.notificationConfiguration = :notificationConfiguration AND n.notificationHandlerConfigKeyId = :notificationHandlerConfigKey", + NotificationConfigurationHandlerSetting.class) + .setParameter("notificationConfiguration", notificationConfiguration) + .setParameter("notificationHandlerConfigKey", notificationHandlerConfigKey) + .getSingleResult(); + } catch (NoResultException ex) { + return null; + } + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationConfigurationSettingFacade.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationConfigurationSettingFacade.java new file mode 100644 index 000000000..20d286e29 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationConfigurationSettingFacade.java @@ -0,0 +1,77 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.beans; + +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfiguration; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfigurationSetting; +import gov.anl.aps.logr.portal.model.db.entities.NotificationProviderConfigKey; +import gov.anl.aps.logr.portal.utilities.SessionUtility; +import java.util.List; +import javax.ejb.Stateless; +import javax.persistence.EntityManager; +import javax.persistence.NoResultException; +import javax.persistence.PersistenceContext; + +/** + * Facade for NotificationConfigurationSetting entity. + * Provides database access methods for configuration settings. + * + * @author djarosz + */ +@Stateless +public class NotificationConfigurationSettingFacade extends CdbEntityFacade { + + @PersistenceContext(unitName = "LogrPortalPU") + private EntityManager em; + + @Override + protected EntityManager getEntityManager() { + return em; + } + + public NotificationConfigurationSettingFacade() { + super(NotificationConfigurationSetting.class); + } + + public static NotificationConfigurationSettingFacade getInstance() { + return (NotificationConfigurationSettingFacade) SessionUtility.findFacade(NotificationConfigurationSettingFacade.class.getSimpleName()); + } + + /** + * Find all settings for a specific notification configuration. + * + * @param notificationConfiguration The notification configuration + * @return List of settings for the configuration + */ + public List findByNotificationConfiguration(NotificationConfiguration notificationConfiguration) { + return em.createQuery( + "SELECT n FROM NotificationConfigurationSetting n WHERE n.notificationConfiguration = :notificationConfiguration", + NotificationConfigurationSetting.class) + .setParameter("notificationConfiguration", notificationConfiguration) + .getResultList(); + } + + /** + * Find a specific setting for a configuration and key. + * + * @param notificationConfiguration The notification configuration + * @param notificationProviderConfigKey The provider config key + * @return The matching setting or null if not found + */ + public NotificationConfigurationSetting findByNotificationConfigurationAndKey( + NotificationConfiguration notificationConfiguration, + NotificationProviderConfigKey notificationProviderConfigKey) { + try { + return em.createQuery( + "SELECT n FROM NotificationConfigurationSetting n WHERE n.notificationConfiguration = :notificationConfiguration AND n.notificationProviderConfigKeyId = :notificationProviderConfigKey", + NotificationConfigurationSetting.class) + .setParameter("notificationConfiguration", notificationConfiguration) + .setParameter("notificationProviderConfigKey", notificationProviderConfigKey) + .getSingleResult(); + } catch (NoResultException ex) { + return null; + } + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationHandlerConfigKeyFacade.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationHandlerConfigKeyFacade.java new file mode 100644 index 000000000..b7c6e5b3f --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationHandlerConfigKeyFacade.java @@ -0,0 +1,66 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.beans; + +import gov.anl.aps.logr.portal.model.db.entities.NotificationHandlerConfigKey; +import gov.anl.aps.logr.portal.utilities.SessionUtility; +import java.util.List; +import javax.ejb.Stateless; +import javax.persistence.EntityManager; +import javax.persistence.NoResultException; +import javax.persistence.PersistenceContext; + +/** + * Facade for NotificationHandlerConfigKey entity. + * Provides database access methods for handler configuration keys. + * + * @author djarosz + */ +@Stateless +public class NotificationHandlerConfigKeyFacade extends CdbEntityFacade { + + @PersistenceContext(unitName = "LogrPortalPU") + private EntityManager em; + + @Override + protected EntityManager getEntityManager() { + return em; + } + + public NotificationHandlerConfigKeyFacade() { + super(NotificationHandlerConfigKey.class); + } + + public static NotificationHandlerConfigKeyFacade getInstance() { + return (NotificationHandlerConfigKeyFacade) SessionUtility.findFacade(NotificationHandlerConfigKeyFacade.class.getSimpleName()); + } + + /** + * Find all handler config keys ordered by display_order. + * + * @return List of all NotificationHandlerConfigKey entities + */ + @Override + public List findAll() { + return em.createNamedQuery("NotificationHandlerConfigKey.findAll", NotificationHandlerConfigKey.class) + .getResultList(); + } + + /** + * Find a specific handler config key by its config_key name. + * + * @param configKey The config key name (e.g., "entry_updates") + * @return The matching NotificationHandlerConfigKey or null if not found + */ + public NotificationHandlerConfigKey findByConfigKey(String configKey) { + try { + return em.createNamedQuery("NotificationHandlerConfigKey.findByConfigKey", NotificationHandlerConfigKey.class) + .setParameter("configKey", configKey) + .getSingleResult(); + } catch (NoResultException ex) { + return null; + } + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationProviderConfigKeyFacade.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationProviderConfigKeyFacade.java new file mode 100644 index 000000000..1e57a693e --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationProviderConfigKeyFacade.java @@ -0,0 +1,74 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.beans; + +import gov.anl.aps.logr.portal.model.db.entities.NotificationProvider; +import gov.anl.aps.logr.portal.model.db.entities.NotificationProviderConfigKey; +import gov.anl.aps.logr.portal.utilities.SessionUtility; +import java.util.List; +import javax.ejb.Stateless; +import javax.persistence.EntityManager; +import javax.persistence.NoResultException; +import javax.persistence.PersistenceContext; + +/** + * Facade for NotificationProviderConfigKey entity. + * Provides database access methods for provider configuration keys. + * + * @author djarosz + */ +@Stateless +public class NotificationProviderConfigKeyFacade extends CdbEntityFacade { + + @PersistenceContext(unitName = "LogrPortalPU") + private EntityManager em; + + @Override + protected EntityManager getEntityManager() { + return em; + } + + public NotificationProviderConfigKeyFacade() { + super(NotificationProviderConfigKey.class); + } + + public static NotificationProviderConfigKeyFacade getInstance() { + return (NotificationProviderConfigKeyFacade) SessionUtility.findFacade(NotificationProviderConfigKeyFacade.class.getSimpleName()); + } + + /** + * Find all configuration keys for a specific provider ordered by display order. + * + * @param provider The notification provider + * @return List of configuration keys for the provider + */ + public List findByProvider(NotificationProvider provider) { + return em.createQuery( + "SELECT n FROM NotificationProviderConfigKey n WHERE n.notificationProvider = :provider ORDER BY n.displayOrder", + NotificationProviderConfigKey.class) + .setParameter("provider", provider) + .getResultList(); + } + + /** + * Find a specific configuration key by provider and key name. + * + * @param provider The notification provider + * @param configKey The configuration key name + * @return The matching config key or null if not found + */ + public NotificationProviderConfigKey findByProviderAndKey(NotificationProvider provider, String configKey) { + try { + return em.createQuery( + "SELECT n FROM NotificationProviderConfigKey n WHERE n.notificationProvider = :provider AND n.configKey = :configKey", + NotificationProviderConfigKey.class) + .setParameter("provider", provider) + .setParameter("configKey", configKey) + .getSingleResult(); + } catch (NoResultException ex) { + return null; + } + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationProviderFacade.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationProviderFacade.java new file mode 100644 index 000000000..830ae07e8 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/beans/NotificationProviderFacade.java @@ -0,0 +1,54 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.beans; + +import gov.anl.aps.logr.portal.model.db.entities.NotificationProvider; +import gov.anl.aps.logr.portal.utilities.SessionUtility; +import javax.ejb.Stateless; +import javax.persistence.EntityManager; +import javax.persistence.NoResultException; +import javax.persistence.PersistenceContext; + +/** + * Facade for NotificationProvider entity. + * Provides database access methods for notification providers. + * + * @author djarosz + */ +@Stateless +public class NotificationProviderFacade extends CdbEntityFacade { + + @PersistenceContext(unitName = "LogrPortalPU") + private EntityManager em; + + @Override + protected EntityManager getEntityManager() { + return em; + } + + public NotificationProviderFacade() { + super(NotificationProvider.class); + } + + public static NotificationProviderFacade getInstance() { + return (NotificationProviderFacade) SessionUtility.findFacade(NotificationProviderFacade.class.getSimpleName()); + } + + /** + * Find a notification provider by name. + * + * @param name The provider name + * @return The matching provider or null if not found + */ + public NotificationProvider findByName(String name) { + try { + return em.createNamedQuery("NotificationProvider.findByName", NotificationProvider.class) + .setParameter("name", name) + .getSingleResult(); + } catch (NoResultException ex) { + return null; + } + } +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/CdbEntity.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/CdbEntity.java index 628a63aaa..7514f4714 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/CdbEntity.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/CdbEntity.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.mqtt.model.MqttEntityEvent; import gov.anl.aps.logr.portal.controllers.utilities.CdbEntityControllerUtility; import gov.anl.aps.logr.portal.import_export.import_.objects.ValidInfo; import gov.anl.aps.logr.portal.model.db.beans.PropertyTypeFacade; @@ -23,8 +24,9 @@ /** * Base class for all CDB entities. + * @param describes an optional additional specific mqtt event that can be specified for the entity. */ -public class CdbEntity implements Serializable, Cloneable { +public class CdbEntity implements Serializable, Cloneable { private static final Logger LOGGER = LogManager.getLogger(CdbEntity.class.getName()); @@ -44,6 +46,8 @@ public class CdbEntity implements Serializable, Cloneable { // persistence management for associated ItemConnectors, deleted on call to edit this item in facade private transient List deletedConnectorList = null; + private transient List actionEvents; + // import wizard variables private transient boolean isValidImport = true; private transient String validStringImport; @@ -311,4 +315,20 @@ public static boolean isValidCableEndDesignation(String designation) { return list.contains(designation); } + @JsonIgnore + public List getActionEvents() { + return actionEvents; + } + + public void addActionEvent(ActionEvent event) { + if (actionEvents == null) { + actionEvents = new ArrayList<>(); + } + actionEvents.add(event); + } + + public void clearActionEvents() { + actionEvents = null; + } + } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Log.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Log.java index 684f35dcc..07be06a9b 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Log.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Log.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.mqtt.model.LogEntryEvent; import gov.anl.aps.logr.portal.utilities.MarkdownParser; import gov.anl.aps.logr.portal.view.objects.GroupedReaction; import java.io.Serializable; @@ -50,7 +51,7 @@ @NamedQuery(name = "Log.findByEnteredOnDateTime", query = "SELECT l FROM Log l WHERE l.enteredOnDateTime = :enteredOnDateTime"), @NamedQuery(name = "Log.findByEffectiveFromDateTime", query = "SELECT l FROM Log l WHERE l.effectiveFromDateTime = :effectiveFromDateTime"), @NamedQuery(name = "Log.findByEffectiveToDateTime", query = "SELECT l FROM Log l WHERE l.effectiveToDateTime = :effectiveToDateTime")}) -public class Log extends CdbEntity implements Serializable { +public class Log extends CdbEntity implements Serializable { private static final long serialVersionUID = 1L; @Id @@ -121,6 +122,8 @@ public class Log extends CdbEntity implements Serializable { private transient String addedReactionsString; private transient List childLogListReversed = null; + + private transient boolean isSystemLog = false; public Log() { } @@ -186,6 +189,10 @@ public void setLastModifiedOnDateTime(Date lastModifiedOnDateTime) { public UserInfo getLastModifiedByUser() { return lastModifiedByUser; } + + public String getLastModifiedByUsername() { + return lastModifiedByUser.getUsername(); + } public void setLastModifiedByUser(UserInfo lastModifiedByUser) { this.lastModifiedByUser = lastModifiedByUser; @@ -383,6 +390,15 @@ public void setAddedReactionsString(String addedReactionsString) { this.addedReactionsString = addedReactionsString; } + @JsonIgnore + public boolean isSystemLog() { + return isSystemLog; + } + + public void markAsSystemLog() { + isSystemLog = true; + } + @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set @@ -398,7 +414,7 @@ public boolean equals(Object object) { @Override public String toString() { - return "gov.anl.aps.cdb.portal.model.db.entities.Log[ id=" + id + " ]"; + return "gov.anl.aps.cdb.portal.model.db.entities.Log[ id=" + id; } } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/LogReaction.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/LogReaction.java index 608a32320..adc8444b2 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/LogReaction.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/LogReaction.java @@ -5,6 +5,7 @@ package gov.anl.aps.logr.portal.model.db.entities; import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.mqtt.model.LogReactionEvent; import java.io.Serializable; import javax.persistence.EmbeddedId; import javax.persistence.Entity; @@ -28,7 +29,7 @@ @NamedQuery(name = "LogReaction.findByLogId", query = "SELECT l FROM LogReaction l WHERE l.logReactionPK.logId = :logId"), @NamedQuery(name = "LogReaction.findByReactionId", query = "SELECT l FROM LogReaction l WHERE l.logReactionPK.reactionId = :reactionId"), @NamedQuery(name = "LogReaction.findByUserId", query = "SELECT l FROM LogReaction l WHERE l.logReactionPK.userId = :userId")}) -public class LogReaction implements Serializable { +public class LogReaction extends CdbEntity implements Serializable { private static final long serialVersionUID = 1L; @EmbeddedId @@ -44,6 +45,7 @@ public class LogReaction implements Serializable { private UserInfo userInfo; public LogReaction() { + this.logReactionPK = new LogReactionPK(); } public LogReaction(LogReactionPK logReactionPK) { @@ -63,6 +65,11 @@ public void setLogReactionPK(LogReactionPK logReactionPK) { this.logReactionPK = logReactionPK; } + @Override + public Object getId() { + return logReactionPK; + } + @JsonIgnore public Log getLog() { return log; @@ -70,6 +77,7 @@ public Log getLog() { public void setLog(Log log) { this.log = log; + updatePK(); } public Reaction getReaction() { @@ -78,6 +86,7 @@ public Reaction getReaction() { public void setReaction(Reaction reaction) { this.reaction = reaction; + updatePK(); } public String getUsername() { @@ -91,6 +100,22 @@ public UserInfo getUserInfo() { public void setUserInfo(UserInfo userInfo) { this.userInfo = userInfo; + updatePK(); + } + + private void updatePK() { + if (this.log != null) { + this.logReactionPK.setLogId(this.log.getId()); + } + + if (this.reaction != null) { + this.logReactionPK.setReactionId(this.reaction.getId()); + } + + if (this.userInfo != null) { + this.logReactionPK.setUserId(this.userInfo.getId()); + } + } @Override diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationConfiguration.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationConfiguration.java new file mode 100644 index 000000000..04bc1e57d --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationConfiguration.java @@ -0,0 +1,329 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.entities; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import gov.anl.aps.logr.common.utilities.HttpLinkUtility; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.persistence.Basic; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; +import javax.persistence.OneToMany; +import javax.persistence.Table; +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlTransient; + +/** + * + * @author djarosz + */ +@Entity +@Table(name = "notification_configuration") +@XmlRootElement +@NamedQueries({ + @NamedQuery(name = "NotificationConfiguration.findAll", query = "SELECT n FROM NotificationConfiguration n"), + @NamedQuery(name = "NotificationConfiguration.findById", query = "SELECT n FROM NotificationConfiguration n WHERE n.id = :id"), + @NamedQuery(name = "NotificationConfiguration.findByName", query = "SELECT n FROM NotificationConfiguration n WHERE n.name = :name"), + @NamedQuery(name = "NotificationConfiguration.findByDescription", query = "SELECT n FROM NotificationConfiguration n WHERE n.description = :description"), + @NamedQuery(name = "NotificationConfiguration.findByNotificationEndpoint", query = "SELECT n FROM NotificationConfiguration n WHERE n.notificationEndpoint = :notificationEndpoint")}) +public class NotificationConfiguration extends CdbEntity implements Serializable { + + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Basic(optional = false) + @Column(name = "id") + private Integer id; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 64) + @Column(name = "name") + private String name; + @Size(max = 256) + @Column(name = "description") + private String description; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 256) + @Column(name = "notification_endpoint") + private String notificationEndpoint; + @JoinColumn(name = "notification_provider_id", referencedColumnName = "id") + @ManyToOne(optional = false) + private NotificationProvider notificationProvider; + @JoinColumn(name = "user_id", referencedColumnName = "id") + @ManyToOne(optional = true) + private UserInfo userInfo; + @OneToMany(cascade = CascadeType.ALL, mappedBy = "notificationConfiguration") + private Collection notificationConfigurationHandlerSettingCollection; + @OneToMany(cascade = CascadeType.ALL, mappedBy = "notificationConfiguration") + private Collection notificationConfigurationSettingCollection; + + // Variables for create/update notificaiton configuration + private transient List providerConfigKeys = new ArrayList<>(); + private transient Map configSettings = new HashMap<>(); // config key ID -> value + private transient Map handlerPreferences = new HashMap<>(); // handler key ID -> enabled + + // Self-populating maps keyed by name for API serialization + private transient Map handlerPreferencesByName = new HashMap<>(); + private transient Map configSettingsByName = new HashMap<>(); + + private transient String displayNotificationEndpoint; + + // Transient fields for unsubscribe flow + public enum UnsubscribeState { + LOGIN_REQUIRED, ERROR, ALREADY_UNSUBSCRIBED, CONFIRM, SUCCESS + } + + private transient NotificationHandlerConfigKey unsubscribeHandlerConfigKey; + private transient String unsubscribeError; + private transient UnsubscribeState unsubscribeState; + private transient boolean unsubscribeForOtherUser = false; + + public NotificationConfiguration() { + } + + public NotificationConfiguration(Integer id) { + this.id = id; + } + + public NotificationConfiguration(Integer id, String name, String notificationEndpoint) { + this.id = id; + this.name = name; + this.notificationEndpoint = notificationEndpoint; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getNotificationEndpoint() { + return notificationEndpoint; + } + + public void setNotificationEndpoint(String notificationEndpoint) { + this.notificationEndpoint = notificationEndpoint; + } + + @JsonIgnore + public String getDisplayNotificationEndpoint() { + if (displayNotificationEndpoint == null && notificationEndpoint != null) { + displayNotificationEndpoint = HttpLinkUtility.prepareHttpLinkDisplayValue(notificationEndpoint); + } + return displayNotificationEndpoint; + } + + public String getNotificationProviderName() { + return notificationProvider.getName(); + } + + @JsonIgnore + public NotificationProvider getNotificationProvider() { + return notificationProvider; + } + + public void setNotificationProvider(NotificationProvider notificationProvider) { + this.notificationProvider = notificationProvider; + } + + public String getUsername() { + if (userInfo == null) { + return null; + } + + return userInfo.getUsername(); + + } + + @JsonIgnore + public UserInfo getUserInfo() { + return userInfo; + } + + public void setUserInfo(UserInfo userInfo) { + this.userInfo = userInfo; + } + + @JsonIgnore + @XmlTransient + public Collection getNotificationConfigurationHandlerSettingCollection() { + return notificationConfigurationHandlerSettingCollection; + } + + public void setNotificationConfigurationHandlerSettingCollection(Collection notificationConfigurationHandlerSettingCollection) { + this.notificationConfigurationHandlerSettingCollection = notificationConfigurationHandlerSettingCollection; + } + + @JsonIgnore + @XmlTransient + public Collection getNotificationConfigurationSettingCollection() { + return notificationConfigurationSettingCollection; + } + + public void setNotificationConfigurationSettingCollection(Collection notificationConfigurationSettingCollection) { + this.notificationConfigurationSettingCollection = notificationConfigurationSettingCollection; + } + + public boolean isNewEntity() { + return getId() == null; + } + + @JsonIgnore + public List getProviderConfigKeys() { + return providerConfigKeys; + } + + public void setProviderConfigKeys(List providerConfigKeys) { + this.providerConfigKeys = providerConfigKeys; + } + + @JsonIgnore + public Map getConfigSettings() { + return configSettings; + } + + public void setConfigSettings(Map configSettings) { + this.configSettings = configSettings; + } + + @JsonIgnore + public Map getHandlerPreferences() { + return handlerPreferences; + } + + public void setHandlerPreferences(Map handlerPreferences) { + this.handlerPreferences = handlerPreferences; + } + + public Map getHandlerPreferencesByName() { + if (handlerPreferencesByName.isEmpty() + && notificationConfigurationHandlerSettingCollection != null) { + for (NotificationConfigurationHandlerSetting setting + : notificationConfigurationHandlerSettingCollection) { + handlerPreferencesByName.put( + setting.getNotificationHandlerConfigKey().getConfigKey(), + "true".equalsIgnoreCase(setting.getConfigValue())); + } + } + return handlerPreferencesByName; + } + + public void setHandlerPreferencesByName(Map handlerPreferencesByName) { + this.handlerPreferencesByName = handlerPreferencesByName; + } + + public Map getConfigSettingsByName() { + if (configSettingsByName.isEmpty() + && notificationConfigurationSettingCollection != null) { + for (NotificationConfigurationSetting setting + : notificationConfigurationSettingCollection) { + configSettingsByName.put( + setting.getNotificationProviderConfigKey().getConfigKey(), + setting.getConfigValue()); + } + } + return configSettingsByName; + } + + public void setConfigSettingsByName(Map configSettingsByName) { + this.configSettingsByName = configSettingsByName; + } + + @JsonIgnore + public NotificationHandlerConfigKey getUnsubscribeHandlerConfigKey() { + return unsubscribeHandlerConfigKey; + } + + public void setUnsubscribeHandlerConfigKey(NotificationHandlerConfigKey unsubscribeHandlerConfigKey) { + this.unsubscribeHandlerConfigKey = unsubscribeHandlerConfigKey; + } + + @JsonIgnore + public String getUnsubscribeError() { + return unsubscribeError; + } + + public void setUnsubscribeError(String unsubscribeError) { + this.unsubscribeError = unsubscribeError; + } + + @JsonIgnore + public UnsubscribeState getUnsubscribeState() { + return unsubscribeState; + } + + public void setUnsubscribeState(UnsubscribeState unsubscribeState) { + this.unsubscribeState = unsubscribeState; + } + + @JsonIgnore + public boolean isUnsubscribeForOtherUser() { + return unsubscribeForOtherUser; + } + + public void setUnsubscribeForOtherUser(boolean unsubscribeForOtherUser) { + this.unsubscribeForOtherUser = unsubscribeForOtherUser; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } + + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof NotificationConfiguration)) { + return false; + } + NotificationConfiguration other = (NotificationConfiguration) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; + } + return true; + } + + @Override + public String toString() { + return "gov.anl.aps.logr.portal.model.db.entities.NotificationConfiguration[ id=" + id + " ]"; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationConfigurationHandlerSetting.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationConfigurationHandlerSetting.java new file mode 100644 index 000000000..3a299acf9 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationConfigurationHandlerSetting.java @@ -0,0 +1,123 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.entities; + +import java.io.Serializable; +import javax.persistence.Basic; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; +import javax.persistence.Table; +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; +import javax.xml.bind.annotation.XmlRootElement; + +/** + * + * @author djarosz + */ +@Entity +@Table(name = "notification_configuration_handler_setting") +@XmlRootElement +@NamedQueries({ + @NamedQuery(name = "NotificationConfigurationHandlerSetting.findAll", query = "SELECT n FROM NotificationConfigurationHandlerSetting n"), + @NamedQuery(name = "NotificationConfigurationHandlerSetting.findById", query = "SELECT n FROM NotificationConfigurationHandlerSetting n WHERE n.id = :id"), + @NamedQuery(name = "NotificationConfigurationHandlerSetting.findByConfigValue", query = "SELECT n FROM NotificationConfigurationHandlerSetting n WHERE n.configValue = :configValue")}) +public class NotificationConfigurationHandlerSetting implements Serializable { + + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Basic(optional = false) + @Column(name = "id") + private Integer id; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 256) + @Column(name = "config_value") + private String configValue; + @JoinColumn(name = "notification_configuration_id", referencedColumnName = "id") + @ManyToOne(optional = false) + private NotificationConfiguration notificationConfiguration; + @JoinColumn(name = "notification_handler_config_key_id", referencedColumnName = "id") + @ManyToOne(optional = false) + private NotificationHandlerConfigKey notificationHandlerConfigKey; + + public NotificationConfigurationHandlerSetting() { + } + + public NotificationConfigurationHandlerSetting(Integer id) { + this.id = id; + } + + public NotificationConfigurationHandlerSetting(Integer id, String configValue) { + this.id = id; + this.configValue = configValue; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public NotificationConfiguration getNotificationConfiguration() { + return notificationConfiguration; + } + + public void setNotificationConfiguration(NotificationConfiguration notificationConfiguration) { + this.notificationConfiguration = notificationConfiguration; + } + + public NotificationHandlerConfigKey getNotificationHandlerConfigKey() { + return notificationHandlerConfigKey; + } + + public void setNotificationHandlerConfigKey(NotificationHandlerConfigKey notificationHandlerConfigKey) { + this.notificationHandlerConfigKey = notificationHandlerConfigKey; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } + + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof NotificationConfigurationHandlerSetting)) { + return false; + } + NotificationConfigurationHandlerSetting other = (NotificationConfigurationHandlerSetting) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; + } + return true; + } + + @Override + public String toString() { + return "gov.anl.aps.logr.portal.model.db.entities.NotificationConfigurationHandlerSetting[ id=" + id + " ]"; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationConfigurationSetting.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationConfigurationSetting.java new file mode 100644 index 000000000..bd918e71d --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationConfigurationSetting.java @@ -0,0 +1,115 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.entities; + +import java.io.Serializable; +import javax.persistence.Basic; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; +import javax.persistence.Table; +import javax.validation.constraints.Size; +import javax.xml.bind.annotation.XmlRootElement; + +/** + * + * @author djarosz + */ +@Entity +@Table(name = "notification_configuration_setting") +@XmlRootElement +@NamedQueries({ + @NamedQuery(name = "NotificationConfigurationSetting.findAll", query = "SELECT n FROM NotificationConfigurationSetting n"), + @NamedQuery(name = "NotificationConfigurationSetting.findById", query = "SELECT n FROM NotificationConfigurationSetting n WHERE n.id = :id"), + @NamedQuery(name = "NotificationConfigurationSetting.findByConfigValue", query = "SELECT n FROM NotificationConfigurationSetting n WHERE n.configValue = :configValue")}) +public class NotificationConfigurationSetting implements Serializable { + + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Basic(optional = false) + @Column(name = "id") + private Integer id; + @Size(max = 256) + @Column(name = "config_value") + private String configValue; + @JoinColumn(name = "notification_configuration_id", referencedColumnName = "id") + @ManyToOne(optional = false) + private NotificationConfiguration notificationConfiguration; + @JoinColumn(name = "notification_provider_config_key_id", referencedColumnName = "id") + @ManyToOne(optional = false) + private NotificationProviderConfigKey notificationProviderConfigKey; + + public NotificationConfigurationSetting() { + } + + public NotificationConfigurationSetting(Integer id) { + this.id = id; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public NotificationConfiguration getNotificationConfiguration() { + return notificationConfiguration; + } + + public void setNotificationConfiguration(NotificationConfiguration notificationConfiguration) { + this.notificationConfiguration = notificationConfiguration; + } + + public NotificationProviderConfigKey getNotificationProviderConfigKey() { + return notificationProviderConfigKey; + } + + public void setNotificationProviderConfigKey(NotificationProviderConfigKey notificationProviderConfigKey) { + this.notificationProviderConfigKey = notificationProviderConfigKey; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } + + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof NotificationConfigurationSetting)) { + return false; + } + NotificationConfigurationSetting other = (NotificationConfigurationSetting) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; + } + return true; + } + + @Override + public String toString() { + return "gov.anl.aps.logr.portal.model.db.entities.NotificationConfigurationSetting[ id=" + id + " ]"; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationHandlerConfigKey.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationHandlerConfigKey.java new file mode 100644 index 000000000..2e042caa4 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationHandlerConfigKey.java @@ -0,0 +1,176 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.entities; + +import java.io.Serializable; +import java.util.Collection; +import javax.persistence.Basic; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; +import javax.persistence.OneToMany; +import javax.persistence.Table; +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlTransient; + +/** + * + * @author djarosz + */ +@Entity +@Table(name = "notification_handler_config_key") +@XmlRootElement +@NamedQueries({ + @NamedQuery(name = "NotificationHandlerConfigKey.findAll", query = "SELECT n FROM NotificationHandlerConfigKey n"), + @NamedQuery(name = "NotificationHandlerConfigKey.findById", query = "SELECT n FROM NotificationHandlerConfigKey n WHERE n.id = :id"), + @NamedQuery(name = "NotificationHandlerConfigKey.findByConfigKey", query = "SELECT n FROM NotificationHandlerConfigKey n WHERE n.configKey = :configKey"), + @NamedQuery(name = "NotificationHandlerConfigKey.findByDisplayName", query = "SELECT n FROM NotificationHandlerConfigKey n WHERE n.displayName = :displayName"), + @NamedQuery(name = "NotificationHandlerConfigKey.findByDescription", query = "SELECT n FROM NotificationHandlerConfigKey n WHERE n.description = :description"), + @NamedQuery(name = "NotificationHandlerConfigKey.findByValueType", query = "SELECT n FROM NotificationHandlerConfigKey n WHERE n.valueType = :valueType"), + @NamedQuery(name = "NotificationHandlerConfigKey.findByDefaultValue", query = "SELECT n FROM NotificationHandlerConfigKey n WHERE n.defaultValue = :defaultValue"), + @NamedQuery(name = "NotificationHandlerConfigKey.findByDisplayOrder", query = "SELECT n FROM NotificationHandlerConfigKey n WHERE n.displayOrder = :displayOrder")}) +public class NotificationHandlerConfigKey implements Serializable { + + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Basic(optional = false) + @Column(name = "id") + private Integer id; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 64) + @Column(name = "config_key") + private String configKey; + @Size(max = 64) + @Column(name = "display_name") + private String displayName; + @Size(max = 256) + @Column(name = "description") + private String description; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 7) + @Column(name = "value_type") + private String valueType; + @Size(max = 256) + @Column(name = "default_value") + private String defaultValue; + @Column(name = "display_order") + private Integer displayOrder; + @OneToMany(cascade = CascadeType.ALL, mappedBy = "notificationHandlerConfigKey") + private Collection notificationConfigurationHandlerSettingCollection; + + public NotificationHandlerConfigKey() { + } + + public NotificationHandlerConfigKey(Integer id) { + this.id = id; + } + + public NotificationHandlerConfigKey(Integer id, String configKey, String valueType) { + this.id = id; + this.configKey = configKey; + this.valueType = valueType; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getConfigKey() { + return configKey; + } + + public void setConfigKey(String configKey) { + this.configKey = configKey; + } + + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getValueType() { + return valueType; + } + + public void setValueType(String valueType) { + this.valueType = valueType; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public Integer getDisplayOrder() { + return displayOrder; + } + + public void setDisplayOrder(Integer displayOrder) { + this.displayOrder = displayOrder; + } + + @XmlTransient + public Collection getNotificationConfigurationHandlerSettingCollection() { + return notificationConfigurationHandlerSettingCollection; + } + + public void setNotificationConfigurationHandlerSettingCollection(Collection notificationConfigurationHandlerSettingCollection) { + this.notificationConfigurationHandlerSettingCollection = notificationConfigurationHandlerSettingCollection; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } + + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof NotificationHandlerConfigKey)) { + return false; + } + NotificationHandlerConfigKey other = (NotificationHandlerConfigKey) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; + } + return true; + } + + @Override + public String toString() { + return "gov.anl.aps.logr.portal.model.db.entities.NotificationHandlerConfigKey[ id=" + id + " ]"; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationProvider.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationProvider.java new file mode 100644 index 000000000..59deed322 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationProvider.java @@ -0,0 +1,158 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.entities; + +import java.io.Serializable; +import java.util.Collection; +import javax.persistence.Basic; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Lob; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; +import javax.persistence.OneToMany; +import javax.persistence.Table; +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlTransient; +import gov.anl.aps.logr.portal.utilities.MarkdownParser; + +/** + * + * @author djarosz + */ +@Entity +@Table(name = "notification_provider") +@XmlRootElement +@NamedQueries({ + @NamedQuery(name = "NotificationProvider.findAll", query = "SELECT n FROM NotificationProvider n"), + @NamedQuery(name = "NotificationProvider.findById", query = "SELECT n FROM NotificationProvider n WHERE n.id = :id"), + @NamedQuery(name = "NotificationProvider.findByName", query = "SELECT n FROM NotificationProvider n WHERE n.name = :name"), + @NamedQuery(name = "NotificationProvider.findByDescription", query = "SELECT n FROM NotificationProvider n WHERE n.description = :description")}) +public class NotificationProvider implements Serializable { + + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Basic(optional = false) + @Column(name = "id") + private Integer id; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 64) + @Column(name = "name") + private String name; + @Size(max = 256) + @Column(name = "description") + private String description; + @Lob + @Column(name = "instructions") + private String instructions; + private transient String htmlInstructions; + @OneToMany(cascade = CascadeType.ALL, mappedBy = "notificationProvider") + private Collection notificationProviderConfigKeyCollection; + @OneToMany(cascade = CascadeType.ALL, mappedBy = "notificationProvider") + private Collection notificationConfigurationCollection; + + public NotificationProvider() { + } + + public NotificationProvider(Integer id) { + this.id = id; + } + + public NotificationProvider(Integer id, String name) { + this.id = id; + this.name = name; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getInstructions() { + return instructions; + } + + public void setInstructions(String instructions) { + this.instructions = instructions; + } + + public String getHtmlInstructions() { + if (htmlInstructions == null && instructions != null) { + htmlInstructions = MarkdownParser.parseMarkdownAsHTML(instructions); + } + return htmlInstructions; + } + + @XmlTransient + public Collection getNotificationProviderConfigKeyCollection() { + return notificationProviderConfigKeyCollection; + } + + public void setNotificationProviderConfigKeyCollection(Collection notificationProviderConfigKeyCollection) { + this.notificationProviderConfigKeyCollection = notificationProviderConfigKeyCollection; + } + + @XmlTransient + public Collection getNotificationConfigurationCollection() { + return notificationConfigurationCollection; + } + + public void setNotificationConfigurationCollection(Collection notificationConfigurationCollection) { + this.notificationConfigurationCollection = notificationConfigurationCollection; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } + + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof NotificationProvider)) { + return false; + } + NotificationProvider other = (NotificationProvider) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; + } + return true; + } + + @Override + public String toString() { + return "gov.anl.aps.logr.portal.model.db.entities.NotificationProvider[ id=" + id + " ]"; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationProviderConfigKey.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationProviderConfigKey.java new file mode 100644 index 000000000..ca737cbf3 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/NotificationProviderConfigKey.java @@ -0,0 +1,164 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.portal.model.db.entities; + +import java.io.Serializable; +import java.util.Collection; +import javax.persistence.Basic; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; +import javax.persistence.OneToMany; +import javax.persistence.Table; +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlTransient; + +/** + * + * @author djarosz + */ +@Entity +@Table(name = "notification_provider_config_key") +@XmlRootElement +@NamedQueries({ + @NamedQuery(name = "NotificationProviderConfigKey.findAll", query = "SELECT n FROM NotificationProviderConfigKey n"), + @NamedQuery(name = "NotificationProviderConfigKey.findById", query = "SELECT n FROM NotificationProviderConfigKey n WHERE n.id = :id"), + @NamedQuery(name = "NotificationProviderConfigKey.findByConfigKey", query = "SELECT n FROM NotificationProviderConfigKey n WHERE n.configKey = :configKey"), + @NamedQuery(name = "NotificationProviderConfigKey.findByDescription", query = "SELECT n FROM NotificationProviderConfigKey n WHERE n.description = :description"), + @NamedQuery(name = "NotificationProviderConfigKey.findByIsRequired", query = "SELECT n FROM NotificationProviderConfigKey n WHERE n.isRequired = :isRequired"), + @NamedQuery(name = "NotificationProviderConfigKey.findByDisplayOrder", query = "SELECT n FROM NotificationProviderConfigKey n WHERE n.displayOrder = :displayOrder")}) +public class NotificationProviderConfigKey implements Serializable { + + private static final long serialVersionUID = 1L; + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Basic(optional = false) + @Column(name = "id") + private Integer id; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 64) + @Column(name = "config_key") + private String configKey; + @Size(max = 256) + @Column(name = "description") + private String description; + @Basic(optional = false) + @NotNull + @Column(name = "is_required") + private Boolean isRequired; + @Column(name = "display_order") + private Integer displayOrder; + @JoinColumn(name = "notification_provider_id", referencedColumnName = "id") + @ManyToOne(optional = false) + private NotificationProvider notificationProvider; + @OneToMany(cascade = CascadeType.ALL, mappedBy = "notificationProviderConfigKey") + private Collection notificationConfigurationSettingCollection; + + public NotificationProviderConfigKey() { + } + + public NotificationProviderConfigKey(Integer id) { + this.id = id; + } + + public NotificationProviderConfigKey(Integer id, String configKey, Boolean isRequired) { + this.id = id; + this.configKey = configKey; + this.isRequired = isRequired; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getConfigKey() { + return configKey; + } + + public void setConfigKey(String configKey) { + this.configKey = configKey; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Boolean getIsRequired() { + return isRequired; + } + + public void setIsRequired(Boolean isRequired) { + this.isRequired = isRequired; + } + + public Integer getDisplayOrder() { + return displayOrder; + } + + public void setDisplayOrder(Integer displayOrder) { + this.displayOrder = displayOrder; + } + + public NotificationProvider getNotificationProvider() { + return notificationProvider; + } + + public void setNotificationProvider(NotificationProvider notificationProvider) { + this.notificationProvider = notificationProvider; + } + + @XmlTransient + public Collection getNotificationConfigurationSettingCollection() { + return notificationConfigurationSettingCollection; + } + + public void setNotificationConfigurationSettingCollection(Collection notificationConfigurationSettingCollection) { + this.notificationConfigurationSettingCollection = notificationConfigurationSettingCollection; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } + + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof NotificationProviderConfigKey)) { + return false; + } + NotificationProviderConfigKey other = (NotificationProviderConfigKey) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; + } + return true; + } + + @Override + public String toString() { + return "gov.anl.aps.logr.portal.model.db.entities.NotificationProviderConfigKey[ id=" + id + " ]"; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Reaction.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Reaction.java index d1ff308d6..92cc9ec41 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Reaction.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/Reaction.java @@ -4,6 +4,7 @@ */ package gov.anl.aps.logr.portal.model.db.entities; +import com.fasterxml.jackson.annotation.JsonIgnore; import java.io.Serializable; import java.util.Collection; import javax.persistence.Basic; @@ -119,6 +120,7 @@ public void setDescription(String description) { } @XmlTransient + @JsonIgnore public Collection getLogReactionCollection() { return logReactionCollection; } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/UserInfo.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/UserInfo.java index 9093e390c..e66c0624f 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/UserInfo.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/model/db/entities/UserInfo.java @@ -10,6 +10,7 @@ import gov.anl.aps.logr.portal.controllers.LoginController; import gov.anl.aps.logr.portal.utilities.SearchResult; import java.io.Serializable; +import java.util.Collection; import java.util.List; import java.util.regex.Pattern; import javax.persistence.Basic; @@ -78,12 +79,14 @@ public class UserInfo extends SettingEntity implements Serializable { @Column(name = "middle_name") private String middleName; // @Pattern(regexp="[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?", message="Invalid email")//if the field contains email address consider using this annotation to enforce field validation - @Size(max = 64) + @Size(max = 64) private String email; - @Size(max = 256) + @Size(max = 256) private String password; - @Size(max = 256) + @Size(max = 256) private String description; + @OneToMany(mappedBy = "userInfo") + private Collection notificationConfigurationCollection; @OneToMany(cascade = CascadeType.ALL, mappedBy = "userInfo") private List userSessions; @JoinTable(name = "user_list", joinColumns = { @@ -522,6 +525,15 @@ public void setSettingList(List entitySettingList) { @Override public EntitySetting createNewEntitySetting() { return new UserSetting(); + } + + @XmlTransient + public Collection getNotificationConfigurationCollection() { + return notificationConfigurationCollection; + } + + public void setNotificationConfigurationCollection(Collection notificationConfigurationCollection) { + this.notificationConfigurationCollection = notificationConfigurationCollection; } } diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/MarkdownParser.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/MarkdownParser.java index 5e8a81081..78048395c 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/MarkdownParser.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/MarkdownParser.java @@ -21,6 +21,7 @@ import com.vladsch.flexmark.util.misc.Extension; import com.vladsch.flexmark.util.sequence.BasedSequence; import com.vladsch.flexmark.util.sequence.Escaping; +import com.vladsch.flexmark.ext.tables.TablesExtension; import gov.anl.aps.logr.common.constants.CdbPropertyValue; import java.util.HashSet; @@ -60,13 +61,15 @@ public class MarkdownParser { + "monospaced code content goes here\n" + "```\n" + "\n\n" - + "# Heading Levels:\n" + + "### Table\n\n" + + "| Column1 | Column2 | Column3 |\n" + + "| --- | --- | --- |\n" + + "| Cell 1 | Cell 2 | Cell 3 |\n" + + "\n\n" + + "### Heading Levels\n" + "# Level 1 Heading\n" + "## Level 2 Heading\n" - + "### Level 3 Heading\n" - + "#### Level 4 Heading\n" - + "##### Level 5 Heading\n" - + "###### Level 6 Heading\n"; + + "### Level 3 Heading\n"; private static String contextRoot = null; @@ -79,7 +82,8 @@ public class MarkdownParser { private static MutableDataHolder options = new MutableDataSet() .set(Parser.EXTENSIONS, Arrays.asList( new Extension[]{ - LogrFlexmarkExtension.create() + LogrFlexmarkExtension.create(), + TablesExtension.create() } )); diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SearchResult.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SearchResult.java index 4a6856529..29b9dab2e 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SearchResult.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SearchResult.java @@ -4,9 +4,14 @@ */ package gov.anl.aps.logr.portal.utilities; +import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonIgnore; import gov.anl.aps.logr.portal.model.db.entities.CdbEntity; +import gov.anl.aps.logr.portal.model.db.entities.EntityInfo; +import gov.anl.aps.logr.portal.model.db.entities.Item; +import gov.anl.aps.logr.portal.model.db.entities.Log; import java.util.ArrayList; +import java.util.Date; import java.util.HashMap; import java.util.regex.Pattern; @@ -52,6 +57,7 @@ public CdbEntity getCdbEntity() { return cdbEntity; } + @JsonIgnore public CdbEntity getAdditionalEntity() { return additionalEntity; } @@ -85,6 +91,7 @@ public void setObjectAttributeMatchMap(HashMap objectAttributeMa this.objectAttributeMatchMap = objectAttributeMatchMap; } + @JsonIgnore public boolean isEmpty() { return objectAttributeMatchMap.isEmpty(); } @@ -97,6 +104,48 @@ public void setAdditionalAttribute(String additionalAttribute) { this.additionalAttribute = additionalAttribute; } + public Integer getLogDocumentId() { + if (cdbEntity instanceof Item) { + return ((Item) cdbEntity).getId(); + } + return null; + } + + public Integer getLogEntryId() { + if (additionalEntity instanceof Log) { + return ((Log) additionalEntity).getId(); + } + return null; + } + + public String getLogbookType() { + if (cdbEntity instanceof Item) { + return ((Item) cdbEntity).getLongEntityTypeString(); + } + return null; + } + + public String getSystem() { + if (cdbEntity instanceof Item) { + return ((Item) cdbEntity).getItemTypeString(); + } + return null; + } + + @JsonFormat(shape = JsonFormat.Shape.STRING) + public Date getLastModifiedOn() { + if (additionalEntity instanceof Log) { + return ((Log) additionalEntity).getLastModifiedOnDateTime(); + } + if (cdbEntity instanceof Item) { + EntityInfo entityInfo = ((Item) cdbEntity).getEntityInfo(); + if (entityInfo != null) { + return entityInfo.getLastModifiedOnDateTime(); + } + } + return null; + } + public boolean doesValueContainPattern(String key, Object value, Pattern searchPattern) { if (value != null) { return doesValueContainPattern(key, value.toString(), searchPattern); @@ -115,6 +164,7 @@ public boolean doesValueContainPattern(String key, String value, Pattern searchP return searchResult; } + @JsonIgnore public ArrayList getShortDisplayList() { ArrayList stringList = new ArrayList<>(); @@ -125,6 +175,7 @@ public ArrayList getShortDisplayList() { return stringList; } + @JsonIgnore public ArrayList getDisplayList() { ArrayList stringList = new ArrayList<>(); @@ -136,6 +187,7 @@ public ArrayList getDisplayList() { return stringList; } + @JsonIgnore public String getDisplay() { String result = ""; String keyDelimiter = ": "; diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SessionUtility.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SessionUtility.java index 0a6256779..8463dd7c0 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SessionUtility.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/portal/utilities/SessionUtility.java @@ -4,16 +4,18 @@ */ package gov.anl.aps.logr.portal.utilities; +import com.fasterxml.jackson.core.JsonProcessingException; +import fish.payara.cloud.connectors.mqtt.api.MQTTConnection; +import fish.payara.cloud.connectors.mqtt.api.MQTTConnectionFactory; +import gov.anl.aps.logr.common.exceptions.CdbException; +import gov.anl.aps.logr.common.exceptions.MqttNotConfiguredException; +import gov.anl.aps.logr.common.mqtt.model.MqttEvent; import gov.anl.aps.logr.portal.model.db.entities.UserInfo; import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; -import java.net.URLEncoder; -import java.net.UnknownHostException; import java.util.HashMap; import java.util.Map; import java.util.Stack; -import java.util.logging.Level; +import javax.annotation.Resource; import javax.faces.application.FacesMessage; import javax.faces.application.NavigationHandler; import javax.faces.application.ViewHandler; @@ -47,11 +49,16 @@ public class SessionUtility { private static final String MODULE_NAME_LOOKUP = "java:module/ModuleName"; private static final String JAVA_LOOKUP_START = "java:global/"; private static String FACADE_LOOKUP_STRING_START = null; + private static final String BELY_MQTT_NAME = "bely/MQTT/resource"; private static final String USER_SESSION_COOKIE_KEY = "USERSESSIONID"; private static final Logger logger = LogManager.getLogger(SessionUtility.class.getName()); + // Instructs the framework of the class to resolve for the mqtt connection factory. + @Resource(lookup = BELY_MQTT_NAME) + MQTTConnectionFactory factory; + public SessionUtility() { } @@ -63,6 +70,10 @@ public static void addErrorMessage(String summary, String detail, boolean isAjax addMessage(MESSAGES_KEY, new FacesMessage(FacesMessage.SEVERITY_ERROR, summary, detail), isAjax); } + public static void setValidationFailed() { + FacesContext.getCurrentInstance().validationFailed(); + } + public static void addWarningMessage(String summary, String detail) { addWarningMessage(summary, detail, false); } @@ -133,7 +144,7 @@ public static UserInfo getUser() { public static String getRemoteAddress() { HttpServletRequest request = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); - return request.getRemoteAddr(); + return request.getRemoteAddr(); } public static String getSessionCookie() { @@ -141,10 +152,10 @@ public static String getSessionCookie() { Cookie cookie = (Cookie) cookieMap.get(USER_SESSION_COOKIE_KEY); - if (cookie != null) { + if (cookie != null) { String value = cookie.getValue(); - - return value; + + return value; } return null; @@ -323,6 +334,51 @@ public static Object findBean(String beanName) { return (Object) context.getApplication().evaluateExpressionGet(context, "#{" + beanName + "}", Object.class); } + public static MQTTConnectionFactory fetchMQTTConnectionFactory() { + try { + InitialContext context = new InitialContext(); + MQTTConnectionFactory result = (MQTTConnectionFactory) context.lookup(BELY_MQTT_NAME); + + return result; + } catch (NamingException ex) { + logger.error(ex); + } catch (NoClassDefFoundError ex) { + logger.error(ex); + } + return null; + + } + + public static void publishMqttEvent(MqttEvent event) throws CdbException { + MQTTConnectionFactory mqttFactory = fetchMQTTConnectionFactory(); + + String jsonMessage; + try { + jsonMessage = event.toJson(); + } catch (JsonProcessingException ex) { + String msg = "Failed to serialize MQTT event: " + ex.getMessage(); + logger.error(msg); + throw new CdbException(msg); + } + + if (mqttFactory == null) { + String msg = "MQTT not configured. Skipping event: " + jsonMessage; + logger.debug(msg); + throw new MqttNotConfiguredException(msg); + } + + try { + MQTTConnection connection = mqttFactory.getConnection(); + connection.publish(event.getTopic().getValue(), jsonMessage.getBytes(), 0, false); + connection.close(); + logger.debug("Published MQTT event to {}: {}", event.getTopic().getValue(), jsonMessage); + } catch (Exception ex) { + String msg = "Failed to publish MQTT event: " + ex.getMessage(); + logger.error(msg); + throw new CdbException(msg); + } + } + public static Object findFacade(String facadeName) { try { InitialContext context = new InitialContext(); diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/CdbRestService.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/CdbRestService.java index cad9e6d22..de3b93772 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/CdbRestService.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/CdbRestService.java @@ -43,7 +43,8 @@ private Set> getRestResourceClasses() { resources.add(gov.anl.aps.logr.rest.routes.AuthenticationRoute.class); resources.add(gov.anl.aps.logr.rest.routes.DomainRoute.class); resources.add(gov.anl.aps.logr.rest.routes.DownloadRoute.class); - resources.add(gov.anl.aps.logr.rest.routes.LogbookRoute.class); + resources.add(gov.anl.aps.logr.rest.routes.LogbookRoute.class); + resources.add(gov.anl.aps.logr.rest.routes.NotificationConfigurationRoute.class); resources.add(gov.anl.aps.logr.rest.routes.PropertyValueRoute.class); resources.add(gov.anl.aps.logr.rest.routes.SearchRoute.class); resources.add(gov.anl.aps.logr.rest.routes.SystemLogRoute.class); diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/entities/LogbookSearchResults.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/entities/LogbookSearchResults.java new file mode 100644 index 000000000..ccfecb76c --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/entities/LogbookSearchResults.java @@ -0,0 +1,39 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.rest.entities; + +import gov.anl.aps.logr.portal.utilities.SearchResult; +import java.util.LinkedList; + +/** + * API entity represents search results for log documents and log entries. + * + * @author djarosz + */ +public class LogbookSearchResults { + + private LinkedList documentResults; + private LinkedList logEntryResults; + + public LogbookSearchResults() { + } + + public LinkedList getDocumentResults() { + return documentResults; + } + + public void setDocumentResults(LinkedList documentResults) { + this.documentResults = documentResults; + } + + public LinkedList getLogEntryResults() { + return logEntryResults; + } + + public void setLogEntryResults(LinkedList logEntryResults) { + this.logEntryResults = logEntryResults; + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/LogbookRoute.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/LogbookRoute.java index 982b776e3..2482242f5 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/LogbookRoute.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/LogbookRoute.java @@ -12,9 +12,9 @@ import gov.anl.aps.logr.portal.constants.ItemDomainName; import gov.anl.aps.logr.portal.controllers.utilities.EntityInfoControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.ItemDomainLogbookControllerUtility; -import gov.anl.aps.logr.portal.controllers.utilities.LogControllerUtility; import gov.anl.aps.logr.portal.model.db.beans.DomainFacade; import gov.anl.aps.logr.portal.model.db.beans.ItemDomainLogbookFacade; +import gov.anl.aps.logr.portal.model.db.beans.LogFacade; import gov.anl.aps.logr.portal.model.db.entities.Domain; import gov.anl.aps.logr.portal.model.db.entities.EntityInfo; import gov.anl.aps.logr.portal.model.db.entities.EntityType; @@ -65,6 +65,9 @@ public class LogbookRoute extends ItemBaseRoute { @EJB ItemDomainLogbookFacade itemDomainLogbookFacade; + @EJB + LogFacade logFacade; + private Domain getLogbookDomain() { return domainFacade.find(ItemDomainName.LOGBOOK_ID); } @@ -234,8 +237,13 @@ public LogEntry addUpdateLogEntry(@RequestBody(required = true) LogEntry logEntr utility.verifySaveLogLockoutsForItem(logDocument, logEntity, user); } + Log originalLogEntry = null; + if (logId != null) { + originalLogEntry = logFacade.find(logId); + } + logEntry.updateLogPerLogEntryObject(logEntity); - logEntity = saveLog(logEntity, user); + logEntity = utility.saveLog(logEntity, user, originalLogEntry); // Update modified date. updateModifiedDateForLogDocument(logDocument, user); @@ -401,18 +409,6 @@ private void updateModifiedDateForLogDocument(ItemDomainLogbook logDocument, Use eicu.update(entityInfo, user); } - private Log saveLog(Log log, UserInfo userInfo) throws CdbException { - LogControllerUtility utility = new LogControllerUtility(); - - if (log.getId() != null) { - log = utility.update(log, userInfo); - } else { - log = utility.create(log, userInfo); - } - - return log; - } - @Override protected void verifyUserPermissionForItem(UserInfo user, Item item) throws AuthorizationError { // Permission verification should be done at the top level document only. diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/NotificationConfigurationRoute.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/NotificationConfigurationRoute.java new file mode 100644 index 000000000..ce652c147 --- /dev/null +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/NotificationConfigurationRoute.java @@ -0,0 +1,132 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ +package gov.anl.aps.logr.rest.routes; + +import gov.anl.aps.logr.portal.model.db.beans.NotificationConfigurationFacade; +import gov.anl.aps.logr.portal.model.db.beans.NotificationHandlerConfigKeyFacade; +import gov.anl.aps.logr.portal.model.db.beans.NotificationProviderConfigKeyFacade; +import gov.anl.aps.logr.portal.model.db.beans.NotificationProviderFacade; +import gov.anl.aps.logr.portal.model.db.entities.NotificationConfiguration; +import gov.anl.aps.logr.portal.model.db.entities.NotificationHandlerConfigKey; +import gov.anl.aps.logr.portal.model.db.entities.NotificationProvider; +import gov.anl.aps.logr.portal.model.db.entities.NotificationProviderConfigKey; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.util.List; +import javax.ejb.EJB; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * + * @author djarosz + */ +@Path("/NotificationConfiguration") +@Tag(name = "NotificationConfiguration") +public class NotificationConfigurationRoute extends BaseRoute { + + private static final Logger LOGGER = LogManager.getLogger(NotificationConfigurationRoute.class.getName()); + + @EJB + NotificationConfigurationFacade notificationConfigurationFacade; + + @EJB + NotificationProviderFacade notificationProviderFacade; + + @EJB + NotificationProviderConfigKeyFacade notificationProviderConfigKeyFacade; + + @EJB + NotificationHandlerConfigKeyFacade notificationHandlerConfigKeyFacade; + + @GET + @Path("/all") + @Produces(MediaType.APPLICATION_JSON) + public List getAll() { + LOGGER.debug("Fetching all notification configurations"); + return notificationConfigurationFacade.findAll(); + } + + @GET + @Path("/ById/{id}") + @Produces(MediaType.APPLICATION_JSON) + public NotificationConfiguration getById(@PathParam("id") int id) { + LOGGER.debug("Fetching notification configuration with id: " + id); + return notificationConfigurationFacade.find(id); + } + + @GET + @Path("/ByName/{name}") + @Produces(MediaType.APPLICATION_JSON) + public List getByName(@PathParam("name") String name) { + LOGGER.debug("Fetching notification configurations with name: " + name); + return notificationConfigurationFacade.findByName(name); + } + + @GET + @Path("/ByProviderId/{providerId}") + @Produces(MediaType.APPLICATION_JSON) + public List getByProviderId(@PathParam("providerId") int providerId) { + LOGGER.debug("Fetching notification configurations for provider id: " + providerId); + NotificationProvider provider = notificationProviderFacade.find(providerId); + return notificationConfigurationFacade.findByProvider(provider); + } + + @GET + @Path("/ByUsername/{username}") + @Produces(MediaType.APPLICATION_JSON) + public List getByUsername(@PathParam("username") String username) { + LOGGER.debug("Fetching notification configurations for user: " + username); + UserInfo user = userFacade.findByUsername(username); + return notificationConfigurationFacade.findByUser(user); + } + + @GET + @Path("/Providers") + @Produces(MediaType.APPLICATION_JSON) + public List getAllProviders() { + LOGGER.debug("Fetching all notification providers"); + return notificationProviderFacade.findAll(); + } + + @GET + @Path("/ProviderById/{id}") + @Produces(MediaType.APPLICATION_JSON) + public NotificationProvider getProviderById(@PathParam("id") int id) { + LOGGER.debug("Fetching notification provider with id: " + id); + return notificationProviderFacade.find(id); + } + + @GET + @Path("/ProviderByName/{name}") + @Produces(MediaType.APPLICATION_JSON) + public NotificationProvider getProviderByName(@PathParam("name") String name) { + LOGGER.debug("Fetching notification provider with name: " + name); + return notificationProviderFacade.findByName(name); + } + + @GET + @Path("/ProviderConfigKeys/{providerId}") + @Produces(MediaType.APPLICATION_JSON) + public List getProviderConfigKeys(@PathParam("providerId") int providerId) { + LOGGER.debug("Fetching config keys for provider id: " + providerId); + NotificationProvider provider = notificationProviderFacade.find(providerId); + return notificationProviderConfigKeyFacade.findByProvider(provider); + } + + @GET + @Path("/HandlerConfigKeys") + @Produces(MediaType.APPLICATION_JSON) + public List getHandlerConfigKeys() { + LOGGER.debug("Fetching all handler config keys"); + return notificationHandlerConfigKeyFacade.findAll(); + } + +} diff --git a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/SearchRoute.java b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/SearchRoute.java index e8a7f5031..08f86936d 100644 --- a/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/SearchRoute.java +++ b/src/java/LogrPortal/src/java/gov/anl/aps/logr/rest/routes/SearchRoute.java @@ -4,26 +4,53 @@ */ package gov.anl.aps.logr.rest.routes; +import gov.anl.aps.logr.common.exceptions.InvalidArgument; import gov.anl.aps.logr.common.exceptions.InvalidRequest; +import gov.anl.aps.logr.common.mqtt.constants.CallSource; +import gov.anl.aps.logr.common.mqtt.model.entities.LogbookSearchOptions; +import gov.anl.aps.logr.portal.constants.EntityTypeName; +import gov.anl.aps.logr.portal.constants.ItemDomainName; import gov.anl.aps.logr.portal.controllers.utilities.ItemCategoryControllerUtility; +import gov.anl.aps.logr.portal.controllers.utilities.ItemDomainLogbookControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.ItemElementControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.ItemTypeControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.PropertyTypeCategoryControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.PropertyTypeControllerUtility; +import gov.anl.aps.logr.portal.controllers.utilities.SearchControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.SourceControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.UserGroupControllerUtility; import gov.anl.aps.logr.portal.controllers.utilities.UserInfoControllerUtility; +import gov.anl.aps.logr.portal.model.db.beans.DomainFacade; +import gov.anl.aps.logr.portal.model.db.beans.UserInfoFacade; +import gov.anl.aps.logr.portal.model.db.entities.Domain; +import gov.anl.aps.logr.portal.model.db.entities.EntityType; +import gov.anl.aps.logr.portal.model.db.entities.ItemType; +import gov.anl.aps.logr.portal.model.db.entities.UserInfo; import gov.anl.aps.logr.portal.utilities.SearchResult; +import gov.anl.aps.logr.rest.entities.LogbookSearchResults; import gov.anl.aps.logr.rest.entities.SearchEntitiesOptions; import gov.anl.aps.logr.rest.entities.SearchEntitiesResults; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.parameters.RequestBody; import io.swagger.v3.oas.annotations.tags.Tag; +import java.util.ArrayList; +import java.util.Date; import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import javax.ejb.EJB; import javax.ws.rs.Consumes; +import javax.ws.rs.DefaultValue; +import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; +import javax.ws.rs.PathParam; import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; /** * @@ -33,59 +60,219 @@ @Tag(name = "Search") public class SearchRoute { + private static final Logger logger = LogManager.getLogger(SearchRoute.class.getName()); + + @EJB + DomainFacade domainFacade; + + @EJB + UserInfoFacade userInfoFacade; + + @GET + @Path("/{searchText}") + @Produces(MediaType.APPLICATION_JSON) + @Operation(summary = "Search log documents and log entries.") + public LogbookSearchResults searchLogbook( + @Parameter(description = "Search text with wildcard support (? for single char, * for multiple)", required = true) + @PathParam("searchText") String searchText, + @Parameter(description = "Use case insensitive matching") + @QueryParam("caseInsensitive") @DefaultValue("true") boolean caseInsensitive, + @Parameter(description = "Logbook type IDs to filter by, can specify multiple") + @QueryParam("logbookTypeId") List logbookTypeIdList, + @Parameter(description = "System IDs to filter by, can specify multiple") + @QueryParam("systemId") List systemIdList, + @Parameter(description = "User IDs to filter by, can specify multiple") + @QueryParam("userId") List userIdList, + @Parameter(description = "Start of last modified date range filter") + @QueryParam("startModifiedDate") Date startModifiedDate, + @Parameter(description = "End of last modified date range filter") + @QueryParam("endModifiedDate") Date endModifiedDate, + @Parameter(description = "Start of creation date range filter") + @QueryParam("startCreatedDate") Date startCreatedDate, + @Parameter(description = "End of creation date range filter") + @QueryParam("endCreatedDate") Date endCreatedDate + ) throws InvalidRequest, InvalidArgument { + + // Resolve ID lists to entity lists + List entityTypeList = resolveEntityTypeList(logbookTypeIdList); + List itemTypeList = resolveItemTypeList(systemIdList); + List userList = resolveUserList(userIdList); + + Date startModifiedTime = startModifiedDate; + Date endModifiedTime = endModifiedDate; + Date startCreatedTime = startCreatedDate; + Date endCreatedTime = endCreatedDate; + + // Adjust end times to include the full day + endModifiedTime = ItemDomainLogbookControllerUtility.adjustEndTimeForSearch(endModifiedTime); + endCreatedTime = ItemDomainLogbookControllerUtility.adjustEndTimeForSearch(endCreatedTime); + + ItemDomainLogbookControllerUtility utility = new ItemDomainLogbookControllerUtility(); + + // Build search options map + Map searchArgs = utility.createAdvancedSearchMap( + entityTypeList, itemTypeList, userList, + startModifiedTime, endModifiedTime, + startCreatedTime, endCreatedTime); + + LogbookSearchResults results = new LogbookSearchResults(); + + // Search log documents + LinkedList documentResults = utility.performEntitySearch( + searchText, searchArgs, caseInsensitive); + results.setDocumentResults(documentResults); + + // Search log entries + LinkedList logEntryResults = utility.searchLogEntries( + searchText, caseInsensitive, searchArgs); + results.setLogEntryResults(logEntryResults); + + // Publish MQTT search event + LogbookSearchOptions searchOptions = new LogbookSearchOptions( + entityTypeList, itemTypeList, userList, + startModifiedTime, endModifiedTime, + startCreatedTime, endCreatedTime, caseInsensitive); + SearchControllerUtility.publishSearchMqttEvent(searchText, searchOptions, CallSource.API); + + return results; + } + @POST - @Path("/Entities") + @Path("/GenericSearch") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) - public SearchEntitiesResults searchEntities(@RequestBody(required = true) SearchEntitiesOptions searchEntitiesOptions) throws InvalidRequest { + public SearchEntitiesResults genericSearch(@RequestBody(required = true) SearchEntitiesOptions searchEntitiesOptions) throws InvalidRequest { SearchEntitiesResults results = new SearchEntitiesResults(); String searchText = searchEntitiesOptions.getSearchText(); if (searchText == null) { throw new InvalidRequest("Search text must be specified."); } - + if (searchEntitiesOptions.isIncludeItemElement()) { ItemElementControllerUtility itemElementControllerUtility = new ItemElementControllerUtility(); LinkedList itemElementResults = itemElementControllerUtility.performEntitySearch(searchText, true); - results.setItemDomainCatalogResults(itemElementResults); + results.setItemElementResults(itemElementResults); } if (searchEntitiesOptions.isIncludeItemType()) { ItemTypeControllerUtility itemTypeControllerUtility = new ItemTypeControllerUtility(); LinkedList itemTypeResults = itemTypeControllerUtility.performEntitySearch(searchText, true); - results.setItemDomainCatalogResults(itemTypeResults); + results.setItemTypeResults(itemTypeResults); } if (searchEntitiesOptions.isIncludeItemCategoy()) { ItemCategoryControllerUtility itemCategoryControllerUtility = new ItemCategoryControllerUtility(); LinkedList itemCategoryResults = itemCategoryControllerUtility.performEntitySearch(searchText, true); - results.setItemDomainCatalogResults(itemCategoryResults); + results.setItemCategoryResults(itemCategoryResults); } if (searchEntitiesOptions.isIncludePropertyType()) { PropertyTypeControllerUtility propertyTypeControllerUtility = new PropertyTypeControllerUtility(); LinkedList propertyTypeResults = propertyTypeControllerUtility.performEntitySearch(searchText, true); - results.setItemDomainCatalogResults(propertyTypeResults); + results.setPropertyTypeResults(propertyTypeResults); } if (searchEntitiesOptions.isIncludePropertyTypeCategory()) { PropertyTypeCategoryControllerUtility propertyTypeCategoryControllerUtility = new PropertyTypeCategoryControllerUtility(); LinkedList propertyTypeCategoryResults = propertyTypeCategoryControllerUtility.performEntitySearch(searchText, true); - results.setItemDomainCatalogResults(propertyTypeCategoryResults); + results.setPropertyTypeCategoryResults(propertyTypeCategoryResults); } if (searchEntitiesOptions.isIncludeSource()) { SourceControllerUtility sourceControllerUtility = new SourceControllerUtility(); LinkedList sourceResults = sourceControllerUtility.performEntitySearch(searchText, true); - results.setItemDomainCatalogResults(sourceResults); + results.setSourceResults(sourceResults); } if (searchEntitiesOptions.isIncludeUser()) { UserInfoControllerUtility userControllerUtility = new UserInfoControllerUtility(); LinkedList userResults = userControllerUtility.performEntitySearch(searchText, true); - results.setItemDomainCatalogResults(userResults); + results.setUserResults(userResults); } if (searchEntitiesOptions.isIncludeUserGroup()) { UserGroupControllerUtility userGroupControllerUtility = new UserGroupControllerUtility(); LinkedList userGroupResults = userGroupControllerUtility.performEntitySearch(searchText, true); - results.setItemDomainCatalogResults(userGroupResults); + results.setUserGroupResults(userGroupResults); } return results; } + + private Domain getLogbookDomain() { + return domainFacade.find(ItemDomainName.LOGBOOK_ID); + } + + private List getLogbookTypes() { + Domain domain = getLogbookDomain(); + List logbookTypes = domain.getAllowedEntityTypeList(); + // Remove template + for (EntityType logbookType : logbookTypes) { + if (logbookType.getName().equals(EntityTypeName.template.getValue())) { + logbookTypes.remove(logbookType); + break; + } + } + + return logbookTypes; + } + + private List getLogbookSystems() { + Domain domain = getLogbookDomain(); + + return domain.getItemTypeList(); + } + + private List resolveEntityTypeList(List idList) throws InvalidArgument { + if (idList == null || idList.isEmpty()) { + return null; + } + List result = new ArrayList<>(); + List validTypes = getLogbookTypes(); + for (Integer id : idList) { + boolean found = false; + for (EntityType type : validTypes) { + if (type.getId() == id) { + result.add(type); + found = true; + break; + } + } + if (!found) { + throw new InvalidArgument("Invalid logbook type id: " + id); + } + } + return result; + } + + private List resolveItemTypeList(List idList) throws InvalidArgument { + if (idList == null || idList.isEmpty()) { + return null; + } + List result = new ArrayList<>(); + List validSystems = getLogbookSystems(); + for (Integer id : idList) { + boolean found = false; + for (ItemType system : validSystems) { + if (system.getId().equals(id)) { + result.add(system); + found = true; + break; + } + } + if (!found) { + throw new InvalidArgument("Invalid system id: " + id); + } + } + return result; + } + + private List resolveUserList(List idList) throws InvalidArgument { + if (idList == null || idList.isEmpty()) { + return null; + } + List result = new ArrayList<>(); + for (Integer id : idList) { + UserInfo user = userInfoFacade.find(id); + if (user == null) { + throw new InvalidArgument("Invalid user id: " + id); + } + result.add(user); + } + return result; + } } diff --git a/src/java/LogrPortal/web/resources/css/logbook.css b/src/java/LogrPortal/web/resources/css/logbook.css index 4cd29136d..fde8820a6 100644 --- a/src/java/LogrPortal/web/resources/css/logbook.css +++ b/src/java/LogrPortal/web/resources/css/logbook.css @@ -106,14 +106,40 @@ pre:has(code) { font-family: 'Monaco', monospace; padding: 1em; border: 1px solid #cfcfcf; + position: relative; } pre:has([class^="language-"]) { - border: none; + border: none; background: #273647; color: white; } +.code-copy-btn { + position: absolute; + top: 6px; + right: 6px; + padding: 3px 10px; + font-size: 12px; + cursor: pointer; + border: 1px solid #cfcfcf; + border-radius: 4px; + background: #f5f5f5; + color: #333; + opacity: 0; + transition: opacity 0.2s; +} + +pre:hover .code-copy-btn { + opacity: 1; +} + +pre:has([class^="language-"]) .code-copy-btn { + background: #3a4f65; + color: #ddd; + border-color: #5a7a96; +} + .logEntry h1 { color: #495057; padding: 0; @@ -133,7 +159,28 @@ pre:has([class^="language-"]) { } .logEntry img { - max-width: 450px; + max-width: 450px; +} + +/* Markdown table styles */ +.logEntry table { + border-collapse: collapse; + width: 100%; + margin: 1em 0; + font-size: 14px; + line-height: 1.5; +} + +.logEntry th, +.logEntry td { + border: 1px solid #ddd; + padding: 10px 12px; +} + +.logEntry th { + background-color: #f5f5f5; + font-weight: 600; + color: #333; } .activeLogEntry { @@ -160,7 +207,7 @@ div.logbookSection { .markdown-example-field { background: white; width: 400px; - height: 700px; + height: 750px; } .markdown-example-field > pre { diff --git a/src/java/LogrPortal/web/resources/css/portal.css b/src/java/LogrPortal/web/resources/css/portal.css index 728026e02..53a1065a6 100644 --- a/src/java/LogrPortal/web/resources/css/portal.css +++ b/src/java/LogrPortal/web/resources/css/portal.css @@ -509,6 +509,10 @@ tr:hover .actionLink { width: 30px; } +.fullWidthInput { + width: 100%; +} + select { width: 150px; } diff --git a/src/java/LogrPortal/web/resources/javascript/codeBlockCopyButton.js b/src/java/LogrPortal/web/resources/javascript/codeBlockCopyButton.js new file mode 100644 index 000000000..d69ef15f8 --- /dev/null +++ b/src/java/LogrPortal/web/resources/javascript/codeBlockCopyButton.js @@ -0,0 +1,45 @@ +/* + * Copyright (c) UChicago Argonne, LLC. All rights reserved. + * See LICENSE file. + */ + +/** + * Adds copy buttons to all
 blocks within a container.
+ * @param {Element} [container=document] - The container to search within.
+ */
+function addCodeBlockCopyButtons(container) {
+    try {
+        container = container || document;
+        var preElements = container.querySelectorAll('pre');
+
+        for (var i = 0; i < preElements.length; i++) {
+            var pre = preElements[i];
+            var code = pre.querySelector('code');
+            if (!code) continue;
+            if (pre.querySelector('.code-copy-btn')) continue;
+
+            var btn = document.createElement('button');
+            btn.className = 'code-copy-btn';
+            btn.type = 'button';
+            btn.innerHTML = '';
+            btn.setAttribute('onclick', 'copyCodeBlock(this); return false;');
+            pre.appendChild(btn);
+        }
+    } catch (e) {
+        console.error('Failed to add code block copy buttons:', e);
+    }
+}
+
+function copyCodeBlock(btn) {
+    var code = btn.parentElement.querySelector('code');
+    if (!code) return;
+
+    navigator.clipboard.writeText(code.textContent).then(function () {
+        btn.innerHTML = '';
+        setTimeout(function () {
+            btn.innerHTML = '';
+        }, 1500);
+    }).catch(function (e) {
+        console.error('Failed to copy code block:', e);
+    });
+}
diff --git a/src/java/LogrPortal/web/views/itemDomainLogbook/private/MarkdownExampleDialog.xhtml b/src/java/LogrPortal/web/views/itemDomainLogbook/private/MarkdownExampleDialog.xhtml
deleted file mode 100644
index 7f554db9d..000000000
--- a/src/java/LogrPortal/web/views/itemDomainLogbook/private/MarkdownExampleDialog.xhtml
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-
-
-
-    
-
-        
-            
-            
-
-
-            
-                
#{itemDomainLogbookController.exampleMarkdown}
-
- - -
- -
-
- -
- - - - - -
- - - -
- - diff --git a/src/java/LogrPortal/web/views/itemDomainLogbook/private/markdownExampleDialog.xhtml b/src/java/LogrPortal/web/views/itemDomainLogbook/private/markdownExampleDialog.xhtml index 7f554db9d..9587fdb50 100644 --- a/src/java/LogrPortal/web/views/itemDomainLogbook/private/markdownExampleDialog.xhtml +++ b/src/java/LogrPortal/web/views/itemDomainLogbook/private/markdownExampleDialog.xhtml @@ -15,6 +15,7 @@ See LICENSE file. header="Markdown Example" styleClass="viewTransparentBackgroundDialog markdown-example-dialog" dynamic="true" + onShow="addCodeBlockCopyButtons(this.jq[0])" modal="false"> diff --git a/src/java/LogrPortal/web/views/itemDomainLogbook/view.xhtml b/src/java/LogrPortal/web/views/itemDomainLogbook/view.xhtml index 4121c6336..cfd0e7e20 100644 --- a/src/java/LogrPortal/web/views/itemDomainLogbook/view.xhtml +++ b/src/java/LogrPortal/web/views/itemDomainLogbook/view.xhtml @@ -182,9 +182,10 @@ See LICENSE file. - + + + oncomplete="renderLogs(); addCodeBlockCopyButtons(); ScrollToLog(#{itemDomainLogbookController.lastLog.id});" /> diff --git a/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationDeleteDialog.xhtml b/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationDeleteDialog.xhtml new file mode 100644 index 000000000..522b3724f --- /dev/null +++ b/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationDeleteDialog.xhtml @@ -0,0 +1,26 @@ + + + + + + + + + + + diff --git a/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationEditDialog.xhtml b/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationEditDialog.xhtml new file mode 100644 index 000000000..44549b6c0 --- /dev/null +++ b/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationEditDialog.xhtml @@ -0,0 +1,142 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+ #{notificationConfigurationController.selectedProvider.description} + +

+ + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + +
diff --git a/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationListDataTable.xhtml b/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationListDataTable.xhtml new file mode 100644 index 000000000..45ead4b00 --- /dev/null +++ b/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationListDataTable.xhtml @@ -0,0 +1,66 @@ + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+ + + + + + + + + + + + + + + +
+ +
diff --git a/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationListView.xhtml b/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationListView.xhtml new file mode 100644 index 000000000..7fbf8e864 --- /dev/null +++ b/src/java/LogrPortal/web/views/notificationConfiguration/private/notificationConfigurationListView.xhtml @@ -0,0 +1,25 @@ + + + + + +
+ +
+ + + + + + + +
diff --git a/src/java/LogrPortal/web/views/notificationConfiguration/private/providerInstructionsDialog.xhtml b/src/java/LogrPortal/web/views/notificationConfiguration/private/providerInstructionsDialog.xhtml new file mode 100644 index 000000000..7a6b3f985 --- /dev/null +++ b/src/java/LogrPortal/web/views/notificationConfiguration/private/providerInstructionsDialog.xhtml @@ -0,0 +1,37 @@ + + + + + + + + + + +
+ +
+
+ + + + +
+ +
diff --git a/src/java/LogrPortal/web/views/notificationConfiguration/private/unsubscribeNotificationDescription.xhtml b/src/java/LogrPortal/web/views/notificationConfiguration/private/unsubscribeNotificationDescription.xhtml new file mode 100644 index 000000000..34854a600 --- /dev/null +++ b/src/java/LogrPortal/web/views/notificationConfiguration/private/unsubscribeNotificationDescription.xhtml @@ -0,0 +1,22 @@ + + + + + +

#{descriptionPrefix} + #{currentConfiguration.unsubscribeHandlerConfigKey.displayName} + notifications for configuration + #{currentConfiguration.name}. +

+ +

+ #{currentConfiguration.unsubscribeHandlerConfigKey.description} +

+
+ +
diff --git a/src/java/LogrPortal/web/views/notificationConfiguration/private/unsubscribeOtherUserNote.xhtml b/src/java/LogrPortal/web/views/notificationConfiguration/private/unsubscribeOtherUserNote.xhtml new file mode 100644 index 000000000..ebd666cad --- /dev/null +++ b/src/java/LogrPortal/web/views/notificationConfiguration/private/unsubscribeOtherUserNote.xhtml @@ -0,0 +1,16 @@ + + + + + + +

Note: You are #{noteText} + #{currentConfiguration.username}

+
+ +
diff --git a/src/java/LogrPortal/web/views/notificationConfiguration/private/unsubscribeSettingsButton.xhtml b/src/java/LogrPortal/web/views/notificationConfiguration/private/unsubscribeSettingsButton.xhtml new file mode 100644 index 000000000..0b038618f --- /dev/null +++ b/src/java/LogrPortal/web/views/notificationConfiguration/private/unsubscribeSettingsButton.xhtml @@ -0,0 +1,14 @@ + + + + + + + diff --git a/src/java/LogrPortal/web/views/notificationConfiguration/unsubscribe.xhtml b/src/java/LogrPortal/web/views/notificationConfiguration/unsubscribe.xhtml new file mode 100644 index 000000000..9f0ae24f8 --- /dev/null +++ b/src/java/LogrPortal/web/views/notificationConfiguration/unsubscribe.xhtml @@ -0,0 +1,117 @@ + + + + + + + + + + + : Unsubscribe from Notifications + + + + + + + +
+
+

Unsubscribe from Notifications

+
+ + + + +

You must be logged in to manage notification preferences.

+

Please log in and try again.

+
+
+ + + + + + +

#{currentConfiguration.unsubscribeError}

+
+
+ + + + + + + + + + + + + + + + + + + +

You are about to disable the following notification type:

+ + + + + + + + + + + + +

Are you sure you want to unsubscribe?

+ + + + +
+
+ + + + + + + + + + + +

You can re-enable this notification type from your settings.

+ + +
+
+ +
+ +
+
+
+ +
diff --git a/src/java/LogrPortal/web/views/userInfo/edit.xhtml b/src/java/LogrPortal/web/views/userInfo/edit.xhtml index a379b7497..f0678a8d1 100644 --- a/src/java/LogrPortal/web/views/userInfo/edit.xhtml +++ b/src/java/LogrPortal/web/views/userInfo/edit.xhtml @@ -31,18 +31,27 @@ See LICENSE file.
- + - + - + + + + + + + + diff --git a/src/java/LogrPortal/web/views/userInfo/view.xhtml b/src/java/LogrPortal/web/views/userInfo/view.xhtml index b286d41e9..d4a6a0975 100644 --- a/src/java/LogrPortal/web/views/userInfo/view.xhtml +++ b/src/java/LogrPortal/web/views/userInfo/view.xhtml @@ -34,14 +34,22 @@ See LICENSE file.
- - + + + + + + +
diff --git a/src/lib/mqtt-rar-0.8.0.rar b/src/lib/mqtt-rar-0.8.0.rar new file mode 100644 index 000000000..5f21fdcae Binary files /dev/null and b/src/lib/mqtt-rar-0.8.0.rar differ diff --git a/support/bin/install_netbeans.sh b/support/bin/install_netbeans.sh index 3a9755596..8ff73e496 100755 --- a/support/bin/install_netbeans.sh +++ b/support/bin/install_netbeans.sh @@ -8,7 +8,7 @@ CDB_HOSTNAME=`hostname -f` NETBEANS_VERSION=16 NETBEANS_ZIP_FILE=netbeans-$NETBEANS_VERSION-bin.zip -DOWNLOAD_URL=https://dlcdn.apache.org/netbeans/netbeans/$NETBEANS_VERSION/$NETBEANS_ZIP_FILE +DOWNLOAD_URL=https://archive.apache.org/dist/netbeans/netbeans/$NETBEANS_VERSION/$NETBEANS_ZIP_FILE currentDir=`pwd` cd `dirname $0`/.. && topDir=`pwd` diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/async-test-configuration.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/async-test-configuration.md new file mode 100644 index 000000000..0db9362e0 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/async-test-configuration.md @@ -0,0 +1,6 @@ +--- +globs: '["**/pytest.ini", "**/pyproject.toml", "**/test_*.py"]' +alwaysApply: false +--- + +Always include pytest-asyncio in dev dependencies and configure it properly in pytest.ini with asyncio_mode = auto and asyncio_default_fixture_loop_scope = function. Mark async test functions with @pytest.mark.asyncio decorator. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/conda-python-environment.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/conda-python-environment.md new file mode 100644 index 000000000..c229515b3 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/conda-python-environment.md @@ -0,0 +1,12 @@ +--- +alwaysApply: true +--- + +Always use the .conda/bin Python environment for terminal executions. When running Python-related commands: +1. ALWAYS execute commands from the project root directory to ensure relative conda paths work correctly +2. Use full paths: '.conda/bin/python' instead of 'python', '.conda/bin/pip' instead of 'pip', '.conda/bin/pytest' instead of 'pytest' +3. For make commands and build tools that invoke Python, either: + - Set PATH explicitly: 'PATH=.conda/bin:$PATH make ...' + - Or use absolute paths by prefixing with workspace path +4. Never use system Python or assume 'python' is in PATH +5. Always verify you're in the project root before executing commands \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/default-topic-pattern.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/default-topic-pattern.md new file mode 100644 index 000000000..e11a589ee --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/default-topic-pattern.md @@ -0,0 +1,7 @@ +--- +globs: "**/*.py" +regex: class.*MQTTHandler +alwaysApply: false +--- + +When creating MQTTHandler subclasses, do not override the topic_pattern property unless there's a specific need to limit the topics. The default pattern "bely/#" allows the handler to receive all BELY events, and the framework automatically routes events to the appropriate handler methods. Only override topic_pattern for performance optimization or when you explicitly want to limit the handler to specific topics. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/documentation-standards.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/documentation-standards.md new file mode 100644 index 000000000..12961e94d --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/documentation-standards.md @@ -0,0 +1,6 @@ +--- +globs: '["docs/*.md", "*.md"]' +alwaysApply: false +--- + +Keep documentation simple and practical. Use clear examples, avoid jargon, and focus on what users need to know. Each doc file should have a clear purpose: getting started, API reference, troubleshooting, etc. Include code examples that can be copy-pasted. Link between related docs. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/email-threading-implementation.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/email-threading-implementation.md new file mode 100644 index 000000000..1c9fd7c5c --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/email-threading-implementation.md @@ -0,0 +1,14 @@ +--- +globs: examples/handlers/apprise_smart_notification/**/*.py +description: Guidelines for maintaining email threading functionality in the + Apprise Smart Notification Handler +alwaysApply: false +--- + +When modifying the Apprise Smart Notification Handler: +1. Email notifications must use threading headers (In-Reply-To, References, Thread-Topic) to group related messages +2. Subject lines for email notifications should follow the pattern "Re: Log: [Document Name]" with optional suffixes like "[Entry Updated]" or "[by username]" +3. Non-email notifications should use descriptive subjects with emojis +4. The EmailThreadingStrategy class should detect email URLs and only apply threading to email notifications +5. Test assertions must account for different subject formats between email and non-email notifications +6. Mock notification tracking functions in tests must accept an optional headers parameter for email threading support \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/pydantic-v2-standards.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/pydantic-v2-standards.md new file mode 100644 index 000000000..42d209369 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/pydantic-v2-standards.md @@ -0,0 +1,7 @@ +--- +globs: '["**/models.py", "**/*.py"]' +regex: BaseModel +alwaysApply: false +--- + +Always use Pydantic v2 ConfigDict instead of the deprecated Config class. Use model_config = ConfigDict(...) at the class level. For field aliases, use Field(alias="...") and set populate_by_name=True in ConfigDict to allow both field name and alias during parsing. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/python-import-resolution-for-tests.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/python-import-resolution-for-tests.md new file mode 100644 index 000000000..7a30a7d3a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/python-import-resolution-for-tests.md @@ -0,0 +1,22 @@ +--- +globs: "**/*.py" +description: Apply when fixing Python import errors in test files that are in + subdirectories trying to import from parent directories +alwaysApply: false +--- + +When Python test files need to import modules from parent directories, use try/except blocks in the module being imported to handle both relative imports (when used as a package) and absolute imports (when imported directly from tests). Example: +```python +try: + # Try relative imports first (when used as a package) + from .submodule import Component +except ImportError: + # Fall back to absolute imports (when imported directly from tests) + from submodule import Component +``` +Also ensure test files add the parent directory to sys.path before importing: +```python +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) +``` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/use-specific-handler-methods.md b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/use-specific-handler-methods.md new file mode 100644 index 000000000..6b71da31c --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.continue/rules/use-specific-handler-methods.md @@ -0,0 +1,7 @@ +--- +globs: '["**/*.md", "**/handlers/*.py", "**/examples/*.py"]' +regex: MQTTHandler|handle.*event|handler.*example +alwaysApply: false +--- + +Always encourage users to implement specific handler methods like handle_log_entry_add, handle_log_entry_update, etc. instead of the generic handle method. Show HybridEventHandler for multi-event handlers. The framework automatically routes events to the correct typed method based on the event type. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/.gitignore b/tools/developer_tools/bely-mqtt-message-broker/.gitignore new file mode 100644 index 000000000..dd5118ff2 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/.gitignore @@ -0,0 +1,140 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +Pipfile.lock + +# PEP 582 +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Project specific +.env.local +.env.*.local +*.pid + +# Conda build artifacts +conda-bld/ +.conda/ diff --git a/tools/developer_tools/bely-mqtt-message-broker/CHANGELOG.md b/tools/developer_tools/bely-mqtt-message-broker/CHANGELOG.md new file mode 100644 index 000000000..eb7361468 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/CHANGELOG.md @@ -0,0 +1,28 @@ +# Changelog + +All notable changes to the BELY MQTT Framework will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.1.0] - 2024-01-01 + +### Added +- Initial release +- Pluggable handler system for MQTT events +- Pydantic models for all BELY event types +- MQTT topic pattern matching with wildcards +- BELY API client integration +- CLI interface with `start` and `version` commands +- Async/await support +- Comprehensive logging +- Configuration via command-line options +- Handler hot-reloading support +- Systemd service example +- Documentation and examples + +### Security +- Secure MQTT authentication support +- API key handling for BELY API + +[0.1.0]: https://github.com/bely-org/bely-mqtt-framework/releases/tag/v0.1.0 \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/LICENSE b/tools/developer_tools/bely-mqtt-message-broker/LICENSE new file mode 100644 index 000000000..194aa8022 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/LICENSE @@ -0,0 +1,23 @@ +Copyright © 2025, UChicago Argonne, LLC +All Rights Reserved +Software Name: Best Electronic Logbook Yet (BELY) +By: UChicago Argonne, LLC +OPEN SOURCE LICENSE (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + + * The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tools/developer_tools/bely-mqtt-message-broker/Makefile b/tools/developer_tools/bely-mqtt-message-broker/Makefile new file mode 100644 index 000000000..8d9fc01c0 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/Makefile @@ -0,0 +1,128 @@ +.PHONY: help install install-dev test test-handlers test-apprise test-apprise-integration test-all lint format fix type-check clean docs quality quality-fix + +help: + @echo "BELY MQTT Framework - Development Commands" + @echo "" + @echo "Installation:" + @echo " make install Install the package" + @echo " make install-dev Install with development dependencies" + @echo "" + @echo "Testing:" + @echo " make test Run core framework tests" + @echo " make test-handlers Run all handler tests" + @echo " make test-apprise Run Apprise handler unit tests" + @echo " make test-apprise-integration Run Apprise integration tests" + @echo " make test-all Run all tests (framework + handlers)" + @echo " make test-cov Run all tests with coverage" + @echo "" + @echo "Code Quality:" + @echo " make lint Run linting checks" + @echo " make format Format code with black" + @echo " make fix Auto-fix formatting and linting issues" + @echo " make type-check Run type checking with mypy" + @echo " make type-check-handlers Type check example handlers" + @echo " make quality Run all quality checks" + @echo " make quality-fix Fix issues then run quality checks" + @echo "" + @echo "Utilities:" + @echo " make clean Clean build artifacts" + @echo " make docs Build documentation" + @echo " make run-example Run example handler" + +install: + pip install -e . + +install-dev: + pip install -e ".[dev]" + +test: + pytest tests/ + +test-handlers: + @echo "Running Apprise Smart Notification Handler tests..." + pytest examples/handlers/apprise_smart_notification/test/ \ + --asyncio-mode=auto -v + +test-apprise: + @echo "Running Apprise handler unit tests..." + pytest examples/handlers/apprise_smart_notification/test/test_handler.py --asyncio-mode=auto -v + +test-apprise-integration: + @echo "Running Apprise handler integration tests..." + pytest examples/handlers/apprise_smart_notification/test/test_integration.py --asyncio-mode=auto -v + +test-all: test test-handlers + +test-cov: + pytest tests/ \ + examples/handlers/apprise_smart_notification/test/ \ + --cov=src/bely_mqtt \ + --cov=examples/handlers/apprise_smart_notification \ + --cov-report=html \ + --cov-report=term-missing \ + --asyncio-mode=auto + +lint: + ruff check src/ tests/ examples/handlers/apprise_smart_notification/ + black --check src/ tests/ examples/handlers/apprise_smart_notification/ + +format: + black src/ tests/ examples/handlers/apprise_smart_notification/ + ruff check --fix src/ tests/ examples/handlers/apprise_smart_notification/ + +type-check: + mypy src/bely_mqtt + @echo "Type checking apprise_smart_notification handler..." + mypy examples/handlers/apprise_smart_notification --ignore-missing-imports + +type-check-handlers: + @echo "Type checking all example handlers..." + mypy examples/handlers/apprise_smart_notification --ignore-missing-imports + # Add other handlers here as they are converted to packages + +quality: lint type-check test-all + +fix: + @echo "Auto-fixing code formatting and linting issues..." + @echo "Running black formatter..." + @black src/ tests/ examples/handlers/apprise_smart_notification/ + @echo "Running ruff auto-fixes..." + @-ruff check --fix src/ tests/ examples/handlers/apprise_smart_notification/ + @echo "" + @echo "Auto-fix complete. Run 'make lint' to see remaining issues." + +quality-fix: fix + @echo "Running quality checks after fixes..." + @echo "=======================================" + @$(MAKE) quality || (echo ""; echo "Some issues could not be automatically fixed. Please review the errors above."; exit 1) + +clean: + find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true + find . -type f -name "*.pyc" -delete + find . -type d -name "*.egg-info" -exec rm -rf {} + 2>/dev/null || true + find . -type d -name ".pytest_cache" -exec rm -rf {} + 2>/dev/null || true + find . -type d -name ".mypy_cache" -exec rm -rf {} + 2>/dev/null || true + find . -type d -name ".ruff_cache" -exec rm -rf {} + 2>/dev/null || true + find . -type d -name "htmlcov" -exec rm -rf {} + 2>/dev/null || true + find . -type f -name ".coverage" -delete + rm -rf build/ dist/ + +docs: + @echo "Documentation is in docs/ directory" + @echo "Main files:" + @echo " - README.md: Overview and API reference" + @echo " - docs/QUICKSTART.md: Quick start guide" + @echo " - docs/HANDLER_DEVELOPMENT.md: Handler development guide" + @echo " - docs/ARCHITECTURE.md: Architecture documentation" + +run-example: + @echo "Starting BELY MQTT Framework with example handlers..." + bely-mqtt start \ + --broker-host localhost \ + --broker-port 1883 \ + --handlers-dir ./examples/handlers \ + --topic "bely/#" \ + --log-level INFO + +list-handlers: + bely-mqtt list-handlers --handlers-dir ./examples/handlers diff --git a/tools/developer_tools/bely-mqtt-message-broker/README.md b/tools/developer_tools/bely-mqtt-message-broker/README.md new file mode 100644 index 000000000..e9d4e62a8 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/README.md @@ -0,0 +1,16 @@ +# BELY MQTT Framework + +A pluggable Python framework for handling MQTT events from BELY (Best Electronic Logbook Yet). + +## Features + +- 🔌 **Pluggable Handlers** - Create custom handlers for specific MQTT topics +- 📦 **Type-Safe Models** - Pydantic models for all BELY event types +- 🎯 **Topic Matching** - Support for MQTT wildcards (`+`, `#`) +- 🔗 **API Integration** - Optional BELY API client for additional data +- 🚀 **Async Support** - Built on async/await for high performance +- 📢 **Notifications** - Send alerts via email, Slack, Discord, and more + +## Documentation + +For more details, see the [docs](./docs) directory. \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/README.md b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/README.md new file mode 100644 index 000000000..5af3c8b03 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/README.md @@ -0,0 +1,134 @@ +# Conda Recipe for BELY MQTT Framework + +This directory contains the conda recipe for building the BELY MQTT Framework package. + +## Prerequisites + +- Conda or Miniconda installed +- conda-build package: `conda install conda-build` +- anaconda-client (for uploading): `conda install anaconda-client` + +## Building the Package + +### Quick Build + +```bash +# Run the build script +./conda-recipe/build_conda_package.sh +``` + +### Manual Build + +```bash +# Set build output directory (important if running from conda env in project) +export CONDA_BLD_PATH="${HOME}/conda-bld" + +# Build for current platform +conda build conda-recipe/ --output-folder "${CONDA_BLD_PATH}" + +# Build with specific Python version +conda build conda-recipe/ --python 3.11 --output-folder "${CONDA_BLD_PATH}" + +# Build for all Python versions defined in conda_build_config.yaml +conda build conda-recipe/ --variants --output-folder "${CONDA_BLD_PATH}" +``` + +## Testing the Package + +The package includes automated tests that run during the build process: +- Import tests for all modules +- CLI command tests +- Basic functionality tests + +## Uploading to Private Repository + +### To Anaconda Cloud (Private Channel) + +```bash +# Login to Anaconda Cloud +anaconda login + +# Upload the package +anaconda upload --user YOUR_ORG --channel YOUR_CHANNEL /path/to/package.tar.bz2 + +# Upload all built packages +anaconda upload --user YOUR_ORG --channel YOUR_CHANNEL ~/conda-bld/**/*.tar.bz2 +``` + +### To Private Conda Server + +```bash +# Example for Artifactory +curl -u username:password -T /path/to/package.tar.bz2 \ + "https://your-artifactory.com/artifactory/conda-local/linux-64/package.tar.bz2" +``` + +## Installing from Private Repository + +### From Anaconda Cloud + +```bash +# Add your private channel +conda config --add channels https://conda.anaconda.org/YOUR_ORG/YOUR_CHANNEL + +# Install the package +conda install bely-mqtt-framework +``` + +### From Private Server + +```bash +# Add your private repository +conda config --add channels https://your-server.com/conda/channel + +# Install the package +conda install bely-mqtt-framework +``` + +## Package Variants + +The recipe builds packages for multiple Python versions: +- Python 3.9 +- Python 3.10 +- Python 3.11 +- Python 3.12 + +All packages are noarch (platform-independent). + +## Optional Dependencies + +To include optional dependencies (like apprise for notifications): + +```bash +# Install with apprise support +conda install bely-mqtt-framework apprise +``` + +## Troubleshooting + +### Build Failures + +1. Check conda-build is up to date: `conda update conda-build` +2. Clear conda cache: `conda clean --all` +3. Check build logs in `~/conda-bld/work/` + +### "Can't merge/copy source into subdirectory of itself" Error + +This occurs when trying to build conda packages with the build directory inside the source tree. The build script automatically handles this by: + +1. Using a temporary directory for the build process +2. Copying the final artifacts to `./conda-bld` in your project +3. Cleaning up the temporary directory + +This allows you to keep build artifacts locally while avoiding the circular reference issue. + +### Import Errors + +1. Ensure all dependencies are available in your conda channels +2. Check for conflicting packages: `conda list | grep pydantic` + +### Upload Issues + +1. Verify anaconda-client is installed: `conda install anaconda-client` +2. Check authentication: `anaconda whoami` +3. Verify channel permissions \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/build_conda_package.sh b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/build_conda_package.sh new file mode 100755 index 000000000..96ee31437 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/build_conda_package.sh @@ -0,0 +1,91 @@ +#!/bin/bash +# Build script for creating conda packages + +set -e + +# Colors for output +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +echo -e "${GREEN}Building BELY MQTT Framework conda package...${NC}" + +# Check if conda-build is installed +if ! command -v conda-build &> /dev/null; then + echo -e "${RED}conda-build is not installed. Installing...${NC}" + conda install -y conda-build +fi + +# Clean previous builds +echo -e "${YELLOW}Cleaning previous builds...${NC}" +rm -rf build/ dist/ *.egg-info/ +conda build purge + +# Set up local directory for final artifacts +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +PROJECT_ROOT="$( cd "${SCRIPT_DIR}/.." && pwd )" +LOCAL_CONDA_BLD="${PROJECT_ROOT}/conda-bld" + +# Remove existing conda-bld directory to ensure clean build +if [ -d "${LOCAL_CONDA_BLD}" ]; then + echo -e "${YELLOW}Removing existing conda-bld directory...${NC}" + rm -rf "${LOCAL_CONDA_BLD}" +fi +mkdir -p "${LOCAL_CONDA_BLD}" + +# Use a temporary build directory outside the project +TEMP_BUILD_DIR=$(mktemp -d "${TMPDIR:-/tmp}/conda-build.XXXXXX") +export CONDA_BLD_PATH="${TEMP_BUILD_DIR}" + +echo -e "${YELLOW}Using temporary build directory: ${CONDA_BLD_PATH}${NC}" +echo -e "${YELLOW}Final packages will be copied to: ${LOCAL_CONDA_BLD}${NC}" + +# Build the package +echo -e "${YELLOW}Building conda package...${NC}" +conda build conda-recipe/ --output-folder "${CONDA_BLD_PATH}" + +# Get the package path +PACKAGE_PATH=$(conda build conda-recipe/ --output-folder "${CONDA_BLD_PATH}" --output) + +# Copy build artifacts to local directory +echo -e "${YELLOW}Copying build artifacts to project directory...${NC}" +cp -r "${CONDA_BLD_PATH}"/noarch "${LOCAL_CONDA_BLD}/" 2>/dev/null || true +cp -r "${CONDA_BLD_PATH}"/osx-arm64 "${LOCAL_CONDA_BLD}/" 2>/dev/null || true +cp -r "${CONDA_BLD_PATH}"/linux-64 "${LOCAL_CONDA_BLD}/" 2>/dev/null || true +cp -r "${CONDA_BLD_PATH}"/win-64 "${LOCAL_CONDA_BLD}/" 2>/dev/null || true + +# Update package path to local directory +PACKAGE_NAME=$(basename "${PACKAGE_PATH}") +PACKAGE_PATH="${LOCAL_CONDA_BLD}/noarch/${PACKAGE_NAME}" + +# Clean up temporary directory +rm -rf "${TEMP_BUILD_DIR}" + +echo -e "${GREEN}Package built successfully!${NC}" +echo -e "${GREEN}Package location: ${PACKAGE_PATH}${NC}" + +# Optional: Convert to other platforms (skip for noarch packages) +if [[ ! ${PACKAGE_PATH} == *"noarch"* ]]; then + echo -e "${YELLOW}Converting package for other platforms...${NC}" + # Use temp directory for conversion, then copy results + TEMP_CONVERT_DIR=$(mktemp -d "${TMPDIR:-/tmp}/conda-convert.XXXXXX") + conda convert -p all ${PACKAGE_PATH} -o "${TEMP_CONVERT_DIR}" + cp -r "${TEMP_CONVERT_DIR}"/* "${LOCAL_CONDA_BLD}/" 2>/dev/null || true + rm -rf "${TEMP_CONVERT_DIR}" +else + echo -e "${YELLOW}Package is noarch - no platform conversion needed${NC}" +fi + +echo -e "${GREEN}Build complete!${NC}" +echo "" +echo "To upload to your private conda channel:" +echo " anaconda upload ${PACKAGE_PATH}" +echo "" +echo "To install locally:" +echo " conda install -c local bely-mqtt-framework" +echo "" +echo "Or install from the local build directory:" +echo " conda install -c file://${LOCAL_CONDA_BLD} bely-mqtt-framework" +echo "" +echo "Package files are located in: ${LOCAL_CONDA_BLD}" \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/conda_build_config.yaml b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/conda_build_config.yaml new file mode 100644 index 000000000..d54e73aad --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/conda_build_config.yaml @@ -0,0 +1,14 @@ +# Conda build configuration +# This file defines build variants and pinnings + +python: + - 3.9 + - 3.10 + - 3.11 + - 3.12 + +# Pin run dependencies to be compatible with the build dependencies +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/meta.yaml b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/meta.yaml new file mode 100644 index 000000000..d98e74294 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/meta.yaml @@ -0,0 +1,65 @@ +{% set name = "bely-mqtt-framework" %} +{% set version = "0.1.0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + path: .. + +build: + number: 0 + noarch: python + script: {{ PYTHON }} -m pip install . -vv + entry_points: + - bely-mqtt = bely_mqtt.cli:cli + +requirements: + host: + - python >=3.9 + - pip + - setuptools >=65.0 + - wheel + run: + - python >=3.9 + - click >=8.1.0 + - paho-mqtt >=2.0.0 + - pydantic >=2.0.0 + - python-dotenv >=1.0.0 + - pluggy >=1.3.0 + +test: + imports: + - bely_mqtt + - bely_mqtt.models + - bely_mqtt.events + - bely_mqtt.plugin + - bely_mqtt.mqtt_client + commands: + - bely-mqtt --help + - bely-mqtt --version + requires: + - pytest >=7.0.0 + - pytest-asyncio >=0.21.0 + +about: + home: https://github.com/bely-org/bely-mqtt-framework + license: MIT + license_family: MIT + license_file: LICENSE + summary: Pluggable Python framework for handling BELY MQTT events + description: | + BELY MQTT Framework is a pluggable Python framework for handling MQTT events + from BELY (Best Electronic Logbook Yet). It provides: + - Pluggable handler system for MQTT topics + - Type-safe models for BELY events + - Integration with BELY API for additional data + - CLI for easy configuration and management + - Async support for high performance + doc_url: https://github.com/bely-org/bely-mqtt-framework/tree/main/docs + dev_url: https://github.com/bely-org/bely-mqtt-framework + +extra: + recipe-maintainers: + - your-github-username \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/post-link.sh b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/post-link.sh new file mode 100644 index 000000000..36f02e49d --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/post-link.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# Post-link script that runs after package installation + +echo "" +echo "BELY MQTT Framework has been successfully installed!" +echo "" +echo "To get started:" +echo " 1. Create a handlers directory: mkdir -p handlers" +echo " 2. Create your first handler (see examples)" +echo " 3. Run: bely-mqtt start --handlers-dir ./handlers" +echo "" +echo "For more information:" +echo " - Documentation: https://github.com/bely-org/bely-mqtt-framework/tree/main/docs" +echo " - Examples: https://github.com/bely-org/bely-mqtt-framework/tree/main/examples" +echo "" \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/pre-unlink.sh b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/pre-unlink.sh new file mode 100644 index 000000000..8c96312f8 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/pre-unlink.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# Pre-unlink script that runs before package removal + +echo "Removing BELY MQTT Framework..." + +# Clean up any cached files +if [ -d "$HOME/.cache/bely-mqtt" ]; then + echo "Cleaning cache directory..." + rm -rf "$HOME/.cache/bely-mqtt" +fi \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/run_test.py b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/run_test.py new file mode 100644 index 000000000..639b97f18 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/run_test.py @@ -0,0 +1,81 @@ +"""Test script for conda package.""" + +import sys +import subprocess + +def test_imports(): + """Test that all modules can be imported.""" + print("Testing imports...") + + try: + import bely_mqtt + print(f"✓ bely_mqtt version: {bely_mqtt.__version__}") + + from bely_mqtt import MQTTHandler, BelyMQTTClient, PluginManager + print("✓ Core classes imported") + + from bely_mqtt.models import ( + LogEntryAddEvent, + LogEntryUpdateEvent, + MQTTMessage + ) + print("✓ Event models imported") + + from bely_mqtt.events import EventType + print("✓ Event types imported") + + except ImportError as e: + print(f"✗ Import failed: {e}") + return False + + return True + +def test_cli(): + """Test CLI commands.""" + print("\nTesting CLI...") + + # Test help command + result = subprocess.run( + [sys.executable, "-m", "bely_mqtt.cli", "--help"], + capture_output=True, + text=True + ) + if result.returncode != 0: + print(f"✗ CLI help failed: {result.stderr}") + return False + print("✓ CLI help works") + + # Test version command + result = subprocess.run( + [sys.executable, "-m", "bely_mqtt.cli", "--version"], + capture_output=True, + text=True + ) + if result.returncode != 0: + print(f"✗ CLI version failed: {result.stderr}") + return False + print(f"✓ CLI version: {result.stdout.strip()}") + + return True + +def main(): + """Run all tests.""" + print("Running conda package tests...\n") + + tests_passed = True + + if not test_imports(): + tests_passed = False + + if not test_cli(): + tests_passed = False + + if tests_passed: + print("\n✓ All tests passed!") + return 0 + else: + print("\n✗ Some tests failed!") + return 1 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/variants.yaml b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/variants.yaml new file mode 100644 index 000000000..bfb3b0865 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/conda-recipe/variants.yaml @@ -0,0 +1,19 @@ +# Build variants for different configurations +# This allows building multiple package variants in one go + +# Python versions to build for +python: + - 3.9 + - 3.10 + - 3.11 + - 3.12 + +# Optional: Build with different dependency versions +pydantic: + - 2.0 + - 2.5 + +# Optional: Build with/without optional dependencies +include_apprise: + - true + - false \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/config.example.yaml b/tools/developer_tools/bely-mqtt-message-broker/config.example.yaml new file mode 100644 index 000000000..10f929cc2 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/config.example.yaml @@ -0,0 +1,25 @@ +# BELY MQTT Framework Configuration +# This file configures global settings and handler-specific parameters + +# Global configuration shared across all handlers +global: + # BELY API URL for querying additional information + bely_url: https://bely.gov/bely + +# Handler-specific configurations +handlers: + # Advanced logging handler with custom directory + AdvancedLoggingHandler: + logging_dir: /var/log/bely + + # Basic logging handler (no configuration needed) + LoggingHandler: {} + + # Notification handler with webhook + NotificationHandler: + webhook_url: https://example.com/webhook + + # Apprise smart notification handler + AppriseSmartNotificationHandler: + config: + config_path: /path/to/apprise_notification_config.yaml \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/README.md b/tools/developer_tools/bely-mqtt-message-broker/docs/README.md new file mode 100644 index 000000000..5926f0f3f --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/README.md @@ -0,0 +1,33 @@ +# BELY MQTT Framework Documentation + +This directory contains the complete documentation for the BELY MQTT Framework. + +## Documentation Structure + +### Getting Started +- **[index.md](index.md)** - Documentation home page with overview +- **[getting-started.md](getting-started.md)** - Quick start guide for new users +- **[examples.md](examples.md)** - Working examples of different handler types + +### Reference +- **[api-reference.md](api-reference.md)** - Complete API documentation +- **[configuration.md](configuration.md)** - All configuration options + +### Guides +- **[faq.md](faq.md)** - Frequently asked questions +- **[troubleshooting.md](troubleshooting.md)** - Common issues and solutions +- **[migration-guide.md](migration-guide.md)** - Migrating from raw MQTT clients + +## Key Concepts + +1. **Handlers** - Python classes that process specific MQTT events +2. **Event Models** - Type-safe Pydantic models for all BELY events +3. **Topic Patterns** - MQTT topic matching with wildcard support +4. **API Integration** - Optional BELY API client for enriched data + +## Quick Links + +- [Create your first handler](getting-started.md) +- [View example handlers](examples.md) +- [Configure the framework](configuration.md) +- [Troubleshoot issues](troubleshooting.md) \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/api-reference.md b/tools/developer_tools/bely-mqtt-message-broker/docs/api-reference.md new file mode 100644 index 000000000..f5d7c4019 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/api-reference.md @@ -0,0 +1,167 @@ +# API Reference + +## Core Classes + +### MQTTHandler + +Base class for all MQTT event handlers. + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class MyHandler(MQTTHandler): + # By default, subscribes to all BELY topics (bely/#) + # Override topic_pattern to subscribe to specific topics only + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + """Handle new log entry event.""" + pass +``` + +**Properties:** +- `topic_pattern` - MQTT topic pattern to match (default: `"bely/#"`) +- `logger` - Pre-configured logger instance +- `api_client` - Optional BELY API client (if configured) + +**Overriding Topic Pattern:** + +```python +class SpecificHandler(MQTTHandler): + @property + def topic_pattern(self) -> str: + """Subscribe to specific topics only.""" + return "bely/logEntry/Add" +``` + +**Event Handler Methods:** +- `handle_log_entry_add(event: LogEntryAddEvent)` - New log entries +- `handle_log_entry_update(event: LogEntryUpdateEvent)` - Updated entries +- `handle_log_entry_reply_add(event: LogEntryReplyAddEvent)` - New replies +- `handle_log_entry_reply_update(event: LogEntryReplyUpdateEvent)` - Updated replies +- `handle_log_reaction_add(event: LogReactionAddEvent)` - New reactions +- `handle_log_reaction_delete(event: LogReactionDeleteEvent)` - Deleted reactions + +**Utility Methods:** +- `topic_matches(topic: str) -> bool` - Check if topic matches pattern + +### HybridEventHandler + +Alias for MQTTHandler that emphasizes handling multiple event types. With the default topic pattern `"bely/#"`, all handlers can process multiple event types by implementing the appropriate handler methods. + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class MultiHandler(MQTTHandler): + # Uses default topic_pattern "bely/#" + # Implements multiple event handler methods + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"New entry: {event.description}") + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + self.logger.info(f"Updated entry: {event.log_info.id}") +``` + +### MQTTMessage + +Represents an MQTT message. + +```python +from bely_mqtt import MQTTMessage + +message = MQTTMessage( + topic="bely/logEntry/Add", + payload={"description": "Entry added"}, + raw_payload='{"description": "Entry added"}' +) +``` + +**Attributes:** +- `topic: str` - MQTT topic +- `payload: Dict[str, Any]` - Parsed JSON payload +- `raw_payload: str` - Original payload string + +## Event Models + +### LogEntryAddEvent + +```python +from bely_mqtt import LogEntryAddEvent + +# Event is automatically parsed from message payload +async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + print(f"Entry ID: {event.log_info.id}") + print(f"Document: {event.parent_log_document_info.name}") + print(f"Added by: {event.event_triggered_by_username}") + print(f"Description: {event.description}") +``` + +### LogEntryUpdateEvent + +```python +from bely_mqtt import LogEntryUpdateEvent + +async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + print(f"Updated entry: {event.log_info.id}") + print(f"Text diff: {event.text_diff}") +``` + +### LogEntryReplyAddEvent + +```python +from bely_mqtt import LogEntryReplyAddEvent + +async def handle_log_entry_reply_add(self, event: LogEntryReplyAddEvent) -> None: + print(f"Reply to entry: {event.parent_log_info.id}") + print(f"Reply ID: {event.log_info.id}") +``` + +## Topic Patterns + +By default, handlers subscribe to `bely/#` (all BELY events). The framework automatically routes events to the appropriate handler methods based on the event type. + +MQTT topic patterns support wildcards: + +- `+` - Single level wildcard +- `#` - Multi-level wildcard + +Examples: +- `bely/#` - Matches all BELY events (default) +- `bely/logEntry/Add` - Exact match +- `bely/logEntry/+` - Matches Add, Update, Delete +- `bely/+/Add` - Matches any entity Add events + +**When to Override the Default:** +- Performance optimization - reduce unnecessary message processing +- Clarity - make handler's purpose explicit +- Testing - isolate specific event types + +## CLI Commands + +### start + +Start the MQTT framework: + +```bash +bely-mqtt start [OPTIONS] +``` + +**Options:** +- `--handlers-dir PATH` - Directory containing handlers +- `--mqtt-host TEXT` - MQTT broker host +- `--mqtt-port INTEGER` - MQTT broker port +- `--mqtt-username TEXT` - MQTT username +- `--mqtt-password TEXT` - MQTT password +- `--log-level TEXT` - Logging level +- `--api-url TEXT` - BELY API URL +- `--api-key TEXT` - BELY API key + +### version + +Show version: + +```bash +bely-mqtt version +``` + + diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/configuration.md b/tools/developer_tools/bely-mqtt-message-broker/docs/configuration.md new file mode 100644 index 000000000..5d9b0a545 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/configuration.md @@ -0,0 +1,165 @@ +# Configuration + +The BELY MQTT Framework can be configured through command-line options, environment variables, or configuration files. + +## Command-Line Options + +```bash +bely-mqtt start [OPTIONS] +``` + +### MQTT Options + +- `--broker-host TEXT` - MQTT broker hostname (default: localhost) +- `--broker-port INTEGER` - MQTT broker port (default: 1883) +- `--username TEXT` - MQTT username for authentication +- `--password TEXT` - MQTT password for authentication +- `--topic TEXT` - MQTT topic pattern to subscribe (default: bely/#) + +### Handler Options + +- `--handlers-dir PATH` - Directory containing handler files (default: ./handlers) +- `--config PATH` - YAML configuration file for handlers + +### API Options + +- `--api-url TEXT` - BELY API base URL +- `--api-key TEXT` - BELY API authentication key + +### Logging Options + +- `--log-level TEXT` - Logging level: DEBUG, INFO, WARNING, ERROR (default: INFO) + +## Environment Variables + +All command-line options can be set via environment variables: + +```bash +export MQTT_BROKER_HOST=broker.example.com +export MQTT_BROKER_PORT=1883 +export MQTT_CLIENT_ID=bely-mqtt-client +export MQTT_USERNAME=myuser +export MQTT_PASSWORD=mypass +export BELY_API_URL=https://api.bely.dev +export BELY_API_KEY=your-api-key +export BELY_HANDLERS_DIR=./handlers +export BELY_CONFIG=./config.yaml +export LOG_LEVEL=DEBUG +``` + +## Configuration Files + +### Environment File (.env) + +Create a `.env` file in your project root for environment variables: + +```bash +# MQTT Configuration +MQTT_BROKER_HOST=localhost +MQTT_BROKER_PORT=1883 +MQTT_CLIENT_ID=bely-mqtt-client +MQTT_USERNAME= +MQTT_PASSWORD= + +# BELY API Configuration +BELY_API_URL=https://api.bely.dev +BELY_API_KEY=your-api-key-here + +# Logging +LOG_LEVEL=INFO + +# Handler Configuration +BELY_HANDLERS_DIR=./handlers +BELY_CONFIG=./config.yaml +``` + +### Handler Configuration (YAML) + +Handlers can be configured via a YAML file to provide both global and handler-specific settings: + +```yaml +# Global configuration shared across all handlers +global: + # BELY API URL for querying additional information + bely_url: https://bely.example.com/bely + # Add any other global parameters here + shared_param: value + +# Handler-specific configurations +handlers: + # Configure the AdvancedLoggingHandler + AdvancedLoggingHandler: + logging_dir: /var/log/bely + log_level: DEBUG + rotate_logs: true + max_size_mb: 100 + + # Configure the NotificationHandler + NotificationHandler: + webhook_url: https://hooks.slack.com/services/YOUR/WEBHOOK + enabled: true + timeout: 30 + + # Configure the AppriseSmartNotificationHandler + AppriseSmartNotificationHandler: + config_path: /path/to/apprise_notification_config.yaml +``` + +Use with: `--config config.yaml` + +## SSL/TLS Configuration + +For secure MQTT connections: + +```bash +bely-mqtt start \ + --mqtt-host broker.example.com \ + --mqtt-port 8883 \ + --mqtt-tls \ + --mqtt-ca-cert /path/to/ca.crt \ + --mqtt-client-cert /path/to/client.crt \ + --mqtt-client-key /path/to/client.key +``` + +## Logging Configuration + +### Log Levels + +- `DEBUG` - Detailed information for debugging +- `INFO` - General informational messages +- `WARNING` - Warning messages for potentially harmful situations +- `ERROR` - Error messages for serious problems + +### Log Format + +The default log format includes: +- Timestamp +- Log level +- Handler name +- Message + +Example: +``` +2024-01-01 12:00:00 INFO [LogHandler] New log entry added: ID=123 +``` + +## Production Configuration + +For production deployments: + +1. Use environment variables for sensitive data +2. Enable appropriate log level (INFO or WARNING) +3. Configure log rotation +4. Use SSL/TLS for MQTT connections +5. Set up monitoring and alerting + +Example production command: + +```bash +bely-mqtt start \ + --handlers-dir /opt/bely-handlers \ + --log-level INFO \ + --log-file /var/log/bely-mqtt/app.log \ + --mqtt-tls \ + --mqtt-ca-cert /etc/ssl/mqtt/ca.crt +``` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/examples.md b/tools/developer_tools/bely-mqtt-message-broker/docs/examples.md new file mode 100644 index 000000000..94798e1bd --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/examples.md @@ -0,0 +1,217 @@ +# Examples + +## Basic Examples + +### Simple Log Entry Handler + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class SimpleHandler(MQTTHandler): + """Log new entries to console.""" + + # Uses default topic_pattern "bely/#" - receives all BELY events + # The framework automatically routes log entry add events to handle_log_entry_add + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"New entry: {event.description}") +``` + +### Multi-Event Handler + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class LogMonitor(MQTTHandler): + """Monitor all log entry changes.""" + + # Uses default topic_pattern "bely/#" - receives all BELY events + # Simply implement the handler methods for the events you care about + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"NEW: {event.description[:50]}...") + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + self.logger.info(f"UPDATED: Entry {event.log_info.id}") +``` + +### Specific Topic Handler + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class LogEntryOnlyHandler(MQTTHandler): + """Monitor only log entry events (not replies, reactions, etc).""" + + @property + def topic_pattern(self) -> str: + return "bely/logEntry/+" # Override default to match only log entry events + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"NEW: {event.description[:50]}...") + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + self.logger.info(f"UPDATED: Entry {event.log_info.id}") +``` + +## Advanced Examples + +### API Integration + +```python +from bely_mqtt import MQTTHandler, LogEntryUpdateEvent + +class EnrichedHandler(MQTTHandler): + """Enrich events with API data.""" + + @property + def topic_pattern(self) -> str: + # Override default to only process update events (performance optimization) + return "bely/logEntry/Update" + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + if self.api_client: + # Get full entry details + entry = await self.api_client.get_log_entry(event.log_info.id) + self.logger.info(f"Full entry data: {entry}") +``` + +### Notification Handler + +```python +from bely_mqtt import MQTTHandler, LogEntryReplyAddEvent +import aiohttp + +class NotificationHandler(MQTTHandler): + """Send notifications for replies.""" + + @property + def topic_pattern(self) -> str: + # Override to only process reply events (avoid unnecessary processing) + return "bely/logEntryReply/Add" + + async def handle_log_entry_reply_add(self, event: LogEntryReplyAddEvent) -> None: + # Don't notify about self-replies + if event.event_triggered_by_username == event.parent_log_info.entered_by_username: + return + + message = f"New reply from {event.event_triggered_by_username}" + await self.send_slack_notification(message) + + async def send_slack_notification(self, message: str) -> None: + webhook_url = "https://hooks.slack.com/services/YOUR/WEBHOOK" + async with aiohttp.ClientSession() as session: + await session.post(webhook_url, json={"text": message}) +``` + +### Reaction Tracker + +```python +from bely_mqtt import MQTTHandler, LogReactionAddEvent, LogReactionDeleteEvent +from collections import defaultdict + +class ReactionTracker(MQTTHandler): + """Track reactions on log entries.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.reaction_counts = defaultdict(lambda: defaultdict(int)) + + @property + def topic_pattern(self) -> str: + # Override to only process reaction events + return "bely/logReaction/+" + + async def handle_log_reaction_add(self, event: LogReactionAddEvent) -> None: + log_id = event.parent_log_info.id + emoji = event.log_reaction.reaction.emoji + self.reaction_counts[log_id][emoji] += 1 + + self.logger.info(f"Reaction {emoji} added to entry {log_id}") + + async def handle_log_reaction_delete(self, event: LogReactionDeleteEvent) -> None: + log_id = event.parent_log_info.id + emoji = event.log_reaction.reaction.emoji + if self.reaction_counts[log_id][emoji] > 0: + self.reaction_counts[log_id][emoji] -= 1 +``` + +### Error Handling Example + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent +import asyncio + +class RobustHandler(MQTTHandler): + """Handler with comprehensive error handling.""" + + # Uses default topic_pattern "bely/#" + # Implements only the specific handler method needed + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + try: + # Process with timeout + await asyncio.wait_for( + self.process_entry(event), + timeout=30.0 + ) + except asyncio.TimeoutError: + self.logger.error(f"Timeout processing entry {event.log_info.id}") + except Exception as e: + self.logger.error(f"Error: {e}", exc_info=True) + + async def process_entry(self, event: LogEntryAddEvent) -> None: + # Your processing logic here + await asyncio.sleep(1) + self.logger.info(f"Processed entry {event.log_info.id}") +``` + +## Running the Examples + +1. Create a `handlers` directory +2. Copy example code to Python files in the directory +3. Run the framework: + +```bash +# Basic +bely-mqtt start --handlers-dir ./handlers + +# With API integration +bely-mqtt start \ + --handlers-dir ./handlers \ + --api-url https://api.bely.dev \ + --api-key your-api-key + +# With debug logging +bely-mqtt start \ + --handlers-dir ./handlers \ + --log-level DEBUG +``` + +## Testing Examples + +```python +import pytest +from bely_mqtt import LogEntryAddEvent +from handlers.simple_handler import SimpleHandler + +@pytest.mark.asyncio +async def test_simple_handler(caplog): + handler = SimpleHandler() + + event = LogEntryAddEvent( + description="Test entry", + event_timestamp="2024-01-01T00:00:00Z", + entity_name="Log", + entity_id=1, + event_triggered_by_username="testuser", + parent_log_document_info={"name": "Test Doc", "id": 1}, + log_info={"id": 1}, + logbook_list=[], + text_diff="+ Test entry" + ) + + await handler.handle_log_entry_add(event) + + assert "New entry: Test entry" in caplog.text +``` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/faq.md b/tools/developer_tools/bely-mqtt-message-broker/docs/faq.md new file mode 100644 index 000000000..1467f0dea --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/faq.md @@ -0,0 +1,124 @@ +# Frequently Asked Questions + +## General Questions + +### What is BELY MQTT Framework? + +It's a Python framework that makes it easy to handle MQTT events from BELY (Best Electronic Logbook Yet). You write handlers that react to specific events like log entries being added or updated. + +### What Python versions are supported? + +Python 3.9, 3.10, 3.11, and 3.12 are supported. + +### Do I need to know MQTT? + +Basic understanding helps, but the framework handles most MQTT details for you. You just need to know about topics (like `bely/logEntry/Add`). + +## Handler Development + +### How do I handle multiple event types? + +Use wildcards in your topic pattern and implement specific handler methods: + +```python +from bely_mqtt import HybridEventHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class MultiHandler(HybridEventHandler): + @property + def topic_pattern(self) -> str: + return "bely/logEntry/+" # Handles Add, Update, Delete + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Handle new entries + pass + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + # Handle updates + pass +``` + +### Can I use the BELY API in handlers? + +Yes! Configure the API when starting: + +```bash +bely-mqtt start --api-url https://api.bely.dev --api-key YOUR_KEY +``` + +Then use `self.api_client` in your handler. + +### How do I test my handlers? + +```python +import pytest +from bely_mqtt import LogEntryAddEvent +from my_handler import MyHandler + +@pytest.mark.asyncio +async def test_handler(): + handler = MyHandler() + event = LogEntryAddEvent( + description="Test entry", + event_timestamp="2024-01-01T00:00:00Z", + entity_name="Log", + entity_id=1, + event_triggered_by_username="testuser", + parent_log_document_info={"name": "Test Doc", "id": 1}, + log_info={"id": 1}, + logbook_list=[], + text_diff="+ Test entry" + ) + await handler.handle_log_entry_add(event) +``` + +## Troubleshooting + +### Handler not being called + +1. Check the topic pattern matches the MQTT topic +2. Verify the handler file is in the handlers directory +3. Check logs for errors: `--log-level DEBUG` + +### Connection refused + +1. Check MQTT broker is running +2. Verify host and port are correct +3. Check username/password if required + +### Import errors + +Make sure the framework is installed: + +```bash +pip install bely-mqtt-framework +``` + +## Performance + +### How many handlers can I run? + +The framework can handle hundreds of handlers. Each handler runs asynchronously, so they don't block each other. + +### Can I run multiple instances? + +By default, no. The framework uses a file lock (`/tmp/bely-mqtt.lock`) to enforce single-instance operation and prevent duplicate message processing. If you attempt to start a second instance, it will exit with an error. + +If you need multiple instances (e.g., subscribing to different topics with separate handler sets), specify a different lock file for each: + +```bash +# Instance 1 +bely-mqtt start --lock-file /tmp/bely-mqtt-1.lock -t "bely/logEntry/#" + +# Instance 2 +bely-mqtt start --lock-file /tmp/bely-mqtt-2.lock -t "bely/logbook/#" +``` + +You can also set the lock file path via the `BELY_LOCK_FILE` environment variable. + +### Is it production ready? + +Yes! The framework includes: +- Error handling and recovery +- Logging +- Systemd service support +- Async processing \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/getting-started.md b/tools/developer_tools/bely-mqtt-message-broker/docs/getting-started.md new file mode 100644 index 000000000..d7a906657 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/getting-started.md @@ -0,0 +1,78 @@ +# Getting Started + +This guide will help you create your first MQTT handler for BELY events. + +## Installation + +```bash +pip install bely-mqtt-framework +``` + +## Your First Handler + +Create a file `handlers/my_first_handler.py`: + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class MyFirstHandler(MQTTHandler): + """Logs when new entries are added.""" + + # By default, handlers subscribe to all BELY topics (bely/#) + # The framework automatically routes events to the appropriate handler methods + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"New log entry: {event.description}") + self.logger.info(f"Added by: {event.event_triggered_by_username}") +``` + +### Subscribing to Specific Topics + +If you want to limit your handler to specific topics only (for performance or clarity), override the `topic_pattern` property: + +```python +class SpecificTopicHandler(MQTTHandler): + """Only handles log entry events.""" + + @property + def topic_pattern(self) -> str: + return "bely/logEntry/#" # Only log entry events + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Handle new entries + pass + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + # Handle updates + pass +``` + +## Running the Framework + +```bash +# Start with your handler +bely-mqtt start --handlers-dir ./handlers + +# With custom MQTT broker +bely-mqtt start \ + --handlers-dir ./handlers \ + --mqtt-host broker.example.com \ + --mqtt-port 1883 +``` + +## Handler Methods + +The framework automatically calls the appropriate method based on the event type: + +- `handle_log_entry_add(event: LogEntryAddEvent)` - New log entries +- `handle_log_entry_update(event: LogEntryUpdateEvent)` - Updated entries +- `handle_log_entry_reply_add(event: LogEntryReplyAddEvent)` - New replies +- `handle_log_reaction_add(event: LogReactionAddEvent)` - New reactions + +## What's Next? + +- [API Reference](api-reference.md) - Complete API documentation +- [Examples](examples.md) - More handler examples +- [FAQ](faq.md) - Common questions and answers +- [Troubleshooting](troubleshooting.md) - Solve common issues + diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/index.md b/tools/developer_tools/bely-mqtt-message-broker/docs/index.md new file mode 100644 index 000000000..ae8e10231 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/index.md @@ -0,0 +1,57 @@ +# BELY MQTT Framework Documentation + +Welcome to the BELY MQTT Framework documentation. This framework helps you build event-driven integrations with BELY (Best Electronic Logbook Yet). + +## Quick Links + +- [Getting Started](getting-started.md) - Create your first handler in 5 minutes +- [API Reference](api-reference.md) - Complete API documentation +- [Examples](examples.md) - Real-world handler examples +- [Configuration](configuration.md) - Configure the framework +- [FAQ](faq.md) - Frequently asked questions +- [Troubleshooting](troubleshooting.md) - Common issues and solutions + +## What is BELY MQTT Framework? + +The BELY MQTT Framework is a pluggable Python framework for handling MQTT events from BELY. It provides: + +- **Easy Handler Development** - Simple Python classes for event handling +- **Flexible Topic Matching** - Support for MQTT wildcards +- **Built-in Models** - Pydantic models for all BELY events +- **API Integration** - Optional BELY API client +- **Production Ready** - Logging, error handling, and systemd support + +## Installation + +```bash +pip install bely-mqtt-framework +``` + +## Basic Example + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class LogHandler(MQTTHandler): + @property + def topic_pattern(self) -> str: + return "bely/logEntry/Add" + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + self.logger.info(f"New entry: {event.description}") +``` + +## Architecture + +The framework consists of: + +1. **Core Framework** - MQTT client and plugin manager +2. **Handler System** - Pluggable handlers for different events +3. **Data Models** - Pydantic models for type safety +4. **CLI Interface** - Command-line tools for running the framework + +## Support + +- GitHub Issues: [Report bugs or request features](https://github.com/bely-org/bely-mqtt-framework/issues) +- Documentation: [Full documentation](https://github.com/bely-org/bely-mqtt-framework/tree/main/docs) +- Examples: [Example handlers](https://github.com/bely-org/bely-mqtt-framework/tree/main/examples) \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/migration-guide.md b/tools/developer_tools/bely-mqtt-message-broker/docs/migration-guide.md new file mode 100644 index 000000000..a49327e4e --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/migration-guide.md @@ -0,0 +1,141 @@ +# Migration Guide + +## Migrating from Raw MQTT Client + +If you're currently using a raw MQTT client (like `paho-mqtt`), here's how to migrate: + +### Before (Raw MQTT) + +```python +import paho.mqtt.client as mqtt +import json + +def on_message(client, userdata, msg): + if msg.topic == "bely/logEntry/Add": + payload = json.loads(msg.payload) + print(f"New entry: {payload['description']}") + +client = mqtt.Client() +client.on_message = on_message +client.connect("localhost", 1883) +client.subscribe("bely/#") +client.loop_forever() +``` + +### After (BELY MQTT Framework) + +```python +from bely_mqtt import MQTTHandler, LogEntryAddEvent + +class LogHandler(MQTTHandler): + # By default, subscribes to all BELY topics (bely/#) + # The framework automatically routes events to specific handlers + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + print(f"New entry: {event.description}") +``` + +Or if you want to subscribe to specific topics only: + +```python +class SpecificLogHandler(MQTTHandler): + @property + def topic_pattern(self) -> str: + return "bely/logEntry/Add" # Override to subscribe to specific topic + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + print(f"New entry: {event.description}") +``` + +## Benefits of Migration + +1. **Type Safety** - Pydantic models validate event data +2. **Better Organization** - Separate handlers for different events +3. **Error Handling** - Built-in error handling and logging +4. **Async Support** - Better performance with async/await +5. **API Integration** - Easy access to BELY API +6. **Testing** - Easier to unit test handlers + +## Migration Steps + +1. **Install the framework** + ```bash + pip install bely-mqtt-framework + ``` + +2. **Convert callbacks to handlers** + - Create one handler per event type + - Move callback logic to specific handler methods (e.g., `handle_log_entry_add`) + +3. **Use provided models** + ```python + from bely_mqtt import LogEntryAddEvent + + event = LogEntryAddEvent(**message.payload) + # Now you have type-safe access to all fields + ``` + +4. **Update configuration** + - Replace connection code with CLI arguments + - Use environment variables for secrets + +5. **Test your handlers** + ```python + @pytest.mark.asyncio + async def test_handler(): + handler = MyHandler() + message = MQTTMessage(...) + await handler.handle(message) + ``` + +## Common Patterns + +### Multiple Topics + +**Before:** +```python +def on_message(client, userdata, msg): + if msg.topic == "bely/logEntry/Add": + handle_add(msg) + elif msg.topic == "bely/logEntry/Update": + handle_update(msg) +``` + +**After:** +```python +from bely_mqtt import HybridEventHandler, LogEntryAddEvent, LogEntryUpdateEvent + +class MultiHandler(HybridEventHandler): + @property + def topic_pattern(self) -> str: + return "bely/logEntry/+" + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Handle add events + pass + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + # Handle update events + pass +``` + +### Error Handling + +**Before:** +```python +def on_message(client, userdata, msg): + try: + payload = json.loads(msg.payload) + process_message(payload) + except Exception as e: + print(f"Error: {e}") +``` + +**After:** +```python +async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + try: + await self.process_event(event) + except Exception as e: + self.logger.error(f"Failed to process: {e}", exc_info=True) +``` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/docs/troubleshooting.md b/tools/developer_tools/bely-mqtt-message-broker/docs/troubleshooting.md new file mode 100644 index 000000000..051677262 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/docs/troubleshooting.md @@ -0,0 +1,124 @@ +# Troubleshooting Guide + +## Common Issues + +### MQTT Connection Issues + +**Problem:** Cannot connect to MQTT broker + +**Solutions:** +1. Check broker is running: `mosquitto_sub -t '#' -v` +2. Verify connection details: + ```bash + bely-mqtt start --mqtt-host localhost --mqtt-port 1883 + ``` +3. Check firewall rules +4. Try without authentication first + +### Handler Not Loading + +**Problem:** Handler file exists but not being loaded + +**Solutions:** +1. Check file naming: Must be `*.py` in handlers directory +2. Verify class inherits from `MQTTHandler` +3. Check for syntax errors: `python -m py_compile handlers/my_handler.py` +4. Enable debug logging: `--log-level DEBUG` + +### Handler Not Receiving Messages + +**Problem:** Handler loads but doesn't receive messages + +**Solutions:** +1. Verify topic pattern matches: + ```python + # Check exact topic + @property + def topic_pattern(self) -> str: + return "bely/logEntry/Add" # Must match exactly + ``` +2. Implement the correct handler method: + ```python + # Use specific method for the event type + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Not just handle(self, message) + pass + ``` +3. Test with wildcards: + ```python + return "bely/#" # Receives all BELY messages + ``` +4. Check MQTT subscription: Look for "Subscribed to topic" in logs + +### Import Errors + +**Problem:** `ModuleNotFoundError: No module named 'bely_mqtt'` + +**Solutions:** +1. Install the framework: + ```bash + pip install bely-mqtt-framework + ``` +2. Check virtual environment is activated +3. Verify installation: + ```bash + pip show bely-mqtt-framework + ``` + +### Async Errors + +**Problem:** `RuntimeWarning: coroutine 'handle' was never awaited` + +**Solutions:** +1. Ensure handle method is async: + ```python + async def handle(self, message: MQTTMessage) -> None: + # Your code here + ``` +2. Use `await` for async calls: + ```python + await some_async_function() + ``` + +## Debug Mode + +Enable debug logging to see detailed information: + +```bash +bely-mqtt start --handlers-dir ./handlers --log-level DEBUG +``` + +This shows: +- Handler loading process +- MQTT connection details +- Message routing +- Error stack traces + +## Getting Help + +1. Check the [FAQ](faq.md) +2. Review [examples](examples.md) +3. Enable debug logging +4. Check GitHub issues +5. Ask in discussions + +## Log Messages Explained + +### INFO Messages + +- `"Loading handlers from ..."` - Handler discovery started +- `"Loaded handler: ..."` - Handler successfully loaded +- `"Connected to MQTT broker"` - MQTT connection established +- `"Subscribed to topic: ..."` - Topic subscription successful + +### WARNING Messages + +- `"No handlers found"` - Check handlers directory +- `"Failed to load handler"` - Syntax error in handler file +- `"No handlers for topic"` - No matching topic patterns + +### ERROR Messages + +- `"Connection refused"` - MQTT broker not accessible +- `"Authentication failed"` - Check username/password +- `"Handler error"` - Exception in handle() method \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/.env.example b/tools/developer_tools/bely-mqtt-message-broker/examples/.env.example new file mode 100644 index 000000000..e670509c4 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/.env.example @@ -0,0 +1,20 @@ +# MQTT Broker Configuration +MQTT_BROKER_HOST=localhost +MQTT_BROKER_PORT=1883 +MQTT_CLIENT_ID=bely-mqtt-client +MQTT_USERNAME= +MQTT_PASSWORD= + +# BELY API Configuration +BELY_API_URL=https://api.bely.dev +BELY_API_KEY= + +# Handler Configuration +BELY_HANDLERS_DIR=./handlers + +# Logging Configuration +LOG_LEVEL=INFO + +# Notification Configuration (if using notification handlers) +# APPRISE_NOTIFICATION_URL=mailto://user:password@gmail.com +# APPRISE_NOTIFICATION_URL=discord://webhook_id/webhook_token diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/config/apprise_notification_config.yaml b/tools/developer_tools/bely-mqtt-message-broker/examples/config/apprise_notification_config.yaml new file mode 100644 index 000000000..cdc499950 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/config/apprise_notification_config.yaml @@ -0,0 +1,101 @@ +# Apprise Smart Notification Handler Configuration +# +# This configuration file defines how the ApprisSmartNotificationHandler +# sends notifications for BELY events. + +# Global configuration (optional) +global: + # Email server configuration for mailto:// URLs + mail_server: "smtp.gmail.com" + # mail_port: 587 + # mail_username: "your-email@gmail.com" + # mail_password: "your-app-password" + mail_from: "your-email@gmail.com" + mail_from_name: "BELY Notifications" + +# User-specific notification configuration +users: + # Example user with email and Discord notifications + john_doe: + # Apprise URLs for notification endpoints + # See https://github.com/caronc/apprise/wiki for all supported services + apprise_urls: + # Email notification + - "mailto://john.doe@example.com" + # Discord webhook + - "discord://webhook-id/webhook-token" + + # Which types of notifications to send + notifications: + # Notify when someone else updates any log entry (typically for document owners) + entry_updates: true + # Notify when someone else edits YOUR log entries specifically + own_entry_edits: true + # Notify when someone else replies to a log entry + entry_replies: true + # Notify when someone else creates an entry in a document + new_entries: true + # Notify when someone replies to any entry in your document + document_replies: true + # Notify when someone reacts to your log entry + reactions: true + + # Example user with Slack notifications only + jane_smith: + apprise_urls: + # Slack webhook + - "slack://token-a/token-b/token-c" + + notifications: + entry_updates: true + # Only get notified about general updates, not specifically own entries + own_entry_edits: false + entry_replies: false # Don't notify about replies + new_entries: true + + # Example user with multiple notification channels + bob_johnson: + apprise_urls: + # Email + - "mailto://bob@example.com" + # Telegram + - "tgram://bot-token/chat-id" + # Pushbullet + - "pbul://access-token" + + notifications: + entry_updates: true + entry_replies: true + new_entries: false # Don't notify about new entries + + # Example user with Teams notifications + alice_williams: + apprise_urls: + # Microsoft Teams + - "msteams://token-a/token-b/token-c" + + notifications: + entry_updates: true + entry_replies: true + new_entries: true + +# Supported Apprise Services: +# +# Email: +# - mailto://user:password@domain.com +# - mailgun://user:password@domain.com +# - sendgrid://api-key@domain.com +# +# Chat: +# - discord://webhook-id/webhook-token +# - slack://token-a/token-b/token-c +# - msteams://token-a/token-b/token-c +# - telegram://bot-token/chat-id +# +# Notifications: +# - pushbullet://access-token +# - pushover://user-key/api-token +# - gotify://hostname/token +# +# See https://github.com/caronc/apprise/wiki for complete list + diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/config/example.env b/tools/developer_tools/bely-mqtt-message-broker/examples/config/example.env new file mode 100644 index 000000000..a6f8ac309 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/config/example.env @@ -0,0 +1,19 @@ +# BELY MQTT Framework Configuration Example +# Copy this file to .env and update with your values + +# MQTT Configuration +MQTT_HOST=localhost +MQTT_PORT=1883 +MQTT_USERNAME= +MQTT_PASSWORD= +MQTT_TOPIC=bely/# + +# BELY API Configuration (optional) +BELY_API_URL=https://api.bely.dev +BELY_API_KEY=your-api-key-here + +# Logging +LOG_LEVEL=INFO + +# Handler Configuration +HANDLERS_DIR=./handlers \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_notification_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_notification_handler.py new file mode 100644 index 000000000..b80128749 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_notification_handler.py @@ -0,0 +1,300 @@ +""" +Advanced notification handler using Apprise. + +This handler demonstrates how to send notifications for BELY events +using the Apprise library, which supports multiple notification services. + +Installation: + pip install bely-mqtt-framework[apprise] + +Configuration: + Set APPRISE_URLS environment variable with notification endpoints: + + APPRISE_URLS="mailto://user:password@gmail.com discord://webhook_id/webhook_token" + + Or configure in handler initialization. +""" + +import logging +import os +from typing import List, Optional + +from bely_mqtt.models import ( + LogEntryAddEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryUpdateEvent, + MQTTMessage, +) +from bely_mqtt.plugin import MQTTHandler + +logger = logging.getLogger(__name__) + +try: + import apprise + + APPRISE_AVAILABLE = True +except ImportError: + APPRISE_AVAILABLE = False + logger.warning( + "Apprise not installed. Install with: pip install bely-mqtt-framework[apprise]" + ) + + +class ApprisNotificationHandler(MQTTHandler): + """ + Send notifications for BELY events using Apprise. + + Supports multiple notification services: + - Email (SMTP) + - Discord + - Slack + - Telegram + - Pushbullet + - And many more... + + Configure notification URLs via: + 1. APPRISE_URLS environment variable + 2. Handler initialization + 3. Configuration file + """ + + def __init__(self, *args, notification_urls: Optional[List[str]] = None, **kwargs): + """ + Initialize the notification handler. + + Args: + notification_urls: List of Apprise notification URLs. + If not provided, will read from APPRISE_URLS env var. + """ + super().__init__(*args, **kwargs) + self.apprise_instance: Optional[apprise.Apprise] = None + self.notification_urls: List[str] = [] + + if not APPRISE_AVAILABLE: + self.logger.warning( + "Apprise not available. Install with: pip install bely-mqtt-framework[apprise]" + ) + return + + # Get notification URLs from parameter or environment + if notification_urls: + self.notification_urls = notification_urls + else: + env_urls = os.getenv("APPRISE_URLS", "") + if env_urls: + self.notification_urls = env_urls.split() + + if self.notification_urls: + self.apprise_instance = apprise.Apprise() + for url in self.notification_urls: + if self.apprise_instance.add(url): + self.logger.info(f"Added notification endpoint: {url}") + else: + self.logger.warning(f"Failed to add notification endpoint: {url}") + else: + self.logger.warning( + "No notification URLs configured. Set APPRISE_URLS environment variable." + ) + + @property + def topic_pattern(self) -> str: + """Subscribe to all log entry events.""" + return "bely/logEntry/#" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry events and send notifications.""" + if not APPRISE_AVAILABLE or not self.apprise_instance: + self.logger.debug("Apprise not available, skipping notification") + return + + try: + if "Reply" in message.topic: + if "Add" in message.topic: + await self._handle_reply_added(message) + elif "Update" in message.topic: + await self._handle_reply_updated(message) + elif "Add" in message.topic: + await self._handle_entry_added(message) + elif "Update" in message.topic: + await self._handle_entry_updated(message) + except Exception as e: + self.logger.error(f"Failed to handle notification event: {e}", exc_info=True) + + async def _handle_entry_added(self, message: MQTTMessage) -> None: + """Handle log entry add event.""" + event = LogEntryAddEvent(**message.payload) + + title = f"📝 New Log Entry in {event.parent_log_document_info.name}" + body = self._format_entry_added_body(event) + + await self._send_notification(title, body) + + async def _handle_entry_updated(self, message: MQTTMessage) -> None: + """Handle log entry update event.""" + event = LogEntryUpdateEvent(**message.payload) + + title = f"✏️ Log Entry Updated in {event.parent_log_document_info.name}" + body = self._format_entry_updated_body(event) + + await self._send_notification(title, body) + + async def _handle_reply_added(self, message: MQTTMessage) -> None: + """Handle reply add event.""" + event = LogEntryReplyAddEvent(**message.payload) + + title = f"💬 Reply Added to Entry in {event.parent_log_document_info.name}" + body = self._format_reply_added_body(event) + + await self._send_notification(title, body) + + async def _handle_reply_updated(self, message: MQTTMessage) -> None: + """Handle reply update event.""" + event = LogEntryReplyUpdateEvent(**message.payload) + + title = f"✏️ Reply Updated in {event.parent_log_document_info.name}" + body = self._format_reply_updated_body(event) + + await self._send_notification(title, body) + + def _format_entry_added_body(self, event: LogEntryAddEvent) -> str: + """Format notification body for entry added event.""" + logbooks = ", ".join(lb.display_name or lb.name for lb in event.logbook_list) + return ( + f"User: {event.event_triggered_by_username}\n" + f"Logbooks: {logbooks}\n" + f"Entry ID: {event.log_info.id}\n" + f"Time: {event.event_timestamp.isoformat()}\n\n" + f"Content:\n{event.text_diff[:200]}" + ) + + def _format_entry_updated_body(self, event: LogEntryUpdateEvent) -> str: + """Format notification body for entry updated event.""" + logbooks = ", ".join(lb.display_name or lb.name for lb in event.logbook_list) + return ( + f"User: {event.event_triggered_by_username}\n" + f"Logbooks: {logbooks}\n" + f"Entry ID: {event.log_info.id}\n" + f"Time: {event.event_timestamp.isoformat()}\n\n" + f"Changes:\n{event.text_diff[:200]}" + ) + + def _format_reply_added_body(self, event: LogEntryReplyAddEvent) -> str: + """Format notification body for reply added event.""" + logbooks = ", ".join(lb.display_name or lb.name for lb in event.logbook_list) + return ( + f"User: {event.event_triggered_by_username}\n" + f"Logbooks: {logbooks}\n" + f"Reply to Entry: {event.parent_log_info.id}\n" + f"Reply ID: {event.log_info.id}\n" + f"Time: {event.event_timestamp.isoformat()}\n\n" + f"Content:\n{event.text_diff[:200]}" + ) + + def _format_reply_updated_body(self, event: LogEntryReplyUpdateEvent) -> str: + """Format notification body for reply updated event.""" + logbooks = ", ".join(lb.display_name or lb.name for lb in event.logbook_list) + return ( + f"User: {event.event_triggered_by_username}\n" + f"Logbooks: {logbooks}\n" + f"Reply to Entry: {event.parent_log_info.id}\n" + f"Reply ID: {event.log_info.id}\n" + f"Time: {event.event_timestamp.isoformat()}\n\n" + f"Changes:\n{event.text_diff[:200]}" + ) + + async def _send_notification(self, title: str, body: str) -> None: + """Send a notification via Apprise.""" + if not self.apprise_instance: + self.logger.warning("Apprise instance not initialized") + return + + self.logger.info(f"Sending notification: {title}") + + # Send notification + self.apprise_instance.notify( + body=body, + title=title, + ) + + +class SelectiveNotificationHandler(MQTTHandler): + """ + Send notifications only for specific conditions. + + This handler demonstrates how to filter events and only send + notifications for specific logbooks or users. + """ + + def __init__( + self, + *args, + notification_urls: Optional[List[str]] = None, + target_logbooks: Optional[List[str]] = None, + exclude_users: Optional[List[str]] = None, + **kwargs, + ): + """ + Initialize the selective notification handler. + + Args: + notification_urls: List of Apprise notification URLs. + target_logbooks: Only notify for these logbooks (None = all). + exclude_users: Don't notify for these users. + """ + super().__init__(*args, **kwargs) + self.target_logbooks = target_logbooks + self.exclude_users = exclude_users or [] + self.apprise_instance: Optional[apprise.Apprise] = None + + if not APPRISE_AVAILABLE: + return + + if notification_urls: + self.apprise_instance = apprise.Apprise() + for url in notification_urls: + self.apprise_instance.add(url) + + @property + def topic_pattern(self) -> str: + """Subscribe to log entry add events.""" + return "bely/logEntry/Add" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry add event with filtering.""" + if not APPRISE_AVAILABLE or not self.apprise_instance: + return + + try: + event = LogEntryAddEvent(**message.payload) + + # Filter by user + if event.event_triggered_by_username in self.exclude_users: + self.logger.debug( + f"Skipping notification for excluded user: " + f"{event.event_triggered_by_username}" + ) + return + + # Filter by logbook + if self.target_logbooks: + logbook_names = [lb.name for lb in event.logbook_list] + if not any(lb in self.target_logbooks for lb in logbook_names): + self.logger.debug( + f"Skipping notification for non-target logbooks: {logbook_names}" + ) + return + + # Send notification + title = f"📝 New Entry in {event.parent_log_document_info.name}" + body = ( + f"User: {event.event_triggered_by_username}\n" + f"Entry ID: {event.log_info.id}\n\n" + f"{event.text_diff[:200]}" + ) + + self.apprise_instance.notify(title=title, body=body) + self.logger.info(f"Notification sent for entry {event.log_info.id}") + + except Exception as e: + self.logger.error(f"Failed to handle notification: {e}", exc_info=True) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/README.md b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/README.md new file mode 100644 index 000000000..7bf9a2e0a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/README.md @@ -0,0 +1,78 @@ +# Apprise Smart Notification Handler + +A modular BELY MQTT handler for sending smart notifications via Apprise with enhanced email threading support. + +## Structure + +This handler is organized as a Python package with the following modules: + +``` +apprise_smart_notification/ +├── __init__.py # Package initialization and exports +├── handler.py # Main handler class implementation +├── config_loader.py # YAML configuration loading and processing +├── notification_processor.py # Notification routing and sending logic +├── formatters.py # Message formatting utilities +├── email_threading.py # Email threading header generation +├── apprise_email_wrapper.py # Custom wrapper for email headers support +└── README.md # This file +``` + +## Module Responsibilities + +### handler.py +- Main `AppriseSmartNotificationHandler` class +- Event handling methods (handle_log_entry_add, etc.) +- High-level event routing logic +- Coordination between other modules + +### config_loader.py +- `ConfigLoader` class for YAML file processing +- Global configuration management +- URL processing for mail server settings +- Configuration validation + +### notification_processor.py +- `NotificationProcessor` class for notification management +- User notification settings management +- Apprise instance management +- Notification sending logic + +### formatters.py +- `NotificationFormatter` class for message formatting +- HTML message generation +- Permalink generation +- Trigger description generation + +## Usage + +The handler can be used exactly the same way as before: + +```python +from apprise_smart_notification import AppriseSmartNotificationHandler + +handler = AppriseSmartNotificationHandler( + config_path="/path/to/config.yaml", + global_config=global_config +) +``` + +## Configuration + +See the main docstring in `__init__.py` for detailed configuration documentation. + +## Benefits of Modular Structure + +1. **Maintainability**: Each module has a single, clear responsibility +2. **Testability**: Individual components can be tested in isolation +3. **Reusability**: Components can be reused in other handlers +4. **Readability**: Smaller, focused files are easier to understand +5. **Extensibility**: New features can be added without modifying existing code + +## Development + +When adding new features: +- Event handling logic goes in `handler.py` +- Configuration features go in `config_loader.py` +- Notification logic goes in `notification_processor.py` +- Message formatting goes in `formatters.py` \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/__init__.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/__init__.py new file mode 100644 index 000000000..7e208c9d5 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/__init__.py @@ -0,0 +1,57 @@ +""" +Smart Apprise Notification Handler for BELY MQTT Events. + +This handler sends notifications for BELY events using Apprise, with configuration +from a YAML file. It supports: + +1. Log Entry Updates - Notify when someone else updates a log entry +2. Log Entry Replies - Notify when someone else replies to a log entry +3. New Log Entries - Notify when someone else creates an entry in a document +4. Log Reactions - Notify when someone reacts to a log entry +5. Document Replies - Notify document owners when someone replies to any entry in their document +6. Own Entry Edits - Notify when someone else edits YOUR log entry (different from entry_updates) + +Configuration is loaded from a YAML file with the following structure: + + global: + # Global mail server settings - automatically applied to simple mailto:// URLs + + # Example 1: Authenticated mail server (Gmail, Office365, etc.) + mail_server: "smtp.gmail.com" + mail_port: 587 + mail_username: "your-email@gmail.com" # Optional - only for authenticated servers + mail_password: "your-app-password" # Optional - only for authenticated servers + mail_from: "your-email@gmail.com" + mail_from_name: "BELY Notifications" + + # Example 2: Non-authenticated mail server (internal/relay servers) + # mail_server: "mail.com" + # mail_port: 25 # Often port 25 for non-authenticated + # mail_from: "bely@aps.anl.gov" + # mail_from_name: "BELY Notifications" + # No username/password needed for non-authenticated servers + + users: + john_doe: + apprise_urls: + # Simple mailto URL - will use global mail settings if available + - "mailto://john@example.com" + # Other notification services + - "discord://webhook-id/webhook-token" + notifications: + entry_updates: true # Notify when any log entry is updated (typically for document owners) + own_entry_edits: true # Notify when YOUR log entries are edited by others + entry_replies: true + new_entries: true + reactions: true + document_replies: true # Notify when anyone replies in owned documents + +Example usage: + handler = AppriseSmartNotificationHandler( + config_path="/path/to/config.yaml" + ) +""" + +from .handler import AppriseSmartNotificationHandler + +__all__ = ["AppriseSmartNotificationHandler"] diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/apprise_email_wrapper.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/apprise_email_wrapper.py new file mode 100644 index 000000000..44ae7dab4 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/apprise_email_wrapper.py @@ -0,0 +1,192 @@ +""" +Custom Apprise wrapper to support email headers for threading. + +This module provides a custom implementation that allows passing email headers +to email notifications without modifying the Apprise library. +""" + +from typing import Dict, Optional +from urllib.parse import urlparse + +import apprise + + +class EmailNotificationWrapper: + """ + Wrapper for email notifications that supports custom headers. + + This class provides a way to send email notifications with custom headers + (like Message-ID and References for threading) without modifying Apprise. + """ + + def __init__(self, apprise_url: str): + """ + Initialize the email wrapper with an Apprise URL. + + Args: + apprise_url: The Apprise email URL (mailto:// or mailtos://) + """ + self.apprise_url = apprise_url + self._parse_email_config() + + def _parse_email_config(self) -> None: + """Parse the email configuration from the Apprise URL.""" + # Parse the URL to extract components + parsed = urlparse(self.apprise_url) + + # Check if this is an email notification + if parsed.scheme not in ("mailto", "mailtos"): + raise ValueError(f"Not an email URL: {self.apprise_url}") + + # Store the parsed components for later use + self.scheme = parsed.scheme + self.netloc = parsed.netloc + self.path = parsed.path + self.query = parsed.query + self.parsed_url = parsed + + def send_with_headers( + self, title: str, body: str, headers: Optional[Dict[str, str]] = None + ) -> bool: + """ + Send an email notification with custom headers. + + Args: + title: Email subject + body: Email body + headers: Optional dictionary of email headers + + Returns: + True if notification was sent successfully, False otherwise + """ + try: + # Create a NotifyEmail instance directly + email_instance = apprise.Apprise.instantiate(self.apprise_url) + + # Check if this is an email notification instance + # We check for the class name since we can't import NotifyEmail directly + if not (email_instance and email_instance.__class__.__name__ == "NotifyEmail"): + # Fall back to regular Apprise if not an email notification + apobj = apprise.Apprise() + apobj.add(self.apprise_url) + result = apobj.notify(body=body, title=title) + return bool(result) + + # If we have headers, inject them into the email instance + if headers: + # Ensure the email instance has a headers attribute + if not hasattr(email_instance, "headers"): + setattr(email_instance, "headers", {}) # type: ignore[attr-defined] + # Clear any existing headers and set our custom ones + getattr(email_instance, "headers").clear() # type: ignore[attr-defined] + getattr(email_instance, "headers").update(headers) # type: ignore[attr-defined] + else: + # Ensure headers is at least an empty dict + if not hasattr(email_instance, "headers"): + setattr(email_instance, "headers", {}) # type: ignore[attr-defined] + + # Send the notification using the email instance + result = email_instance.send(body=body, title=title) + return bool(result) + + except Exception as e: + print(f"Error sending email with headers: {e}") + return False + + +class AppriseWithEmailHeaders: + """ + Extended Apprise wrapper that supports email headers for threading. + + This class wraps around Apprise to provide email header support while + maintaining compatibility with other notification types. + """ + + def __init__(self): + """Initialize the wrapper.""" + self.apprise = apprise.Apprise() + self.email_wrappers = {} + self.non_email_urls = [] + + def add(self, url: str) -> bool: + """ + Add a notification URL. + + Args: + url: The Apprise notification URL + + Returns: + True if URL was added successfully + """ + # Check if this is an email URL + if self._is_email_url(url): + # Create an email wrapper for this URL + try: + wrapper = EmailNotificationWrapper(url) + # Use URL as key for now (could be improved) + self.email_wrappers[url] = wrapper + return True + except Exception: + # If wrapper creation fails, fall back to regular Apprise + result = self.apprise.add(url) + return bool(result) + else: + # For non-email URLs, use regular Apprise + self.non_email_urls.append(url) + result = self.apprise.add(url) + return bool(result) + + def _is_email_url(self, url: str) -> bool: + """Check if a URL is an email notification URL.""" + try: + parsed = urlparse(url) + return parsed.scheme in ("mailto", "mailtos") + except Exception: + return False + + def notify(self, body: str, title: str = "", headers: Optional[Dict[str, str]] = None) -> bool: + """ + Send notifications with optional email headers support. + + Args: + body: Notification body + title: Notification title + headers: Optional email headers (only used for email notifications) + + Returns: + True if all notifications were sent successfully + """ + success = True + + # Send to email endpoints with headers + for url, wrapper in self.email_wrappers.items(): + result = wrapper.send_with_headers(title, body, headers) + success = success and result + + # Send to non-email endpoints (without headers) + if self.non_email_urls: + result = self.apprise.notify(body=body, title=title) + success = success and bool(result) + + return success + + def __bool__(self) -> bool: + """Check if any notification endpoints are configured.""" + return bool(self.email_wrappers) or bool(self.non_email_urls) + + +def is_email_notification(url: str) -> bool: + """ + Check if an Apprise URL is for email notifications. + + Args: + url: The Apprise URL to check + + Returns: + True if the URL is for email notifications + """ + try: + parsed = urlparse(url) + return parsed.scheme in ("mailto", "mailtos") + except Exception: + return False diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/config_loader.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/config_loader.py new file mode 100644 index 000000000..9479d193f --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/config_loader.py @@ -0,0 +1,215 @@ +""" +Configuration loader for Apprise Smart Notification Handler. +""" + +from logging import Logger +from pathlib import Path +from typing import Any, Dict + +try: + import yaml + + YAML_AVAILABLE = True +except ImportError: + YAML_AVAILABLE = False + + +class ConfigLoader: + """Handles loading and processing of YAML configuration files.""" + + def __init__(self, logger: Logger): + """ + Initialize the config loader. + + Args: + logger: Logger instance for output + """ + self.logger = logger + + if not YAML_AVAILABLE: + self.logger.warning("PyYAML not installed. Install with: pip install pyyaml") + raise ImportError("PyYAML is required for this handler") + + def load_config(self, config_path: str) -> Dict[str, Any]: + """ + Load configuration from YAML file. + + Args: + config_path: Path to YAML configuration file + + Returns: + Dictionary containing the configuration + + Raises: + FileNotFoundError: If config file not found + ValueError: If config is invalid + """ + if not YAML_AVAILABLE: + raise ImportError("PyYAML is required for configuration loading") + + config_file = Path(config_path) + + if not config_file.exists(): + raise FileNotFoundError(f"Config file not found: {config_path}") + + try: + with open(config_file, "r") as f: + config = yaml.safe_load(f) or {} + + self.logger.info(f"Loaded configuration from {config_path}") + + # Validate config structure + if "users" not in config: + self.logger.warning("No users configured in config file") + config["users"] = {} + + # Log global configuration status + self._log_global_config_status(config) + + return config + + except yaml.YAMLError as e: + raise ValueError(f"Invalid YAML in config file: {e}") + except Exception as e: + raise ValueError(f"Error loading config file: {e}") + + def _log_global_config_status(self, config: Dict[str, Any]) -> None: + """ + Log the status of global configuration. + + Args: + config: The loaded configuration dictionary + """ + if "global" in config: + global_config = config["global"] + if "mail_server" in global_config: + # Check if this is an authenticated or non-authenticated server + has_auth = "mail_username" in global_config and "mail_password" in global_config + auth_type = "authenticated" if has_auth else "non-authenticated" + port = global_config.get("mail_port", 25 if not has_auth else 587) + + self.logger.info( + f"Global mail server configured ({auth_type}): " + f"{global_config.get('mail_server')}:{port}" + ) + + # Warn if partial authentication (only username or only password) + if ("mail_username" in global_config) != ("mail_password" in global_config): + self.logger.warning( + "Partial authentication detected. Both mail_username and mail_password " + "are required for authenticated servers." + ) + else: + self.logger.info( + "No global mail server configured. Simple mailto:// URLs will need full configuration." + ) + else: + self.logger.info( + "No global configuration found. Simple mailto:// URLs will need full configuration." + ) + + def load_config_from_api(self, api_factory) -> Dict[str, Any]: + """ + Load notification configurations from the BELY REST API. + + Args: + api_factory: BelyApiFactory instance for accessing the BELY API + + Returns: + Dictionary containing user configurations keyed by username, + with per-endpoint structure + + Raises: + Exception: If API call fails + """ + nc_api = api_factory.getNotificationConfigurationApi() + + configs = nc_api.get_all() + + users: Dict[str, Any] = {} + for config in configs: + username = config.username + if not username: + continue + + if username not in users: + users[username] = {"configs": []} + + # handler_preferences_by_name is already {str: bool} from the API + notifications = config.handler_preferences_by_name or {} + + users[username]["configs"].append( + { + "apprise_url": config.notification_endpoint, + "notifications": notifications, + "config_id": config.id, + } + ) + + self.logger.info( + f"Loaded {len(configs)} notification configurations " f"for {len(users)} users from API" + ) + return {"users": users} + + def process_apprise_url(self, url: str, global_config: Dict[str, Any]) -> str: + """ + Process Apprise URL to incorporate global settings. + + For mailto:// URLs, this will use global mail server settings if available. + Supports both authenticated and non-authenticated mail servers. + + Args: + url: Original Apprise URL + global_config: Global configuration dictionary + + Returns: + Processed URL with global settings applied + """ + # Check if this is a simple mailto URL that needs global settings + if url.startswith("mailto://") and global_config and "mail_server" in global_config: + # Extract the email address from the simple mailto URL + email_part = url.replace("mailto://", "") + + # Get mail server settings + mail_server = global_config.get("mail_server") + mail_from = global_config.get("mail_from", "noreply@localhost") + mail_from_name = global_config.get("mail_from_name", "BELY Notifications") + + # Check if this is an authenticated or non-authenticated server + has_auth = "mail_username" in global_config and "mail_password" in global_config + + if has_auth: + # Authenticated mail server (Gmail, Office365, etc.) + mail_port = global_config.get("mail_port", 587) + mail_username = global_config.get("mail_username") + mail_password = global_config.get("mail_password") + + # If mail_from not specified, use username + if "mail_from" not in global_config: + mail_from = mail_username + + # Construct the full Apprise mailto URL with authentication + # Format: mailto://username:password@server:port?to=recipient&from=sender&name=sender_name + processed_url = ( + f"mailto://{mail_username}:{mail_password}@{mail_server}:{mail_port}" + f"?to={email_part}&from={mail_from}&name={mail_from_name}" + ) + + self.logger.debug(f"Processed mailto URL with authentication for: {email_part}") + else: + # Non-authenticated mail server (internal relay servers) + mail_port = global_config.get("mail_port", 25) # Default to port 25 for non-auth + + # Construct the Apprise mailto URL without authentication + # Format: mailto://server:port?to=recipient&from=sender&name=sender_name + processed_url = ( + f"mailto://{mail_server}:{mail_port}" + f"?to={email_part}&from={mail_from}&name={mail_from_name}" + ) + + self.logger.debug(f"Processed mailto URL without authentication for: {email_part}") + + return processed_url + + # Return original URL if not a mailto or no global config + return url diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/email_threading.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/email_threading.py new file mode 100644 index 000000000..4ef8c0156 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/email_threading.py @@ -0,0 +1,366 @@ +""" +Email threading support for Apprise Smart Notification Handler. + +This module provides email threading headers to ensure related notifications +are grouped together in email clients. +""" + +from enum import Enum +from typing import Optional, Dict, Any +from datetime import datetime, timezone +from urllib.parse import urlparse +import hashlib + + +class NotificationEventType(Enum): + """Types of notification events for email threading""" + + DOCUMENT_CREATE = "document_create" + DOCUMENT_UPDATE = "document_update" + DOCUMENT_DELETE = "document_delete" + ENTRY_ADD = "entry_add" + ENTRY_UPDATE = "entry_update" + ENTRY_DELETE = "entry_delete" + ENTRY_REPLY = "entry_reply" + REPLY_UPDATE = "reply_update" + REPLY_DELETE = "reply_delete" + REACTION_ADD = "reaction_add" + REACTION_DELETE = "reaction_delete" + + @property + def is_document_event(self) -> bool: + """Check if this is a document-level event""" + return self in {self.DOCUMENT_CREATE, self.DOCUMENT_UPDATE, self.DOCUMENT_DELETE} + + @property + def is_entry_event(self) -> bool: + """Check if this is an entry-level event (not a reply)""" + return self in {self.ENTRY_ADD, self.ENTRY_UPDATE, self.ENTRY_DELETE} + + @property + def is_reply_event(self) -> bool: + """Check if this is a reply event""" + return self in {self.ENTRY_REPLY, self.REPLY_UPDATE, self.REPLY_DELETE} + + @property + def is_reaction_event(self) -> bool: + """Check if this is a reaction event""" + return self in {self.REACTION_ADD, self.REACTION_DELETE} + + +class EmailThreadingStrategy: + """ + Email threading strategy that works with Apprise's custom headers. + Only applies to email-type notifications. + """ + + # Email notification schemes in Apprise + EMAIL_SCHEMES = { + "mailto", + "mailtos", # Generic email + } + + def __init__(self, domain: str = "notifications.bely.app"): + """ + Initialize the email threading strategy. + + Args: + domain: Domain to use in message IDs + """ + self.domain = domain + + @staticmethod + def is_email_notification(apprise_url: str) -> bool: + """ + Check if an Apprise URL is for email notifications. + + Args: + apprise_url: The Apprise notification URL + + Returns: + True if this is an email notification URL + """ + try: + # Handle special case for mailto URLs that might not have :// + if apprise_url.startswith("mailto:"): + return True + + parsed = urlparse(apprise_url) + scheme = parsed.scheme.lower() + + # Check if it's an email scheme + return scheme in EmailThreadingStrategy.EMAIL_SCHEMES + except Exception: + return False + + def _generate_stable_id(self, identifier: str) -> str: + """ + Generate a stable ID for threading purposes. + Uses a hash to ensure consistency across notifications. + + Args: + identifier: The identifier to hash + + Returns: + A stable message ID + """ + # Create a stable hash from the identifier + hash_obj = hashlib.sha256(identifier.encode()) + short_hash = hash_obj.hexdigest()[:12] + return f"<{identifier}.{short_hash}@{self.domain}>" + + def generate_document_thread_id(self, document_id: str) -> str: + """ + Generate the root thread ID for a document. + + Args: + document_id: The document ID + + Returns: + Thread ID for the document + """ + # Ensure document_id is a string + document_id = str(document_id) + return self._generate_stable_id(f"doc.{document_id}") + + def generate_entry_thread_id(self, entry_id: str) -> str: + """ + Generate a stable thread ID for an entry. + + Args: + entry_id: The entry ID + + Returns: + Thread ID for the entry + """ + # Ensure entry_id is a string + entry_id = str(entry_id) + return self._generate_stable_id(f"entry.{entry_id}") + + def get_email_headers( + self, + event_type: NotificationEventType, + document_id: str, + document_name: str, + entry_id: Optional[str] = None, + parent_entry_id: Optional[str] = None, + ) -> Dict[str, str]: + """ + Generate email-specific threading headers. + + Args: + event_type: Type of notification event + document_id: The document ID + document_name: The document name (for Thread-Topic) + entry_id: The log entry ID (if applicable) + parent_entry_id: The parent entry ID (for replies) + + Returns: + Dictionary of email threading headers + """ + headers = {} + + # Ensure all IDs are strings + document_id = str(document_id) + if entry_id is not None: + entry_id = str(entry_id) + if parent_entry_id is not None: + parent_entry_id = str(parent_entry_id) + + # Thread-Topic helps some email clients group messages + # Use document name for better readability + safe_name = document_name.replace("\n", " ").replace("\r", " ")[:100] + headers["Thread-Topic"] = f"Log: {safe_name}" + + # X-Thread-Id is a custom header for additional threading hint + headers["X-Thread-Id"] = self.generate_document_thread_id(document_id) + headers["X-Document-Id"] = document_id + + if event_type.is_document_event: + # Document events start the thread + # We'll use References to establish the thread root + headers["References"] = self.generate_document_thread_id(document_id) + + elif event_type.is_entry_event: + # Entry events reference the document + if not entry_id: + raise ValueError(f"entry_id required for {event_type.value}") + + headers["In-Reply-To"] = self.generate_document_thread_id(document_id) + headers["References"] = self.generate_document_thread_id(document_id) + headers["X-Entry-Id"] = entry_id + + elif event_type.is_reply_event: + # Reply events reference both document and parent entry + if not entry_id: + raise ValueError(f"entry_id required for {event_type.value}") + if not parent_entry_id: + raise ValueError(f"parent_entry_id required for {event_type.value}") + + parent_ref = self.generate_entry_thread_id(parent_entry_id) + doc_ref = self.generate_document_thread_id(document_id) + + headers["In-Reply-To"] = parent_ref + headers["References"] = f"{doc_ref} {parent_ref}" + headers["X-Entry-Id"] = entry_id + headers["X-Parent-Entry-Id"] = parent_entry_id + + elif event_type.is_reaction_event: + # Reaction events reference the entry they're reacting to + if not entry_id: + raise ValueError(f"entry_id required for {event_type.value}") + + entry_ref = self.generate_entry_thread_id(entry_id) + doc_ref = self.generate_document_thread_id(document_id) + + headers["In-Reply-To"] = entry_ref + headers["References"] = f"{doc_ref} {entry_ref}" + headers["X-Entry-Id"] = entry_id + + # Add metadata for debugging + headers["X-Notification-Time"] = datetime.now(timezone.utc).isoformat() + headers["X-Event-Type"] = event_type.value + + return headers + + def generate_subject( + self, + event_type: NotificationEventType, + document_title: str, + action_description: Optional[str] = None, + is_email: bool = True, + ) -> str: + """ + Generate subject lines - consistent for emails, descriptive for others. + + Args: + event_type: Type of notification event + document_title: Title of the document + action_description: Optional description of the action + is_email: Whether this is for an email notification + + Returns: + Subject line for the notification + """ + # Sanitize document title + safe_title = document_title.replace("\n", " ").replace("\r", " ") + + if is_email: + # Email subjects need consistency for threading + base_subject = f"Log: {safe_title}" + + # Document creation starts a new thread without "Re:" + if event_type == NotificationEventType.DOCUMENT_CREATE: + return f"New {base_subject}" + + # All other events use "Re:" for threading + prefix = "Re: " + + # Add event-specific indicators + if event_type == NotificationEventType.DOCUMENT_UPDATE: + suffix = " [Document Updated]" + elif event_type == NotificationEventType.DOCUMENT_DELETE: + suffix = " [Document Deleted]" + elif event_type == NotificationEventType.ENTRY_UPDATE: + suffix = " [Entry Updated]" + elif event_type == NotificationEventType.ENTRY_DELETE: + suffix = " [Entry Deleted]" + elif event_type == NotificationEventType.REPLY_UPDATE: + suffix = " [Reply Updated]" + elif event_type == NotificationEventType.REPLY_DELETE: + suffix = " [Reply Deleted]" + elif event_type == NotificationEventType.REACTION_ADD: + if action_description: + suffix = f" [{action_description}]" + else: + suffix = " [Reaction Added]" + elif event_type == NotificationEventType.REACTION_DELETE: + if action_description: + suffix = f" [{action_description}]" + else: + suffix = " [Reaction Removed]" + elif action_description: + suffix = f" - {action_description}" + else: + suffix = "" + + return f"{prefix}{base_subject}{suffix}" + + else: + # Non-email notifications can have more descriptive subjects + if event_type == NotificationEventType.DOCUMENT_CREATE: + return f"📄 New Document: {safe_title}" + elif event_type == NotificationEventType.DOCUMENT_UPDATE: + return f"📝 Document Updated: {safe_title}" + elif event_type == NotificationEventType.DOCUMENT_DELETE: + return f"🗑️ Document Deleted: {safe_title}" + elif event_type == NotificationEventType.ENTRY_ADD: + desc = f" - {action_description}" if action_description else "" + return f"➕ New Entry in {safe_title}{desc}" + elif event_type == NotificationEventType.ENTRY_UPDATE: + desc = f" - {action_description}" if action_description else "" + return f"✏️ Entry Updated in {safe_title}{desc}" + elif event_type == NotificationEventType.ENTRY_DELETE: + desc = f" - {action_description}" if action_description else "" + return f"🗑️ Entry Deleted in {safe_title}{desc}" + elif event_type == NotificationEventType.ENTRY_REPLY: + desc = f" - {action_description}" if action_description else "" + return f"💬 Reply in {safe_title}{desc}" + elif event_type == NotificationEventType.REPLY_UPDATE: + desc = f" - {action_description}" if action_description else "" + return f"✏️ Reply Updated in {safe_title}{desc}" + elif event_type == NotificationEventType.REPLY_DELETE: + desc = f" - {action_description}" if action_description else "" + return f"🗑️ Reply Deleted in {safe_title}{desc}" + elif event_type == NotificationEventType.REACTION_ADD: + desc = f" - {action_description}" if action_description else "" + return f"👍 Reaction in {safe_title}{desc}" + elif event_type == NotificationEventType.REACTION_DELETE: + desc = f" - {action_description}" if action_description else "" + return f"👎 Reaction Removed in {safe_title}{desc}" + + return safe_title + + +def detect_event_type( + event: Any, + is_reply: bool = False, + is_update: bool = False, + is_delete: bool = False, + is_reaction: bool = False, + is_reaction_delete: bool = False, +) -> NotificationEventType: + """ + Detect the notification event type from the event and context. + + Args: + event: The event object + is_reply: Whether this is a reply event + is_update: Whether this is an update event + is_delete: Whether this is a delete event + is_reaction: Whether this is a reaction event + is_reaction_delete: Whether this is a reaction delete event + + Returns: + The appropriate NotificationEventType + """ + if is_reaction: + if is_reaction_delete: + return NotificationEventType.REACTION_DELETE + return NotificationEventType.REACTION_ADD + + if is_reply: + if is_delete: + return NotificationEventType.REPLY_DELETE + elif is_update: + return NotificationEventType.REPLY_UPDATE + else: + return NotificationEventType.ENTRY_REPLY + + # Regular entry events + if is_delete: + return NotificationEventType.ENTRY_DELETE + elif is_update: + return NotificationEventType.ENTRY_UPDATE + else: + return NotificationEventType.ENTRY_ADD diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/formatters.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/formatters.py new file mode 100644 index 000000000..a8292b803 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/formatters.py @@ -0,0 +1,409 @@ +""" +Notification formatters for Apprise Smart Notification Handler. +""" + +import logging +from datetime import datetime, tzinfo +from typing import Optional, Union +from zoneinfo import ZoneInfo + +from bely_mqtt import ( + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, + LogEntryEventBase, + LogReactionEventBase, +) +from bely_mqtt.models import CoreEvent + + +class NotificationFormatter: + """Handles formatting of notification messages.""" + + def __init__( + self, bely_url: Optional[str], logger: logging.Logger, timezone: Optional[str] = None + ): + """ + Initialize the formatter. + + Args: + bely_url: Base URL for BELY instance + logger: Logger instance for output + timezone: Timezone string (e.g., 'America/New_York'). If None, uses system local timezone. + """ + self.bely_url = bely_url + self.logger = logger + self.timezone: tzinfo # Declare the type + + # Set timezone - use provided timezone, or detect local timezone + if timezone: + try: + self.timezone = ZoneInfo(timezone) + except Exception as e: + self.logger.warning(f"Invalid timezone '{timezone}': {e}. Using UTC.") + self.timezone = ZoneInfo("UTC") + else: + # Detect local timezone using datetime + try: + # Get the local timezone from the system + local_tz = datetime.now().astimezone().tzinfo + if local_tz is not None: + self.timezone = local_tz + else: + self.timezone = ZoneInfo("UTC") + except Exception as e: + self.logger.debug(f"Could not detect local timezone: {e}. Using UTC.") + self.timezone = ZoneInfo("UTC") + + def _format_timestamp(self, timestamp: datetime) -> str: + """ + Format a timestamp for display in notifications. + + Args: + timestamp: The datetime object to format + + Returns: + Formatted timestamp string in local timezone + """ + # Ensure timestamp is timezone-aware + if timestamp.tzinfo is None: + # Assume UTC if no timezone info + timestamp = timestamp.replace(tzinfo=ZoneInfo("UTC")) + + # Convert to local timezone + local_timestamp = timestamp.astimezone(self.timezone) + + # Format as readable string with timezone + return local_timestamp.strftime("%Y-%m-%d %H:%M:%S %Z") + + def format_entry_added(self, event: LogEntryAddEvent) -> str: + """Format notification body for new log entry.""" + body = ( + f"New entry added to {event.parent_log_document_info.name}
" + f"By: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Entry markdown: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_entry_updated(self, event: LogEntryUpdateEvent) -> str: + """Format notification body for updated log entry (document owner notification).""" + body = ( + f"Entry updated in {event.parent_log_document_info.name}
" + f"Updated by: {event.event_triggered_by_username}
" + f"Original author: {event.log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Entry markdown changes: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_own_entry_edited(self, event: LogEntryUpdateEvent) -> str: + """Format notification body for when user's own entry is edited by someone else.""" + body = ( + f"Entry edited in {event.parent_log_document_info.name}
" + f"Original author: {event.log_info.entered_by_username}
" + f"Edited by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Entry markdown changes: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "own_entry_edit") + + def format_reply_added(self, event: LogEntryReplyAddEvent) -> str: + """Format notification body for new reply.""" + body = ( + f"New reply to entry in {event.parent_log_document_info.name}
" + f"Entry by: {event.parent_log_info.entered_by_username}
" + f"Reply by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Reply markdown: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_reply_updated(self, event: LogEntryReplyUpdateEvent) -> str: + """Format notification body for updated reply (document owner notification).""" + body = ( + f"Reply updated in {event.parent_log_document_info.name}
" + f"Updated by: {event.event_triggered_by_username}
" + f"On entry by: {event.parent_log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Reply markdown changes: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_own_reply_updated(self, event: LogEntryReplyUpdateEvent) -> str: + """Format notification body for when a reply on user's own entry is updated by someone else.""" + body = ( + f"Reply updated on entry in {event.parent_log_document_info.name}
" + f"Entry by: {event.parent_log_info.entered_by_username}
" + f"Updated by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Reply markdown changes: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "own_reply_update") + + def format_document_reply(self, event: LogEntryReplyAddEvent) -> str: + """Format notification body for document owner about new reply.""" + body = ( + f"New reply added in document {event.parent_log_document_info.name}
" + f"Reply by: {event.event_triggered_by_username}
" + f"To entry by: {event.parent_log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Reply markdown: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "document_owner") + + def format_reaction_added(self, event: LogReactionAddEvent) -> str: + """Format notification body for added reaction.""" + reaction_info = event.log_reaction.reaction + body = ( + f"New reaction added to entry in {event.parent_log_document_info.name}
" + f"By: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Reaction: {reaction_info.emoji} {reaction_info.name}
" + f"Description: {event.description}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_reaction_deleted(self, event: LogReactionDeleteEvent) -> str: + """Format notification body for deleted reaction.""" + reaction_info = event.log_reaction.reaction + body = ( + f"Reaction removed from entry in {event.parent_log_document_info.name}
" + f"By: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Reaction: {reaction_info.emoji} {reaction_info.name}
" + f"Description: {event.description}" + ) + return self._append_permalink_and_trigger(body, event) + + def format_entry_deleted(self, event: LogEntryDeleteEvent) -> str: + """Format notification body for deleted log entry (document owner notification).""" + body = ( + f"Entry deleted from {event.parent_log_document_info.name}
" + f"Deleted by: {event.event_triggered_by_username}
" + f"Original author: {event.log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Deleted entry content: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "entry_delete") + + def format_own_entry_deleted(self, event: LogEntryDeleteEvent) -> str: + """Format notification body for when user's own entry is deleted by someone else.""" + body = ( + f"Entry was deleted from {event.parent_log_document_info.name}
" + f"Deleted by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"Description: {event.description}
" + f"
Deleted entry content: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "own_entry_delete") + + def format_reply_deleted(self, event: LogEntryReplyDeleteEvent) -> str: + """Format notification body for deleted reply (entry creator notification).""" + body = ( + f"Reply deleted from entry in {event.parent_log_document_info.name}
" + f"Deleted by: {event.event_triggered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Deleted reply content: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "reply_delete") + + def format_document_reply_deleted(self, event: LogEntryReplyDeleteEvent) -> str: + """Format notification body for document owner about deleted reply.""" + body = ( + f"Reply deleted from document {event.parent_log_document_info.name}
" + f"Deleted by: {event.event_triggered_by_username}
" + f"On entry by: {event.parent_log_info.entered_by_username}
" + f"Time: {self._format_timestamp(event.event_timestamp)}
" + f"
Deleted reply content: {self._format_text_diff_pre(event.text_diff)}" + ) + return self._append_permalink_and_trigger(body, event, "document_owner") + + def _format_text_diff_pre(self, text_diff: str, max_height: str = "200px") -> str: + """ + Format text diff in a styled pre box. + + Args: + text_diff: The text difference to display + max_height: Maximum height of the pre box (default: "200px") + + Returns: + HTML formatted pre box with the text diff + """ + return ( + f"
"
+            f"{text_diff}
" + ) + + def _generate_log_entry_link(self, document_id: int, log_id: int) -> str: + """ + Generate a direct link to a log entry in BELY. + + Args: + document_id: The document ID + log_id: The log entry ID + + Returns: + URL string to the log entry + """ + if not self.bely_url: + return "" + + # Remove trailing slash if present + base_url = self.bely_url.rstrip("/") + + return f"{base_url}/views/item/view?id={document_id}&logId={log_id}" + + def _append_permalink_and_trigger( + self, + body: str, + event: Union[LogEntryEventBase, LogReactionEventBase], + notification_context: Optional[str] = None, + ) -> str: + """ + Append permalink and trigger description to notification body. + + Args: + body: The notification body + event: The event that triggered the notification + notification_context: Optional context about the notification type + + Returns: + Body with permalink and trigger description appended + """ + # Generate permalink if bely_url is available + if self.bely_url: + # Handle both LogEntryEventBase and LogReaction events + if isinstance(event, LogEntryEventBase): + log_id = event.log_info.id + document_id = event.parent_log_document_info.id + elif isinstance(event, LogReactionEventBase): + log_id = event.parent_log_info.id + document_id = event.parent_log_document_info.id + else: + log_id = None + document_id = None + + if log_id and document_id: + link = self._generate_log_entry_link(document_id, log_id) + body += f'

View entry' + + # Add trigger description + trigger_description = self._get_trigger_description(event, notification_context) + body += f"


{trigger_description}" + + return body + + def _get_trigger_description( + self, event: CoreEvent, notification_context: Optional[str] = None + ) -> str: + """ + Get a description of why this notification was triggered. + + Args: + event: The event that triggered the notification + notification_context: Optional context about the notification type + + Returns: + A human-readable description of the trigger + """ + if isinstance(event, LogEntryAddEvent): + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"added a new log entry to the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'new_entries' notifications enabled." + ) + elif isinstance(event, LogEntryUpdateEvent): + # Check the notification context to determine the type + if notification_context == "own_entry_edit": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"edited a log entry that you originally created in the document " + f"'{event.parent_log_document_info.name}'. You have 'own_entry_edits' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"updated a log entry in the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'entry_updates' notifications enabled." + ) + elif isinstance(event, LogEntryReplyAddEvent): + # Check the notification context to determine the type + if notification_context == "document_owner": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"added a reply to an entry in your document '{event.parent_log_document_info.name}'. " + f"You have 'document_replies' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"replied to your log entry in the document '{event.parent_log_document_info.name}'. " + f"You have 'entry_replies' notifications enabled." + ) + elif isinstance(event, LogEntryReplyUpdateEvent): + # Check the notification context to determine the type + if notification_context == "own_reply_update": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"updated a reply on your log entry in the document " + f"'{event.parent_log_document_info.name}'. You have 'own_entry_edits' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"updated a reply in the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'entry_replies' notifications enabled." + ) + elif isinstance(event, LogReactionAddEvent): + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"added a reaction to your log entry in the document " + f"'{event.parent_log_document_info.name}'. You have 'reactions' notifications enabled." + ) + elif isinstance(event, LogReactionDeleteEvent): + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"removed a reaction from your log entry in the document " + f"'{event.parent_log_document_info.name}'. You have 'reactions' notifications enabled." + ) + elif isinstance(event, LogEntryDeleteEvent): + # Check the notification context to determine the type + if notification_context == "own_entry_delete": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"deleted a log entry that you originally created in the document " + f"'{event.parent_log_document_info.name}'. You have 'own_entry_edits' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"deleted a log entry in the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'entry_updates' notifications enabled." + ) + elif isinstance(event, LogEntryReplyDeleteEvent): + # Check the notification context to determine the type + if notification_context == "reply_delete": + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"deleted a reply from your log entry in the document " + f"'{event.parent_log_document_info.name}'. You have 'entry_replies' notifications enabled." + ) + else: + return ( + f"This notification was sent because {event.event_triggered_by_username} " + f"deleted a reply in the document '{event.parent_log_document_info.name}' " + f"which you own. You have 'document_replies' notifications enabled." + ) + else: + return "This notification was sent due to activity on your BELY content." diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/handler.py new file mode 100644 index 000000000..b5acec2f5 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/handler.py @@ -0,0 +1,627 @@ +""" +Main handler implementation for Apprise Smart Notifications. +""" + +import asyncio +from typing import Any, Optional, Union + +from bely_mqtt import ( + MQTTHandler, + CoreEvent, + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, + TestNotificationEvent, +) +from bely_mqtt.config import GlobalConfig + +try: + # Try relative imports first (when used as a package) + from .config_loader import ConfigLoader + from .notification_processor import NotificationProcessor + from .formatters import NotificationFormatter + from .email_threading import NotificationEventType + from .apprise_email_wrapper import AppriseWithEmailHeaders +except ImportError: + # Fall back to absolute imports (when imported directly from tests) + from config_loader import ConfigLoader # type: ignore[no-redef] + from notification_processor import NotificationProcessor # type: ignore[no-redef] + from formatters import NotificationFormatter # type: ignore[no-redef] + from email_threading import NotificationEventType # type: ignore[no-redef] + from apprise_email_wrapper import AppriseWithEmailHeaders # type: ignore[no-redef] + + +class AppriseSmartNotificationHandler(MQTTHandler): + """ + Smart notification handler using Apprise with YAML configuration. + + Sends notifications for: + - Log entry updates by other users + - Log entry replies by other users + - New log entries in documents by other users + - Reactions to log entries by other users + """ + + def __init__( + self, + config_path: Optional[str] = None, + api_factory: Optional[Any] = None, + global_config: Optional[GlobalConfig] = None, + ): + """ + Initialize the handler. + + Args: + config_path: Path to YAML configuration file + api_factory: Optional BelyApiFactory instance for querying the BELY API + global_config: Optional global configuration containing bely_url and other settings + + Raises: + ImportError: If apprise or yaml not installed + FileNotFoundError: If config file not found + ValueError: If config is invalid + """ + super().__init__(api_factory=api_factory, global_config=global_config) + + self.bely_url = self.global_config.bely_url if self.global_config else None + + # Initialize components + self.config_loader = ConfigLoader(self.logger) + + # Initialize formatter with timezone from config if available + self.timezone = None + self.formatter = NotificationFormatter( + self.bely_url, self.logger + ) # Will be updated after config load + self.processor = NotificationProcessor(self.logger, bely_url=self.bely_url) + + # Load configuration: prefer API, fall back to YAML + self.global_config_data = {} + api_config_loaded = False + + if self.api_factory: + try: + api_config = self.config_loader.load_config_from_api(self.api_factory) + if config_path: + file_config = self.config_loader.load_config(config_path) + self.global_config_data = file_config.get("global", {}) + api_config["global"] = self.global_config_data + self.processor.initialize_from_config(api_config, self.config_loader) + api_config_loaded = True + except Exception as e: + self.logger.warning(f"Failed to load from API: {e}") + + if not api_config_loaded and config_path: + config = self.config_loader.load_config(config_path) + self.global_config_data = config.get("global", {}) + self.processor.initialize_from_config(config, self.config_loader) + elif not api_config_loaded: + self.logger.warning("No config path provided. Handler will not send notifications.") + + # Debounced config reload state + self._reload_timer: Optional[asyncio.TimerHandle] = None + self._reload_debounce_seconds: float = 2.0 + + async def handle_generic_add(self, event: CoreEvent) -> None: + """Handle generic add events. Reloads config on NotificationConfiguration changes.""" + if self._is_notification_config_event(event): + self._schedule_config_reload() + + async def handle_generic_update(self, event: CoreEvent) -> None: + """Handle generic update events. Reloads config on NotificationConfiguration changes.""" + if self._is_notification_config_event(event): + self._schedule_config_reload() + + async def handle_generic_delete(self, event: CoreEvent) -> None: + """Handle generic delete events. Reloads config on NotificationConfiguration changes.""" + if self._is_notification_config_event(event): + self._schedule_config_reload() + + def _is_notification_config_event(self, event: CoreEvent) -> bool: + """Check if event is for a NotificationConfiguration entity.""" + return bool(event.entity_name == "NotificationConfiguration") + + def _schedule_config_reload(self) -> None: + """Schedule a debounced config reload. Resets timer on each call.""" + if not self.api_factory: + self.logger.warning("Cannot reload config: no API factory configured") + return + + if self._reload_timer is not None: + self._reload_timer.cancel() + + loop = asyncio.get_event_loop() + self._reload_timer = loop.call_later( + self._reload_debounce_seconds, lambda: asyncio.ensure_future(self._reload_config()) + ) + self.logger.debug("Scheduled config reload (debounced)") + + async def _reload_config(self) -> None: + """Reload notification config from API, swapping atomically on success.""" + self._reload_timer = None + self.logger.info("Reloading notification configuration from API...") + + try: + api_config = self.config_loader.load_config_from_api(self.api_factory) + if self.global_config_data: + api_config["global"] = self.global_config_data + + temp_processor = NotificationProcessor(self.logger, bely_url=self.bely_url) + temp_processor.initialize_from_config(api_config, self.config_loader) + + self.processor.user_endpoint_configs = temp_processor.user_endpoint_configs + self.processor._config_id_to_username = temp_processor._config_id_to_username + self.logger.info( + f"Config reloaded: {len(self.processor.user_endpoint_configs)} users configured" + ) + except Exception as e: + self.logger.error(f"Failed to reload config, keeping existing: {e}", exc_info=True) + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + """ + Handle new log entry events. + + Notify document owner/creators if entry is created by someone else. + + Args: + event: The log entry add event + """ + try: + await self._handle_add_event(event, is_reply=False) + except Exception as e: + self.logger.error(f"Error processing log entry add: {e}", exc_info=True) + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + """ + Handle log entry update events. + + Notify: + 1. The original creator if their entry is updated by someone else (own_entry_edits) + 2. Document owners about any updates + + Args: + event: The log entry update event + """ + try: + await self._handle_update_event(event, is_reply=False) + except Exception as e: + self.logger.error(f"Error processing log entry update: {e}", exc_info=True) + + async def handle_log_entry_reply_add(self, event: LogEntryReplyAddEvent) -> None: + """ + Handle log entry reply events. + + Notify: + 1. The original entry creator if someone else replies + 2. The document owner if someone replies to any entry in their document + + Args: + event: The log entry reply add event + """ + try: + await self._handle_add_event(event, is_reply=True) + except Exception as e: + self.logger.error(f"Error processing log entry reply add: {e}", exc_info=True) + + async def handle_log_entry_reply_update(self, event: LogEntryReplyUpdateEvent) -> None: + """ + Handle log entry reply update events. + + Notify: + 1. The original entry creator if someone else updates a reply on their entry (own_entry_edits) + 2. Document owners about any reply updates + + Args: + event: The log entry reply update event + """ + try: + await self._handle_update_event(event, is_reply=True) + except Exception as e: + self.logger.error(f"Error processing log entry reply update: {e}", exc_info=True) + + async def handle_log_reaction_add(self, event: LogReactionAddEvent) -> None: + """ + Handle log reaction add events. + + Notify the original entry creator if someone else reacts to their entry. + + Args: + event: The log reaction add event + """ + try: + await self._handle_reaction_event(event, is_add=True) + except Exception as e: + self.logger.error(f"Error processing log reaction add: {e}", exc_info=True) + + async def handle_log_reaction_delete(self, event: LogReactionDeleteEvent) -> None: + """ + Handle log reaction delete events. + + Notify the original entry creator if someone removes a reaction from their entry. + + Args: + event: The log reaction delete event + """ + try: + await self._handle_reaction_event(event, is_add=False) + except Exception as e: + self.logger.error(f"Error processing log reaction delete: {e}", exc_info=True) + + async def handle_log_entry_delete(self, event: LogEntryDeleteEvent) -> None: + """ + Handle log entry delete events. + + Notify: + 1. The original creator if their entry is deleted by someone else + 2. Document owners about any deletions + + Args: + event: The log entry delete event + """ + try: + await self._handle_delete_event(event, is_reply=False) + except Exception as e: + self.logger.error(f"Error processing log entry delete: {e}", exc_info=True) + + async def handle_log_entry_reply_delete(self, event: LogEntryReplyDeleteEvent) -> None: + """ + Handle log entry reply delete events. + + Notify: + 1. The original entry creator if someone deletes a reply on their entry + 2. Document owners about any reply deletions + + Args: + event: The log entry reply delete event + """ + try: + await self._handle_delete_event(event, is_reply=True) + except Exception as e: + self.logger.error(f"Error processing log entry reply delete: {e}", exc_info=True) + + async def handle_notification_test(self, event: TestNotificationEvent) -> None: + """ + Handle test notification events. + + Sends a test notification directly to the configured endpoint. + + Args: + event: The test notification event + """ + try: + # Create a temporary Apprise instance for this test + apobj = AppriseWithEmailHeaders() + + # Process the notification endpoint URL through config_loader + # to apply global SMTP settings for mailto URLs + processed_url = self.config_loader.process_apprise_url( + event.notification_endpoint, self.global_config_data + ) + + if not apobj.add(processed_url): + self.logger.error( + f"Failed to add notification endpoint: {event.notification_endpoint}" + ) + return + + # Send the test notification + title = "BELY Test Notification" + body = ( + f"This is a test notification for configuration: " + f"{event.configuration_name}\n\n" + f"Triggered by: {event.event_triggered_by_username}\n" + f"Timestamp: {event.event_timestamp}" + ) + + if event.configuration_id is not None: + body += f"\nConfiguration ID: {event.configuration_id}" + + if event.provider_settings: + body += "\n\nProvider Settings:" + for key, value in event.provider_settings.items(): + body += f"\n {key}: {value}" + + result = apobj.notify(body=body, title=title) + + if result: + self.logger.info( + f"Test notification sent successfully for configuration: " + f"{event.configuration_name}" + ) + else: + self.logger.warning( + f"Failed to send test notification for configuration: " + f"{event.configuration_name}" + ) + + except Exception as e: + self.logger.error(f"Error sending test notification: {e}", exc_info=True) + + async def _handle_add_event( + self, event: Union[LogEntryAddEvent, LogEntryReplyAddEvent], is_reply: bool = False + ) -> None: + """ + Unified handler for add events (entry or reply adds). + + Args: + event: The add event + is_reply: Whether this is a reply add + """ + notification_configs = [] + + # Determine event type for threading + event_type = ( + NotificationEventType.ENTRY_REPLY if is_reply else NotificationEventType.ENTRY_ADD + ) + + # Extract IDs for threading + entry_id = event.log_info.id if hasattr(event, "log_info") else None + parent_entry_id = ( + event.parent_log_info.id if is_reply and hasattr(event, "parent_log_info") else None + ) + + if is_reply: + # Notify the original entry creator + creator_username = event.parent_log_info.entered_by_username + notification_configs.append( + { + "username": creator_username, + "notification_type": "entry_replies", + "title": f"Reply to Your Log Entry in {event.parent_log_document_info.name}", + "body": self.formatter.format_reply_added(event), + "context": None, + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + + # Notify the document owner + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": "document_replies", + "title": f"New Reply in Your Document: {event.parent_log_document_info.name}", + "body": self.formatter.format_document_reply(event), + "context": "document_owner", + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + else: + # Don't notify if the creator is also the document creator + if ( + event.event_triggered_by_username + == event.parent_log_document_info.created_by_username + ): + return + + # Notify document owner about new entry + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": "new_entries", + "title": f"New Log Entry in {event.parent_log_document_info.name}", + "body": self.formatter.format_entry_added(event), + "context": None, + "event_type": event_type, + "entry_id": entry_id, + } + ) + + await self.processor.process_notifications(event, notification_configs) + + async def _handle_update_event( + self, event: Union[LogEntryUpdateEvent, LogEntryReplyUpdateEvent], is_reply: bool = False + ) -> None: + """ + Unified handler for update events (entry or reply updates). + + Args: + event: The update event + is_reply: Whether this is a reply update + """ + notification_configs = [] + + # Determine event type for threading + event_type = ( + NotificationEventType.REPLY_UPDATE if is_reply else NotificationEventType.ENTRY_UPDATE + ) + + # Extract IDs for threading + entry_id = event.log_info.id if hasattr(event, "log_info") else None + parent_entry_id = ( + event.parent_log_info.id if is_reply and hasattr(event, "parent_log_info") else None + ) + + if is_reply: + creator_username = event.parent_log_info.entered_by_username + own_edit_title = ( + f"Reply Updated on Your Log Entry in {event.parent_log_document_info.name}" + ) + own_edit_body = self.formatter.format_own_reply_updated(event) + owner_title = f"Reply Updated in {event.parent_log_document_info.name}" + owner_body = self.formatter.format_reply_updated(event) + owner_notification_type = "entry_replies" + own_context = "own_reply_update" + else: + creator_username = event.log_info.entered_by_username + own_edit_title = f"Your Log Entry Was Edited: {event.parent_log_document_info.name}" + own_edit_body = self.formatter.format_own_entry_edited(event) + owner_title = f"Log Entry Updated: {event.parent_log_document_info.name}" + owner_body = self.formatter.format_entry_updated(event) + owner_notification_type = "entry_updates" + own_context = "own_entry_edit" + + # Config for notifying the original creator + notification_configs.append( + { + "username": creator_username, + "notification_type": "own_entry_edits", + "title": own_edit_title, + "body": own_edit_body, + "context": own_context, + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + + # Config for notifying the document owner + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": owner_notification_type, + "title": owner_title, + "body": owner_body, + "context": None, + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + + await self.processor.process_notifications(event, notification_configs) + + async def _handle_reaction_event( + self, event: Union[LogReactionAddEvent, LogReactionDeleteEvent], is_add: bool = True + ) -> None: + """ + Handle log reaction events (both add and delete). + + Notify the original entry creator if someone else reacts to or removes a reaction from their entry. + + Args: + event: The log reaction event + is_add: Whether this is an add event (True) or delete event (False) + """ + # Don't notify the reactor (person who added/removed the reaction) + if event.event_triggered_by_username == event.parent_log_info.entered_by_username: + return + + # Check if we should notify about reactions + creator_username = event.parent_log_info.entered_by_username + if not self.processor.should_notify(creator_username, "reactions"): + return + + # Determine event type for threading + event_type = ( + NotificationEventType.REACTION_ADD if is_add else NotificationEventType.REACTION_DELETE + ) + + # Extract entry ID for threading + entry_id = event.parent_log_info.id if hasattr(event, "parent_log_info") else None + + # Build notification + if is_add: + title = f"Reaction to Your Log Entry in {event.parent_log_document_info.name}" + body = self.formatter.format_reaction_added(event) + else: + title = f"Reaction Removed from Your Log Entry in {event.parent_log_document_info.name}" + body = self.formatter.format_reaction_deleted(event) + + # Send notification with threading support + await self.processor.send_notification_with_threading( + username=creator_username, + title=title, + body=body, + event_type=event_type, + document_id=event.parent_log_document_info.id, + document_name=event.parent_log_document_info.name, + entry_id=entry_id, + action_by=event.event_triggered_by_username, + notification_type="reactions", + ) + + async def _handle_delete_event( + self, event: Union[LogEntryDeleteEvent, LogEntryReplyDeleteEvent], is_reply: bool = False + ) -> None: + """ + Unified handler for delete events (entry or reply deletes). + + Args: + event: The delete event + is_reply: Whether this is a reply delete + """ + notification_configs = [] + + # Determine event type for threading + event_type = ( + NotificationEventType.REPLY_DELETE if is_reply else NotificationEventType.ENTRY_DELETE + ) + + # Extract IDs for threading + entry_id = event.log_info.id if hasattr(event, "log_info") else None + parent_entry_id = ( + event.parent_log_info.id if is_reply and hasattr(event, "parent_log_info") else None + ) + + if is_reply: + # Notify the original entry creator about reply deletion + creator_username = event.parent_log_info.entered_by_username + notification_configs.append( + { + "username": creator_username, + "notification_type": "entry_replies", + "title": f"Reply Deleted from Your Log Entry in {event.parent_log_document_info.name}", + "body": self.formatter.format_reply_deleted(event), + "context": "reply_delete", + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + + # Notify the document owner about reply deletion + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": "document_replies", + "title": f"Reply Deleted in Your Document: {event.parent_log_document_info.name}", + "body": self.formatter.format_document_reply_deleted(event), + "context": "document_owner", + "event_type": event_type, + "entry_id": entry_id, + "parent_entry_id": parent_entry_id, + } + ) + else: + # Notify the original entry creator about their entry being deleted + creator_username = event.log_info.entered_by_username + notification_configs.append( + { + "username": creator_username, + "notification_type": "own_entry_edits", + "title": f"Your Log Entry Was Deleted: {event.parent_log_document_info.name}", + "body": self.formatter.format_own_entry_deleted(event), + "context": "own_entry_delete", + "event_type": event_type, + "entry_id": entry_id, + } + ) + + # Notify the document owner about entry deletion + owner_username = event.parent_log_document_info.owner_username + notification_configs.append( + { + "username": owner_username, + "notification_type": "entry_updates", + "title": f"Log Entry Deleted: {event.parent_log_document_info.name}", + "body": self.formatter.format_entry_deleted(event), + "context": "entry_delete", + "event_type": event_type, + "entry_id": entry_id, + } + ) + + await self.processor.process_notifications(event, notification_configs) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/notification_processor.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/notification_processor.py new file mode 100644 index 000000000..e3e45189f --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/notification_processor.py @@ -0,0 +1,410 @@ +""" +Notification processor for Apprise Smart Notification Handler. +""" + +from logging import Logger +from typing import Any, Dict, List, Optional + +try: + # Try relative imports first (when used as a package) + from .email_threading import EmailThreadingStrategy, NotificationEventType + from .apprise_email_wrapper import AppriseWithEmailHeaders, is_email_notification +except ImportError: + # Fall back to absolute imports (when imported directly from tests) + from email_threading import EmailThreadingStrategy, NotificationEventType # type: ignore[no-redef] + from apprise_email_wrapper import AppriseWithEmailHeaders, is_email_notification # type: ignore[no-redef] + + +class NotificationProcessor: + """Handles processing and sending of notifications.""" + + def __init__( + self, logger: Logger, domain: str = "notifications.bely.app", bely_url: Optional[str] = None + ): + """ + Initialize the notification processor. + + Args: + logger: Logger instance for output + domain: Domain for email threading IDs (default: notifications.bely.app) + bely_url: Optional BELY application URL for generating unsubscribe links + """ + + self.logger = logger + self.bely_url = bely_url + + # Per-endpoint tracking: each entry is a dict with + # {"apprise": AppriseWithEmailHeaders, "settings": Dict[str, bool], "has_email": bool, + # "config_id": Optional[int]} + self.user_endpoint_configs: Dict[str, List[dict]] = {} + + # Reverse index: config_id -> username for O(1) delete lookups + self._config_id_to_username: Dict[int, str] = {} + + # Initialize email threading strategy + self.email_threading = EmailThreadingStrategy(domain=domain) + + def initialize_from_config(self, config: Dict[str, Any], config_loader: Any) -> None: + """ + Initialize Apprise instances from configuration. + + Supports two config formats: + - API format: user_config has "configs" list with per-endpoint dicts + - YAML format: user_config has "apprise_urls" list with shared "notifications" + + Args: + config: Configuration dictionary + config_loader: ConfigLoader instance for URL processing + """ + global_config = config.get("global", {}) + users_config = config.get("users", {}) + + self._config_id_to_username.clear() + + for username, user_config in users_config.items(): + try: + if "configs" in user_config: + # API format: per-endpoint configs + for endpoint_config in user_config["configs"]: + self._add_endpoint( + username, + endpoint_config["apprise_url"], + endpoint_config.get("notifications", {}), + config_loader, + global_config, + config_id=endpoint_config.get("config_id"), + ) + else: + # YAML format: shared notifications across all URLs + apprise_urls = user_config.get("apprise_urls", []) + if isinstance(apprise_urls, str): + apprise_urls = [apprise_urls] + notifications = user_config.get("notifications", {}) + + for url in apprise_urls: + self._add_endpoint( + username, url, notifications, config_loader, global_config + ) + + except Exception as e: + self.logger.error(f"Error initializing Apprise for user {username}: {e}") + + def _add_endpoint( + self, + username: str, + url: str, + notifications: Dict[str, bool], + config_loader: Any, + global_config: Dict[str, Any], + config_id: Optional[int] = None, + ) -> None: + """ + Add a single notification endpoint for a user. + + Args: + username: Username this endpoint belongs to + url: Apprise URL for the endpoint + notifications: Notification preferences for this endpoint + config_loader: ConfigLoader instance for URL processing + global_config: Global configuration dictionary + config_id: Optional NotificationConfiguration ID from the API + """ + apobj = AppriseWithEmailHeaders() + processed_url = config_loader.process_apprise_url(url, global_config) + + if not apobj.add(processed_url): + self.logger.warning(f"Failed to add Apprise URL for user {username}: {url}") + return + + has_email = is_email_notification(processed_url) + + settings = { + "entry_updates": notifications.get("entry_updates", True), + "own_entry_edits": notifications.get("own_entry_edits", True), + "entry_replies": notifications.get("entry_replies", True), + "new_entries": notifications.get("new_entries", True), + "reactions": notifications.get("reactions", True), + "document_replies": notifications.get("document_replies", True), + } + + if username not in self.user_endpoint_configs: + self.user_endpoint_configs[username] = [] + + self.user_endpoint_configs[username].append( + { + "apprise": apobj, + "settings": settings, + "has_email": has_email, + "config_id": config_id, + } + ) + + if config_id is not None: + self._config_id_to_username[config_id] = username + + self.logger.debug(f"Added endpoint for user: {username} (has_email: {has_email})") + + def should_notify(self, username: Optional[str], notification_type: str) -> bool: + """ + Check if user should be notified for this type of event. + + Returns True if ANY endpoint for the user has this notification type enabled. + + Args: + username: Username to check + notification_type: Type of notification (entry_updates, own_entry_edits, + entry_replies, new_entries, reactions, document_replies) + + Returns: + True if user should be notified, False otherwise + """ + if not username: + return False + + endpoints = self.user_endpoint_configs.get(username) + if not endpoints: + return False + + return any(ep["settings"].get(notification_type, True) for ep in endpoints) + + def get_username_by_config_id(self, config_id: int) -> Optional[str]: + """ + Look up which user owns a given NotificationConfiguration ID. + + Args: + config_id: The NotificationConfiguration ID + + Returns: + Username if found, None otherwise + """ + return self._config_id_to_username.get(config_id) + + def remove_endpoint_by_config_id(self, config_id: int) -> bool: + """ + Remove a notification endpoint by its config ID. + + Looks up the username via the reverse index, removes the matching + endpoint, and cleans up the reverse index. If the user has no + remaining endpoints, removes the user key entirely. + + Args: + config_id: The NotificationConfiguration ID to remove + + Returns: + True if found and removed, False otherwise + """ + username = self._config_id_to_username.get(config_id) + if username is None: + return False + + endpoints = self.user_endpoint_configs.get(username) + if not endpoints: + del self._config_id_to_username[config_id] + return False + + for i, ep in enumerate(endpoints): + if ep.get("config_id") == config_id: + endpoints.pop(i) + del self._config_id_to_username[config_id] + + if not endpoints: + del self.user_endpoint_configs[username] + + return True + + return False + + async def send_notification( + self, + username: Optional[str], + title: str, + body: str, + headers: Optional[Dict[str, str]] = None, + notification_type: Optional[str] = None, + ) -> None: + """ + Send notification to user via Apprise. + + When notification_type is provided, only sends to endpoints that have + that type enabled. When None, sends to all endpoints. + + Args: + username: Username to notify + title: Notification title + body: Notification body + headers: Optional headers (used for email threading) + notification_type: Optional notification type to filter endpoints + """ + endpoints = self.user_endpoint_configs.get(username) if username else None + if not endpoints: + self.logger.debug(f"No notification endpoints for user: {username}") + return + + for ep in endpoints: + # Filter by notification type if specified + if notification_type and not ep["settings"].get(notification_type, True): + continue + + try: + apobj = ep["apprise"] + has_email = ep["has_email"] + config_id = ep.get("config_id") + + # Create per-endpoint body with unsubscribe link for email endpoints + ep_body = body + if has_email and notification_type and self.bely_url and config_id is not None: + base = self.bely_url.rstrip("/") + unsub_url = ( + f"{base}/views/notificationConfiguration/unsubscribe" + f"?configId={config_id}¬ificationType={notification_type}" + ) + display_type = notification_type.replace("_", " ") + ep_body += ( + f"
" + f'Unsubscribe from {display_type} notifications' + f"" + ) + + if headers and has_email: + result = apobj.notify( + body=ep_body, + title=title, + headers=headers, + ) + self.logger.debug( + f"Sent notification with email threading headers to {username}" + ) + else: + result = apobj.notify( + body=ep_body, + title=title, + ) + + if result: + self.logger.info(f"Notification sent to {username}: {title}") + else: + self.logger.warning(f"Failed to send notification to {username}") + + except Exception as e: + self.logger.error(f"Error sending notification to {username}: {e}", exc_info=True) + + async def send_notification_with_threading( + self, + username: Optional[str], + title: str, + body: str, + event_type: NotificationEventType, + document_id: str, + document_name: str, + entry_id: Optional[str] = None, + parent_entry_id: Optional[str] = None, + action_by: Optional[str] = None, + notification_type: Optional[str] = None, + ) -> None: + """ + Send notification with email threading support. + + Args: + username: Username to notify + title: Notification title (will be overridden for threading) + body: Notification body + event_type: Type of notification event + document_id: The document ID + document_name: The document name + entry_id: The log entry ID (if applicable) + parent_entry_id: The parent entry ID (for replies) + action_by: User who performed the action + notification_type: Optional notification type to filter endpoints + """ + endpoints = self.user_endpoint_configs.get(username) if username else None + if not endpoints: + self.logger.debug(f"No notification endpoints for user: {username}") + return + + # Check if any endpoint for this user has email notifications + has_email = any(ep["has_email"] for ep in endpoints) + + # Generate appropriate subject based on notification type + action_desc = f"by {action_by}" if action_by else None + threaded_subject = self.email_threading.generate_subject( + event_type=event_type, + document_title=document_name, + action_description=action_desc, + is_email=has_email, + ) + + # Generate email headers if any endpoint has email notifications + headers = None + if has_email: + try: + headers = self.email_threading.get_email_headers( + event_type=event_type, + document_id=document_id, + document_name=document_name, + entry_id=entry_id, + parent_entry_id=parent_entry_id, + ) + self.logger.debug(f"Generated email threading headers for {username}: {headers}") + except ValueError as e: + self.logger.warning(f"Failed to generate email headers: {e}") + + # Send notification with threading support + await self.send_notification(username, threaded_subject, body, headers, notification_type) + + async def process_notifications( + self, event: Any, notification_configs: List[Dict[str, Any]] + ) -> None: + """ + Process and send notifications based on configuration. + + Args: + event: The event to process + notification_configs: List of dicts with: + - username: User to notify + - notification_type: Type of notification setting to check + - title: Notification title + - body: Notification body + - context: Optional context for trigger description + - event_type: Optional NotificationEventType for threading + - entry_id: Optional entry ID for threading + - parent_entry_id: Optional parent entry ID for threading + """ + notifications_sent = [] + + for config in notification_configs: + username = config["username"] + notification_type = config["notification_type"] + + # Skip if already notified or shouldn't notify + if username in notifications_sent: + continue + if not self.should_notify(username, notification_type): + continue + if event.event_triggered_by_username == username: + continue + + title = config["title"] + body = config["body"] + + # Check if we have threading information + if "event_type" in config and hasattr(event, "parent_log_document_info"): + # Use threading-aware notification + await self.send_notification_with_threading( + username=username, + title=title, + body=body, + event_type=config["event_type"], + document_id=event.parent_log_document_info.id, + document_name=event.parent_log_document_info.name, + entry_id=config.get("entry_id"), + parent_entry_id=config.get("parent_entry_id"), + action_by=event.event_triggered_by_username, + notification_type=notification_type, + ) + else: + # Fall back to simple notification + await self.send_notification( + username, title, body, notification_type=notification_type + ) + + notifications_sent.append(username) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/run_tests.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/run_tests.py new file mode 100644 index 000000000..fb9cc0967 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/run_tests.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +""" +Test runner for apprise_smart_notification handler. + +Usage: + python run_tests.py # Run all tests + python run_tests.py -v # Verbose output + python run_tests.py --cov # With coverage report + python run_tests.py -k test_entry_add # Run specific test +""" + +import sys +import subprocess +from pathlib import Path +import os + + +def main(): + """Run the test suite.""" + # Get the directory containing this script + test_dir = Path(__file__).parent + + # Add src directory to PYTHONPATH for the subprocess + src_path = test_dir.parent.parent.parent / "src" + env = os.environ.copy() + if src_path.exists(): + if "PYTHONPATH" in env: + env["PYTHONPATH"] = f"{src_path}{os.pathsep}{env['PYTHONPATH']}" + else: + env["PYTHONPATH"] = str(src_path) + + # Base pytest command + cmd = [sys.executable, "-m", "pytest", "test/"] + + # Add common options + cmd.extend( + [ + "--asyncio-mode=auto", # Handle async tests + "--tb=short", # Shorter traceback format + ] + ) + + # Check for coverage flag + if "--cov" in sys.argv: + cmd.extend( + [ + "--cov=apprise_smart_notification", + "--cov-report=term-missing", + "--cov-report=html:htmlcov", + ] + ) + sys.argv.remove("--cov") + + # Pass through any other arguments + cmd.extend(sys.argv[1:]) + + # Run tests + print(f"Running: {' '.join(cmd)}") + print("-" * 60) + + result = subprocess.run(cmd, cwd=test_dir, env=env) + sys.exit(result.returncode) + + +if __name__ == "__main__": + main() diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_api_config.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_api_config.py new file mode 100644 index 000000000..fa382b648 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_api_config.py @@ -0,0 +1,533 @@ +""" +Tests for API-based notification configuration loading. + +Tests the code path where notification configs are loaded from the BELY REST API +(/api/NotificationConfiguration/all) instead of YAML files. +""" + +import logging +from datetime import datetime +from pathlib import Path +from unittest.mock import MagicMock, patch, AsyncMock +import pytest +import yaml +import sys + +# Add src directory to path to import bely_mqtt +src_path = Path(__file__).parent.parent.parent.parent / "src" +if src_path.exists(): + sys.path.insert(0, str(src_path)) + +# Mock apprise before importing handler +sys.modules["apprise"] = MagicMock() + +from bely_mqtt import LogEntryAddEvent, LogEntryUpdateEvent # noqa: E402 +from bely_mqtt.models import LogInfo, LogDocumentInfo, LogbookInfo # noqa: E402 +from bely_mqtt.config import GlobalConfig # noqa: E402 + +# Add parent directory to path to import handler module +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from config_loader import ConfigLoader # noqa: E402 +from handler import AppriseSmartNotificationHandler # noqa: E402 + + +def make_mock_nc( + username, + notification_endpoint, + handler_preferences_by_name=None, + notification_provider_name="email", + name="config", + description="", + id=1, +): + """Create a mock NotificationConfiguration object matching the API model.""" + mock = MagicMock() + mock.username = username + mock.notification_endpoint = notification_endpoint + mock.handler_preferences_by_name = handler_preferences_by_name + mock.notification_provider_name = notification_provider_name + mock.name = name + mock.description = description + mock.id = id + return mock + + +SAMPLE_API_CONFIGS = [ + make_mock_nc( + username="logr", + notification_endpoint="mailto://logr@example.com", + handler_preferences_by_name={ + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + notification_provider_name="email", + name="logr-email", + id=1, + ), + make_mock_nc( + username="logr", + notification_endpoint="slack://TokenA/TokenB/TokenC/", + handler_preferences_by_name={ + "entry_updates": True, + "own_entry_edits": False, + "entry_replies": True, + "new_entries": False, + "reactions": False, + "document_replies": True, + }, + notification_provider_name="slack", + name="logr-slack", + id=2, + ), + make_mock_nc( + username="logr", + notification_endpoint="json://webhook.example.com/hook", + handler_preferences_by_name={ + "entry_updates": False, + "own_entry_edits": False, + "entry_replies": False, + "new_entries": False, + "reactions": False, + "document_replies": False, + }, + notification_provider_name="custom_webhook", + name="logr-webhook", + id=3, + ), +] + + +def make_mock_api_factory(configs): + """Create a mock api_factory that returns the given configs from get_all().""" + mock_factory = MagicMock() + mock_nc_api = MagicMock() + mock_nc_api.get_all.return_value = configs + mock_factory.getNotificationConfigurationApi.return_value = mock_nc_api + return mock_factory + + +class TestLoadConfigFromApi: + """Unit tests for ConfigLoader.load_config_from_api().""" + + @pytest.fixture + def loader(self): + return ConfigLoader(logging.getLogger("test")) + + def test_load_config_from_api_basic(self, loader): + """API configs are loaded into the correct structure.""" + factory = make_mock_api_factory(SAMPLE_API_CONFIGS) + result = loader.load_config_from_api(factory) + + assert "users" in result + assert "logr" in result["users"] + + logr_configs = result["users"]["logr"]["configs"] + assert len(logr_configs) == 3 + + # Verify first config + assert logr_configs[0]["apprise_url"] == "mailto://logr@example.com" + assert logr_configs[0]["notifications"]["entry_updates"] is True + assert logr_configs[0]["notifications"]["reactions"] is True + + # Verify second config (slack) + assert logr_configs[1]["apprise_url"] == "slack://TokenA/TokenB/TokenC/" + assert logr_configs[1]["notifications"]["own_entry_edits"] is False + assert logr_configs[1]["notifications"]["new_entries"] is False + + # Verify third config (webhook with everything disabled) + assert logr_configs[2]["apprise_url"] == "json://webhook.example.com/hook" + assert all(v is False for v in logr_configs[2]["notifications"].values()) + + def test_load_config_from_api_multiple_users(self, loader): + """Configs for different users are grouped correctly.""" + configs = [ + make_mock_nc("logr", "mailto://logr@example.com", {"entry_updates": True}, id=1), + make_mock_nc( + "djarosz", + "mailto://dj@example.com", + {"entry_updates": True, "reactions": False}, + id=2, + ), + make_mock_nc("logr", "slack://token/a/b/", {"entry_updates": False}, id=3), + make_mock_nc("djarosz", "discord://webhook/token", {"new_entries": True}, id=4), + ] + factory = make_mock_api_factory(configs) + result = loader.load_config_from_api(factory) + + assert len(result["users"]) == 2 + assert len(result["users"]["logr"]["configs"]) == 2 + assert len(result["users"]["djarosz"]["configs"]) == 2 + + # Verify ordering is preserved + assert result["users"]["logr"]["configs"][0]["apprise_url"] == "mailto://logr@example.com" + assert result["users"]["logr"]["configs"][1]["apprise_url"] == "slack://token/a/b/" + assert result["users"]["djarosz"]["configs"][0]["apprise_url"] == "mailto://dj@example.com" + assert result["users"]["djarosz"]["configs"][1]["apprise_url"] == "discord://webhook/token" + + def test_load_config_from_api_empty_username(self, loader): + """Configs with empty or None username are skipped.""" + configs = [ + make_mock_nc(None, "mailto://nobody@example.com", {"entry_updates": True}, id=1), + make_mock_nc("", "mailto://empty@example.com", {"entry_updates": True}, id=2), + make_mock_nc("logr", "mailto://logr@example.com", {"entry_updates": True}, id=3), + ] + factory = make_mock_api_factory(configs) + result = loader.load_config_from_api(factory) + + assert len(result["users"]) == 1 + assert "logr" in result["users"] + + def test_load_config_from_api_empty_preferences(self, loader): + """Configs with None handler_preferences_by_name default to empty dict.""" + configs = [ + make_mock_nc( + "logr", "mailto://logr@example.com", handler_preferences_by_name=None, id=1 + ), + ] + factory = make_mock_api_factory(configs) + result = loader.load_config_from_api(factory) + + logr_config = result["users"]["logr"]["configs"][0] + assert logr_config["notifications"] == {} + + def test_load_config_from_api_empty_result(self, loader): + """Empty API response returns empty users dict.""" + factory = make_mock_api_factory([]) + result = loader.load_config_from_api(factory) + + assert result == {"users": {}} + + +class TestHandlerApiInit: + """Tests for handler initialization via API factory.""" + + @pytest.fixture + def mock_apprise_cls(self): + """Provide a mock AppriseWithEmailHeaders class.""" + mock_cls = MagicMock() + mock_cls.return_value.add = MagicMock(return_value=True) + mock_cls.return_value.notify = MagicMock(return_value=True) + mock_cls.return_value.__bool__ = MagicMock(return_value=True) + return mock_cls + + def test_handler_init_with_api_factory(self, mock_apprise_cls): + """Handler initializes correctly from API factory.""" + factory = make_mock_api_factory(SAMPLE_API_CONFIGS) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + # logr should have 3 endpoint configs + assert "logr" in handler.processor.user_endpoint_configs + assert len(handler.processor.user_endpoint_configs["logr"]) == 3 + + # Verify settings were mapped correctly for the first endpoint + ep0 = handler.processor.user_endpoint_configs["logr"][0] + assert ep0["settings"]["entry_updates"] is True + assert ep0["settings"]["reactions"] is True + + # Second endpoint (slack) has different settings + ep1 = handler.processor.user_endpoint_configs["logr"][1] + assert ep1["settings"]["own_entry_edits"] is False + assert ep1["settings"]["new_entries"] is False + + def test_handler_api_with_yaml_global_config(self, mock_apprise_cls, tmp_path): + """Handler loads users from API and global settings from YAML.""" + # YAML with only global mail server config (no users) + yaml_config = { + "global": { + "mail_server": "smtp.example.com", + "mail_from": "noreply@example.com", + "mail_from_name": "BELY Notifications", + "mail_port": 25, + }, + } + config_path = tmp_path / "global_only.yaml" + with open(config_path, "w") as f: + yaml.dump(yaml_config, f) + + api_configs = [ + make_mock_nc("logr", "mailto://logr@example.com", {"entry_updates": True}, id=1), + ] + factory = make_mock_api_factory(api_configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + config_path=str(config_path), + api_factory=factory, + global_config=global_config, + ) + + # Users loaded from API + assert "logr" in handler.processor.user_endpoint_configs + + # Global config loaded from YAML + assert handler.global_config_data["mail_server"] == "smtp.example.com" + assert handler.global_config_data["mail_from"] == "noreply@example.com" + + def test_handler_api_fallback_to_yaml(self, mock_apprise_cls, tmp_path): + """API failure falls back to YAML config.""" + yaml_config = { + "users": { + "alice": { + "apprise_urls": ["mailto://alice@example.com"], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + }, + } + config_path = tmp_path / "fallback.yaml" + with open(config_path, "w") as f: + yaml.dump(yaml_config, f) + + # API factory that raises on get_all() + mock_factory = MagicMock() + mock_nc_api = MagicMock() + mock_nc_api.get_all.side_effect = Exception("API connection refused") + mock_factory.getNotificationConfigurationApi.return_value = mock_nc_api + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + config_path=str(config_path), + api_factory=mock_factory, + ) + + # Should have fallen back to YAML + assert "alice" in handler.processor.user_endpoint_configs + assert len(handler.processor.user_endpoint_configs["alice"]) == 1 + + +class TestHandlerApiEventProcessing: + """Tests for event processing with API-loaded configs.""" + + @pytest.fixture + def mock_apprise_cls(self): + mock_cls = MagicMock() + mock_cls.return_value.add = MagicMock(return_value=True) + mock_cls.return_value.notify = MagicMock(return_value=True) + mock_cls.return_value.__bool__ = MagicMock(return_value=True) + return mock_cls + + @pytest.fixture + def api_handler(self, mock_apprise_cls): + """Create a handler initialized via API with two users.""" + configs = [ + make_mock_nc( + "alice", + "mailto://alice@example.com", + { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + id=1, + ), + make_mock_nc( + "alice", + "slack://tokenA/tokenB/tokenC/", + { + "entry_updates": True, + "own_entry_edits": False, + "entry_replies": True, + "new_entries": True, + "reactions": False, + "document_replies": True, + }, + id=2, + ), + make_mock_nc( + "bob", + "mailto://bob@example.com", + { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": False, + "reactions": False, + "document_replies": True, + }, + id=3, + ), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + # Mock notify on all endpoint apprise objects + for username, endpoints in handler.processor.user_endpoint_configs.items(): + for ep in endpoints: + ep["apprise"].notify = MagicMock(return_value=True) + return handler + + @pytest.mark.asyncio + async def test_handler_processes_event_with_api_config(self, api_handler): + """Event processing works correctly with API-loaded configs.""" + timestamp = datetime.now().isoformat() + + doc = LogDocumentInfo( + id=100, + name="Test Document", + ownerUsername="alice", + createdByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=timestamp, + lastModifiedOnDateTime=timestamp, + ) + + # Bob adds an entry to Alice's document + event = LogEntryAddEvent( + eventTimestamp=timestamp, + eventTriggedByUsername="bob", + entityName="LogEntry", + entityId=1, + parentLogDocumentInfo=doc, + logInfo=LogInfo( + id=1, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=timestamp, + lastModifiedOnDateTime=timestamp, + ), + description="New finding", + textDiff="+ Added test results", + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + with patch.object( + api_handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await api_handler.handle_log_entry_add(event) + + # Alice should be notified (document owner, new_entries=True) + mock_send.assert_called_once() + assert mock_send.call_args[0][0] == "alice" + + @pytest.mark.asyncio + async def test_per_endpoint_preferences_respected(self, api_handler): + """Per-endpoint notification preferences from API are respected.""" + # Alice has two endpoints: + # email: all enabled + # slack: own_entry_edits=False, reactions=False + + # Verify should_notify uses ANY endpoint having the setting enabled + assert api_handler.processor.should_notify("alice", "own_entry_edits") is True + assert api_handler.processor.should_notify("alice", "reactions") is True + + # Bob: new_entries=False, reactions=False + assert api_handler.processor.should_notify("bob", "new_entries") is False + assert api_handler.processor.should_notify("bob", "reactions") is False + assert api_handler.processor.should_notify("bob", "entry_replies") is True + + @pytest.mark.asyncio + async def test_unconfigured_user_not_notified(self, api_handler): + """Users not in the API config don't receive notifications.""" + assert api_handler.processor.should_notify("charlie", "entry_updates") is False + + timestamp = datetime.now().isoformat() + doc = LogDocumentInfo( + id=200, + name="Charlie's Doc", + ownerUsername="charlie", + createdByUsername="charlie", + lastModifiedByUsername="charlie", + enteredOnDateTime=timestamp, + lastModifiedOnDateTime=timestamp, + ) + + event = LogEntryAddEvent( + eventTimestamp=timestamp, + eventTriggedByUsername="bob", + entityName="LogEntry", + entityId=2, + parentLogDocumentInfo=doc, + logInfo=LogInfo( + id=2, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=timestamp, + lastModifiedOnDateTime=timestamp, + ), + description="Entry in charlie's doc", + textDiff="+ content", + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + with patch.object( + api_handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await api_handler.handle_log_entry_add(event) + mock_send.assert_not_called() + + @pytest.mark.asyncio + async def test_update_notifies_both_creator_and_owner(self, api_handler): + """Third-party update notifies both entry creator and document owner.""" + timestamp = datetime.now().isoformat() + + doc = LogDocumentInfo( + id=300, + name="Alice's Project", + ownerUsername="alice", + createdByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=timestamp, + lastModifiedOnDateTime=timestamp, + ) + + bob_entry = LogInfo( + id=3, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=timestamp, + lastModifiedOnDateTime=timestamp, + ) + + # Alice (owner, not the editor) edits Bob's entry + event = LogEntryUpdateEvent( + eventTimestamp=timestamp, + eventTriggedByUsername="alice", + entityName="LogEntry", + entityId=3, + parentLogDocumentInfo=doc, + logInfo=bob_entry, + description="Fixed typo", + textDiff="- old text\n+ new text", + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + with patch.object( + api_handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await api_handler.handle_log_entry_update(event) + + # Bob gets notified (own_entry_edits), Alice is the actor so only 1 call + assert mock_send.call_count == 1 + assert mock_send.call_args[0][0] == "bob" + + +if __name__ == "__main__": + pytest.main([__file__, "-v", "--asyncio-mode=auto"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_config_reload.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_config_reload.py new file mode 100644 index 000000000..4e8f24bfa --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_config_reload.py @@ -0,0 +1,811 @@ +""" +Tests for runtime configuration reload triggered by NotificationConfiguration events. + +When a NotificationConfiguration is created, updated, or deleted via the BELY UI/API, +the handler receives a generic CoreEvent and debounce-reloads config from the API. +""" + +import asyncio +from datetime import datetime +from pathlib import Path +from unittest.mock import MagicMock, patch, AsyncMock +import pytest +import yaml +import sys + +# Add src directory to path to import bely_mqtt +src_path = Path(__file__).parent.parent.parent.parent / "src" +if src_path.exists(): + sys.path.insert(0, str(src_path)) + +# Mock apprise before importing handler +sys.modules["apprise"] = MagicMock() + +from bely_mqtt import CoreEvent # noqa: E402 +from bely_mqtt.config import GlobalConfig # noqa: E402 + +# Add parent directory to path to import handler module +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from handler import AppriseSmartNotificationHandler # noqa: E402 + + +def make_mock_nc(username, notification_endpoint, handler_preferences_by_name=None, id=1): + """Create a mock NotificationConfiguration object.""" + mock = MagicMock() + mock.username = username + mock.notification_endpoint = notification_endpoint + mock.handler_preferences_by_name = handler_preferences_by_name + mock.id = id + return mock + + +def make_mock_api_factory(configs): + """Create a mock api_factory that returns the given configs.""" + mock_factory = MagicMock() + mock_nc_api = MagicMock() + mock_nc_api.get_all.return_value = configs + mock_factory.getNotificationConfigurationApi.return_value = mock_nc_api + return mock_factory + + +def make_core_event(entity_name, entity_id=1): + """Create a CoreEvent with the given entity name.""" + return CoreEvent( + description=f"{entity_name} changed", + eventTimestamp=datetime.now().isoformat(), + entityName=entity_name, + entityId=entity_id, + eventTriggedByUsername="admin", + ) + + +@pytest.fixture +def mock_apprise_cls(): + """Provide a mock AppriseWithEmailHeaders class.""" + mock_cls = MagicMock() + mock_cls.return_value.add = MagicMock(return_value=True) + mock_cls.return_value.notify = MagicMock(return_value=True) + mock_cls.return_value.__bool__ = MagicMock(return_value=True) + return mock_cls + + +@pytest.fixture +def initial_api_configs(): + """Initial API configs with one user.""" + return [ + make_mock_nc( + "alice", + "mailto://alice@example.com", + { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + id=1, + ), + ] + + +@pytest.fixture +def handler_with_api(mock_apprise_cls, initial_api_configs): + """Create a handler initialized via API factory.""" + factory = make_mock_api_factory(initial_api_configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + return handler + + +class TestNotificationConfigEventDetection: + """Tests that only NotificationConfiguration events trigger reload.""" + + @pytest.mark.asyncio + async def test_add_notification_config_triggers_reload(self, handler_with_api): + event = make_core_event("NotificationConfiguration") + with patch.object(handler_with_api, "_schedule_config_reload") as mock_schedule: + await handler_with_api.handle_generic_add(event) + mock_schedule.assert_called_once() + + @pytest.mark.asyncio + async def test_update_notification_config_triggers_reload(self, handler_with_api): + event = make_core_event("NotificationConfiguration") + with patch.object(handler_with_api, "_schedule_config_reload") as mock_schedule: + await handler_with_api.handle_generic_update(event) + mock_schedule.assert_called_once() + + @pytest.mark.asyncio + async def test_delete_notification_config_triggers_reload(self, handler_with_api): + event = make_core_event("NotificationConfiguration") + with patch.object(handler_with_api, "_schedule_config_reload") as mock_schedule: + await handler_with_api.handle_generic_delete(event) + mock_schedule.assert_called_once() + + @pytest.mark.asyncio + async def test_other_entity_add_ignored(self, handler_with_api): + event = make_core_event("Log") + with patch.object(handler_with_api, "_schedule_config_reload") as mock_schedule: + await handler_with_api.handle_generic_add(event) + mock_schedule.assert_not_called() + + @pytest.mark.asyncio + async def test_other_entity_update_ignored(self, handler_with_api): + event = make_core_event("UserInfo") + with patch.object(handler_with_api, "_schedule_config_reload") as mock_schedule: + await handler_with_api.handle_generic_update(event) + mock_schedule.assert_not_called() + + @pytest.mark.asyncio + async def test_other_entity_delete_ignored(self, handler_with_api): + event = make_core_event("LogEntry") + with patch.object(handler_with_api, "_schedule_config_reload") as mock_schedule: + await handler_with_api.handle_generic_delete(event) + mock_schedule.assert_not_called() + + +class TestDebounce: + """Tests for debounced config reload scheduling.""" + + @pytest.mark.asyncio + async def test_debounce_coalesces_rapid_events(self, handler_with_api): + """Multiple rapid events result in only one reload.""" + handler_with_api._reload_debounce_seconds = 0.1 + + with patch.object( + handler_with_api, "_reload_config", new_callable=AsyncMock + ) as mock_reload: + # Fire three events rapidly + for _ in range(3): + event = make_core_event("NotificationConfiguration") + await handler_with_api.handle_generic_update(event) + + # Wait for debounce to fire + await asyncio.sleep(0.2) + + # Should only reload once + mock_reload.assert_called_once() + + @pytest.mark.asyncio + async def test_debounce_resets_timer(self, handler_with_api): + """Later events reset the debounce timer.""" + handler_with_api._reload_debounce_seconds = 0.15 + + with patch.object( + handler_with_api, "_reload_config", new_callable=AsyncMock + ) as mock_reload: + event = make_core_event("NotificationConfiguration") + await handler_with_api.handle_generic_update(event) + + # Wait less than debounce, then fire another event + await asyncio.sleep(0.05) + await handler_with_api.handle_generic_update(event) + + # Wait less than debounce from the first event, but before the second fires + await asyncio.sleep(0.1) + mock_reload.assert_not_called() + + # Wait for the second timer to fire + await asyncio.sleep(0.1) + mock_reload.assert_called_once() + + +class TestNoApiFactory: + """Tests for handler without API factory.""" + + @pytest.mark.asyncio + async def test_no_api_factory_warns_on_reload(self, mock_apprise_cls): + """Without api_factory, schedule logs warning and skips.""" + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler() + + with patch.object(handler, "logger") as mock_logger: + handler._schedule_config_reload() + mock_logger.warning.assert_called_once() + + assert handler._reload_timer is None + + +class TestReloadConfig: + """Tests for the actual config reload logic.""" + + @pytest.mark.asyncio + async def test_successful_reload_updates_config(self, mock_apprise_cls): + """Successful API reload swaps user_endpoint_configs.""" + initial_configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=1), + ] + factory = make_mock_api_factory(initial_configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + assert "alice" in handler.processor.user_endpoint_configs + assert "bob" not in handler.processor.user_endpoint_configs + + # Update API to return new configs including bob + new_configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=1), + make_mock_nc( + "bob", "mailto://bob@example.com", {"entry_updates": True, "reactions": False}, id=2 + ), + ] + nc_api = factory.getNotificationConfigurationApi() + nc_api.get_all.return_value = new_configs + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + await handler._reload_config() + + assert "alice" in handler.processor.user_endpoint_configs + assert "bob" in handler.processor.user_endpoint_configs + + @pytest.mark.asyncio + async def test_failed_reload_preserves_existing(self, mock_apprise_cls): + """API failure during reload keeps existing config intact.""" + initial_configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=1), + ] + factory = make_mock_api_factory(initial_configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + original_configs = handler.processor.user_endpoint_configs.copy() + + # Make API fail on next call + nc_api = factory.getNotificationConfigurationApi() + nc_api.get_all.side_effect = Exception("API connection refused") + + await handler._reload_config() + + # Existing config should be preserved + assert handler.processor.user_endpoint_configs == original_configs + assert "alice" in handler.processor.user_endpoint_configs + + @pytest.mark.asyncio + async def test_reload_preserves_global_config(self, mock_apprise_cls, tmp_path): + """Global config from YAML is preserved/merged during reload.""" + yaml_config = { + "global": { + "mail_server": "smtp.example.com", + "mail_from": "noreply@example.com", + "mail_port": 25, + }, + } + config_path = tmp_path / "global.yaml" + with open(config_path, "w") as f: + yaml.dump(yaml_config, f) + + initial_configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=1), + ] + factory = make_mock_api_factory(initial_configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + config_path=str(config_path), + api_factory=factory, + global_config=global_config, + ) + + assert handler.global_config_data["mail_server"] == "smtp.example.com" + + # Reload should pass global config to the temp processor + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + with patch.object(handler.config_loader, "load_config_from_api") as mock_load_api: + mock_load_api.return_value = { + "users": { + "bob": { + "configs": [ + {"apprise_url": "mailto://bob@example.com", "notifications": {}} + ] + } + } + } + + await handler._reload_config() + + # Verify global config was passed + call_args = mock_load_api.call_args + assert call_args is not None + + # Global config data should still be intact + assert handler.global_config_data["mail_server"] == "smtp.example.com" + + @pytest.mark.asyncio + async def test_reload_clears_removed_user(self, mock_apprise_cls): + """When a user's config is removed from API, they're no longer configured after reload.""" + initial_configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=1), + make_mock_nc("bob", "mailto://bob@example.com", {"entry_updates": True}, id=2), + ] + factory = make_mock_api_factory(initial_configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + assert "alice" in handler.processor.user_endpoint_configs + assert "bob" in handler.processor.user_endpoint_configs + + # Remove bob from API + new_configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=1), + ] + nc_api = factory.getNotificationConfigurationApi() + nc_api.get_all.return_value = new_configs + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + await handler._reload_config() + + assert "alice" in handler.processor.user_endpoint_configs + assert "bob" not in handler.processor.user_endpoint_configs + + +class TestConfigIdTracking: + """Tests for config_id storage, reverse index, and lookup/removal methods.""" + + def test_api_configs_store_config_id(self, mock_apprise_cls): + """Config IDs from API are stored in endpoint dicts.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=42), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + endpoints = handler.processor.user_endpoint_configs["alice"] + assert endpoints[0]["config_id"] == 42 + + def test_reverse_index_maps_config_id_to_username(self, mock_apprise_cls): + """Reverse index maps config_id to username correctly.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {}, id=10), + make_mock_nc("bob", "mailto://bob@example.com", {}, id=20), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + assert handler.processor._config_id_to_username == {10: "alice", 20: "bob"} + + def test_get_username_by_config_id_returns_correct_username(self, mock_apprise_cls): + """get_username_by_config_id() returns correct username.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {}, id=10), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + assert handler.processor.get_username_by_config_id(10) == "alice" + + def test_get_username_by_config_id_returns_none_for_unknown(self, mock_apprise_cls): + """get_username_by_config_id() returns None for unknown ID.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {}, id=10), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + assert handler.processor.get_username_by_config_id(999) is None + + def test_remove_endpoint_by_config_id(self, mock_apprise_cls): + """remove_endpoint_by_config_id() removes correct endpoint and cleans up reverse index.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {}, id=10), + make_mock_nc("alice", "slack://token", {}, id=11), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + assert len(handler.processor.user_endpoint_configs["alice"]) == 2 + result = handler.processor.remove_endpoint_by_config_id(10) + + assert result is True + assert len(handler.processor.user_endpoint_configs["alice"]) == 1 + assert handler.processor.user_endpoint_configs["alice"][0]["config_id"] == 11 + assert 10 not in handler.processor._config_id_to_username + assert handler.processor._config_id_to_username[11] == "alice" + + def test_remove_last_endpoint_removes_user_key(self, mock_apprise_cls): + """remove_endpoint_by_config_id() removes user key when last endpoint removed.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {}, id=10), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + result = handler.processor.remove_endpoint_by_config_id(10) + + assert result is True + assert "alice" not in handler.processor.user_endpoint_configs + assert 10 not in handler.processor._config_id_to_username + + def test_remove_unknown_config_id_returns_false(self, mock_apprise_cls): + """remove_endpoint_by_config_id() returns False for unknown ID.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {}, id=10), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + result = handler.processor.remove_endpoint_by_config_id(999) + assert result is False + + def test_yaml_configs_have_none_config_id(self, mock_apprise_cls, tmp_path): + """YAML-loaded configs have config_id: None and don't pollute reverse index.""" + yaml_config = { + "users": { + "alice": { + "apprise_urls": ["mailto://alice@example.com"], + "notifications": {"entry_updates": True}, + }, + }, + } + config_path = tmp_path / "config.yaml" + with open(config_path, "w") as f: + yaml.dump(yaml_config, f) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler(config_path=str(config_path)) + + endpoints = handler.processor.user_endpoint_configs["alice"] + assert endpoints[0]["config_id"] is None + assert handler.processor._config_id_to_username == {} + + @pytest.mark.asyncio + async def test_reverse_index_rebuilt_on_reload(self, mock_apprise_cls): + """After full reload, reverse index is rebuilt correctly.""" + initial_configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {}, id=10), + ] + factory = make_mock_api_factory(initial_configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + assert handler.processor._config_id_to_username == {10: "alice"} + + # Update API to return different configs + new_configs = [ + make_mock_nc("bob", "mailto://bob@example.com", {}, id=20), + make_mock_nc("bob", "slack://token", {}, id=21), + ] + nc_api = factory.getNotificationConfigurationApi() + nc_api.get_all.return_value = new_configs + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + await handler._reload_config() + + # Old index should be gone, new one built + assert 10 not in handler.processor._config_id_to_username + assert handler.processor._config_id_to_username == {20: "bob", 21: "bob"} + + +class TestEndToEnd: + """End-to-end test: event -> debounce -> reload.""" + + @pytest.mark.asyncio + async def test_notification_config_event_triggers_full_reload(self, mock_apprise_cls): + """A NotificationConfiguration update event triggers a full config reload.""" + initial_configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=1), + ] + factory = make_mock_api_factory(initial_configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + handler._reload_debounce_seconds = 0.05 + + # Update API to include bob + new_configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=1), + make_mock_nc("bob", "mailto://bob@example.com", {"entry_updates": True}, id=2), + ] + nc_api = factory.getNotificationConfigurationApi() + nc_api.get_all.return_value = new_configs + + event = make_core_event("NotificationConfiguration", entity_id=2) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + await handler.handle_generic_add(event) + await asyncio.sleep(0.1) + + assert "bob" in handler.processor.user_endpoint_configs + + +class TestUnsubscribeLink: + """Tests for per-endpoint unsubscribe link injection in email notifications.""" + + @pytest.mark.asyncio + async def test_email_endpoint_with_config_id_includes_unsubscribe_link(self, mock_apprise_cls): + """Email endpoint with config_id and bely_url includes unsubscribe URL in body.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=42), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com/bely"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + with patch("notification_processor.is_email_notification", return_value=True): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + with patch("notification_processor.is_email_notification", return_value=True): + await handler.processor.send_notification( + "alice", "Test", "

Body

", notification_type="entry_updates" + ) + + call_args = mock_apprise_cls.return_value.notify.call_args + body = call_args[1]["body"] + assert "Unsubscribe from entry updates notifications" in body + assert "configId=42" in body + assert "notificationType=entry_updates" in body + assert "bely.example.com/bely/views/notificationConfiguration/unsubscribe" in body + + @pytest.mark.asyncio + async def test_non_email_endpoint_no_unsubscribe_link(self, mock_apprise_cls): + """Non-email endpoints do not get unsubscribe links.""" + configs = [ + make_mock_nc("alice", "slack://token", {"entry_updates": True}, id=42), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com/bely"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + with patch("notification_processor.is_email_notification", return_value=False): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + await handler.processor.send_notification( + "alice", "Test", "

Body

", notification_type="entry_updates" + ) + + call_args = mock_apprise_cls.return_value.notify.call_args + body = call_args[1]["body"] + assert "Unsubscribe" not in body + + @pytest.mark.asyncio + async def test_config_id_none_no_unsubscribe_link(self, mock_apprise_cls): + """YAML configs with config_id=None do not get unsubscribe links.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=10), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com/bely"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + with patch("notification_processor.is_email_notification", return_value=True): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + # Manually set config_id to None to simulate YAML config + handler.processor.user_endpoint_configs["alice"][0]["config_id"] = None + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + await handler.processor.send_notification( + "alice", "Test", "

Body

", notification_type="entry_updates" + ) + + call_args = mock_apprise_cls.return_value.notify.call_args + body = call_args[1]["body"] + assert "Unsubscribe" not in body + + @pytest.mark.asyncio + async def test_bely_url_none_no_unsubscribe_link(self, mock_apprise_cls): + """When bely_url is None, no unsubscribe link is added.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=42), + ] + factory = make_mock_api_factory(configs) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + with patch("notification_processor.is_email_notification", return_value=True): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + ) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + await handler.processor.send_notification( + "alice", "Test", "

Body

", notification_type="entry_updates" + ) + + call_args = mock_apprise_cls.return_value.notify.call_args + body = call_args[1]["body"] + assert "Unsubscribe" not in body + + @pytest.mark.asyncio + async def test_notification_type_none_no_unsubscribe_link(self, mock_apprise_cls): + """When notification_type is None, no unsubscribe link is added.""" + configs = [ + make_mock_nc("alice", "mailto://alice@example.com", {"entry_updates": True}, id=42), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com/bely"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + with patch("notification_processor.is_email_notification", return_value=True): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + await handler.processor.send_notification( + "alice", "Test", "

Body

", notification_type=None + ) + + call_args = mock_apprise_cls.return_value.notify.call_args + body = call_args[1]["body"] + assert "Unsubscribe" not in body + + @pytest.mark.asyncio + async def test_two_email_endpoints_get_distinct_unsubscribe_urls(self, mock_apprise_cls): + """Two email endpoints with different config_ids get distinct unsubscribe URLs.""" + configs = [ + make_mock_nc("alice", "mailto://alice@work.com", {"entry_updates": True}, id=10), + make_mock_nc("alice", "mailto://alice@home.com", {"entry_updates": True}, id=20), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com/bely"}) + + # Each endpoint gets its own mock Apprise instance + mock_instances = [MagicMock(), MagicMock()] + for m in mock_instances: + m.add = MagicMock(return_value=True) + m.notify = MagicMock(return_value=True) + call_count = {"i": 0} + + def create_mock(): + idx = call_count["i"] + call_count["i"] += 1 + return mock_instances[idx % len(mock_instances)] + + mock_cls = MagicMock(side_effect=create_mock) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_cls): + with patch("notification_processor.is_email_notification", return_value=True): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + await handler.processor.send_notification( + "alice", "Test", "

Body

", notification_type="entry_updates" + ) + + # Both endpoints should have been notified + assert mock_instances[0].notify.called + assert mock_instances[1].notify.called + + body1 = mock_instances[0].notify.call_args[1]["body"] + body2 = mock_instances[1].notify.call_args[1]["body"] + + assert "configId=10" in body1 + assert "configId=20" in body2 + assert "configId=20" not in body1 + assert "configId=10" not in body2 + + +class TestReactionNotificationType: + """Tests that reaction events pass notification_type='reactions' for unsubscribe links.""" + + @pytest.mark.asyncio + async def test_reaction_event_passes_notification_type(self, mock_apprise_cls): + """Reaction add event should pass notification_type='reactions' to send_notification_with_threading.""" + configs = [ + make_mock_nc( + "alice", + "mailto://alice@example.com", + {"reactions": True, "entry_updates": True}, + id=42, + ), + ] + factory = make_mock_api_factory(configs) + global_config = GlobalConfig({"bely_url": "https://bely.example.com/bely"}) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_cls): + with patch("notification_processor.is_email_notification", return_value=True): + handler = AppriseSmartNotificationHandler( + api_factory=factory, + global_config=global_config, + ) + + with patch.object( + handler.processor, "send_notification_with_threading", new_callable=AsyncMock + ) as mock_send: + # Create a mock reaction add event + event = MagicMock() + event.event_triggered_by_username = "bob" + event.parent_log_info.entered_by_username = "alice" + event.parent_log_info.id = "entry-1" + event.parent_log_document_info.id = "doc-1" + event.parent_log_document_info.name = "Test Doc" + + await handler._handle_reaction_event(event, is_add=True) + + mock_send.assert_called_once() + call_kwargs = mock_send.call_args[1] + assert call_kwargs["notification_type"] == "reactions" + assert call_kwargs["username"] == "alice" + + +if __name__ == "__main__": + pytest.main([__file__, "-v", "--asyncio-mode=auto"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_email_threading.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_email_threading.py new file mode 100644 index 000000000..056c5f7f8 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_email_threading.py @@ -0,0 +1,670 @@ +""" +Test suite for email threading functionality in Apprise Smart Notification Handler. + +This module tests the email threading capabilities that ensure email notifications +are properly threaded together in email clients like Gmail, Outlook, and Thunderbird. +""" + +import sys +from pathlib import Path +from datetime import datetime, timedelta +from unittest.mock import MagicMock, patch, AsyncMock +import pytest +import yaml + +# Add src directory to path to import bely_mqtt +src_path = Path(__file__).parent.parent.parent.parent / "src" +if src_path.exists(): + sys.path.insert(0, str(src_path)) + +# Mock apprise before importing handler +sys.modules["apprise"] = MagicMock() + +from bely_mqtt import ( # noqa: E402 + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogReactionAddEvent, +) +from bely_mqtt.models import ( # noqa: E402 + LogInfo, + LogDocumentInfo, + LogReactionInfo, + ReactionInfo, + LogbookInfo, +) +from bely_mqtt.config import GlobalConfig # noqa: E402 + +# Add parent directory to path for imports +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from handler import AppriseSmartNotificationHandler # noqa: E402 +from email_threading import ( # noqa: E402 + EmailThreadingStrategy, + NotificationEventType, + detect_event_type, +) + + +class TestEmailThreadingStrategy: + """Test the EmailThreadingStrategy class.""" + + @pytest.fixture + def strategy(self): + """Create an EmailThreadingStrategy instance.""" + return EmailThreadingStrategy(domain="test.example.com") + + def test_email_url_detection(self, strategy): + """Test detection of email notification URLs.""" + # Email URLs that should be detected + email_urls = [ + "mailto://user:pass@gmail.com", + "mailtos://user:pass@mail.example.com:587", + ] + + # Non-email URLs that should not be detected + non_email_urls = [ + "slack://TokenA/TokenB/TokenC", + "discord://webhook_id/webhook_token", + "telegram://bot_token/chat_id", + "pushover://user_key@app_token", + "teams://webhook_url", + ] + + for url in email_urls: + assert EmailThreadingStrategy.is_email_notification( + url + ), f"Should detect {url} as email" + + for url in non_email_urls: + assert not EmailThreadingStrategy.is_email_notification( + url + ), f"Should not detect {url} as email" + + def test_thread_id_generation(self, strategy): + """Test generation of consistent thread IDs.""" + document_id = "doc123" + entry_id = "entry456" + + # Thread IDs should be consistent for the same input + doc_thread_1 = strategy.generate_document_thread_id(document_id) + doc_thread_2 = strategy.generate_document_thread_id(document_id) + assert doc_thread_1 == doc_thread_2 + assert "@test.example.com" in doc_thread_1 + + entry_thread_1 = strategy.generate_entry_thread_id(entry_id) + entry_thread_2 = strategy.generate_entry_thread_id(entry_id) + assert entry_thread_1 == entry_thread_2 + assert "@test.example.com" in entry_thread_1 + + # Different IDs should produce different thread IDs + assert doc_thread_1 != entry_thread_1 + + def test_email_headers_for_document_creation(self, strategy): + """Test email headers for document creation (starts thread).""" + headers = strategy.get_email_headers( + event_type=NotificationEventType.DOCUMENT_CREATE, + document_id="doc100", + document_name="Sprint Planning", + ) + + assert "Thread-Topic" in headers + assert "Sprint Planning" in headers["Thread-Topic"] + assert "References" in headers + assert "In-Reply-To" not in headers # Document creation starts the thread + + def test_email_headers_for_entry_addition(self, strategy): + """Test email headers for entry addition (replies to document).""" + headers = strategy.get_email_headers( + event_type=NotificationEventType.ENTRY_ADD, + document_id="doc100", + document_name="Sprint Planning", + entry_id="entry200", + ) + + assert "Thread-Topic" in headers + assert "References" in headers + assert "In-Reply-To" in headers + assert headers["In-Reply-To"] == strategy.generate_document_thread_id("doc100") + assert "X-Entry-Id" in headers + assert headers["X-Entry-Id"] == "entry200" + + def test_email_headers_for_nested_reply(self, strategy): + """Test email headers for reply to entry (nested threading).""" + headers = strategy.get_email_headers( + event_type=NotificationEventType.ENTRY_REPLY, + document_id="doc100", + document_name="Sprint Planning", + entry_id="reply300", + parent_entry_id="entry200", + ) + + assert "Thread-Topic" in headers + assert "References" in headers + assert "In-Reply-To" in headers + assert headers["In-Reply-To"] == strategy.generate_entry_thread_id("entry200") + assert "X-Entry-Id" in headers + assert headers["X-Entry-Id"] == "reply300" + assert "X-Parent-Entry-Id" in headers + assert headers["X-Parent-Entry-Id"] == "entry200" + + # References should include both document and parent entry + references = headers["References"].split() + assert strategy.generate_document_thread_id("doc100") in references + assert strategy.generate_entry_thread_id("entry200") in references + + def test_subject_generation_for_email(self, strategy): + """Test subject line generation for email notifications.""" + document_title = "Q4 Planning Document" + + # Email subjects should use Re: for threading + subjects = [ + (NotificationEventType.DOCUMENT_CREATE, None, "New Log: Q4 Planning Document"), + (NotificationEventType.ENTRY_ADD, "by Alice", "Re: Log: Q4 Planning Document"), + (NotificationEventType.ENTRY_UPDATE, None, "Re: Log: Q4 Planning Document"), + (NotificationEventType.ENTRY_REPLY, "by Bob", "Re: Log: Q4 Planning Document"), + (NotificationEventType.REACTION_ADD, "by Charlie", "Re: Log: Q4 Planning Document"), + ] + + for event_type, action, expected_prefix in subjects: + subject = strategy.generate_subject( + event_type=event_type, + document_title=document_title, + action_description=action, + is_email=True, + ) + assert subject.startswith(expected_prefix) + if action: + assert action in subject + + def test_subject_generation_for_non_email(self, strategy): + """Test subject line generation for non-email notifications.""" + document_title = "Q4 Planning Document" + + # Non-email subjects should include emojis + emoji_map = { + NotificationEventType.DOCUMENT_CREATE: "📄", + NotificationEventType.ENTRY_ADD: "➕", + NotificationEventType.ENTRY_UPDATE: "✏️", + NotificationEventType.ENTRY_DELETE: "🗑️", + NotificationEventType.ENTRY_REPLY: "💬", + NotificationEventType.REACTION_ADD: "👍", + } + + for event_type, emoji in emoji_map.items(): + subject = strategy.generate_subject( + event_type=event_type, document_title=document_title, is_email=False + ) + assert emoji in subject + assert document_title in subject + + def test_event_type_detection(self): + """Test detection of event types from context.""" + mock_event = MagicMock() + + test_cases = [ + # (is_reply, is_update, is_delete, is_reaction, is_reaction_delete, expected) + (False, False, False, False, False, NotificationEventType.ENTRY_ADD), + (False, True, False, False, False, NotificationEventType.ENTRY_UPDATE), + (False, False, True, False, False, NotificationEventType.ENTRY_DELETE), + (True, False, False, False, False, NotificationEventType.ENTRY_REPLY), + (True, True, False, False, False, NotificationEventType.REPLY_UPDATE), + (True, False, True, False, False, NotificationEventType.REPLY_DELETE), + (False, False, False, True, False, NotificationEventType.REACTION_ADD), + (False, False, False, True, True, NotificationEventType.REACTION_DELETE), + ] + + for is_reply, is_update, is_delete, is_reaction, is_reaction_delete, expected in test_cases: + result = detect_event_type( + mock_event, + is_reply=is_reply, + is_update=is_update, + is_delete=is_delete, + is_reaction=is_reaction, + is_reaction_delete=is_reaction_delete, + ) + assert result == expected, f"Expected {expected}, got {result}" + + +class TestEmailThreadingIntegration: + """Integration tests for email threading with the handler.""" + + @pytest.fixture + def test_config(self, tmp_path): + """Create a test configuration with email notifications.""" + config = { + "global": { + "mail_server": { + "host": "smtp.company.com", + "port": 587, + "username": "notifications@company.com", + "password": "secure_password", + "use_tls": True, + } + }, + "users": { + "alice": { + "apprise_urls": [ + "mailto://alice@company.com", + "slack://TokenA/TokenB/TokenC/#general", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + "bob": { + "apprise_urls": [ + "mailto://bob@company.com", + "discord://webhook_id/webhook_token", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + "charlie": { + "apprise_urls": [ + "teams://webhook_url", # No email for Charlie + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + }, + } + + config_path = tmp_path / "threading_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + return config_path + + @pytest.fixture + def handler(self, test_config): + """Create handler with test configuration.""" + global_config = GlobalConfig({"bely_url": "https://bely.company.com"}) + + # Mock AppriseWithEmailHeaders class + mock_apprise_wrapper = MagicMock() + mock_apprise_wrapper.return_value.notify = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.add = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.__bool__ = MagicMock(return_value=True) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_wrapper): + handler = AppriseSmartNotificationHandler( + config_path=str(test_config), global_config=global_config + ) + + # Mock Apprise notify for all users + for username, endpoints in handler.processor.user_endpoint_configs.items(): + for ep in endpoints: + ep["apprise"].notify = MagicMock(return_value=True) + + return handler + + @pytest.mark.asyncio + async def test_email_thread_conversation(self, handler): + """Test a complete email thread conversation.""" + base_time = datetime.now() + + # Track all notifications sent + notifications = [] + + async def track_notification(username, title, body, attach=None, notification_type=None): + notifications.append( + { + "username": username, + "title": title, + "body": body, + "attach": attach, + "timestamp": datetime.now().isoformat(), + } + ) + return True + + handler.processor.send_notification = AsyncMock(side_effect=track_notification) + + # Create a document + doc = LogDocumentInfo( + id=1000, + name="Team Retrospective", + ownerUsername="alice", + createdByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # 1. Bob adds an entry to Alice's document + event1 = LogEntryAddEvent( + eventTimestamp=(base_time + timedelta(minutes=5)).isoformat(), + eventTriggedByUsername="bob", + entityName="LogEntry", + entityId=1001, + parentLogDocumentInfo=doc, + logInfo=LogInfo( + id=1001, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + ), + description="Added retrospective feedback", + textDiff="+ What went well: Sprint velocity improved\n+ To improve: Better testing coverage", + logbookList=[LogbookInfo(id=1, name="Retrospectives", displayName="Retrospectives")], + ) + + await handler.handle_log_entry_add(event1) + + # Alice should get notified (document owner) + assert len(notifications) == 1 + assert notifications[0]["username"] == "alice" + + # Check if email threading headers would be applied + # (In real implementation, these would be in the attach parameter) + # For email notifications, the subject should use "Re:" for threading + + # 2. Alice replies to Bob's entry + event2 = LogEntryReplyAddEvent( + eventTimestamp=(base_time + timedelta(minutes=10)).isoformat(), + eventTriggedByUsername="alice", + entityName="LogEntryReply", + entityId=1002, + parentLogDocumentInfo=doc, + parentLogInfo=LogInfo( + id=1001, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + ), + logInfo=LogInfo( + id=1002, + enteredByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + ), + textDiff="Great points! Let's schedule a meeting to discuss the testing strategy.", + logbookList=[LogbookInfo(id=1, name="Retrospectives", displayName="Retrospectives")], + description="Response to feedback", + ) + + await handler.handle_log_entry_reply_add(event2) + + # Bob should get notified (entry creator) + assert len(notifications) == 2 + assert notifications[1]["username"] == "bob" + + # 3. Bob reacts to Alice's reply + from bely_mqtt.models import ReactionId + + event3 = LogReactionAddEvent( + eventTimestamp=(base_time + timedelta(minutes=15)).isoformat(), + eventTriggedByUsername="bob", + entityName="LogReaction", + entityId=ReactionId(logId=1002, reactionId=1, userId=2), + parentLogDocumentInfo=doc, + parentLogInfo=LogInfo( + id=1002, + enteredByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + ), + logReaction=LogReactionInfo( + id=ReactionId(logId=1002, reactionId=1, userId=2), + reaction=ReactionInfo( + id=1, emoji="👍", name="thumbsup", emojiCode=128077, description="Agreed" + ), + username="bob", + ), + description="Agreed with meeting proposal", + ) + + await handler.handle_log_reaction_add(event3) + + # Alice should get notified about the reaction + assert len(notifications) == 3 + assert notifications[2]["username"] == "alice" + + # Verify all notifications are part of the same conversation thread + # In a real email client, these would all be grouped together + + @pytest.mark.asyncio + async def test_mixed_notification_types(self, handler): + """Test that email and non-email notifications are handled differently.""" + base_time = datetime.now() + + # Track notifications with their types + notifications = [] + + async def track_notification(username, title, body, attach=None, notification_type=None): + # Determine if this is an email notification based on user config + is_email = False + if username in ["alice", "bob"]: # These users have email configured + is_email = True + + notifications.append( + { + "username": username, + "title": title, + "body": body, + "is_email": is_email, + } + ) + return True + + handler.processor.send_notification = AsyncMock(side_effect=track_notification) + + # Create a document owned by Charlie (no email) + doc = LogDocumentInfo( + id=2000, + name="Technical Specs", + ownerUsername="charlie", + createdByUsername="charlie", + lastModifiedByUsername="charlie", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # Alice (has email) adds an entry + event = LogEntryAddEvent( + eventTimestamp=base_time.isoformat(), + eventTriggedByUsername="alice", + entityName="LogEntry", + entityId=2001, + parentLogDocumentInfo=doc, + logInfo=LogInfo( + id=2001, + enteredByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ), + description="Added API specifications", + textDiff="+ API endpoint: /api/v2/users", + logbookList=[LogbookInfo(id=2, name="Tech Specs", displayName="Tech Specs")], + ) + + await handler.handle_log_entry_add(event) + + # Charlie should get notified (document owner) + assert len(notifications) == 1 + assert notifications[0]["username"] == "charlie" + assert not notifications[0]["is_email"] # Charlie doesn't have email + + @pytest.mark.asyncio + async def test_threading_with_updates_and_deletes(self, handler): + """Test that updates and deletes maintain thread continuity.""" + base_time = datetime.now() + + notifications = [] + + async def track_notification(username, title, body, attach=None, notification_type=None): + notifications.append( + { + "username": username, + "title": title, + "body": body, + } + ) + return True + + handler.processor.send_notification = AsyncMock(side_effect=track_notification) + + # Alice's document + doc = LogDocumentInfo( + id=3000, + name="Project Roadmap", + ownerUsername="alice", + createdByUsername="alice", + lastModifiedByUsername="alice", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # Bob's entry + bob_entry = LogInfo( + id=3001, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # 1. Bob updates his own entry + event1 = LogEntryUpdateEvent( + eventTimestamp=(base_time + timedelta(minutes=5)).isoformat(), + eventTriggedByUsername="bob", + entityName="LogEntry", + entityId=3001, + parentLogDocumentInfo=doc, + logInfo=bob_entry, + description="Updated timeline", + textDiff="- Q3 delivery\n+ Q4 delivery", + logbookList=[LogbookInfo(id=3, name="Roadmap", displayName="Roadmap")], + ) + + await handler.handle_log_entry_update(event1) + + # Alice should be notified (document owner) + # Bob shouldn't be notified (he's the one updating) + assert len(notifications) == 1 + assert notifications[0]["username"] == "alice" + + # 2. Alice deletes Bob's entry + event2 = LogEntryDeleteEvent( + eventTimestamp=(base_time + timedelta(minutes=10)).isoformat(), + eventTriggedByUsername="alice", + entityName="LogEntry", + entityId=3001, + parentLogDocumentInfo=doc, + logInfo=bob_entry, + description="Removed outdated entry", + textDiff="- Deleted: Q4 delivery timeline", + logbookList=[LogbookInfo(id=3, name="Roadmap", displayName="Roadmap")], + ) + + await handler.handle_log_entry_delete(event2) + + # Bob should be notified (his entry was deleted) + assert len(notifications) == 2 + assert notifications[1]["username"] == "bob" + + # All these notifications should be part of the same email thread + + +class TestEmailThreadingEdgeCases: + """Test edge cases and error handling for email threading.""" + + @pytest.fixture + def strategy(self): + """Create an EmailThreadingStrategy instance.""" + return EmailThreadingStrategy() + + def test_missing_domain(self, strategy): + """Test thread ID generation without a domain.""" + # Should use default domain + thread_id = strategy.generate_document_thread_id("doc123") + assert "@" in thread_id + assert thread_id.endswith(">") + + def test_empty_document_name(self, strategy): + """Test handling of empty document names.""" + headers = strategy.get_email_headers( + event_type=NotificationEventType.ENTRY_ADD, + document_id="doc123", + document_name="", # Empty name + entry_id="entry456", + ) + + assert "Thread-Topic" in headers + # Should handle empty name gracefully + + def test_special_characters_in_ids(self, strategy): + """Test handling of special characters in IDs.""" + # IDs with special characters + doc_id = "doc-123_test@special" + entry_id = "entry/456\\test" + + doc_thread = strategy.generate_document_thread_id(doc_id) + entry_thread = strategy.generate_entry_thread_id(entry_id) + + # Should generate valid message IDs + assert doc_thread.startswith("<") + assert doc_thread.endswith(">") + assert entry_thread.startswith("<") + assert entry_thread.endswith(">") + + def test_very_long_document_name(self, strategy): + """Test handling of very long document names.""" + long_name = "A" * 500 # Very long document name + + subject = strategy.generate_subject( + event_type=NotificationEventType.ENTRY_ADD, document_title=long_name, is_email=True + ) + + # Should truncate or handle gracefully + assert len(subject) < 1000 # Reasonable subject length + + def test_unicode_in_document_name(self, strategy): + """Test handling of Unicode characters in document names.""" + unicode_name = "Project 项目 🚀 Проект" + + headers = strategy.get_email_headers( + event_type=NotificationEventType.ENTRY_ADD, + document_id="doc123", + document_name=unicode_name, + entry_id="entry456", + ) + + subject = strategy.generate_subject( + event_type=NotificationEventType.ENTRY_ADD, document_title=unicode_name, is_email=True + ) + + assert "Thread-Topic" in headers + assert unicode_name in headers["Thread-Topic"] + assert unicode_name in subject + + +if __name__ == "__main__": + # Run tests with pytest + pytest.main([__file__, "-v", "--asyncio-mode=auto"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_formatters.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_formatters.py new file mode 100644 index 000000000..8dec415d8 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_formatters.py @@ -0,0 +1,424 @@ +""" +Tests for notification formatters. +""" + +import logging +from datetime import datetime, timezone +from pathlib import Path +import sys +from unittest.mock import MagicMock +from zoneinfo import ZoneInfo + +import pytest + +# Add parent directory to path for imports +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from formatters import NotificationFormatter # noqa: E402 +from bely_mqtt import ( # noqa: E402 + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogReactionAddEvent, +) +from bely_mqtt.models import ( # noqa: E402 + LogDocumentInfo, + LogInfo, + LogbookInfo, + ReactionInfo, + ReactionId, + LogReactionInfo, +) + + +@pytest.fixture +def logger(): + """Create a mock logger.""" + return MagicMock(spec=logging.Logger) + + +@pytest.fixture +def formatter_utc(logger): + """Create a formatter with UTC timezone.""" + return NotificationFormatter(bely_url="https://bely.example.com", logger=logger, timezone="UTC") + + +@pytest.fixture +def formatter_eastern(logger): + """Create a formatter with Eastern timezone.""" + return NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="America/New_York" + ) + + +@pytest.fixture +def formatter_pacific(logger): + """Create a formatter with Pacific timezone.""" + return NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="America/Los_Angeles" + ) + + +@pytest.fixture +def sample_log_document_info(): + """Create sample log document info.""" + return LogDocumentInfo( + name="Test Document", id=123, ownerUsername="doc_owner", createdByUsername="doc_creator" + ) + + +@pytest.fixture +def sample_log_info(): + """Create sample log info.""" + return LogInfo(id=456, enteredByUsername="entry_author", lastModifiedByUsername="modifier") + + +@pytest.fixture +def sample_parent_log_info(): + """Create sample parent log info for replies.""" + return LogInfo( + id=789, enteredByUsername="parent_author", lastModifiedByUsername="parent_modifier" + ) + + +@pytest.fixture +def sample_logbook_list(): + """Create sample logbook list.""" + return [ + LogbookInfo(name="Logbook1", id=1, displayName="Display 1"), + LogbookInfo(name="Logbook2", id=2, displayName="Display 2"), + ] + + +@pytest.fixture +def sample_reaction_info(): + """Create sample reaction info.""" + return ReactionInfo( + id=1, name="thumbs_up", emojiCode=128077, emoji="👍", description="Thumbs up" + ) + + +@pytest.fixture +def sample_log_reaction_info(sample_reaction_info): + """Create sample log reaction info.""" + return LogReactionInfo( + reaction=sample_reaction_info, + id=ReactionId(logId=456, reactionId=1, userId=999), + username="reactor_user", + ) + + +class TestTimezoneFormatting: + """Test timezone formatting functionality.""" + + def test_utc_timestamp_formatting(self, formatter_utc): + """Test that UTC timestamps are formatted correctly.""" + # Create a UTC timestamp + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + + formatted = formatter_utc._format_timestamp(utc_time) + + # Should show UTC time + assert "2024-01-15 14:30:45" in formatted + assert "UTC" in formatted + + def test_eastern_timezone_formatting(self, formatter_eastern): + """Test that timestamps are converted to Eastern timezone.""" + # Create a UTC timestamp (2:30 PM UTC) + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + + formatted = formatter_eastern._format_timestamp(utc_time) + + # Should show Eastern time (9:30 AM EST, UTC-5) + assert "2024-01-15 09:30:45" in formatted + assert "EST" in formatted or "EDT" in formatted + + def test_pacific_timezone_formatting(self, formatter_pacific): + """Test that timestamps are converted to Pacific timezone.""" + # Create a UTC timestamp (2:30 PM UTC) + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + + formatted = formatter_pacific._format_timestamp(utc_time) + + # Should show Pacific time (6:30 AM PST, UTC-8) + assert "2024-01-15 06:30:45" in formatted + assert "PST" in formatted or "PDT" in formatted + + def test_naive_timestamp_assumes_utc(self, formatter_eastern): + """Test that naive timestamps are assumed to be UTC.""" + # Create a naive timestamp (no timezone info) + naive_time = datetime(2024, 1, 15, 14, 30, 45) + + formatted = formatter_eastern._format_timestamp(naive_time) + + # Should treat as UTC and convert to Eastern (9:30 AM EST) + assert "2024-01-15 09:30:45" in formatted + + def test_daylight_saving_time(self, formatter_eastern): + """Test that daylight saving time is handled correctly.""" + # Summer date (EDT - Eastern Daylight Time, UTC-4) + summer_time = datetime(2024, 7, 15, 14, 30, 45, tzinfo=timezone.utc) + formatted_summer = formatter_eastern._format_timestamp(summer_time) + assert "2024-07-15 10:30:45" in formatted_summer # UTC-4 + + # Winter date (EST - Eastern Standard Time, UTC-5) + winter_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + formatted_winter = formatter_eastern._format_timestamp(winter_time) + assert "2024-01-15 09:30:45" in formatted_winter # UTC-5 + + def test_invalid_timezone_falls_back_to_utc(self, logger): + """Test that invalid timezone falls back to UTC.""" + formatter = NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="Invalid/Timezone" + ) + + # Should have logged a warning + logger.warning.assert_called() + + # Should use UTC + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "2024-01-15 14:30:45 UTC" in formatted + + def test_no_timezone_specified(self, logger): + """Test formatter with no timezone specified (uses local or UTC).""" + formatter = NotificationFormatter(bely_url="https://bely.example.com", logger=logger) + + # Should have a timezone set (either local or UTC) + assert formatter.timezone is not None + + # Should be able to format timestamps + utc_time = datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "2024-01-15" in formatted + assert ":" in formatted # Has time component + + +class TestEventFormatting: + """Test event formatting with timezone support.""" + + def test_entry_added_with_timezone( + self, formatter_eastern, sample_log_document_info, sample_log_info, sample_logbook_list + ): + """Test that LogEntryAddEvent uses timezone formatting.""" + event = LogEntryAddEvent( + description="Test entry added", + eventTimestamp=datetime(2024, 1, 15, 14, 30, 45, tzinfo=timezone.utc), + entityName="Log", + entityId=123, + eventTriggedByUsername="test_user", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="This is the entry content", + ) + + result = formatter_eastern.format_entry_added(event) + + # Check that Eastern time is shown (9:30 AM EST) + assert "2024-01-15 09:30:45" in result + assert "test_user" in result + assert "Test Document" in result + + def test_entry_updated_with_timezone( + self, formatter_pacific, sample_log_document_info, sample_log_info, sample_logbook_list + ): + """Test that LogEntryUpdateEvent uses timezone formatting.""" + event = LogEntryUpdateEvent( + description="Test entry updated", + eventTimestamp=datetime(2024, 7, 15, 18, 45, 30, tzinfo=timezone.utc), + entityName="Log", + entityId=123, + eventTriggedByUsername="updater", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Updated content", + ) + + result = formatter_pacific.format_entry_updated(event) + + # Check that Pacific time is shown (11:45 AM PDT, UTC-7 in summer) + assert "2024-07-15 11:45:30" in result + assert "updater" in result + + def test_reply_added_with_timezone( + self, + formatter_eastern, + sample_log_document_info, + sample_log_info, + sample_parent_log_info, + sample_logbook_list, + ): + """Test that LogEntryReplyAddEvent uses timezone formatting.""" + event = LogEntryReplyAddEvent( + description="Reply added", + eventTimestamp=datetime(2024, 3, 10, 22, 15, 0, tzinfo=timezone.utc), + entityName="LogReply", + entityId=999, + eventTriggedByUsername="replier", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + parentLogInfo=sample_parent_log_info, + logbookList=sample_logbook_list, + textDiff="Reply content", + ) + + result = formatter_eastern.format_reply_added(event) + + # Check that Eastern time is shown (6:15 PM EDT, UTC-4 in March after DST) + assert "2024-03-10 18:15:00" in result + assert "replier" in result + + def test_reaction_added_with_timezone( + self, + formatter_utc, + sample_log_document_info, + sample_parent_log_info, + sample_log_reaction_info, + ): + """Test that LogReactionAddEvent uses timezone formatting.""" + event = LogReactionAddEvent( + description="Reaction added", + eventTimestamp=datetime(2024, 12, 25, 10, 0, 0, tzinfo=timezone.utc), + entityName="LogReaction", + entityId={"logId": 456, "reactionId": 1, "userId": 999}, + eventTriggedByUsername="reactor", + parentLogInfo=sample_parent_log_info, + parentLogDocumentInfo=sample_log_document_info, + logReaction=sample_log_reaction_info, + ) + + result = formatter_utc.format_reaction_added(event) + + # Check that UTC time is shown + assert "2024-12-25 10:00:00 UTC" in result + assert "reactor" in result + assert "👍" in result + + def test_entry_deleted_with_timezone( + self, formatter_eastern, sample_log_document_info, sample_log_info, sample_logbook_list + ): + """Test that LogEntryDeleteEvent uses timezone formatting.""" + event = LogEntryDeleteEvent( + description="Entry deleted", + eventTimestamp=datetime(2024, 6, 1, 3, 30, 15, tzinfo=timezone.utc), + entityName="Log", + entityId=123, + eventTriggedByUsername="deleter", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Deleted content", + ) + + result = formatter_eastern.format_entry_deleted(event) + + # Check that Eastern time is shown (11:30 PM EDT previous day, UTC-4 in June) + assert "2024-05-31 23:30:15" in result + assert "deleter" in result + + def test_multiple_events_same_formatter( + self, formatter_eastern, sample_log_document_info, sample_log_info, sample_logbook_list + ): + """Test that the same formatter consistently formats times in the same timezone.""" + # Create multiple events with different UTC times + event1 = LogEntryAddEvent( + description="Morning entry", + eventTimestamp=datetime(2024, 1, 15, 8, 0, 0, tzinfo=timezone.utc), + entityName="Log", + entityId=1, + eventTriggedByUsername="user1", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Morning content", + ) + + event2 = LogEntryAddEvent( + description="Afternoon entry", + eventTimestamp=datetime(2024, 1, 15, 15, 0, 0, tzinfo=timezone.utc), + entityName="Log", + entityId=2, + eventTriggedByUsername="user2", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Afternoon content", + ) + + event3 = LogEntryAddEvent( + description="Evening entry", + eventTimestamp=datetime(2024, 1, 15, 23, 0, 0, tzinfo=timezone.utc), + entityName="Log", + entityId=3, + eventTriggedByUsername="user3", + parentLogDocumentInfo=sample_log_document_info, + logInfo=sample_log_info, + logbookList=sample_logbook_list, + textDiff="Evening content", + ) + + result1 = formatter_eastern.format_entry_added(event1) + result2 = formatter_eastern.format_entry_added(event2) + result3 = formatter_eastern.format_entry_added(event3) + + # All should be in Eastern time + assert "03:00:00" in result1 # 8 AM UTC = 3 AM EST + assert "10:00:00" in result2 # 3 PM UTC = 10 AM EST + assert "18:00:00" in result3 # 11 PM UTC = 6 PM EST + + # All should show EST + assert "EST" in result1 or "EDT" in result1 + assert "EST" in result2 or "EDT" in result2 + assert "EST" in result3 or "EDT" in result3 + + +class TestFormatterInitialization: + """Test formatter initialization with different timezone configurations.""" + + def test_formatter_with_valid_timezone(self, logger): + """Test creating formatter with valid timezone.""" + formatter = NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="Europe/London" + ) + + assert formatter.timezone == ZoneInfo("Europe/London") + + # Test formatting + utc_time = datetime(2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "12:00:00" in formatted # Same as UTC in January (no DST) + + def test_formatter_with_asia_timezone(self, logger): + """Test creating formatter with Asian timezone.""" + formatter = NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="Asia/Tokyo" + ) + + assert formatter.timezone == ZoneInfo("Asia/Tokyo") + + # Test formatting + utc_time = datetime(2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "21:00:00" in formatted # UTC+9 for Tokyo + + def test_formatter_with_australia_timezone(self, logger): + """Test creating formatter with Australian timezone.""" + formatter = NotificationFormatter( + bely_url="https://bely.example.com", logger=logger, timezone="Australia/Sydney" + ) + + assert formatter.timezone == ZoneInfo("Australia/Sydney") + + # Test formatting + utc_time = datetime(2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc) + formatted = formatter._format_timestamp(utc_time) + assert "23:00:00" in formatted # UTC+11 for Sydney in January (summer) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_handler.py new file mode 100644 index 000000000..a5b792bcf --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_handler.py @@ -0,0 +1,837 @@ +""" +Comprehensive test suite for Apprise Smart Notification Handler. + +Tests all event types and notification scenarios with mock data. +""" + +import logging +from datetime import datetime +from pathlib import Path +from unittest.mock import MagicMock, patch, AsyncMock +import pytest +import yaml +import sys + +# Add src directory to path to import bely_mqtt +src_path = Path(__file__).parent.parent.parent.parent / "src" +if src_path.exists(): + sys.path.insert(0, str(src_path)) + +# Mock apprise before importing handler +mock_apprise = MagicMock() +sys.modules["apprise"] = mock_apprise + +from bely_mqtt import ( # noqa: E402 + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, +) +from bely_mqtt.models import ( # noqa: E402 + LogInfo, + LogDocumentInfo, + LogReactionInfo, + ReactionInfo, +) +from bely_mqtt.config import GlobalConfig # noqa: E402 + +# Add parent directory to path to import handler module +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from handler import AppriseSmartNotificationHandler # noqa: E402 + + +class MockEventFactory: + """Factory for creating mock events with realistic data.""" + + def __init__(self): + self.timestamp = datetime.now().isoformat() + + # Define three test users for proper notification testing + self.alice = "alice" # Document owner + self.bob = "bob" # Entry creator (different from document owner) + self.charlie = "charlie" # Third party who updates/replies + + # Create mock document owned by Alice + self.document = self._create_document() + + # Create mock log entries + self.alice_entry = self._create_log_entry(self.alice, 1) + self.bob_entry = self._create_log_entry(self.bob, 2) # Bob's entry in Alice's document + self.charlie_entry = self._create_log_entry(self.charlie, 3) + + def _create_document(self) -> LogDocumentInfo: + """Create a mock document owned by Alice.""" + return LogDocumentInfo( + id=100, + name="Project Alpha Documentation", + ownerUsername=self.alice, + createdByUsername=self.alice, + lastModifiedByUsername=self.alice, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ) + + def _create_log_entry(self, username: str, entry_id: int) -> LogInfo: + """Create a mock log entry.""" + return LogInfo( + id=entry_id, + enteredByUsername=username, + lastModifiedByUsername=username, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ) + + def _create_reaction(self, emoji: str = "👍", name: str = "thumbsup") -> LogReactionInfo: + """Create a mock reaction.""" + from bely_mqtt.models import ReactionId + + return LogReactionInfo( + id=ReactionId(logId=1, reactionId=1, userId=2), + reaction=ReactionInfo( + id=1, + emoji=emoji, + name=name, + emojiCode=128077, # Unicode code point for thumbs up + description=f"Reaction {name}", + ), + username=self.bob, + ) + + def create_entry_add_by_bob(self) -> LogEntryAddEvent: + """Bob adds a new entry to Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogEntry", + entityId=2, + parentLogDocumentInfo=self.document, + logInfo=self.bob_entry, + description="New findings from testing", + textDiff="+ Added new test results\n+ Performance improved by 20%", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_entry_update_by_bob_on_alice(self) -> LogEntryUpdateEvent: + """Bob updates Alice's entry.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryUpdateEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogEntry", + entityId=1, + parentLogDocumentInfo=self.document, + logInfo=self.alice_entry, + description="Corrected typo and added details", + textDiff="- Old text with typo\n+ New text with correction", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_entry_update_by_charlie_on_bob(self) -> LogEntryUpdateEvent: + """Charlie updates Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryUpdateEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, # Charlie is the third party + entityName="LogEntry", + entityId=2, + parentLogDocumentInfo=self.document, + logInfo=self.bob_entry, + description="Added clarification", + textDiff="+ Added clarification note", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_reply_add_by_charlie_to_bob(self) -> LogEntryReplyAddEvent: + """Charlie replies to Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryReplyAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, # Charlie is the third party + entityName="LogEntryReply", + entityId=3, + parentLogDocumentInfo=self.document, + parentLogInfo=self.bob_entry, # Replying to Bob's entry + logInfo=LogInfo( + id=3, + enteredByUsername=self.charlie, + lastModifiedByUsername=self.charlie, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ), + textDiff="Great point! I agree with this approach.", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + description="Reply to Bob's entry", + ) + + def create_reply_add_by_alice_to_bob(self) -> LogEntryReplyAddEvent: + """Alice replies to Bob's entry in her document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryReplyAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.alice, + entityName="LogEntryReply", + entityId=4, + parentLogDocumentInfo=self.document, + parentLogInfo=self.bob_entry, + logInfo=LogInfo( + id=4, + enteredByUsername=self.alice, + lastModifiedByUsername=self.alice, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ), + textDiff="Thanks for the update. Please also check the API docs.", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + description="Reply to Bob's entry", + ) + + def create_reply_update_by_charlie_on_bob_entry(self) -> LogEntryReplyUpdateEvent: + """Charlie updates a reply on Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryReplyUpdateEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, # Charlie is updating + entityName="LogEntryReply", + entityId=3, + parentLogDocumentInfo=self.document, + parentLogInfo=self.bob_entry, # Bob's entry + logInfo=LogInfo( + id=3, + enteredByUsername=self.charlie, # Charlie originally created this reply + lastModifiedByUsername=self.charlie, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ), + textDiff="- Old reply text\n+ Updated reply with more details", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + description="Updated reply", + ) + + def create_reaction_add_by_bob_to_alice(self) -> LogReactionAddEvent: + """Bob adds a reaction to Alice's entry.""" + from bely_mqtt.models import ReactionId + + return LogReactionAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogReaction", + entityId=ReactionId(logId=1, reactionId=1, userId=2), + parentLogDocumentInfo=self.document, + parentLogInfo=self.alice_entry, + logReaction=self._create_reaction("🎉", "tada"), + description="Celebrating the achievement", + ) + + def create_reaction_delete_by_bob_from_alice(self) -> LogReactionDeleteEvent: + """Bob removes a reaction from Alice's entry.""" + from bely_mqtt.models import ReactionId + + return LogReactionDeleteEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogReaction", + entityId=ReactionId(logId=1, reactionId=1, userId=2), + parentLogDocumentInfo=self.document, + parentLogInfo=self.alice_entry, + logReaction=self._create_reaction("👍", "thumbsup"), + description="Removing thumbsup", + ) + + def create_self_reaction_by_alice(self) -> LogReactionAddEvent: + """Alice adds a reaction to her own entry (should not notify).""" + from bely_mqtt.models import ReactionId + + return LogReactionAddEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.alice, + entityName="LogReaction", + entityId=ReactionId(logId=1, reactionId=2, userId=1), + parentLogDocumentInfo=self.document, + parentLogInfo=self.alice_entry, + logReaction=self._create_reaction("✅", "check"), + description="Marking as complete", + ) + + def create_entry_delete_by_bob_on_alice(self) -> LogEntryDeleteEvent: + """Bob deletes Alice's entry.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryDeleteEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.bob, + entityName="LogEntry", + entityId=1, + parentLogDocumentInfo=self.document, + logInfo=self.alice_entry, + description="Deleted outdated entry", + textDiff="- Deleted content: This was the original entry text", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_entry_delete_by_charlie_on_bob(self) -> LogEntryDeleteEvent: + """Charlie deletes Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryDeleteEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, + entityName="LogEntry", + entityId=2, + parentLogDocumentInfo=self.document, + logInfo=self.bob_entry, + description="Removed duplicate entry", + textDiff="- Deleted content: Bob's test results", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + ) + + def create_reply_delete_by_charlie_on_bob_entry(self) -> LogEntryReplyDeleteEvent: + """Charlie deletes a reply on Bob's entry in Alice's document.""" + from bely_mqtt.models import LogbookInfo + + return LogEntryReplyDeleteEvent( + eventTimestamp=self.timestamp, + eventTriggedByUsername=self.charlie, + entityName="LogEntryReply", + entityId=3, + parentLogDocumentInfo=self.document, + parentLogInfo=self.bob_entry, + logInfo=LogInfo( + id=3, + enteredByUsername=self.charlie, + lastModifiedByUsername=self.charlie, + enteredOnDateTime=self.timestamp, + lastModifiedOnDateTime=self.timestamp, + ), + textDiff="- Deleted reply: This comment is no longer relevant", + logbookList=[LogbookInfo(id=1, name="Project Alpha", displayName="Project Alpha")], + description="Deleted outdated reply", + ) + + +class TestAppriseSmartNotificationHandler: + """Test suite for AppriseSmartNotificationHandler.""" + + @pytest.fixture + def mock_factory(self): + """Provide a mock event factory.""" + return MockEventFactory() + + @pytest.fixture + def config_file(self, tmp_path): + """Create a temporary config file for testing.""" + config = { + "global": { + "mail_server": { + "host": "smtp.example.com", + "port": 587, + "username": "notifications@example.com", + "password": "secret123", + "use_tls": True, + } + }, + "users": { + "alice": { + "apprise_urls": [ + "mailto://alice@example.com", + "discord://webhook_id/webhook_token", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + "bob": { + "apprise_urls": [ + "mailto://bob@example.com", + "slack://TokenA/TokenB/TokenC/", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": False, # Bob doesn't want new entry notifications + "reactions": False, # Bob doesn't want reaction notifications + "document_replies": True, + }, + }, + "charlie": { + "apprise_urls": [ + "mailto://charlie@example.com", + "teams://webhook_url", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + }, + } + + config_path = tmp_path / "test_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + return config_path + + @pytest.fixture + def handler(self, config_file): + """Create a handler instance with mocked Apprise.""" + global_config = GlobalConfig({"bely_url": "https://bely.example.com"}) + + # Mock AppriseWithEmailHeaders class + mock_apprise_wrapper = MagicMock() + mock_apprise_wrapper.return_value.notify = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.add = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.__bool__ = MagicMock(return_value=True) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_wrapper): + handler = AppriseSmartNotificationHandler( + config_path=str(config_file), global_config=global_config + ) + + # Mock the notify method for all user instances + for username, endpoints in handler.processor.user_endpoint_configs.items(): + for ep in endpoints: + ep["apprise"].notify = MagicMock(return_value=True) + + return handler + + @pytest.mark.asyncio + async def test_entry_add_by_collaborator(self, handler, mock_factory): + """Test: Bob adds entry to Alice's document -> Alice gets notified.""" + event = mock_factory.create_entry_add_by_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_add(event) + + # Alice should be notified about new entry in her document + mock_send.assert_called_once() + call_args = mock_send.call_args + assert call_args[0][0] == "alice" + # Alice has email configured, so should get email-style subject + assert "Re: Log: Project Alpha Documentation" in call_args[0][1] + assert "bob" in call_args[0][1] or "bob" in call_args[0][2] + assert "Performance improved by 20%" in call_args[0][2] + + @pytest.mark.asyncio + async def test_entry_update_by_collaborator(self, handler, mock_factory): + """Test: Bob updates Alice's entry -> Alice gets notified (deduped as she's both owner & creator).""" + event = mock_factory.create_entry_update_by_bob_on_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_update(event) + + # Should send 1 notification (deduplicated since Alice is both owner and creator) + assert mock_send.call_count == 1 + + # Check notification + call_args = mock_send.call_args + assert call_args[0][0] == "alice" # As both owner and original creator + + # Verify notification content + assert "bob" in call_args[0][2] + assert "typo" in call_args[0][2] + + @pytest.mark.asyncio + async def test_entry_update_by_owner(self, handler, mock_factory): + """Test: Charlie updates Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_entry_update_by_charlie_on_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_update(event) + + # Bob should be notified his entry was edited + # Alice should also be notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames + assert "alice" in usernames + + @pytest.mark.asyncio + async def test_reply_add_by_collaborator(self, handler, mock_factory): + """Test: Charlie replies to Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_reply_add_by_charlie_to_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_reply_add(event) + + # Bob gets notified as entry creator + # Alice gets notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames # Entry creator + assert "alice" in usernames # Document owner + + # Check notification content + for call in calls: + assert "charlie" in call[0][2].lower() + + @pytest.mark.asyncio + async def test_reply_add_by_owner(self, handler, mock_factory): + """Test: Alice replies to Bob's entry in her own document -> Bob gets notified.""" + event = mock_factory.create_reply_add_by_alice_to_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_reply_add(event) + + # Only Bob should be notified as entry creator + # Alice doesn't get notified (she's the one replying to her own document) + assert mock_send.call_count == 1 + + call_args = mock_send.call_args + assert call_args[0][0] == "bob" + assert "alice" in call_args[0][2].lower() + + @pytest.mark.asyncio + async def test_reply_update(self, handler, mock_factory): + """Test: Charlie updates reply on Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_reply_update_by_charlie_on_bob_entry() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_reply_update(event) + + # Bob should be notified as entry creator + # Alice should be notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames # Entry creator + assert "alice" in usernames # Document owner + + # Check that charlie is mentioned in notifications + for call in calls: + assert "charlie" in call[0][2].lower() + + @pytest.mark.asyncio + async def test_reaction_add(self, handler, mock_factory): + """Test: Bob adds reaction to Alice's entry -> Alice gets notified.""" + event = mock_factory.create_reaction_add_by_bob_to_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_reaction_add(event) + + # Alice should be notified about the reaction + mock_send.assert_called_once() + call_args = mock_send.call_args + assert call_args[0][0] == "alice" + # Alice has email configured, so should get email-style subject with action + assert "Re: Log: Project Alpha Documentation" in call_args[0][1] + assert "bob" in call_args[0][1] # Action by bob should be in subject + assert "🎉" in call_args[0][2] + assert "tada" in call_args[0][2] + + @pytest.mark.asyncio + async def test_reaction_delete(self, handler, mock_factory): + """Test: Bob removes reaction from Alice's entry -> Alice gets notified.""" + event = mock_factory.create_reaction_delete_by_bob_from_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_reaction_delete(event) + + # Alice should be notified about the reaction removal + mock_send.assert_called_once() + call_args = mock_send.call_args + assert call_args[0][0] == "alice" + # Alice has email configured, so should get email-style subject with action + assert "Re: Log: Project Alpha Documentation" in call_args[0][1] + assert "bob" in call_args[0][1] # Action by bob should be in subject + assert "👍" in call_args[0][2] + + @pytest.mark.asyncio + async def test_self_reaction_no_notification(self, handler, mock_factory): + """Test: Alice reacts to her own entry -> No notification sent.""" + event = mock_factory.create_self_reaction_by_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_reaction_add(event) + + # No notification should be sent for self-reactions + mock_send.assert_not_called() + + @pytest.mark.asyncio + async def test_notification_settings_respected(self, handler, mock_factory): + """Test: Notification settings are respected (Bob has reactions disabled).""" + from bely_mqtt.models import ReactionId, LogReactionInfo, ReactionInfo + + # Create a reaction event where Alice reacts to Bob's entry + event = LogReactionAddEvent( + eventTimestamp=mock_factory.timestamp, + eventTriggedByUsername="alice", + entityName="LogReaction", + entityId=ReactionId(logId=2, reactionId=3, userId=1), + parentLogDocumentInfo=mock_factory.document, + parentLogInfo=mock_factory.bob_entry, # Bob's entry + logReaction=LogReactionInfo( + id=ReactionId(logId=2, reactionId=3, userId=1), + reaction=ReactionInfo( + id=3, emoji="💯", name="100", emojiCode=128175, description="Perfect" + ), + username="alice", + ), + description="Perfect solution!", + ) + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_reaction_add(event) + + # Bob has reactions disabled, so no notification + mock_send.assert_not_called() + + @pytest.mark.asyncio + async def test_permalink_generation(self, handler, mock_factory): + """Test: Permalinks are correctly generated in notifications.""" + event = mock_factory.create_entry_add_by_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_add(event) + + call_args = mock_send.call_args + body = call_args[0][2] + + # Check for permalink as hyperlink + assert "https://bely.example.com/views/item/view?id=100&logId=2" in body + assert ( + 'View entry' + in body + ) + + @pytest.mark.asyncio + async def test_trigger_description(self, handler, mock_factory): + """Test: Trigger descriptions are included in notifications.""" + event = mock_factory.create_entry_add_by_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_add(event) + + call_args = mock_send.call_args + body = call_args[0][2] + + # Check for trigger description + assert "This notification was sent because" in body + assert "bob" in body + assert "new_entries" in body + + @pytest.mark.asyncio + async def test_error_handling(self, handler, mock_factory): + """Test: Errors are handled gracefully without crashing.""" + event = mock_factory.create_entry_add_by_bob() + + # Mock the logger + with patch.object(handler, "logger") as mock_logger: + # Simulate an error in send_notification + with patch.object( + handler.processor, "send_notification", side_effect=Exception("Network error") + ): + # Should not raise an exception + await handler.handle_log_entry_add(event) + + # Check that error was logged + assert mock_logger.error.called + + @pytest.mark.asyncio + async def test_entry_delete_by_collaborator(self, handler, mock_factory): + """Test: Bob deletes Alice's entry -> Alice gets notified.""" + event = mock_factory.create_entry_delete_by_bob_on_alice() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_delete(event) + + # Alice should be notified as both owner and original creator (deduplicated) + assert mock_send.call_count == 1 + + call_args = mock_send.call_args + assert call_args[0][0] == "alice" + # Alice has email configured, so should get email-style subject + assert "Re: Log: Project Alpha Documentation" in call_args[0][1] + assert "[Entry Deleted]" in call_args[0][1] + assert "bob" in call_args[0][2] + assert "Deleted content" in call_args[0][2] + + @pytest.mark.asyncio + async def test_entry_delete_by_third_party(self, handler, mock_factory): + """Test: Charlie deletes Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_entry_delete_by_charlie_on_bob() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_delete(event) + + # Bob should be notified his entry was deleted + # Alice should also be notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames + assert "alice" in usernames + + # Check notification content + for call in calls: + assert "charlie" in call[0][2].lower() + assert "deleted" in call[0][2].lower() + + @pytest.mark.asyncio + async def test_reply_delete(self, handler, mock_factory): + """Test: Charlie deletes reply on Bob's entry in Alice's document -> Both Alice and Bob get notified.""" + event = mock_factory.create_reply_delete_by_charlie_on_bob_entry() + + with patch.object( + handler.processor, "send_notification", new_callable=AsyncMock + ) as mock_send: + await handler.handle_log_entry_reply_delete(event) + + # Bob should be notified as entry creator + # Alice should be notified as document owner + assert mock_send.call_count == 2 + + calls = mock_send.call_args_list + usernames = [call[0][0] for call in calls] + assert "bob" in usernames # Entry creator + assert "alice" in usernames # Document owner + + # Check that charlie is mentioned in notifications + for call in calls: + assert "charlie" in call[0][2].lower() + assert "deleted" in call[0][2].lower() + + @pytest.mark.asyncio + async def test_no_config_handler(self): + """Test: Handler works without config (no notifications sent).""" + # Mock AppriseWithEmailHeaders class + mock_apprise_wrapper = MagicMock() + mock_apprise_wrapper.return_value.notify = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.add = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.__bool__ = MagicMock(return_value=True) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_wrapper): + # Create handler without config - should not raise an error + handler = AppriseSmartNotificationHandler() + + # Verify processor has no user configurations + assert handler.processor.user_endpoint_configs == {} + + factory = MockEventFactory() + event = factory.create_entry_add_by_bob() + + # Should handle event without error even without config + await handler.handle_log_entry_add(event) + + # Try other event types too - all should work without errors + await handler.handle_log_entry_update(factory.create_entry_update_by_bob_on_alice()) + await handler.handle_log_entry_reply_add(factory.create_reply_add_by_charlie_to_bob()) + await handler.handle_log_reaction_add(factory.create_reaction_add_by_bob_to_alice()) + await handler.handle_log_entry_delete(factory.create_entry_delete_by_bob_on_alice()) + await handler.handle_log_entry_reply_delete( + factory.create_reply_delete_by_charlie_on_bob_entry() + ) + + # Verify handler can process events without config + # (it just won't send notifications) + + +class TestNotificationContent: + """Test the content and formatting of notifications.""" + + @pytest.fixture + def formatter(self): + """Create a formatter instance.""" + from formatters import NotificationFormatter + + return NotificationFormatter( + bely_url="https://bely.example.com", logger=logging.getLogger("test") + ) + + def test_entry_add_formatting(self, formatter): + """Test formatting of entry add notifications.""" + factory = MockEventFactory() + event = factory.create_entry_add_by_bob() + + body = formatter.format_entry_added(event) + + assert "New entry added to Project Alpha Documentation" in body + assert "By: bob" in body + assert "Performance improved by 20%" in body + assert "alert('xss')" in body + + +if __name__ == "__main__": + # Run tests with pytest + pytest.main([__file__, "-v", "--asyncio-mode=auto"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_headers_wrapper.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_headers_wrapper.py new file mode 100644 index 000000000..38259474a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_headers_wrapper.py @@ -0,0 +1,602 @@ +#!/usr/bin/env python3 +""" +Comprehensive test suite for email headers wrapper functionality. + +This module tests that email headers (Message-ID, References, In-Reply-To) +are properly passed through the wrapper to enable email threading. +""" + +import sys +from pathlib import Path +from unittest.mock import MagicMock, patch +import pytest + +# Add parent directory to path for imports +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +# Mock apprise at module level before importing apprise_email_wrapper +sys.modules["apprise"] = MagicMock() +sys.modules["apprise.plugins.email.base"] = MagicMock() + +import apprise_email_wrapper # noqa: E402 + + +class TestEmailNotificationWrapper: + """Test suite for EmailNotificationWrapper class.""" + + @pytest.fixture + def mock_apprise(self): + """Create mock apprise module.""" + mock_apprise = MagicMock() + mock_apprise.Apprise = MagicMock() + + # Create a mock NotifyEmail instance + mock_email_instance = MagicMock() + mock_email_instance.__class__.__name__ = "NotifyEmail" + mock_email_instance.send = MagicMock(return_value=True) + mock_email_instance.headers = {} + + mock_apprise.Apprise.instantiate = MagicMock(return_value=mock_email_instance) + + return mock_apprise, mock_email_instance + + def test_wrapper_initialization(self): + """Test EmailNotificationWrapper initialization with various URLs.""" + # Test valid email URLs + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://user:pass@gmail.com") + assert wrapper.scheme == "mailto" + + wrapper_secure = apprise_email_wrapper.EmailNotificationWrapper( + "mailtos://user:pass@gmail.com" + ) + assert wrapper_secure.scheme == "mailtos" + + # Test invalid URL + with pytest.raises(ValueError, match="Not an email URL"): + apprise_email_wrapper.EmailNotificationWrapper("slack://token") + + def test_send_with_headers(self, mock_apprise): + """Test sending email with custom headers.""" + mock_apprise_module, mock_email_instance = mock_apprise + + # Patch the apprise module that's already imported + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Define custom headers for email threading + headers = { + "Message-ID": "", + "References": "", + "In-Reply-To": "", + "X-Thread-Topic": "Test Document", + } + + # Send notification with headers + result = wrapper.send_with_headers( + title="Test Subject", body="Test Body", headers=headers + ) + + # Verify the email instance was created + mock_apprise_module.Apprise.instantiate.assert_called_once_with( + "mailto://test@example.com" + ) + + # Verify headers were set on the email instance + assert mock_email_instance.headers == headers + + # Verify send was called with correct parameters + mock_email_instance.send.assert_called_once_with(body="Test Body", title="Test Subject") + + assert result is True + + def test_send_without_headers(self, mock_apprise): + """Test sending email without custom headers.""" + mock_apprise_module, mock_email_instance = mock_apprise + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Send notification without headers + result = wrapper.send_with_headers(title="Test Subject", body="Test Body", headers=None) + + # Verify headers attribute exists but is empty + assert hasattr(mock_email_instance, "headers") + assert mock_email_instance.headers == {} + + # Verify send was called + mock_email_instance.send.assert_called_once() + assert result is True + + def test_headers_override_existing(self, mock_apprise): + """Test that new headers override any existing headers.""" + mock_apprise_module, mock_email_instance = mock_apprise + + # Pre-populate headers with existing values + mock_email_instance.headers = {"Old-Header": "old-value"} + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Send with new headers + new_headers = {"Message-ID": "", "New-Header": "new-value"} + + wrapper.send_with_headers(title="Test", body="Test", headers=new_headers) + + # Verify old headers were cleared and new ones set + assert mock_email_instance.headers == new_headers + assert "Old-Header" not in mock_email_instance.headers + + def test_fallback_for_non_email(self, mock_apprise): + """Test fallback to regular Apprise for non-email notifications.""" + mock_apprise_module, _ = mock_apprise + + # Make instantiate return None to simulate non-email URL + mock_apprise_module.Apprise.instantiate.return_value = None + + # Create a mock Apprise instance for fallback + mock_apprise_instance = MagicMock() + mock_apprise_instance.add = MagicMock(return_value=True) + mock_apprise_instance.notify = MagicMock(return_value=True) + mock_apprise_module.Apprise.return_value = mock_apprise_instance + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Send notification (should fall back to regular Apprise) + result = wrapper.send_with_headers( + title="Test", body="Test Body", headers={"Message-ID": ""} + ) + + # Verify fallback was used + mock_apprise_instance.add.assert_called_once_with("mailto://test@example.com") + mock_apprise_instance.notify.assert_called_once_with(body="Test Body", title="Test") + assert result is True + + def test_error_handling(self, mock_apprise): + """Test error handling in send_with_headers.""" + mock_apprise_module, mock_email_instance = mock_apprise + + # Make send raise an exception + mock_email_instance.send.side_effect = Exception("Network error") + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise_module): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Should return False on error + with patch("builtins.print") as mock_print: + result = wrapper.send_with_headers( + title="Test", body="Test", headers={"Message-ID": ""} + ) + + assert result is False + mock_print.assert_called_once() + assert "Error sending email with headers" in str(mock_print.call_args) + + +class TestAppriseWithEmailHeaders: + """Test suite for AppriseWithEmailHeaders class.""" + + @pytest.fixture + def mock_setup(self): + """Setup mocks for testing.""" + mock_apprise = MagicMock() + mock_apprise.Apprise = MagicMock() + + mock_apprise_instance = MagicMock() + mock_apprise_instance.add = MagicMock(return_value=True) + mock_apprise_instance.notify = MagicMock(return_value=True) + mock_apprise.Apprise.return_value = mock_apprise_instance + + return mock_apprise, mock_apprise_instance + + def test_add_email_url(self, mock_setup): + """Test adding email URLs creates wrapper instances.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Mock EmailNotificationWrapper + with patch.object( + apprise_email_wrapper, "EmailNotificationWrapper" + ) as mock_wrapper_class: + mock_email_wrapper = MagicMock() + mock_wrapper_class.return_value = mock_email_wrapper + + # Add email URLs + assert wrapper.add("mailto://user1@example.com") + assert wrapper.add("mailtos://user2@example.com") + + # Verify wrappers were created + assert mock_wrapper_class.call_count == 2 + mock_wrapper_class.assert_any_call("mailto://user1@example.com") + mock_wrapper_class.assert_any_call("mailtos://user2@example.com") + + # Verify wrappers are stored + assert len(wrapper.email_wrappers) == 2 + + def test_add_non_email_url(self, mock_setup): + """Test adding non-email URLs uses regular Apprise.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + wrapper.apprise = mock_apprise_instance + + # Add non-email URLs + assert wrapper.add("slack://token") + assert wrapper.add("discord://webhook") + + # Verify regular Apprise was used + assert mock_apprise_instance.add.call_count == 2 + mock_apprise_instance.add.assert_any_call("slack://token") + mock_apprise_instance.add.assert_any_call("discord://webhook") + + # Verify URLs are tracked + assert len(wrapper.non_email_urls) == 2 + + def test_notify_with_mixed_urls(self, mock_setup): + """Test notify with both email and non-email URLs.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + wrapper.apprise = mock_apprise_instance + + # Create mock email wrappers + mock_email_wrapper1 = MagicMock() + mock_email_wrapper1.send_with_headers = MagicMock(return_value=True) + + mock_email_wrapper2 = MagicMock() + mock_email_wrapper2.send_with_headers = MagicMock(return_value=True) + + # Manually add to simulate successful adds + wrapper.email_wrappers = { + "mailto://user1@example.com": mock_email_wrapper1, + "mailto://user2@example.com": mock_email_wrapper2, + } + wrapper.non_email_urls = ["slack://token"] + + # Define headers for email threading + headers = { + "Message-ID": "", + "References": "", + } + + # Send notification + result = wrapper.notify(body="Test notification", title="Test Title", headers=headers) + + # Verify email wrappers were called with headers + mock_email_wrapper1.send_with_headers.assert_called_once_with( + "Test Title", "Test notification", headers + ) + mock_email_wrapper2.send_with_headers.assert_called_once_with( + "Test Title", "Test notification", headers + ) + + # Verify regular Apprise was called for non-email + mock_apprise_instance.notify.assert_called_once_with( + body="Test notification", title="Test Title" + ) + + assert result is True + + def test_notify_without_headers(self, mock_setup): + """Test notify without headers still works.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Create mock email wrapper + mock_email_wrapper = MagicMock() + mock_email_wrapper.send_with_headers = MagicMock(return_value=True) + + wrapper.email_wrappers = {"mailto://user@example.com": mock_email_wrapper} + + # Send notification without headers + result = wrapper.notify(body="Test", title="Title") + + # Verify wrapper was called with None for headers + mock_email_wrapper.send_with_headers.assert_called_once_with("Title", "Test", None) + + assert result is True + + def test_bool_operator(self, mock_setup): + """Test __bool__ operator returns correct values.""" + mock_apprise, _ = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Empty wrapper should be False + assert not wrapper + + # Add email wrapper + wrapper.email_wrappers["mailto://test@example.com"] = MagicMock() + assert wrapper + + # Clear and add non-email URL + wrapper.email_wrappers.clear() + wrapper.non_email_urls.append("slack://token") + assert wrapper + + def test_partial_failure_handling(self, mock_setup): + """Test handling when some notifications fail.""" + mock_apprise, mock_apprise_instance = mock_setup + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Create mock email wrappers with different results + mock_email_wrapper1 = MagicMock() + mock_email_wrapper1.send_with_headers = MagicMock(return_value=True) + + mock_email_wrapper2 = MagicMock() + mock_email_wrapper2.send_with_headers = MagicMock(return_value=False) # This one fails + + wrapper.email_wrappers = { + "mailto://user1@example.com": mock_email_wrapper1, + "mailto://user2@example.com": mock_email_wrapper2, + } + + # Send notification + result = wrapper.notify( + body="Test", title="Title", headers={"Message-ID": ""} + ) + + # Should return False if any notification fails + assert result is False + + +class TestUtilityFunctions: + """Test utility functions in the module.""" + + def test_is_email_notification(self): + """Test is_email_notification function.""" + # Test email URLs + assert apprise_email_wrapper.is_email_notification("mailto://user@example.com") + assert apprise_email_wrapper.is_email_notification("mailtos://secure@example.com") + assert apprise_email_wrapper.is_email_notification("mailto://user:pass@smtp.gmail.com:587") + + # Test non-email URLs + assert not apprise_email_wrapper.is_email_notification("slack://token") + assert not apprise_email_wrapper.is_email_notification("discord://webhook/token") + assert not apprise_email_wrapper.is_email_notification("https://example.com") + assert not apprise_email_wrapper.is_email_notification("telegram://bot_token/chat_id") + + # Test invalid URLs + assert not apprise_email_wrapper.is_email_notification("") + assert not apprise_email_wrapper.is_email_notification("not a url") + + +class TestIntegrationScenarios: + """Test realistic integration scenarios.""" + + def test_email_threading_scenario(self): + """Test a complete email threading scenario.""" + mock_apprise = MagicMock() + + # Create a mock email instance that tracks headers + class MockEmailInstance: + def __init__(self): + self.headers = {} + self.__class__.__name__ = "NotifyEmail" + self.sent_messages = [] + + def send(self, body, title): + self.sent_messages.append( + {"title": title, "body": body, "headers": self.headers.copy()} + ) + return True + + mock_email = MockEmailInstance() + mock_apprise.Apprise.instantiate = MagicMock(return_value=mock_email) + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + # Create wrapper for email notification + wrapper = apprise_email_wrapper.EmailNotificationWrapper( + "mailto://notifications@example.com" + ) + + # Simulate a thread of messages + + # 1. Initial message + initial_headers = { + "Message-ID": "", + "X-Thread-Topic": "Project Status Report", + } + + wrapper.send_with_headers( + title="Project Status Report", + body="Initial project status", + headers=initial_headers, + ) + + # 2. First reply + reply1_headers = { + "Message-ID": "", + "References": "", + "In-Reply-To": "", + "X-Thread-Topic": "Project Status Report", + } + + wrapper.send_with_headers( + title="Re: Project Status Report", + body="Update on task completion", + headers=reply1_headers, + ) + + # 3. Second reply in thread + reply2_headers = { + "Message-ID": "", + "References": " ", + "In-Reply-To": "", + "X-Thread-Topic": "Project Status Report", + } + + wrapper.send_with_headers( + title="Re: Project Status Report", + body="Additional comments", + headers=reply2_headers, + ) + + # Verify all messages were sent with correct headers + assert len(mock_email.sent_messages) == 3 + + # Check first message + assert ( + mock_email.sent_messages[0]["headers"]["Message-ID"] + == "" + ) + assert "References" not in mock_email.sent_messages[0]["headers"] + + # Check first reply + assert ( + mock_email.sent_messages[1]["headers"]["Message-ID"] + == "" + ) + assert ( + mock_email.sent_messages[1]["headers"]["In-Reply-To"] + == "" + ) + + # Check second reply + assert ( + mock_email.sent_messages[2]["headers"]["Message-ID"] + == "" + ) + assert ( + mock_email.sent_messages[2]["headers"]["In-Reply-To"] + == "" + ) + assert ( + "" + in mock_email.sent_messages[2]["headers"]["References"] + ) + + def test_multiple_recipients_with_headers(self): + """Test sending to multiple email recipients with same headers.""" + mock_apprise = MagicMock() + mock_apprise.Apprise = MagicMock() + + # Track all email instances created + email_instances = [] + + def create_email_instance(url): + instance = MagicMock() + instance.__class__.__name__ = "NotifyEmail" + instance.headers = {} + instance.send = MagicMock(return_value=True) + instance.url = url # Track which URL this is for + email_instances.append(instance) + return instance + + mock_apprise.Apprise.instantiate = create_email_instance + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + # Create wrapper with multiple email endpoints + wrapper = apprise_email_wrapper.AppriseWithEmailHeaders() + + # Mock EmailNotificationWrapper to track calls + with patch.object( + apprise_email_wrapper, "EmailNotificationWrapper" + ) as mock_wrapper_class: + # Create individual mock wrappers + mock_wrappers = [] + for i in range(3): + mock_wrapper = MagicMock() + mock_wrapper.send_with_headers = MagicMock(return_value=True) + mock_wrappers.append(mock_wrapper) + + # Configure mock to return different wrapper for each call + mock_wrapper_class.side_effect = mock_wrappers + + # Add multiple email recipients + wrapper.add("mailto://alice@example.com") + wrapper.add("mailto://bob@example.com") + wrapper.add("mailto://charlie@example.com") + + # Send notification with threading headers + headers = { + "Message-ID": "", + "X-Priority": "High", + "X-Thread-Topic": "System Alert", + } + + result = wrapper.notify( + title="System Alert", body="Critical system update required", headers=headers + ) + + # Verify all wrappers were called with the same headers + for mock_wrapper in mock_wrappers: + mock_wrapper.send_with_headers.assert_called_once_with( + "System Alert", "Critical system update required", headers + ) + + assert result is True + + +class TestEdgeCases: + """Test edge cases and boundary conditions.""" + + def test_special_characters_in_headers(self): + """Test headers with special characters.""" + mock_apprise = MagicMock() + mock_email = MagicMock() + mock_email.__class__.__name__ = "NotifyEmail" + mock_email.headers = {} + mock_email.send = MagicMock(return_value=True) + mock_apprise.Apprise.instantiate = MagicMock(return_value=mock_email) + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Headers with special characters + headers = { + "Message-ID": "", + "X-Custom-Header": "Value with spaces and 特殊文字", + "References": " ", + } + + result = wrapper.send_with_headers(title="Test", body="Test", headers=headers) + + # Headers should be set exactly as provided + assert mock_email.headers == headers + assert result is True + + def test_very_long_header_values(self): + """Test headers with very long values.""" + mock_apprise = MagicMock() + mock_email = MagicMock() + mock_email.__class__.__name__ = "NotifyEmail" + mock_email.headers = {} + mock_email.send = MagicMock(return_value=True) + mock_apprise.Apprise.instantiate = MagicMock(return_value=mock_email) + + with patch.object(apprise_email_wrapper, "apprise", mock_apprise): + wrapper = apprise_email_wrapper.EmailNotificationWrapper("mailto://test@example.com") + + # Create a very long References header (common in long email threads) + long_references = " ".join([f"" for i in range(50)]) + + headers = { + "Message-ID": "", + "References": long_references, + "X-Long-Value": "A" * 1000, # Very long header value + } + + result = wrapper.send_with_headers(title="Test", body="Test", headers=headers) + + # Headers should be set regardless of length + assert mock_email.headers == headers + assert len(mock_email.headers["References"]) > 500 + assert len(mock_email.headers["X-Long-Value"]) == 1000 + assert result is True + + +if __name__ == "__main__": + # Run tests with pytest + pytest.main([__file__, "-v", "--tb=short"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_integration.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_integration.py new file mode 100644 index 000000000..3636430cc --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/apprise_smart_notification/test/test_integration.py @@ -0,0 +1,816 @@ +""" +Integration tests for Apprise Smart Notification Handler. + +These tests demonstrate the handler processing realistic MQTT event sequences. +""" + +import asyncio +from datetime import datetime, timedelta +from pathlib import Path +from unittest.mock import MagicMock, patch, AsyncMock +import pytest +import yaml +import sys + +# Add src directory to path to import bely_mqtt +src_path = Path(__file__).parent.parent.parent.parent / "src" +if src_path.exists(): + sys.path.insert(0, str(src_path)) + +# Mock apprise before importing handler +sys.modules["apprise"] = MagicMock() + +from bely_mqtt import ( # noqa: E402 + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, +) +from bely_mqtt.models import ( # noqa: E402 + LogInfo, + LogDocumentInfo, + LogReactionInfo, + ReactionInfo, + LogbookInfo, +) +from bely_mqtt.config import GlobalConfig # noqa: E402 + +# Add parent directory to path to import handler module +handler_path = Path(__file__).parent.parent +if handler_path.exists(): + sys.path.insert(0, str(handler_path)) + +from handler import AppriseSmartNotificationHandler # noqa: E402 + + +class TestScenarios: + """Integration test scenarios simulating real-world usage.""" + + @pytest.fixture + def test_config(self, tmp_path): + """Create a test configuration with multiple users.""" + config = { + "global": { + "mail_server": { + "host": "smtp.company.com", + "port": 587, + "username": "notifications@company.com", + "password": "secure_password", + "use_tls": True, + } + }, + "users": { + # Team lead - wants all notifications + "sarah": { + "apprise_urls": [ + "mailto://sarah@company.com", + "slack://TokenA/TokenB/TokenC/#general", + ], + "notifications": { + "entry_updates": True, + "own_entry_edits": True, + "entry_replies": True, + "new_entries": True, + "reactions": True, + "document_replies": True, + }, + }, + # Developer - selective notifications + "john": { + "apprise_urls": [ + "mailto://john@company.com", + "discord://webhook_id/webhook_token", + ], + "notifications": { + "entry_updates": False, # Too noisy + "own_entry_edits": True, # Important + "entry_replies": True, # Important + "new_entries": False, # Too noisy + "reactions": True, # Fun + "document_replies": False, # Not a document owner + }, + }, + # QA Engineer - minimal notifications + "emma": { + "apprise_urls": [ + "mailto://emma@company.com", + ], + "notifications": { + "entry_updates": False, + "own_entry_edits": True, # Only when someone edits her entries + "entry_replies": True, # Only direct replies + "new_entries": False, + "reactions": False, + "document_replies": False, + }, + }, + }, + } + + config_path = tmp_path / "integration_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + return config_path + + @pytest.fixture + def handler(self, test_config): + """Create handler with test configuration.""" + global_config = GlobalConfig({"bely_url": "https://bely.company.com"}) + + # Mock AppriseWithEmailHeaders class + mock_apprise_wrapper = MagicMock() + mock_apprise_wrapper.return_value.notify = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.add = MagicMock(return_value=True) + mock_apprise_wrapper.return_value.__bool__ = MagicMock(return_value=True) + + with patch("notification_processor.AppriseWithEmailHeaders", mock_apprise_wrapper): + handler = AppriseSmartNotificationHandler( + config_path=str(test_config), global_config=global_config + ) + + # Mock Apprise notify for all users + for username, endpoints in handler.processor.user_endpoint_configs.items(): + for ep in endpoints: + ep["apprise"].notify = MagicMock(return_value=True) + + return handler + + @pytest.fixture + def notification_tracker(self, handler): + """Track all notifications sent during tests.""" + notifications = [] + + async def track_notification(username, title, body, headers=None, notification_type=None): + notifications.append( + { + "username": username, + "title": title, + "body": body, + "headers": headers, + "notification_type": notification_type, + "timestamp": datetime.now().isoformat(), + } + ) + # Call the original mock + return True + + handler.processor.send_notification = AsyncMock(side_effect=track_notification) + return notifications + + @pytest.mark.asyncio + async def test_scenario_sprint_planning_document(self, handler, notification_tracker): + """ + Scenario: Sprint Planning Document + + Sarah (team lead) creates a sprint planning document. + John and Emma collaborate by adding entries and comments. + """ + base_time = datetime.now() + + # Sarah's sprint planning document + sprint_doc = LogDocumentInfo( + id=200, + name="Sprint 23 Planning", + ownerUsername="sarah", + createdByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # 1. John adds a task entry + from bely_mqtt.models import LogbookInfo + + john_entry = LogInfo( + id=201, + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=5)).isoformat(), + ) + + event1 = LogEntryAddEvent( + eventTimestamp=(base_time + timedelta(minutes=5)).isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=201, + parentLogDocumentInfo=sprint_doc, + logInfo=john_entry, + description="Added API refactoring task", + textDiff="+ Task: Refactor /api/v1/users endpoint\n+ Estimate: 5 story points\n+ Dependencies: Database migration", + logbookList=[LogbookInfo(id=1, name="Sprint Planning", displayName="Sprint Planning")], + ) + + await handler.handle_log_entry_add(event1) + + # Sarah should be notified (document owner, new_entries enabled) + assert len(notification_tracker) == 1 + assert notification_tracker[0]["username"] == "sarah" + # Sarah has email configured, so should get email-style subject + assert "Re: Log: Sprint 23 Planning" in notification_tracker[0]["title"] + + # 2. Emma adds a QA consideration as a reply + event2 = LogEntryReplyAddEvent( + eventTimestamp=(base_time + timedelta(minutes=10)).isoformat(), + eventTriggedByUsername="emma", + entityName="LogEntryReply", + entityId=202, + parentLogDocumentInfo=sprint_doc, + parentLogInfo=john_entry, + logInfo=LogInfo( + id=202, + enteredByUsername="emma", + lastModifiedByUsername="emma", + enteredOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + ), + textDiff="Need to update integration tests for the new endpoint structure. Also check backwards compatibility.", + logbookList=[LogbookInfo(id=1, name="Sprint Planning", displayName="Sprint Planning")], + description="QA consideration", + ) + + await handler.handle_log_entry_reply_add(event2) + + # John should be notified (entry creator, entry_replies enabled) + # Sarah should be notified (document owner, document_replies enabled) + assert len(notification_tracker) == 3 # 1 previous + 2 new + new_notifications = notification_tracker[-2:] + usernames = {n["username"] for n in new_notifications} + assert usernames == {"john", "sarah"} + + # 3. Sarah updates John's entry with priority + event3 = LogEntryUpdateEvent( + eventTimestamp=(base_time + timedelta(minutes=15)).isoformat(), + eventTriggedByUsername="sarah", + entityName="LogEntry", + entityId=201, + parentLogDocumentInfo=sprint_doc, + logInfo=john_entry, + description="Added priority", + textDiff="+ Priority: HIGH - Blocking customer feature", + logbookList=[LogbookInfo(id=1, name="Sprint Planning", displayName="Sprint Planning")], + ) + + await handler.handle_log_entry_update(event3) + + # John should be notified (own_entry_edits enabled) + # Sarah is the one making the update, so she won't be notified about her own action + assert len(notification_tracker) == 4 # 3 previous + 1 new + new_notification = notification_tracker[-1] + + # Check John's notification + assert new_notification["username"] == "john" + # John has email configured, so should get email-style subject + assert "Re: Log: Sprint 23 Planning" in new_notification["title"] + assert "[Entry Updated]" in new_notification["title"] + assert "sarah" in new_notification["body"].lower() + + # 4. John reacts to Emma's QA comment with thumbs up + from bely_mqtt.models import ReactionId + + event4 = LogReactionAddEvent( + eventTimestamp=(base_time + timedelta(minutes=20)).isoformat(), + eventTriggedByUsername="john", + entityName="LogReaction", + entityId=ReactionId(logId=202, reactionId=1, userId=2), + parentLogDocumentInfo=sprint_doc, + parentLogInfo=LogInfo( + id=202, # Emma's reply + enteredByUsername="emma", + lastModifiedByUsername="emma", + enteredOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(minutes=10)).isoformat(), + ), + logReaction=LogReactionInfo( + id=ReactionId(logId=202, reactionId=1, userId=2), + reaction=ReactionInfo( + id=1, emoji="👍", name="thumbsup", emojiCode=128077, description="Acknowledged" + ), + username="john", + ), + description="Agreed with QA considerations", + ) + + await handler.handle_log_reaction_add(event4) + + # Emma has reactions disabled, so no notification + assert len(notification_tracker) == 4 # No new notifications + + @pytest.mark.asyncio + async def test_scenario_bug_report_collaboration(self, handler, notification_tracker): + """ + Scenario: Bug Report Document + + Emma creates a bug report document. + John investigates and updates. + Sarah reviews and adds comments. + """ + base_time = datetime.now() + + # Emma's bug report document + bug_doc = LogDocumentInfo( + id=300, + name="BUG-1234: API Response Timeout", + ownerUsername="emma", + createdByUsername="emma", + lastModifiedByUsername="emma", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # Emma's initial bug report entry (not used in this test scenario) + # emma_entry = LogInfo( + # id=301, + # enteredByUsername="emma", + # lastModifiedByUsername="emma", + # enteredOnDateTime=base_time.isoformat(), + # lastModifiedOnDateTime=base_time.isoformat(), + # ) + + # 1. John adds investigation findings + event1 = LogEntryAddEvent( + eventTimestamp=(base_time + timedelta(hours=1)).isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=302, + parentLogDocumentInfo=bug_doc, + logInfo=LogInfo( + id=302, + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + ), + description="Root cause analysis", + textDiff="+ Found N+1 query issue in user permissions check\n+ Each API call triggers 50+ database queries\n+ Solution: Implement eager loading", + logbookList=[LogbookInfo(id=2, name="Bug Reports", displayName="Bug Reports")], + ) + + await handler.handle_log_entry_add(event1) + + # Emma should be notified (document owner, but new_entries disabled) + # So no notification + assert len(notification_tracker) == 0 + + # 2. Sarah adds a high-priority comment on John's findings + event2 = LogEntryReplyAddEvent( + eventTimestamp=(base_time + timedelta(hours=2)).isoformat(), + eventTriggedByUsername="sarah", + entityName="LogEntryReply", + entityId=303, + parentLogDocumentInfo=bug_doc, + parentLogInfo=LogInfo( + id=302, # John's investigation entry + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + ), + logInfo=LogInfo( + id=303, + enteredByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=(base_time + timedelta(hours=2)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=2)).isoformat(), + ), + textDiff="This is affecting our major client. Please prioritize the fix for today's deployment.", + logbookList=[LogbookInfo(id=2, name="Bug Reports", displayName="Bug Reports")], + description="Priority escalation", + ) + + await handler.handle_log_entry_reply_add(event2) + + # John should be notified (entry_replies enabled) + # Emma should be notified (document owner, but document_replies disabled) + assert len(notification_tracker) == 1 + assert notification_tracker[0]["username"] == "john" + # John has email configured, so should get email-style subject + assert "Re: Log: BUG-1234: API Response Timeout" in notification_tracker[0]["title"] + + # 3. John updates his own entry with fix status + event3 = LogEntryUpdateEvent( + eventTimestamp=(base_time + timedelta(hours=3)).isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=302, + parentLogDocumentInfo=bug_doc, + logInfo=LogInfo( + id=302, # John's own entry + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=3)).isoformat(), + ), + description="Fix deployed", + textDiff="+ STATUS: Fixed and deployed to production\n+ Deployment time: 15:30 UTC", + logbookList=[LogbookInfo(id=2, name="Bug Reports", displayName="Bug Reports")], + ) + + await handler.handle_log_entry_update(event3) + + # John is updating his own entry, so no notification to him + # Emma gets notified as document owner (but entry_updates disabled) + assert len(notification_tracker) == 1 # No new notifications + + # 4. Sarah reacts with celebration + from bely_mqtt.models import ReactionId + + event4 = LogReactionAddEvent( + eventTimestamp=(base_time + timedelta(hours=4)).isoformat(), + eventTriggedByUsername="sarah", + entityName="LogReaction", + entityId=ReactionId(logId=302, reactionId=2, userId=1), + parentLogDocumentInfo=bug_doc, + parentLogInfo=LogInfo( + id=302, # John's entry + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=(base_time + timedelta(hours=1)).isoformat(), + lastModifiedOnDateTime=(base_time + timedelta(hours=3)).isoformat(), + ), + logReaction=LogReactionInfo( + id=ReactionId(logId=302, reactionId=2, userId=1), + reaction=ReactionInfo( + id=2, emoji="🎉", name="tada", emojiCode=127881, description="Celebration" + ), + username="sarah", + ), + description="Great work on the quick fix!", + ) + + await handler.handle_log_reaction_add(event4) + + # John should be notified (reactions enabled) + assert len(notification_tracker) == 2 + assert notification_tracker[-1]["username"] == "john" + assert "🎉" in notification_tracker[-1]["body"] + + @pytest.mark.asyncio + async def test_notification_deduplication(self, handler, notification_tracker): + """Test that users don't get duplicate notifications for the same event.""" + base_time = datetime.now() + + # Sarah's document + doc = LogDocumentInfo( + id=400, + name="Test Document", + ownerUsername="sarah", + createdByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # Sarah's entry (she is both owner and creator) + sarah_entry = LogInfo( + id=401, + enteredByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # John updates Sarah's entry + event = LogEntryUpdateEvent( + eventTimestamp=base_time.isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=401, + parentLogDocumentInfo=doc, + logInfo=sarah_entry, + description="Update", + textDiff="+ Added content", + logbookList=[LogbookInfo(id=3, name="Test", displayName="Test")], + ) + + await handler.handle_log_entry_update(event) + + # Sarah should only get ONE notification even though she's both owner and creator + sarah_notifications = [n for n in notification_tracker if n["username"] == "sarah"] + assert len(sarah_notifications) == 1 + + @pytest.mark.asyncio + async def test_self_action_no_notification(self, handler, notification_tracker): + """Test that users don't get notified about their own actions.""" + base_time = datetime.now() + + # John's document + doc = LogDocumentInfo( + id=500, + name="John's Notes", + ownerUsername="john", + createdByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # John adds an entry to his own document + event = LogEntryAddEvent( + eventTimestamp=base_time.isoformat(), + eventTriggedByUsername="john", + entityName="LogEntry", + entityId=501, + parentLogDocumentInfo=doc, + logInfo=LogInfo( + id=501, + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ), + description="Personal note", + textDiff="+ Remember to review PR #123", + logbookList=[LogbookInfo(id=4, name="Personal", displayName="Personal")], + ) + + await handler.handle_log_entry_add(event) + + # No notification should be sent (John is both creator and document owner) + assert len(notification_tracker) == 0 + + @pytest.mark.asyncio + async def test_scenario_content_deletion(self, handler, notification_tracker): + """ + Scenario: Content Deletion + + Test notifications for entry and reply deletions. + """ + base_time = datetime.now() + + # Sarah's document + doc = LogDocumentInfo( + id=600, + name="Team Retrospective", + ownerUsername="sarah", + createdByUsername="sarah", + lastModifiedByUsername="sarah", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # John's entry in Sarah's document + john_entry = LogInfo( + id=601, + enteredByUsername="john", + lastModifiedByUsername="john", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ) + + # 1. Emma deletes John's entry + event1 = LogEntryDeleteEvent( + eventTimestamp=(base_time + timedelta(minutes=5)).isoformat(), + eventTriggedByUsername="emma", + entityName="LogEntry", + entityId=601, + parentLogDocumentInfo=doc, + logInfo=john_entry, + description="Removed duplicate entry", + textDiff="- Deleted content: This was a duplicate of entry #599", + logbookList=[LogbookInfo(id=5, name="Retrospective", displayName="Retrospective")], + ) + + await handler.handle_log_entry_delete(event1) + + # John should be notified (his entry was deleted) + # Sarah should be notified (document owner) + assert len(notification_tracker) == 2 + usernames = {n["username"] for n in notification_tracker} + assert usernames == {"john", "sarah"} + + # Check John's notification + john_notification = [n for n in notification_tracker if n["username"] == "john"][0] + # John has email configured, so should get email-style subject + assert "Re: Log: Team Retrospective" in john_notification["title"] + assert "[Entry Deleted]" in john_notification["title"] + assert "emma" in john_notification["body"].lower() + + # Check Sarah's notification + sarah_notification = [n for n in notification_tracker if n["username"] == "sarah"][0] + # Sarah has email configured, so should get email-style subject + assert "Re: Log: Team Retrospective" in sarah_notification["title"] + assert "[Entry Deleted]" in sarah_notification["title"] + + # 2. Sarah deletes a reply on John's entry + event2 = LogEntryReplyDeleteEvent( + eventTimestamp=(base_time + timedelta(minutes=10)).isoformat(), + eventTriggedByUsername="sarah", + entityName="LogEntryReply", + entityId=602, + parentLogDocumentInfo=doc, + parentLogInfo=john_entry, + logInfo=LogInfo( + id=602, + enteredByUsername="emma", + lastModifiedByUsername="emma", + enteredOnDateTime=base_time.isoformat(), + lastModifiedOnDateTime=base_time.isoformat(), + ), + textDiff="- Deleted reply: This comment was off-topic", + logbookList=[LogbookInfo(id=5, name="Retrospective", displayName="Retrospective")], + description="Removed off-topic comment", + ) + + await handler.handle_log_entry_reply_delete(event2) + + # John should be notified (reply on his entry was deleted) + # Sarah is the one deleting and is the document owner, so she gets one notification + # (deduplicated since she's both the actor and the owner) + assert len(notification_tracker) == 3 # 2 previous + 1 new + new_notification = notification_tracker[-1] + assert new_notification["username"] == "john" + # John has email configured, so should get email-style subject + assert "Re: Log: Team Retrospective" in new_notification["title"] + assert "[Reply Deleted]" in new_notification["title"] + assert "sarah" in new_notification["body"].lower() + + +class TestErrorHandling: + """Test error handling and edge cases.""" + + @pytest.mark.asyncio + async def test_missing_user_config(self, tmp_path): + """Test handling of events for users not in config.""" + config = { + "users": { + "alice": { + "apprise_urls": ["mailto://alice@example.com"], + "notifications": {"entry_replies": True}, + } + } + } + + config_path = tmp_path / "limited_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + handler = AppriseSmartNotificationHandler(config_path=str(config_path)) + + # Event from unconfigured user "bob" + from bely_mqtt.models import LogbookInfo + + event = LogEntryAddEvent( + eventTimestamp=datetime.now().isoformat(), + eventTriggedByUsername="bob", + entityName="LogEntry", + entityId=1, + parentLogDocumentInfo=LogDocumentInfo( + id=1, + name="Test Doc", + ownerUsername="charlie", # Also unconfigured + createdByUsername="charlie", + lastModifiedByUsername="charlie", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + logInfo=LogInfo( + id=1, + enteredByUsername="bob", + lastModifiedByUsername="bob", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + description="Test", + textDiff="Test content", + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + # Should handle gracefully without errors + await handler.handle_log_entry_add(event) + + @pytest.mark.asyncio + async def test_malformed_event_data(self, tmp_path): + """Test handling of events with empty or unusual data.""" + config = { + "users": { + "test": { + "apprise_urls": ["mailto://test@example.com"], + "notifications": { + "new_entries": True, + "entry_updates": True, + }, + } + } + } + + config_path = tmp_path / "test_config.yaml" + with open(config_path, "w") as f: + yaml.dump(config, f) + + handler = AppriseSmartNotificationHandler(config_path=str(config_path)) + + # Mock the Apprise notify method + for username, endpoints in handler.processor.user_endpoint_configs.items(): + for ep in endpoints: + ep["apprise"].notify = MagicMock(return_value=True) + + # Create event with empty strings and edge case data + from bely_mqtt.models import LogbookInfo + + # Test 1: Empty username (should not crash, but won't notify anyone) + event = LogEntryAddEvent( + eventTimestamp=datetime.now().isoformat(), + eventTriggedByUsername="", # Empty username + entityName="LogEntry", + entityId=1, + parentLogDocumentInfo=LogDocumentInfo( + id=1, + name="Test Document", + ownerUsername="test", + createdByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + logInfo=LogInfo( + id=1, + enteredByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + description="", # Empty description + textDiff="", # Empty text diff + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + # Should handle gracefully without crashing + try: + await asyncio.wait_for(handler.handle_log_entry_add(event), timeout=2.0) + except asyncio.TimeoutError: + pytest.fail("Handler timed out processing event with empty username") + + # Test 2: Very long strings (should truncate or handle gracefully) + event2 = LogEntryAddEvent( + eventTimestamp=datetime.now().isoformat(), + eventTriggedByUsername="long_user", + entityName="LogEntry", + entityId=2, + parentLogDocumentInfo=LogDocumentInfo( + id=2, + name="Document with very long name " + "y" * 500, # Reduced from 5000 + ownerUsername="test", + createdByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + logInfo=LogInfo( + id=2, + enteredByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + description="Very long description " + "z" * 1000, # Reduced from 10000 + textDiff="+ " + "content" * 200, # Reduced from 2000 + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + # Should handle gracefully without crashing + try: + await asyncio.wait_for(handler.handle_log_entry_add(event2), timeout=2.0) + except asyncio.TimeoutError: + pytest.fail("Handler timed out processing event with long strings") + + # Test 3: Special characters in strings + event3 = LogEntryAddEvent( + eventTimestamp=datetime.now().isoformat(), + eventTriggedByUsername="user", + entityName="LogEntry", + entityId=3, + parentLogDocumentInfo=LogDocumentInfo( + id=3, + name="Test HTML & Special Chars", + ownerUsername="test", + createdByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + logInfo=LogInfo( + id=3, + enteredByUsername="test", + lastModifiedByUsername="test", + enteredOnDateTime=datetime.now().isoformat(), + lastModifiedOnDateTime=datetime.now().isoformat(), + ), + description="Description with ", + textDiff="+ Content with special chars: & < > \" '", + logbookList=[LogbookInfo(id=1, name="Test", displayName="Test")], + ) + + # Should handle gracefully without crashing + try: + await asyncio.wait_for(handler.handle_log_entry_add(event3), timeout=2.0) + except asyncio.TimeoutError: + pytest.fail("Handler timed out processing event with special characters") + + +if __name__ == "__main__": + # Run integration tests + pytest.main([__file__, "-v", "--asyncio-mode=auto"]) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/hybrid_event_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/hybrid_event_handler.py new file mode 100644 index 000000000..1c31dce77 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/hybrid_event_handler.py @@ -0,0 +1,222 @@ +""" +Example handler that uses both specific and generic event handler methods with typed events. + +This handler demonstrates how to implement both specific event handlers +(for specific event types) and a generic fallback handler for other events. +This is the most flexible approach. + +The framework automatically parses MQTT messages into properly typed event objects +and passes them to the appropriate handler method. +""" + +from bely_mqtt import ( + MQTTHandler, + MQTTMessage, + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, + CoreEvent, +) + + +class HybridEventHandler(MQTTHandler): + """ + Handler that implements both specific and generic event handler methods with typed events. + + This handler processes specific log entry events and replies, with a + fallback generic handler for other event types. All handler methods receive + properly typed event objects instead of raw MQTT messages. + """ + + @property + def topic_pattern(self) -> str: + """Subscribe to all BELY events.""" + return "bely/#" + + # Specific event handlers for log entries + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + """ + Handle log entry add events. + + Args: + event: The log entry add event (already parsed and typed). + """ + try: + self.logger.info(f"Log entry added: {event.log_info.id}") + self.logger.debug(f"Document: {event.parent_log_document_info.name}") + await self.process_log_entry_add(event) + except Exception as e: + self.logger.error(f"Error processing log entry add: {e}", exc_info=True) + + async def handle_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + """ + Handle log entry update events. + + Args: + event: The log entry update event (already parsed and typed). + """ + try: + self.logger.info(f"Log entry updated: {event.log_info.id}") + self.logger.debug(f"Document: {event.parent_log_document_info.name}") + await self.process_log_entry_update(event) + except Exception as e: + self.logger.error(f"Error processing log entry update: {e}", exc_info=True) + + # Specific event handlers for replies + async def handle_log_entry_reply_add(self, event: LogEntryReplyAddEvent) -> None: + """ + Handle log entry reply add events. + + Args: + event: The log entry reply add event (already parsed and typed). + """ + try: + self.logger.info(f"Reply added to log entry: {event.parent_log_info.id}") + await self.process_reply_add(event) + except Exception as e: + self.logger.error(f"Error processing reply add: {e}", exc_info=True) + + async def handle_log_entry_reply_update(self, event: LogEntryReplyUpdateEvent) -> None: + """ + Handle log entry reply update events. + + Args: + event: The log entry reply update event (already parsed and typed). + """ + try: + self.logger.info(f"Reply updated on log entry: {event.parent_log_info.id}") + await self.process_reply_update(event) + except Exception as e: + self.logger.error(f"Error processing reply update: {e}", exc_info=True) + + # Specific event handlers for reactions + async def handle_log_reaction_add(self, event: LogReactionAddEvent) -> None: + """ + Handle log reaction add events. + + Args: + event: The log reaction add event (already parsed and typed). + """ + try: + self.logger.info( + f"Reaction added to log entry {event.parent_log_info.id}: " + f"{event.log_reaction.reaction.emoji} by {event.log_reaction.username}" + ) + await self.process_reaction_add(event) + except Exception as e: + self.logger.error(f"Error processing reaction add: {e}", exc_info=True) + + async def handle_log_reaction_delete(self, event: LogReactionDeleteEvent) -> None: + """ + Handle log reaction delete events. + + Args: + event: The log reaction delete event (already parsed and typed). + """ + try: + self.logger.info( + f"Reaction deleted from log entry {event.parent_log_info.id}: " + f"{event.log_reaction.reaction.emoji} by {event.log_reaction.username}" + ) + await self.process_reaction_delete(event) + except Exception as e: + self.logger.error(f"Error processing reaction delete: {e}", exc_info=True) + + # Generic event handlers for other events + async def handle_generic_add(self, event: CoreEvent) -> None: + """ + Handle generic add events. + + Args: + event: The generic add event (already parsed and typed). + """ + try: + self.logger.info(f"Generic add: {event.entity_name} (ID: {event.entity_id})") + await self.process_generic_add(event) + except Exception as e: + self.logger.error(f"Error processing generic add: {e}", exc_info=True) + + async def handle_generic_update(self, event: CoreEvent) -> None: + """ + Handle generic update events. + + Args: + event: The generic update event (already parsed and typed). + """ + try: + self.logger.info(f"Generic update: {event.entity_name} (ID: {event.entity_id})") + await self.process_generic_update(event) + except Exception as e: + self.logger.error(f"Error processing generic update: {e}", exc_info=True) + + async def handle_generic_delete(self, event: CoreEvent) -> None: + """ + Handle generic delete events. + + Args: + event: The generic delete event (already parsed and typed). + """ + try: + self.logger.info(f"Generic delete: {event.entity_name} (ID: {event.entity_id})") + await self.process_generic_delete(event) + except Exception as e: + self.logger.error(f"Error processing generic delete: {e}", exc_info=True) + + # Fallback generic handler for unknown events + async def handle_generic(self, message: MQTTMessage) -> None: + """ + Fallback handler for events that don't match any specific handler. + + This is called if the message topic doesn't match any of the + specific event types defined above. + + Args: + message: The raw MQTT message. + """ + self.logger.debug(f"Received unknown event on topic: {message.topic}") + + # Processing methods + async def process_log_entry_add(self, event: LogEntryAddEvent) -> None: + """Process a log entry add event.""" + self.logger.debug(f"Processing log entry add: {event.log_info.id}") + + async def process_log_entry_update(self, event: LogEntryUpdateEvent) -> None: + """Process a log entry update event.""" + self.logger.debug(f"Processing log entry update: {event.log_info.id}") + + async def process_reply_add(self, event: LogEntryReplyAddEvent) -> None: + """Process a reply add event.""" + self.logger.debug(f"Processing reply add to: {event.parent_log_info.id}") + + async def process_reply_update(self, event: LogEntryReplyUpdateEvent) -> None: + """Process a reply update event.""" + self.logger.debug(f"Processing reply update to: {event.parent_log_info.id}") + + async def process_reaction_add(self, event: LogReactionAddEvent) -> None: + """Process a reaction add event.""" + self.logger.debug( + f"Processing reaction add: {event.log_reaction.reaction.emoji} " + f"on log {event.parent_log_info.id}" + ) + + async def process_reaction_delete(self, event: LogReactionDeleteEvent) -> None: + """Process a reaction delete event.""" + self.logger.debug( + f"Processing reaction delete: {event.log_reaction.reaction.emoji} " + f"on log {event.parent_log_info.id}" + ) + + async def process_generic_add(self, event: CoreEvent) -> None: + """Process a generic add event.""" + self.logger.debug(f"Processing generic add: {event.entity_name}") + + async def process_generic_update(self, event: CoreEvent) -> None: + """Process a generic update event.""" + self.logger.debug(f"Processing generic update: {event.entity_name}") + + async def process_generic_delete(self, event: CoreEvent) -> None: + """Process a generic delete event.""" + self.logger.debug(f"Processing generic delete: {event.entity_name}") diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/log_entry_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/log_entry_handler.py new file mode 100644 index 000000000..c52bc303a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/log_entry_handler.py @@ -0,0 +1,79 @@ +""" +Example handler for log entry events. + +This handler demonstrates how to parse specific event types +and extract relevant information. +""" + +import logging + +from bely_mqtt.models import LogEntryAddEvent, LogEntryUpdateEvent, MQTTMessage +from bely_mqtt.plugin import MQTTHandler + +logger = logging.getLogger(__name__) + + +class LogEntryAddHandler(MQTTHandler): + """ + Handler for log entry add events. + + This handler processes events when new log entries are added + and can be extended to send notifications or trigger workflows. + """ + + @property + def topic_pattern(self) -> str: + """Subscribe to log entry add events.""" + return "bely/logEntry/Add" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry add event.""" + try: + event = LogEntryAddEvent(**message.payload) + self.logger.info( + f"Log entry added: ID={event.log_info.id}, " + f"Document={event.parent_log_document_info.name}, " + f"By={event.event_triggered_by_username}" + ) + self.logger.debug(f"Text: {event.text_diff}") + + # Example: Query BELY API for more information + if self.api_factory: + try: + log_api = self.api_factory.getLogApi() + log_data = log_api.get_by_id(event.log_info.id) + self.logger.debug(f"Retrieved log entry data: {log_data}") + except Exception: + pass + + except Exception as e: + self.logger.error(f"Failed to parse log entry add event: {e}") + + +class LogEntryUpdateHandler(MQTTHandler): + """ + Handler for log entry update events. + + This handler processes events when log entries are modified. + This example shows how to override the default topic pattern + to subscribe to specific events only. + """ + + @property + def topic_pattern(self) -> str: + """Subscribe to log entry update events only (override default).""" + return "bely/logEntry/Update" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry update event.""" + try: + event = LogEntryUpdateEvent(**message.payload) + self.logger.info( + f"Log entry updated: ID={event.log_info.id}, " + f"Document={event.parent_log_document_info.name}, " + f"By={event.event_triggered_by_username}" + ) + self.logger.debug(f"Changes:\n{event.text_diff}") + + except Exception as e: + self.logger.error(f"Failed to parse log entry update event: {e}") diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler.py new file mode 100644 index 000000000..20d5cef2f --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler.py @@ -0,0 +1,33 @@ +""" +Example handler that logs all BELY events. + +This is a simple example handler that demonstrates how to create +a basic handler that processes MQTT messages. +""" + +import json +import logging + +from bely_mqtt.models import MQTTMessage +from bely_mqtt.plugin import MQTTHandler + +logger = logging.getLogger(__name__) + + +class LoggingHandler(MQTTHandler): + """ + Simple handler that logs all BELY events. + + This handler subscribes to all BELY topics and logs the events + to the standard logging system. + """ + + @property + def topic_pattern(self) -> str: + """Subscribe to all BELY topics.""" + return "bely/#" + + async def handle(self, message: MQTTMessage) -> None: + """Log the incoming message.""" + self.logger.info(f"Event received on topic: {message.topic}") + self.logger.debug(f"Payload: {json.dumps(message.payload, indent=2)}") diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler_advanced.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler_advanced.py new file mode 100644 index 000000000..9e95cfb28 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/logging_handler_advanced.py @@ -0,0 +1,215 @@ +""" +Advanced logging handler that logs BELY events to files by topic. + +This handler demonstrates how to create a handler that: +- Logs events to files organized by topic +- Automatically creates files for each subtopic +- Supports configurable logging directory +- Maintains separate loggers for each topic +- Accepts configuration from the framework + +Environment Variables: + BELY_LOG_DIR: Directory to store log files (default: current directory) + +Configuration File: + { + "handlers": { + "AdvancedLoggingHandler": { + "logging_dir": "/var/log/bely" + } + } + } + +Usage: + # With environment variable + BELY_LOG_DIR=/var/log/bely bely-mqtt start --handlers-dir ./handlers + + # With configuration file + bely-mqtt start --handlers-dir ./handlers --config config.json + + # With direct instantiation + handler = AdvancedLoggingHandler(logging_dir="/var/log/bely") +""" + +import json +import logging +import os +from pathlib import Path +from typing import Dict, Optional + +from bely_mqtt.models import MQTTMessage +from bely_mqtt.plugin import MQTTHandler + + +class AdvancedLoggingHandler(MQTTHandler): + """ + Advanced handler that logs BELY events to files by topic. + + This handler subscribes to all BELY topics and logs each event to a + separate file based on the topic. Files are automatically created in + the specified logging directory. + + Example: + handler = AdvancedLoggingHandler(logging_dir="/var/log/bely") + + Messages on "bely/logEntry/Add" are logged to: + /var/log/bely/logEntry/Add.log + + Messages on "bely/logEntryReply/Update" are logged to: + /var/log/bely/logEntryReply/Update.log + """ + + def __init__( + self, + logging_dir: Optional[str] = None, + api_client: Optional[object] = None, + ): + """ + Initialize the advanced logging handler. + + Args: + logging_dir: Directory to store log files. If None, uses BELY_LOG_DIR + environment variable or current directory. + Directory is created if it doesn't exist. + api_client: Optional BELY API client (for compatibility with handler system). + + Examples: + # Use environment variable + BELY_LOG_DIR=/var/log/bely bely-mqtt start --handlers-dir ./handlers + + # Use direct instantiation + handler = AdvancedLoggingHandler(logging_dir="/var/log/bely") + + # Use configuration file + # config.json: + # { + # "handlers": { + # "AdvancedLoggingHandler": { + # "logging_dir": "/var/log/bely" + # } + # } + # } + # bely-mqtt start --handlers-dir ./handlers --config config.json + + # Use default (current directory) + handler = AdvancedLoggingHandler() + """ + super().__init__(api_client=api_client) + + # Set up logging directory + # Priority: parameter > environment variable > current directory + if logging_dir is None: + logging_dir = os.getenv("BELY_LOG_DIR", ".") + + self.logging_dir = Path(logging_dir) + self.logging_dir.mkdir(parents=True, exist_ok=True) + + # Dictionary to store loggers for each topic + self._topic_loggers: Dict[str, logging.Logger] = {} + + self.logger.info(f"AdvancedLoggingHandler initialized with directory: {self.logging_dir}") + + # Uses default topic_pattern "bely/#" - no need to override + # This handler will receive all BELY events + + def _get_logger_for_topic(self, topic: str) -> logging.Logger: + """ + Get or create a logger for the given topic. + + Creates a new logger if one doesn't exist for this topic. + The logger writes to a file named after the topic path. + + Args: + topic: The MQTT topic (e.g., "bely/logEntry/Add") + + Returns: + A configured logger for the topic. + """ + # Return cached logger if it exists + if topic in self._topic_loggers: + return self._topic_loggers[topic] + + # Create logger name from topic + logger_name = f"bely_mqtt.topic.{topic.replace('/', '.')}" + topic_logger = logging.getLogger(logger_name) + + # Set up file handler for this topic + try: + # Create subdirectories based on topic structure + # e.g., "bely/logEntry/Add" -> "bely/logEntry/Add.log" + topic_parts = topic.split("/") + log_dir = self.logging_dir / Path(*topic_parts[:-1]) + log_dir.mkdir(parents=True, exist_ok=True) + + log_file = log_dir / f"{topic_parts[-1]}.log" + + # Create file handler + file_handler = logging.FileHandler(log_file) + file_handler.setLevel(logging.DEBUG) + + # Create formatter + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + file_handler.setFormatter(formatter) + + # Add handler to logger + topic_logger.addHandler(file_handler) + topic_logger.setLevel(logging.DEBUG) + + # Prevent propagation to root logger + topic_logger.propagate = False + + self.logger.debug(f"Created logger for topic: {topic} -> {log_file}") + + except Exception as e: + self.logger.error(f"Failed to create logger for topic {topic}: {e}") + raise + + # Cache the logger + self._topic_loggers[topic] = topic_logger + return topic_logger + + async def handle(self, message: MQTTMessage) -> None: + """ + Log the incoming message to a file based on its topic. + + Args: + message: The MQTT message to log. + """ + try: + # Get logger for this topic + topic_logger = self._get_logger_for_topic(message.topic) + + # Log the event + topic_logger.info(f"Event received on topic: {message.topic}") + topic_logger.debug(f"Payload: {json.dumps(message.payload, indent=2)}") + + # Also log to main logger + self.logger.debug(f"Logged event from {message.topic}") + + except Exception as e: + self.logger.error(f"Error logging message from {message.topic}: {e}", exc_info=True) + + def get_log_file_for_topic(self, topic: str) -> Path: + """ + Get the log file path for a given topic. + + Args: + topic: The MQTT topic. + + Returns: + Path to the log file for this topic. + """ + topic_parts = topic.split("/") + log_dir = self.logging_dir / Path(*topic_parts[:-1]) + return log_dir / f"{topic_parts[-1]}.log" + + def get_all_log_files(self) -> list[Path]: + """ + Get all log files created by this handler. + + Returns: + List of all log file paths. + """ + return list(self.logging_dir.rglob("*.log")) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/notification_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/notification_handler.py new file mode 100644 index 000000000..d22dc0fba --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/notification_handler.py @@ -0,0 +1,106 @@ +""" +Example notification handler using Apprise. + +This handler demonstrates how to send notifications for specific events. +It requires the 'apprise' extra to be installed: + pip install bely-mqtt-framework[apprise] +""" + +import logging +from typing import Optional + +from bely_mqtt.models import ( + LogEntryAddEvent, + LogEntryReplyAddEvent, + LogEntryUpdateEvent, + MQTTMessage, +) +from bely_mqtt.plugin import MQTTHandler + +logger = logging.getLogger(__name__) + +try: + import apprise + + APPRISE_AVAILABLE = True +except ImportError: + APPRISE_AVAILABLE = False + logger.warning( + "Apprise not installed. Install with: pip install bely-mqtt-framework[apprise]" + ) + + +class NotificationHandler(MQTTHandler): + """ + Handler that sends notifications for BELY events using Apprise. + + This is an example handler showing how to integrate with external + notification services. Configure notification endpoints via environment + variables or configuration files. + """ + + def __init__(self, *args, **kwargs): + """Initialize the notification handler.""" + super().__init__(*args, **kwargs) + self.apprise_instance: Optional[apprise.Apprise] = None + if APPRISE_AVAILABLE: + self.apprise_instance = apprise.Apprise() + + @property + def topic_pattern(self) -> str: + """Subscribe to log entry events.""" + return "bely/logEntry/#" + + async def handle(self, message: MQTTMessage) -> None: + """Handle log entry events and send notifications.""" + if not APPRISE_AVAILABLE: + self.logger.warning("Apprise not available, skipping notification") + return + + try: + if "Add" in message.topic: + await self._handle_add_event(message) + elif "Update" in message.topic: + await self._handle_update_event(message) + except Exception as e: + self.logger.error(f"Failed to handle notification event: {e}") + + async def _handle_add_event(self, message: MQTTMessage) -> None: + """Handle log entry add event.""" + event = LogEntryAddEvent(**message.payload) + title = f"New Log Entry in {event.parent_log_document_info.name}" + body = ( + f"User {event.event_triggered_by_username} added a new log entry.\n" + f"Logbooks: {', '.join(lb.display_name or lb.name for lb in event.logbook_list)}" + ) + await self._send_notification(title, body) + + async def _handle_update_event(self, message: MQTTMessage) -> None: + """Handle log entry update event.""" + event = LogEntryUpdateEvent(**message.payload) + title = f"Log Entry Updated in {event.parent_log_document_info.name}" + body = ( + f"User {event.event_triggered_by_username} updated a log entry.\n" + f"Changes:\n{event.text_diff[:200]}..." + ) + await self._send_notification(title, body) + + async def _send_notification(self, title: str, body: str) -> None: + """Send a notification via Apprise.""" + if not self.apprise_instance: + self.logger.warning("Apprise instance not initialized") + return + + # Example: Add notification endpoints + # self.apprise_instance.add('mailto://user:password@gmail.com') + # self.apprise_instance.add('discord://webhook_id/webhook_token') + + # For now, just log the notification + self.logger.info(f"Notification: {title}") + self.logger.info(f" {body}") + + # Uncomment to actually send notifications: + # self.apprise_instance.notify( + # body=body, + # title=title, + # ) diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/simple_log_handler.py b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/simple_log_handler.py new file mode 100644 index 000000000..ec9f42c12 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/handlers/simple_log_handler.py @@ -0,0 +1,39 @@ +""" +Simple log entry handler example. + +This handler demonstrates basic usage of the BELY MQTT Framework +by logging when new entries are added. +""" + +from bely_mqtt import MQTTHandler, LogEntryAddEvent + + +class SimpleLogHandler(MQTTHandler): + """Logs when new entries are added to BELY.""" + + # Uses default topic_pattern "bely/#" - receives all BELY events + # The framework will automatically route log entry add events to handle_log_entry_add + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + """Handle new log entry event using the specific handler method.""" + try: + # Log the event details + self.logger.info( + f"New log entry added: " + f"ID={event.log_info.id}, " + f"Document={event.parent_log_document_info.name}, " + f"By={event.event_triggered_by_username}" + ) + + # Log the entry description (first 100 chars) + description = event.description[:100] + if len(event.description) > 100: + description += "..." + self.logger.info(f"Description: {description}") + + # You can access all event fields with type safety + self.logger.debug(f"Timestamp: {event.event_timestamp}") + self.logger.debug(f"Logbooks: {[lb.name for lb in event.logbook_list]}") + + except Exception as e: + self.logger.error(f"Failed to process log entry: {e}", exc_info=True) \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.env b/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.env new file mode 100644 index 000000000..abf4c58a1 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.env @@ -0,0 +1,39 @@ +# BELY MQTT Framework Environment Configuration +# +# This file contains environment variables for the BELY MQTT Framework +# systemd service. Copy this to ~/.config/bely-mqtt/bely-mqtt.env and +# customize for your environment. + + +# Service Configuration +# Path to the service executable +# %h expands to user's home directory in systemd +SERVICE_PATH=%h/.local/bin/bely-mqtt + +# MQTT Broker Configuration +MQTT_BROKER_HOST=localhost +MQTT_BROKER_PORT=1883 +MQTT_CLIENT_ID= +MQTT_USERNAME= +MQTT_PASSWORD= +MQTT_TOPIC= + +# Logging Configuration +LOG_LEVEL=INFO +# Options: DEBUG, INFO, WARNING, ERROR, CRITICAL + +# Application Configuration +# Path to handler configuration JSON file +# Default: ~/.config/bely-mqtt/config.json +CONFIG_FILE=~/.config/bely-mqtt/config.json + +# Path to handlers directory +# Default: ~/.config/bely-mqtt/handlers +HANDLERS_DIR=~/.config/bely-mqtt/handlers + +# BELY API Configuration +BELY_API_URL= +BELY_API_KEY= + +# Additional environment file (optional) +ENV_FILE= diff --git a/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.service b/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.service new file mode 100644 index 000000000..7f267c804 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/examples/systemd/bely-mqtt.service @@ -0,0 +1,28 @@ +[Unit] +Description=BELY MQTT Message Broker Framework +After=network-online.target +Wants=network-online.target + +[Service] +Type=simple +EnvironmentFile=%h/.config/bely-mqtt/bely-mqtt.env +Restart=on-failure +RestartSec=5 + +# Service execution +ExecStart=/bin/sh -c '${SERVICE_PATH} start \ + --handlers-dir ${HANDLERS_DIR} \ + --config ${CONFIG_FILE} \ + ${MQTT_BROKER_HOST:+--broker-host=${MQTT_BROKER_HOST}} \ + ${MQTT_BROKER_PORT:+--broker-port=${MQTT_BROKER_PORT}} \ + ${MQTT_CLIENT_ID:+--client-id=${MQTT_CLIENT_ID}} \ + ${MQTT_USERNAME:+--username=${MQTT_USERNAME}} \ + ${MQTT_PASSWORD:+--password=${MQTT_PASSWORD}} \ + ${MQTT_TOPIC:+--topic=${MQTT_TOPIC}} \ + ${BELY_API_URL:+--api-url=${BELY_API_URL}} \ + ${BELY_API_KEY:+--api-key=${BELY_API_KEY}} \ + ${LOG_LEVEL:+--log-level=${LOG_LEVEL}} \ + ${ENV_FILE:+--env-file=${ENV_FILE}}' + +[Install] +WantedBy=default.target \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/pyproject.toml b/tools/developer_tools/bely-mqtt-message-broker/pyproject.toml new file mode 100644 index 000000000..4bbbf1345 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/pyproject.toml @@ -0,0 +1,81 @@ +[build-system] +requires = ["setuptools>=65.0", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "bely-mqtt-framework" +version = "0.1.0" +description = "Pluggable Python framework for handling BELY MQTT events" +readme = "README.md" +requires-python = ">=3.10" +license = {text = "MIT"} +authors = [ + {name = "BELY Team", email = "team@bely.dev"} +] +keywords = ["mqtt", "bely", "logbook", "plugins", "framework", "events", "async"] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: System :: Monitoring", + "Framework :: AsyncIO", +] + +dependencies = [ + "click>=8.1.0", + "paho-mqtt>=1.6.1", + "pydantic>=2.0.0", + "python-dotenv>=1.0.0", + "pluggy>=1.3.0", + "PyYAML>=6.0.0", + "apprise>=1.4.0", + "bely-api>=2025.3.dev0" +] + +[project.urls] +Homepage = "https://github.com/bely-org/bely-mqtt-framework" +Documentation = "https://github.com/bely-org/bely-mqtt-framework/tree/main/docs" +Repository = "https://github.com/bely-org/bely-mqtt-framework" +Issues = "https://github.com/bely-org/bely-mqtt-framework/issues" +Changelog = "https://github.com/bely-org/bely-mqtt-framework/blob/main/CHANGELOG.md" + +[project.optional-dependencies] +dev = [ + "pytest>=7.0.0", + "pytest-cov>=4.0.0", + "pytest-asyncio>=0.21.0", + "black>=23.0.0", + "ruff>=0.1.0", + "mypy>=1.0.0", + "types-paho-mqtt>=1.6.0", + "types-PyYAML>=6.0.0", +] +apprise = [ + "apprise>=1.4.0", +] + +[project.scripts] +bely-mqtt = "bely_mqtt.cli:main" + +[tool.black] +line-length = 100 +target-version = ['py310'] + +[tool.ruff] +line-length = 100 +target-version = "py310" + +[tool.mypy] +python_version = "3.10" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = false + +[[tool.mypy.overrides]] +module = "BelyApiFactory" +ignore_missing_imports = true diff --git a/tools/developer_tools/bely-mqtt-message-broker/pytest.ini b/tools/developer_tools/bely-mqtt-message-broker/pytest.ini new file mode 100644 index 000000000..1a2e83f7a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/pytest.ini @@ -0,0 +1,17 @@ +[pytest] +minversion = 7.0 +testpaths = tests +python_files = test_*.py +python_classes = Test* +python_functions = test_* +asyncio_mode = auto +asyncio_default_fixture_loop_scope = function +addopts = + -v + --strict-markers + --tb=short + +markers = + asyncio: marks tests as async (deselect with '-m "not asyncio"') + integration: marks tests as integration tests + slow: marks tests as slow diff --git a/tools/developer_tools/bely-mqtt-message-broker/sample-mosquitto_sub_output b/tools/developer_tools/bely-mqtt-message-broker/sample-mosquitto_sub_output new file mode 100644 index 000000000..d9ce21caf --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/sample-mosquitto_sub_output @@ -0,0 +1,23 @@ +# Sample mqtt shell output + +mosquitto_sub -v -t "bely/#" + +bely/add {"description":"Add action completed","eventTimestamp":"2025-11-21T16:37:52.537+00:00","entityName":"ItemDomainLogbook","entityId":105,"eventTriggedByUsername":"logr"} +bely/add {"description":"Add action completed","eventTimestamp":"2025-11-21T16:38:06.160+00:00","entityName":"Log","entityId":267,"eventTriggedByUsername":"logr"} +bely/logEntry/Add {"description":"log entry was added","eventTimestamp":"2025-11-21T16:38:06.157+00:00","parentLogDocumentInfo":{"name":"[2025/11/21/2] New Document","id":105,"lastModifiedOnDateTime":"2025-11-21T16:37:42.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","enteredOnDateTime":"2025-11-21T16:37:42.000+00:00","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN"},"logInfo":{"id":267,"lastModifiedOnDateTime":"2025-11-21T16:38:00.392+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:00.392+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"New Log Entry Added","entityName":"Log","entityId":267,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:06.164+00:00","entityName":"EntityInfo","entityId":115,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:14.821+00:00","entityName":"Log","entityId":267,"eventTriggedByUsername":"logr"} +bely/logEntry/Update {"description":"log entry id [267] was modified","eventTimestamp":"2025-11-21T16:38:14.819+00:00","parentLogDocumentInfo":{"name":"[2025/11/21/2] New Document","id":105,"lastModifiedOnDateTime":"2025-11-21T16:38:06.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","enteredOnDateTime":"2025-11-21T16:37:42.000+00:00","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN"},"logInfo":{"id":267,"lastModifiedOnDateTime":"2025-11-21T16:38:14.819+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:00.000+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"- New Log Entry Added\n+ New Log Entry Added\r\n+ \r\n+ Same Entry Updated\r\n","entityName":"Log","entityId":267,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:14.827+00:00","entityName":"EntityInfo","entityId":115,"eventTriggedByUsername":"logr"} +bely/add {"description":"Add action completed","eventTimestamp":"2025-11-21T16:38:20.221+00:00","entityName":"Log","entityId":268,"eventTriggedByUsername":"logr"} +bely/logEntryReply/Add {"description":"reply log entry was added","eventTimestamp":"2025-11-21T16:38:20.219+00:00","parentLogDocumentInfo":{"name":"[2025/11/21/2] New Document","id":105,"lastModifiedOnDateTime":"2025-11-21T16:38:14.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","enteredOnDateTime":"2025-11-21T16:37:42.000+00:00","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN"},"logInfo":{"id":268,"lastModifiedOnDateTime":"2025-11-21T16:38:16.169+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:16.169+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"Reply Added","parentlogInfo":{"id":267,"lastModifiedOnDateTime":"2025-11-21T16:38:14.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:00.000+00:00"},"entityName":"Log","entityId":268,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:20.226+00:00","entityName":"EntityInfo","entityId":115,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:31.075+00:00","entityName":"Log","entityId":268,"eventTriggedByUsername":"logr"} +bely/logEntryReply/Update {"description":"reply log entry id [268] was modified","eventTimestamp":"2025-11-21T16:38:31.073+00:00","parentLogDocumentInfo":{"name":"[2025/11/21/2] New Document","id":105,"lastModifiedOnDateTime":"2025-11-21T16:38:20.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","enteredOnDateTime":"2025-11-21T16:37:42.000+00:00","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN"},"logInfo":{"id":268,"lastModifiedOnDateTime":"2025-11-21T16:38:31.073+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:16.000+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"- Reply Added\n+ Reply Added\r\n+ \r\n+ Reply Modified\n","parentlogInfo":{"id":267,"lastModifiedOnDateTime":"2025-11-21T16:38:14.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-21T16:38:00.000+00:00"},"entityName":"Log","entityId":268,"eventTriggedByUsername":"logr"} +bely/update {"description":"Update action completed","eventTimestamp":"2025-11-21T16:38:31.080+00:00","entityName":"EntityInfo","entityId":115,"eventTriggedByUsername":"logr"} +bely/delete {"description":"Delete action completed","eventTimestamp":"2025-12-17T21:25:44.039+00:00","entityName":"Log","eventTriggedByUsername":"logr","entityId":214} +bely/logEntryReply/Delete {"description":"reply log entry was deleted","eventTimestamp":"2025-12-17T21:25:44.033+00:00","parentLogDocumentInfo":{"name":"[2025/11/17/2] dfsg","id":104,"lastModifiedOnDateTime":"2025-12-17T21:21:06.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN","enteredOnDateTime":"2025-11-17T20:45:34.000+00:00"},"logInfo":{"id":214,"lastModifiedOnDateTime":"2025-11-20T18:35:33.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-18T21:05:32.000+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"TEST\r\n\r\nsadf","parentLogInfo":{"id":212,"lastModifiedOnDateTime":"2025-11-24T18:35:55.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-18T14:19:36.000+00:00"},"entityName":"Log","eventTriggedByUsername":"logr","entityId":214} +bely/update {"description":"Update action completed","eventTimestamp":"2025-12-17T21:25:44.048+00:00","entityName":"EntityInfo","eventTriggedByUsername":"logr","entityId":114} +bely/delete {"description":"Delete action completed","eventTimestamp":"2025-12-17T21:25:51.958+00:00","entityName":"Log","eventTriggedByUsername":"logr","entityId":208} +bely/logEntry/Delete {"description":"log entry was deleted","eventTimestamp":"2025-12-17T21:25:51.954+00:00","parentLogDocumentInfo":{"name":"[2025/11/17/2] dfsg","id":104,"lastModifiedOnDateTime":"2025-12-17T21:25:44.000+00:00","createdByUsername":"logr","lastModifiedByUsername":"logr","ownerUsername":"logr","ownerUserGroupName":"LOGR_ADMIN","enteredOnDateTime":"2025-11-17T20:45:34.000+00:00"},"logInfo":{"id":208,"lastModifiedOnDateTime":"2025-11-18T13:31:38.000+00:00","lastModifiedByUsername":"logr","enteredByUsername":"logr","enteredOnDateTime":"2025-11-18T13:31:38.000+00:00"},"logbookList":[{"name":"studies-sr","id":7,"displayName":"SR"}],"textDiff":"hello World","entityName":"Log","eventTriggedByUsername":"logr","entityId":208} +bely/update {"description":"Update action completed","eventTimestamp":"2025-12-17T21:25:51.963+00:00","entityName":"EntityInfo","eventTriggedByUsername":"logr","entityId":114} \ No newline at end of file diff --git a/tools/developer_tools/bely-mqtt-message-broker/setup.py b/tools/developer_tools/bely-mqtt-message-broker/setup.py new file mode 100644 index 000000000..3e2166507 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/setup.py @@ -0,0 +1,59 @@ +"""Setup configuration for BELY MQTT Framework.""" + +from setuptools import find_packages, setup + +setup( + name="bely-mqtt-framework", + version="0.1.0", + description="Pluggable Python framework for handling BELY MQTT events", + long_description=open("README.md").read(), + long_description_content_type="text/markdown", + author="BELY Team", + author_email="team@bely.dev", + url="https://github.com/bely/mqtt-framework", + license="MIT", + packages=find_packages(where="src"), + package_dir={"": "src"}, + python_requires=">=3.9", + install_requires=[ + "click>=8.1.0", + "paho-mqtt>=1.6.1", + "pydantic>=2.0.0", + "python-dotenv>=1.0.0", + "pluggy>=1.3.0", + "bely-api>=2025.3.dev0", + ], + extras_require={ + "dev": [ + "pytest>=7.0.0", + "pytest-cov>=4.0.0", + "pytest-asyncio>=0.21.0", + "black>=23.0.0", + "ruff>=0.1.0", + "mypy>=1.0.0", + "types-paho-mqtt>=1.6.0", + ], + "apprise": [ + "apprise>=1.4.0", + ], + }, + entry_points={ + "console_scripts": [ + "bely-mqtt=bely_mqtt.cli:main", + ], + }, + classifiers=[ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Communications :: Email", + "Topic :: System :: Monitoring", + ], + keywords="mqtt bely logbook plugins framework", +) diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/__init__.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/__init__.py new file mode 100644 index 000000000..20a1bb1a5 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/__init__.py @@ -0,0 +1,65 @@ +""" +BELY MQTT Framework - Pluggable Python framework for handling BELY MQTT events. + +This framework provides: +- Pluggable handler system for MQTT topics +- Type-safe models for BELY events (CoreEvent, LogEntryAddEvent, etc.) +- Integration with BELY API for additional data +- CLI for easy configuration and management +- Specific event handlers for different event types +""" + +from bely_mqtt.events import EventType +from bely_mqtt.models import ( + BaseEvent, + CoreEvent, + LogbookInfo, + LogDocumentInfo, + LogEntryEventBase, + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogInfo, + LogReactionEventBase, + LogReactionAddEvent, + LogReactionDeleteEvent, + LogReactionInfo, + MQTTMessage, + ReactionId, + ReactionInfo, + TestNotificationEvent, +) +from bely_mqtt.mqtt_client import BelyMQTTClient +from bely_mqtt.plugin import MQTTHandler, PluginManager + +__version__ = "0.1.0" + +__all__ = [ + "BelyMQTTClient", + "MQTTHandler", + "PluginManager", + "EventType", + "BaseEvent", + "CoreEvent", + "LogEntryEventBase", + "LogEntryAddEvent", + "LogEntryUpdateEvent", + "LogEntryDeleteEvent", + "LogEntryReplyAddEvent", + "LogEntryReplyUpdateEvent", + "LogEntryReplyDeleteEvent", + "LogReactionEventBase", + "LogReactionAddEvent", + "LogReactionDeleteEvent", + "LogDocumentInfo", + "LogInfo", + "LogbookInfo", + "LogReactionInfo", + "ReactionInfo", + "ReactionId", + "MQTTMessage", + "TestNotificationEvent", +] diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/cli.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/cli.py new file mode 100644 index 000000000..bf6e85cc8 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/cli.py @@ -0,0 +1,262 @@ +""" +Command-line interface for BELY MQTT framework. +""" + +import logging +import sys +from pathlib import Path +from typing import Optional + +import click +from dotenv import load_dotenv + +from bely_mqtt import __version__ +from bely_mqtt.config import ConfigManager +from bely_mqtt.mqtt_client import BelyMQTTClient +from bely_mqtt.plugin import PluginManager + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", +) +logger = logging.getLogger(__name__) + + +@click.group() +@click.version_option(version=__version__, prog_name="bely-mqtt") +def cli() -> None: + """BELY MQTT Framework - Pluggable MQTT handler for BELY events.""" + pass + + +@cli.command() +@click.option( + "--broker-host", + default="localhost", + envvar="MQTT_BROKER_HOST", + help="MQTT broker hostname or IP address.", +) +@click.option( + "--broker-port", + default=1883, + type=int, + envvar="MQTT_BROKER_PORT", + help="MQTT broker port.", +) +@click.option( + "--client-id", + default="bely-mqtt-client", + envvar="MQTT_CLIENT_ID", + help="MQTT client ID.", +) +@click.option( + "--username", + default=None, + envvar="MQTT_USERNAME", + help="MQTT broker username.", +) +@click.option( + "--password", + default=None, + envvar="MQTT_PASSWORD", + help="MQTT broker password.", +) +@click.option( + "--topic", + "-t", + multiple=True, + default=["bely/#"], + help="MQTT topic(s) to subscribe to. Can be specified multiple times.", +) +@click.option( + "--handlers-dir", + type=click.Path(exists=False, file_okay=False, dir_okay=True, path_type=Path), + default=None, + envvar="BELY_HANDLERS_DIR", + help="Directory containing handler plugins.", +) +@click.option( + "--api-url", + default=None, + envvar="BELY_API_URL", + help="BELY API base URL for querying additional information.", +) +@click.option( + "--api-key", + default=None, + envvar="BELY_API_KEY", + help="BELY API key for authentication.", +) +@click.option( + "--log-level", + type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]), + default="INFO", + envvar="LOG_LEVEL", + help="Logging level.", +) +@click.option( + "--env-file", + type=click.Path(exists=True, file_okay=True, dir_okay=False, path_type=Path), + default=None, + help="Path to .env file for environment variables.", +) +@click.option( + "--config", + type=click.Path(exists=True, file_okay=True, dir_okay=False, path_type=Path), + default=None, + envvar="BELY_CONFIG", + help="Path to configuration file for handlers (YAML format).", +) +@click.option( + "--lock-file", + type=click.Path(file_okay=True, dir_okay=False, path_type=Path), + default="/tmp/bely-mqtt.lock", + envvar="BELY_LOCK_FILE", + help="Path to lock file for single-instance enforcement.", +) +def start( + broker_host: str, + broker_port: int, + client_id: str, + username: Optional[str], + password: Optional[str], + topic: tuple[str, ...], + handlers_dir: Optional[Path], + api_url: Optional[str], + api_key: Optional[str], + log_level: str, + env_file: Optional[Path], + config: Optional[Path], + lock_file: Path, +) -> None: + """Start the BELY MQTT client with registered handlers.""" + # Acquire single-instance lock + import fcntl + + try: + lock_fd = open(lock_file, "w") # noqa: SIM115 + fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) + except BlockingIOError: + logger.error( + f"Another instance of bely-mqtt is already running (lock file: {lock_file}). " + "Use --lock-file to specify a different lock file if you need multiple instances." + ) + sys.exit(1) + + # Keep lock_fd alive for the lifetime of the process + start._lock_fd = lock_fd # type: ignore[attr-defined] + + # Load environment variables from file if provided + if env_file: + load_dotenv(env_file) + + # Set logging level + logging.getLogger().setLevel(log_level) + + logger.info("Starting BELY MQTT Framework") + logger.info(f"Broker: {broker_host}:{broker_port}") + logger.info(f"Topics: {', '.join(topic)}") + + # Initialize configuration manager + config_manager = None + if config: + config_manager = ConfigManager() + try: + config_manager.load_from_file(config) + logger.info(f"Loaded handler configuration from: {config}") + except Exception as e: + logger.error(f"Failed to load configuration file: {e}") + sys.exit(1) + + # Fall back to bely_url from config if --api-url not provided + if not api_url and config_manager and config_manager.global_config: + api_url = config_manager.global_config.bely_url + if api_url: + logger.info(f"Using bely_url from config file: {api_url}") + + # Initialize API factory if URL is provided + api_factory = None + if api_url: + try: + from BelyApiFactory import BelyApiFactory + + api_factory = BelyApiFactory(api_url) + logger.info(f"BELY API factory initialized: {api_url}") + except ImportError: + logger.warning("BelyApiFactory not available. API features disabled.") + + # Initialize plugin manager + plugin_manager = PluginManager(api_factory=api_factory, config_manager=config_manager) + + # Load handlers from directory if provided + if handlers_dir: + logger.info(f"Loading handlers from: {handlers_dir}") + plugin_manager.load_handlers_from_directory(handlers_dir) + + if not plugin_manager.handlers: + logger.warning("No handlers registered. Messages will be received but not processed.") + + # Initialize MQTT client + mqtt_client = BelyMQTTClient( + broker_host=broker_host, + broker_port=broker_port, + client_id=client_id, + username=username, + password=password, + plugin_manager=plugin_manager, + ) + + # Subscribe to topics + for t in topic: + mqtt_client.subscribe(t) + + # Start the client + try: + mqtt_client.start() + except KeyboardInterrupt: + logger.info("Shutting down...") + sys.exit(0) + except Exception as e: + logger.error(f"Fatal error: {e}", exc_info=True) + sys.exit(1) + + +@cli.command() +@click.option( + "--handlers-dir", + type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path), + required=True, + help="Directory containing handler plugins.", +) +def list_handlers(handlers_dir: Path) -> None: + """List all available handlers in a directory.""" + plugin_manager = PluginManager() + plugin_manager.load_handlers_from_directory(handlers_dir) + + if not plugin_manager.handlers: + click.echo("No handlers found.") + return + + click.echo(f"Found {len(plugin_manager.handlers)} handler(s):\n") + for handler in plugin_manager.handlers: + click.echo(f" {handler.__class__.__name__}") + click.echo(f" Topic Pattern: {handler.topic_pattern}") + if handler.__doc__: + click.echo(f" Description: {handler.__doc__.strip()}") + click.echo() + + +@cli.command() +def validate_config() -> None: + """Validate the current configuration.""" + click.echo("Configuration validation not yet implemented.") + + +def main() -> None: + """Entry point for the CLI.""" + cli() + + +if __name__ == "__main__": + main() diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/config.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/config.py new file mode 100644 index 000000000..0f56a2c12 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/config.py @@ -0,0 +1,233 @@ +""" +Configuration system for BELY MQTT handlers. + +This module provides a configuration system that allows handlers to receive +configuration parameters when they are instantiated. +""" + +import logging +from pathlib import Path +from typing import Any, Dict, Optional + +import yaml + +logger = logging.getLogger(__name__) + + +class GlobalConfig: + """ + Global configuration shared across all handlers. + + Stores configuration parameters that will be available to all handlers. + """ + + def __init__(self, config: Optional[Dict[str, Any]] = None): + """ + Initialize global configuration. + + Args: + config: Dictionary of global configuration parameters. + """ + self.config = config or {} + + def get(self, key: str, default: Any = None) -> Any: + """ + Get a configuration value. + + Args: + key: Configuration key. + default: Default value if key not found. + + Returns: + Configuration value or default. + """ + return self.config.get(key, default) + + @property + def bely_url(self) -> Optional[str]: + """ + Get the BELY URL from configuration. + + Returns: + BELY URL if configured, None otherwise. + """ + return self.config.get("bely_url") + + def __repr__(self) -> str: + """Return string representation.""" + return f"GlobalConfig({self.config})" + + +class HandlerConfig: + """ + Configuration for a handler. + + Stores configuration parameters that will be passed to handler constructors. + """ + + def __init__(self, handler_name: str, config: Optional[Dict[str, Any]] = None): + """ + Initialize handler configuration. + + Args: + handler_name: Name of the handler class. + config: Dictionary of configuration parameters. + """ + self.handler_name = handler_name + self.config = config or {} + + def get(self, key: str, default: Any = None) -> Any: + """ + Get a configuration value. + + Args: + key: Configuration key. + default: Default value if key not found. + + Returns: + Configuration value or default. + """ + return self.config.get(key, default) + + def __repr__(self) -> str: + """Return string representation.""" + return f"HandlerConfig({self.handler_name}, {self.config})" + + +class ConfigManager: + """ + Manages configuration for handlers. + + Loads configuration from files or dictionaries and provides it to handlers. + Supports both global configuration (shared across all handlers) and + handler-specific configuration. + """ + + def __init__(self): + """Initialize the configuration manager.""" + self.global_config: Optional[GlobalConfig] = None + self.configs: Dict[str, HandlerConfig] = {} + self.logger = logging.getLogger(__name__) + + def load_from_file(self, config_file: Path) -> None: + """ + Load configuration from a YAML file. + + File format: + global: + shared_param: value + another_param: value + bely_url: https://bely.example.com + + handlers: + AdvancedLoggingHandler: + logging_dir: /var/log/bely + MyHandler: + param1: value1 + param2: value2 + AppriseSmartNotificationHandler: + config_path: /path/to/apprise_config.yaml + + Args: + config_file: Path to the YAML configuration file. + + Raises: + FileNotFoundError: If the configuration file doesn't exist. + yaml.YAMLError: If the file is not valid YAML. + """ + config_file = Path(config_file) + if not config_file.exists(): + raise FileNotFoundError(f"Configuration file not found: {config_file}") + + try: + with open(config_file) as f: + data = yaml.safe_load(f) + + # Load global configuration if present + if "global" in data: + self.set_global_config(data["global"]) + self.logger.info(f"Loaded global configuration: {data['global']}") + + # Load handler-specific configurations + handlers_config = data.get("handlers", {}) + for handler_name, config in handlers_config.items(): + self.set_config(handler_name, config) + + self.logger.info(f"Loaded configuration from {config_file}") + except yaml.YAMLError as e: + self.logger.error(f"Invalid YAML in configuration file: {e}") + raise + + def load_from_dict(self, config_dict: Dict[str, Any]) -> None: + """ + Load configuration from a dictionary. + + Args: + config_dict: Dictionary with optional "global" and "handlers" keys. + """ + # Load global configuration if present + if "global" in config_dict: + self.set_global_config(config_dict["global"]) + + # Load handler-specific configurations + handlers_config = config_dict.get("handlers", {}) + for handler_name, config in handlers_config.items(): + self.set_config(handler_name, config) + + def set_global_config(self, config: Dict[str, Any]) -> None: + """ + Set global configuration. + + Args: + config: Global configuration dictionary. + """ + self.global_config = GlobalConfig(config) + self.logger.debug(f"Set global configuration: {config}") + + def get_global_config(self) -> Optional[GlobalConfig]: + """ + Get global configuration. + + Returns: + GlobalConfig if set, None otherwise. + """ + return self.global_config + + def set_config(self, handler_name: str, config: Dict[str, Any]) -> None: + """ + Set configuration for a handler. + + Args: + handler_name: Name of the handler class. + config: Configuration dictionary. + """ + self.configs[handler_name] = HandlerConfig(handler_name, config) + self.logger.debug(f"Set configuration for {handler_name}: {config}") + + def get_config(self, handler_name: str) -> Optional[HandlerConfig]: + """ + Get configuration for a handler. + + Args: + handler_name: Name of the handler class. + + Returns: + HandlerConfig if found, None otherwise. + """ + return self.configs.get(handler_name) + + def has_config(self, handler_name: str) -> bool: + """ + Check if configuration exists for a handler. + + Args: + handler_name: Name of the handler class. + + Returns: + True if configuration exists. + """ + return handler_name in self.configs + + def __repr__(self) -> str: + """Return string representation.""" + return f"ConfigManager(global={self.global_config}, handlers={self.configs})" diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/events.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/events.py new file mode 100644 index 000000000..e1d43f453 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/events.py @@ -0,0 +1,113 @@ +""" +Event type enumeration for BELY MQTT events. + +This module defines all supported event types and provides utilities +for working with them. +""" + +from enum import Enum +from typing import Optional + + +class EventType(Enum): + """ + Enumeration of all supported BELY event types. + + Each event type corresponds to a specific MQTT topic pattern. + + Generic Events: + GENERIC_ADD - Generic add event (bely/add) + GENERIC_UPDATE - Generic update event (bely/update) + GENERIC_DELETE - Generic delete event (bely/delete) + + Log Entry Events: + LOG_ENTRY_ADD - Log entry added (bely/logEntry/Add) + LOG_ENTRY_UPDATE - Log entry updated (bely/logEntry/Update) + LOG_ENTRY_DELETE - Log entry deleted (bely/logEntry/Delete) + + Log Entry Reply Events: + LOG_ENTRY_REPLY_ADD - Reply added (bely/logEntryReply/Add) + LOG_ENTRY_REPLY_UPDATE - Reply updated (bely/logEntryReply/Update) + LOG_ENTRY_REPLY_DELETE - Reply deleted (bely/logEntryReply/Delete) + + Log Reaction Events: + LOG_REACTION_ADD - Reaction added (bely/logReaction/Add) + LOG_REACTION_DELETE - Reaction deleted (bely/logReaction/Delete) + + Search Events: + SEARCH - Search performed (bely/search) + + Notification Events: + NOTIFICATION_TEST - Test notification (bely/notification/test) + """ + + # Generic events + GENERIC_ADD = "bely/add" + GENERIC_UPDATE = "bely/update" + GENERIC_DELETE = "bely/delete" + + # Log entry events + LOG_ENTRY_ADD = "bely/logEntry/Add" + LOG_ENTRY_UPDATE = "bely/logEntry/Update" + LOG_ENTRY_DELETE = "bely/logEntry/Delete" + + # Log entry reply events + LOG_ENTRY_REPLY_ADD = "bely/logEntryReply/Add" + LOG_ENTRY_REPLY_UPDATE = "bely/logEntryReply/Update" + LOG_ENTRY_REPLY_DELETE = "bely/logEntryReply/Delete" + + # Log reaction events + LOG_REACTION_ADD = "bely/logReaction/Add" + LOG_REACTION_DELETE = "bely/logReaction/Delete" + + # Search events + SEARCH = "bely/search" + + # Notification events + NOTIFICATION_TEST = "bely/notification/test" + + def __str__(self) -> str: + """Return the topic pattern for this event type.""" + return self.value + + @classmethod + def from_topic(cls, topic: str) -> Optional["EventType"]: + """ + Get EventType from MQTT topic. + + Args: + topic: MQTT topic string (e.g., "bely/logEntry/Add") + + Returns: + EventType if topic matches, None otherwise. + + Examples: + >>> EventType.from_topic("bely/logEntry/Add") + + + >>> EventType.from_topic("bely/unknown") + None + """ + for event_type in cls: + if event_type.value == topic: + return event_type + return None + + @property + def handler_method_name(self) -> str: + """ + Get the handler method name for this event type. + + Returns: + Method name like "handle_log_entry_add" + + Examples: + >>> EventType.LOG_ENTRY_ADD.handler_method_name + 'handle_log_entry_add' + + >>> EventType.GENERIC_UPDATE.handler_method_name + 'handle_generic_update' + """ + # Convert enum name to snake_case method name + # e.g., LOG_ENTRY_ADD -> handle_log_entry_add + return f"handle_{self.name.lower()}" diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/models.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/models.py new file mode 100644 index 000000000..b794f3c1d --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/models.py @@ -0,0 +1,249 @@ +""" +Data models for BELY MQTT events. + +These models are designed to be mappable to MQTT messages from BELY. +""" + +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, ConfigDict, Field + + +class LogbookInfo(BaseModel): + """Information about a logbook.""" + + model_config = ConfigDict(populate_by_name=True) + + name: str + id: int + display_name: Optional[str] = Field(None, alias="displayName") + + +class UserInfo(BaseModel): + """Basic user information.""" + + username: str + + +class LogDocumentInfo(BaseModel): + """Information about a log document.""" + + model_config = ConfigDict(populate_by_name=True) + + name: str + id: int + last_modified_on_date_time: Optional[datetime] = Field(None, alias="lastModifiedOnDateTime") + created_by_username: Optional[str] = Field(None, alias="createdByUsername") + last_modified_by_username: Optional[str] = Field(None, alias="lastModifiedByUsername") + entered_on_date_time: Optional[datetime] = Field(None, alias="enteredOnDateTime") + owner_username: Optional[str] = Field(None, alias="ownerUsername") + owner_user_group_name: Optional[str] = Field(None, alias="ownerUserGroupName") + + +class LogInfo(BaseModel): + """Information about a log entry.""" + + model_config = ConfigDict(populate_by_name=True) + + id: int + last_modified_on_date_time: Optional[datetime] = Field(None, alias="lastModifiedOnDateTime") + last_modified_by_username: Optional[str] = Field(None, alias="lastModifiedByUsername") + entered_by_username: Optional[str] = Field(None, alias="enteredByUsername") + entered_on_date_time: Optional[datetime] = Field(None, alias="enteredOnDateTime") + + +class BaseEvent(BaseModel): + """Base class for all BELY MQTT events. + + Note: entity_id can be either an integer (for most events) or a composite + object like ReactionId (for log reaction events). + """ + + model_config = ConfigDict(populate_by_name=True) + + description: str + event_timestamp: datetime = Field(alias="eventTimestamp") + entity_name: str = Field(alias="entityName") + entity_id: Any = Field(alias="entityId") # Can be int or composite object + event_triggered_by_username: str = Field(alias="eventTriggedByUsername") + + +class CoreEvent(BaseEvent): + """Core event with minimal information (add/update/delete).""" + + pass + + +class LogEntryEventBase(BaseEvent): + """Base class for log entry events with common fields.""" + + parent_log_document_info: LogDocumentInfo = Field(alias="parentLogDocumentInfo") + log_info: LogInfo = Field(alias="logInfo") + logbook_list: List[LogbookInfo] = Field(alias="logbookList") + text_diff: str = Field(alias="textDiff") + + +class LogEntryReplyEventBase(LogEntryEventBase): + """Base class for log entry reply events. + + Extends LogEntryEventBase with parent log information for replies. + """ + + parent_log_info: LogInfo = Field(alias="parentLogInfo") + + +# Log Entry Events (Add/Update/Delete) + + +class LogEntryAddEvent(LogEntryEventBase): + """Event triggered when a log entry is added.""" + + pass + + +class LogEntryUpdateEvent(LogEntryEventBase): + """Event triggered when a log entry is updated.""" + + pass + + +class LogEntryDeleteEvent(LogEntryEventBase): + """Event triggered when a log entry is deleted.""" + + pass + + +# Log Entry Reply Events (Add/Update/Delete) + + +class LogEntryReplyAddEvent(LogEntryReplyEventBase): + """Event triggered when a reply to a log entry is added.""" + + pass + + +class LogEntryReplyUpdateEvent(LogEntryReplyEventBase): + """Event triggered when a reply to a log entry is updated.""" + + pass + + +class LogEntryReplyDeleteEvent(LogEntryReplyEventBase): + """Event triggered when a reply to a log entry is deleted.""" + + pass + + +# Reaction Models + + +class ReactionInfo(BaseModel): + """Information about a reaction.""" + + model_config = ConfigDict(populate_by_name=True) + + id: int + name: str + emoji_code: int = Field(alias="emojiCode") + description: Optional[str] = None + emoji: str + + +class ReactionId(BaseModel): + """Composite ID for a reaction (log ID, reaction ID, user ID).""" + + model_config = ConfigDict(populate_by_name=True) + + log_id: int = Field(alias="logId") + reaction_id: int = Field(alias="reactionId") + user_id: int = Field(alias="userId") + + +class LogReactionInfo(BaseModel): + """Information about a log reaction.""" + + model_config = ConfigDict(populate_by_name=True) + + reaction: ReactionInfo + id: ReactionId + username: str + + +class LogReactionEventBase(BaseEvent): + """Base class for log reaction events with common fields.""" + + parent_log_info: LogInfo = Field(alias="parentLogInfo") + parent_log_document_info: LogDocumentInfo = Field(alias="parentLogDocumentInfo") + log_reaction: LogReactionInfo = Field(alias="logReaction") + + +class LogReactionAddEvent(LogReactionEventBase): + """Event triggered when a reaction is added to a log entry.""" + + pass + + +class LogReactionDeleteEvent(LogReactionEventBase): + """Event triggered when a reaction is deleted from a log entry.""" + + pass + + +class TestNotificationEvent(BaseModel): + """Event triggered when a test notification is requested.""" + + model_config = ConfigDict(populate_by_name=True) + + notification_endpoint: str = Field(alias="notificationEndpoint") + configuration_name: str = Field(alias="configurationName") + configuration_id: Optional[int] = Field(None, alias="configurationId") + provider_settings: Optional[Dict[str, str]] = Field(None, alias="providerSettings") + event_triggered_by_username: str = Field(alias="eventTriggedByUsername") + event_timestamp: datetime = Field(alias="eventTimestamp") + + +# Search Models + + +class FilterItem(BaseModel): + """A filter item with id and name (used in search options).""" + + id: int + name: str + + +class LogbookSearchOptions(BaseModel): + """Search options for logbook search events.""" + + model_config = ConfigDict(populate_by_name=True) + + logbook_types: Optional[List[FilterItem]] = Field(None, alias="logbookTypes") + systems: Optional[List[FilterItem]] = None + users: Optional[List[FilterItem]] = None + start_modified_date: Optional[datetime] = Field(None, alias="startModifiedDate") + end_modified_date: Optional[datetime] = Field(None, alias="endModifiedDate") + start_created_date: Optional[datetime] = Field(None, alias="startCreatedDate") + end_created_date: Optional[datetime] = Field(None, alias="endCreatedDate") + case_insensitive: bool = Field(alias="caseInsensitive") + + +class SearchEvent(BaseModel): + """Event triggered when a search is performed.""" + + model_config = ConfigDict(populate_by_name=True) + + search_text: str = Field(alias="searchText") + search_options: Optional[LogbookSearchOptions] = Field(None, alias="searchOptions") + source: str + event_timestamp: datetime = Field(alias="eventTimestamp") + + +class MQTTMessage(BaseModel): + """Wrapper for an MQTT message with topic and payload.""" + + model_config = ConfigDict(arbitrary_types_allowed=True) + + topic: str + payload: Dict[str, Any] + raw_payload: str = "" diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/mqtt_client.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/mqtt_client.py new file mode 100644 index 000000000..b20e6f907 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/mqtt_client.py @@ -0,0 +1,218 @@ +""" +MQTT client for connecting to BELY message broker. +""" + +import asyncio +import json +import logging +from typing import Any, Optional + +import paho.mqtt.client as mqtt +from paho.mqtt.client import ReasonCodes +from paho.mqtt.client import Properties + +from bely_mqtt.models import MQTTMessage +from bely_mqtt.plugin import PluginManager + +logger = logging.getLogger(__name__) + + +class BelyMQTTClient: + """ + MQTT client for BELY events. + + Connects to an MQTT broker and routes messages to registered handlers. + """ + + def __init__( + self, + broker_host: str, + broker_port: int = 1883, + client_id: str = "bely-mqtt-client", + username: Optional[str] = None, + password: Optional[str] = None, + plugin_manager: Optional[PluginManager] = None, + ): + """ + Initialize the MQTT client. + + Args: + broker_host: MQTT broker hostname or IP. + broker_port: MQTT broker port (default: 1883). + client_id: MQTT client ID. + username: Optional MQTT username. + password: Optional MQTT password. + plugin_manager: PluginManager instance for handling messages. + """ + self.broker_host = broker_host + self.broker_port = broker_port + self.client_id = client_id + self.username = username + self.password = password + self.plugin_manager = plugin_manager or PluginManager() + self.logger = logging.getLogger(__name__) + + self.client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2, client_id) # type: ignore[attr-defined, arg-type] + self.client.on_connect = self._on_connect + self.client.on_message = self._on_message + self.client.on_disconnect = self._on_disconnect # type: ignore[assignment] + self.client.on_subscribe = self._on_subscribe + + if username and password: + self.client.username_pw_set(username, password) + + self._subscribed_topics: set[str] = set() + self._running = False + self._loop: Optional[asyncio.AbstractEventLoop] = None + + def subscribe(self, topic: str) -> None: + """ + Subscribe to an MQTT topic. + + Args: + topic: The topic pattern to subscribe to. + """ + self._subscribed_topics.add(topic) + if self.client.is_connected(): + self.client.subscribe(topic) + self.logger.info(f"Subscribed to topic: {topic}") + + # CallbackOnConnect_v2 = Callable[["Client", Any, ConnectFlags, ReasonCode, Union[Properties, None]], None] + # Callable[[Client, Any, Optional[ReasonCodes], Optional[Properties]], object], None]") + def _on_connect( + self, + client: mqtt.Client, + userdata: Any, + connect_flags: dict[str, int], + reason_code: ReasonCodes, + properties: Optional[Properties], + ) -> None: + """Handle MQTT connection.""" + if reason_code == 0: + self.logger.info(f"Connected to MQTT broker at {self.broker_host}:{self.broker_port}") + # Resubscribe to all topics + for topic in self._subscribed_topics: + client.subscribe(topic) + self.logger.info(f"Subscribed to topic: {topic}") + else: + self.logger.error(f"Failed to connect to MQTT broker: {reason_code}") + + def _on_message( + self, + client: mqtt.Client, + userdata: object, + msg: mqtt.MQTTMessage, + ) -> None: + """Handle incoming MQTT message.""" + try: + payload_str = msg.payload.decode("utf-8") + payload_dict = json.loads(payload_str) + + message = MQTTMessage( + topic=msg.topic, + payload=payload_dict, + raw_payload=payload_str, + ) + + # Schedule the async handler on the event loop + if self._loop and self._loop.is_running(): + asyncio.run_coroutine_threadsafe( + self.plugin_manager.handle_message(message), + self._loop, + ) + else: + self.logger.warning("Event loop not running, cannot process message") + + except json.JSONDecodeError as e: + self.logger.error(f"Failed to parse JSON payload from {msg.topic}: {e}") + except Exception as e: + self.logger.error(f"Error processing message from {msg.topic}: {e}") + + def _on_disconnect( + self, + client: mqtt.Client, + userdata: Any, + disconnect_flags: dict[str, int], + reason_code: Optional[ReasonCodes], + properties: Optional[Properties], + ) -> None: + """Handle MQTT disconnection.""" + if reason_code == 0: + self.logger.info("Disconnected from MQTT broker") + else: + self.logger.warning(f"Unexpected disconnection from MQTT broker: {reason_code}") + + def _on_subscribe( + self, + client: mqtt.Client, + userdata: object, + mid: int, + reason_code_list: list[mqtt.ReasonCodes], + properties: mqtt.Properties, + ) -> None: + """Handle subscription confirmation.""" + for reason_code in reason_code_list: + if reason_code == 0: + self.logger.debug("Subscription successful") + else: + self.logger.warning(f"Subscription failed: {reason_code}") + + def connect(self) -> None: + """Connect to the MQTT broker.""" + try: + self.client.connect(self.broker_host, self.broker_port, keepalive=60) + self.logger.info(f"Connecting to MQTT broker at {self.broker_host}:{self.broker_port}") + except Exception as e: + self.logger.error(f"Failed to connect to MQTT broker: {e}") + raise + + def disconnect(self) -> None: + """Disconnect from the MQTT broker.""" + self.client.disconnect() + + async def _run_mqtt_loop(self) -> None: + """Run the MQTT loop in a separate thread.""" + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, self.client.loop_forever) + + def start(self) -> None: + """Start the MQTT client (blocking).""" + self._running = True + # Create and set the event loop for async handler execution + try: + self._loop = asyncio.new_event_loop() + asyncio.set_event_loop(self._loop) + except RuntimeError: + # Event loop already exists, use the current one + self._loop = asyncio.get_event_loop() + + try: + self.connect() + # Run MQTT loop in executor to avoid blocking the event loop + self._loop.run_until_complete(self._run_mqtt_loop()) + except KeyboardInterrupt: + self.logger.info("Interrupted by user") + finally: + self.disconnect() + self._running = False + if self._loop and not self._loop.is_closed(): + self._loop.close() + + async def start_async(self) -> None: + """Start the MQTT client (async).""" + self._running = True + self._loop = asyncio.get_event_loop() + + try: + self.connect() + # Run the MQTT loop in a separate thread + await self._run_mqtt_loop() + except KeyboardInterrupt: + self.logger.info("Interrupted by user") + finally: + self.disconnect() + self._running = False + + def is_connected(self) -> bool: + """Check if the client is connected to the broker.""" + return self.client.is_connected() diff --git a/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/plugin.py b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/plugin.py new file mode 100644 index 000000000..32d849adf --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/src/bely_mqtt/plugin.py @@ -0,0 +1,508 @@ +""" +Plugin system for BELY MQTT handlers. + +This module provides the base classes and interfaces for creating +pluggable handlers for MQTT topics. +""" + +import importlib +import importlib.util +import inspect +import logging +from abc import ABC +from pathlib import Path +from typing import Any, List, Optional, Type + +from bely_mqtt.config import GlobalConfig +from bely_mqtt.events import EventType +from bely_mqtt.models import MQTTMessage + +logger = logging.getLogger(__name__) + + +class MQTTHandler(ABC): + """ + Base class for MQTT message handlers. + + Subclass this to create handlers for specific MQTT topics or event types. + By default, handlers subscribe to all BELY topics (bely/#). Override the + topic_pattern property to subscribe to specific topics only. + + Handlers can implement either: + 1. The generic `handle()` method for all messages + 2. Specific event handler methods like `handle_log_entry_add()`, `handle_generic_update()`, etc. + 3. Both (specific handlers are called first, then generic handle if not overridden) + + Supported specific event handlers: + - handle_generic_add(event: CoreEvent) - Generic add events (bely/add) + - handle_generic_update(event: CoreEvent) - Generic update events (bely/update) + - handle_generic_delete(event: CoreEvent) - Generic delete events (bely/delete) + - handle_log_entry_add(event: LogEntryAddEvent) - Log entry added (bely/logEntry/Add) + - handle_log_entry_update(event: LogEntryUpdateEvent) - Log entry updated (bely/logEntry/Update) + - handle_log_entry_delete(event: LogEntryDeleteEvent) - Log entry deleted (bely/logEntry/Delete) + - handle_log_entry_reply_add(event: LogEntryReplyAddEvent) - Reply added (bely/logEntryReply/Add) + - handle_log_entry_reply_update(event: LogEntryReplyUpdateEvent) - Reply updated (bely/logEntryReply/Update) + - handle_log_entry_reply_delete(event: LogEntryReplyDeleteEvent) - Reply deleted (bely/logEntryReply/Delete) + - handle_log_reaction_add(event: LogReactionAddEvent) - Reaction added (bely/logReaction/Add) + - handle_log_reaction_delete(event: LogReactionDeleteEvent) - Reaction deleted (bely/logReaction/Delete) + + Example: + class MyHandler(MQTTHandler): + # Uses default topic_pattern "bely/#" - receives all BELY events + + async def handle_log_entry_add(self, event: LogEntryAddEvent) -> None: + # Only called for bely/logEntry/Add events + # event is already parsed and typed + self.logger.info(f"New entry: {event.log_info.id}") + + async def handle_generic_update(self, event: CoreEvent) -> None: + # Only called for bely/update events + self.logger.info(f"Updated: {event.entity_name}") + + class SpecificHandler(MQTTHandler): + @property + def topic_pattern(self) -> str: + # Override to subscribe to specific topics only + return "bely/logEntry/#" + + async def handle(self, message: MQTTMessage) -> None: + # Handle only log entry related events + pass + """ + + def __init__( + self, + api_factory=None, + global_config: Optional[GlobalConfig] = None, + ): + """ + Initialize the handler. + + Args: + api_factory: Optional BelyApiFactory instance for querying the BELY API. + global_config: Optional global configuration shared across all handlers. + """ + self.api_factory = api_factory + self.global_config = global_config + self.logger = logging.getLogger(self.__class__.__name__) + + @property + def topic_pattern(self) -> str: + """ + Return the MQTT topic pattern this handler subscribes to. + + By default, handlers subscribe to all BELY topics (bely/#). + Override this property to subscribe to specific topics only. + + Examples: + - "bely/#" - matches all BELY topics (default) + - "bely/add" - matches exact topic + - "bely/logEntry/#" - matches all log entry subtopics + - "bely/+/Add" - matches single level wildcard + + Returns: + MQTT topic pattern string. + """ + return "bely/#" + + async def handle(self, message: MQTTMessage) -> None: + """ + Handle an MQTT message. + + This method is called for all messages matching the topic pattern. + Override this method to handle all messages, or implement specific + event handler methods (handle_log_entry_add, etc.) for specific events. + + Args: + message: The MQTT message to handle. + + Raises: + Exception: Any exception raised will be logged but not propagated. + """ + # Try to route to specific event handler + event_type = EventType.from_topic(message.topic) + if event_type: + handler_method_name = event_type.handler_method_name + if hasattr(self, handler_method_name): + handler_method = getattr(self, handler_method_name) + if callable(handler_method): + # Parse the message payload into the appropriate event type + event = self._parse_event(event_type, message) + await handler_method(event) + return + + # If no specific handler found, call the generic handler + await self.handle_generic(message) + + async def handle_generic(self, message: MQTTMessage) -> None: + """ + Generic handler for messages that don't match specific event types. + + Override this method to handle messages that don't match any specific + event type, or implement specific event handler methods. + + Args: + message: The MQTT message to handle. + """ + # Default implementation does nothing + # Subclasses can override to provide custom behavior + pass + + @staticmethod + def _parse_event(event_type: EventType, message: MQTTMessage) -> Any: + """ + Parse an MQTT message into the appropriate event type. + + Args: + event_type: The EventType to parse into. + message: The MQTT message to parse. + + Returns: + Parsed event object of the appropriate type. + + Raises: + ValueError: If the event type is not recognized. + """ + # Import here to avoid circular imports + from bely_mqtt.models import ( + CoreEvent, + LogEntryAddEvent, + LogEntryUpdateEvent, + LogEntryDeleteEvent, + LogEntryReplyAddEvent, + LogEntryReplyUpdateEvent, + LogEntryReplyDeleteEvent, + LogReactionAddEvent, + LogReactionDeleteEvent, + TestNotificationEvent, + ) + + event_type_map = { + EventType.GENERIC_ADD: CoreEvent, + EventType.GENERIC_UPDATE: CoreEvent, + EventType.GENERIC_DELETE: CoreEvent, + EventType.LOG_ENTRY_ADD: LogEntryAddEvent, + EventType.LOG_ENTRY_UPDATE: LogEntryUpdateEvent, + EventType.LOG_ENTRY_DELETE: LogEntryDeleteEvent, + EventType.LOG_ENTRY_REPLY_ADD: LogEntryReplyAddEvent, + EventType.LOG_ENTRY_REPLY_UPDATE: LogEntryReplyUpdateEvent, + EventType.LOG_ENTRY_REPLY_DELETE: LogEntryReplyDeleteEvent, + EventType.LOG_REACTION_ADD: LogReactionAddEvent, + EventType.LOG_REACTION_DELETE: LogReactionDeleteEvent, + EventType.NOTIFICATION_TEST: TestNotificationEvent, + } + + event_class = event_type_map.get(event_type) + if not event_class: + raise ValueError(f"Unknown event type: {event_type}") + + return event_class(**message.payload) + + def topic_matches(self, topic: str) -> bool: + """ + Check if a topic matches this handler's pattern. + + Args: + topic: The topic to check. + + Returns: + True if the topic matches the pattern, False otherwise. + """ + return self._match_mqtt_pattern(self.topic_pattern, topic) + + @staticmethod + def _match_mqtt_pattern(pattern: str, topic: str) -> bool: + """ + Match an MQTT topic against a pattern. + + Supports: + - # (multi-level wildcard, must be at end) + - + (single-level wildcard) + - exact matches + + Args: + pattern: The MQTT pattern. + topic: The topic to match. + + Returns: + True if the topic matches the pattern. + """ + pattern_parts = pattern.split("/") + topic_parts = topic.split("/") + + for i, pattern_part in enumerate(pattern_parts): + if pattern_part == "#": + # Multi-level wildcard matches everything remaining + return True + elif pattern_part == "+": + # Single-level wildcard matches one level + if i >= len(topic_parts): + return False + else: + # Exact match required + if i >= len(topic_parts) or topic_parts[i] != pattern_part: + return False + + # Check if we've consumed all topic parts + return len(topic_parts) == len(pattern_parts) + + +class PluginManager: + """ + Manages loading and executing MQTT handlers. + + Supports passing both global and handler-specific configuration to handlers + during instantiation. + """ + + def __init__( + self, + api_factory=None, + config_manager: Optional[Any] = None, + ): + """ + Initialize the plugin manager. + + Args: + api_factory: Optional BelyApiFactory instance to pass to handlers. + config_manager: Optional ConfigManager for handler configuration. + """ + self.api_factory = api_factory + self.config_manager = config_manager + self.handlers: List[MQTTHandler] = [] + self.logger = logging.getLogger(__name__) + + def register_handler(self, handler: MQTTHandler) -> None: + """ + Register a handler. + + Args: + handler: The handler instance to register. + """ + self.handlers.append(handler) + self.logger.info( + f"Registered handler {handler.__class__.__name__} " + f"for topic pattern: {handler.topic_pattern}" + ) + + def register_handler_class(self, handler_class: Type[MQTTHandler]) -> None: + """ + Register a handler by class. + + Attempts to pass both global configuration and handler-specific + configuration to the handler if available. + + Args: + handler_class: The handler class to instantiate and register. + """ + handler_name = handler_class.__name__ + + # Get global configuration if available + global_config = None + if self.config_manager: + global_config = self.config_manager.get_global_config() + + # Try to get handler-specific configuration + handler_config = None + if self.config_manager: + handler_config = self.config_manager.get_config(handler_name) + + # Prepare constructor arguments + constructor_args = { + "api_factory": self.api_factory, + } + + # Add global_config if the handler accepts it + if global_config: + constructor_args["global_config"] = global_config + + # Add handler-specific configuration if available + if handler_config and handler_config.config: + constructor_args.update(handler_config.config) + + # Instantiate handler with configuration + try: + # First try with all arguments + handler = handler_class(**constructor_args) # type: ignore[arg-type] + if global_config: + self.logger.debug(f"Instantiated {handler_name} with global configuration") + if handler_config: + self.logger.debug( + f"Instantiated {handler_name} with handler configuration: {handler_config.config}" + ) + except TypeError as e: + # Handler might not accept all parameters, try with just what it accepts + self.logger.debug( + f"Handler {handler_name} does not accept all configuration parameters: {e}" + ) + + # Get the handler's __init__ signature + import inspect + + sig = inspect.signature(handler_class.__init__) + accepted_params = set(sig.parameters.keys()) - {"self"} + + # Filter constructor args to only what the handler accepts + filtered_args = {k: v for k, v in constructor_args.items() if k in accepted_params} + + try: + handler = handler_class(**filtered_args) # type: ignore[arg-type] + self.logger.debug( + f"Instantiated {handler_name} with filtered parameters: {filtered_args.keys()}" + ) + except Exception as e2: + # Fall back to minimal instantiation + self.logger.warning( + f"Failed to instantiate {handler_name} with configuration, " + f"falling back to minimal instantiation: {e2}" + ) + handler = handler_class() + + self.register_handler(handler) + + def load_handlers_from_directory(self, directory: Path) -> None: + """ + Dynamically load handlers from a directory. + + Looks for Python files and packages in the directory and imports any classes + that inherit from MQTTHandler. + + Supports: + - Single Python files (*.py) + - Python packages (directories with __init__.py) + + Args: + directory: Path to the directory containing handler modules. + """ + directory = Path(directory) + if not directory.exists(): + self.logger.warning(f"Handler directory does not exist: {directory}") + return + + # Load handlers from Python files + for py_file in directory.glob("*.py"): + if py_file.name.startswith("_"): + continue + + module_name = py_file.stem + try: + spec = importlib.util.spec_from_file_location(module_name, py_file) + if spec is None or spec.loader is None: + continue + + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + + # Find all MQTTHandler subclasses in the module + for name, obj in inspect.getmembers(module): + if ( + inspect.isclass(obj) + and issubclass(obj, MQTTHandler) + and obj is not MQTTHandler + ): + self.register_handler_class(obj) + self.logger.info(f"Loaded handler from {py_file}: {name}") + + except Exception as e: + self.logger.error(f"Failed to load handlers from {py_file}: {e}") + + # Load handlers from Python packages (directories with __init__.py) + for item in directory.iterdir(): + if not item.is_dir() or item.name.startswith("_") or item.name.startswith("."): + continue + + init_file = item / "__init__.py" + if not init_file.exists(): + continue + + module_name = item.name + try: + # Add parent directory to sys.path temporarily + import sys + + old_path = sys.path.copy() + sys.path.insert(0, str(directory)) + + try: + # Import the package + module = importlib.import_module(module_name) + + # Find all MQTTHandler subclasses in the package + for name, obj in inspect.getmembers(module): + if ( + inspect.isclass(obj) + and issubclass(obj, MQTTHandler) + and obj is not MQTTHandler + ): + self.register_handler_class(obj) + self.logger.info(f"Loaded handler from package {item}: {name}") + + finally: + # Restore original sys.path + sys.path = old_path + + except Exception as e: + self.logger.error(f"Failed to load handlers from package {item}: {e}") + + async def handle_message(self, message: MQTTMessage) -> None: + """ + Route a message to all matching handlers. + + For each matching handler, attempts to route to a specific event handler + method (e.g., handle_log_entry_add) if available, otherwise calls the + generic handle() method. + + Specific event handlers receive parsed event objects (e.g., LogEntryAddEvent) + instead of raw MQTT messages. + + Args: + message: The MQTT message to handle. + """ + matching_handlers = [h for h in self.handlers if h.topic_matches(message.topic)] + + if not matching_handlers: + self.logger.debug(f"No handlers found for topic: {message.topic}") + return + + # Get event type for this topic + event_type = EventType.from_topic(message.topic) + + for handler in matching_handlers: + try: + # Try to route to specific event handler + if event_type: + handler_method_name = event_type.handler_method_name + if hasattr(handler, handler_method_name): + handler_method = getattr(handler, handler_method_name) + if callable(handler_method): + self.logger.debug( + f"Routing {message.topic} to " + f"{handler.__class__.__name__}.{handler_method_name}" + ) + # Parse message into typed event object + event = MQTTHandler._parse_event(event_type, message) + await handler_method(event) + continue + + # Fall back to generic handle method + self.logger.debug( + f"Routing {message.topic} to " f"{handler.__class__.__name__}.handle" + ) + await handler.handle(message) + except Exception as e: + self.logger.error( + f"Error in handler {handler.__class__.__name__}: {e}", + exc_info=True, + ) + + def get_handlers_for_topic(self, topic: str) -> List[MQTTHandler]: + """ + Get all handlers that match a topic. + + Args: + topic: The MQTT topic. + + Returns: + List of matching handlers. + """ + return [h for h in self.handlers if h.topic_matches(topic)] diff --git a/tools/developer_tools/bely-mqtt-message-broker/tests/__init__.py b/tools/developer_tools/bely-mqtt-message-broker/tests/__init__.py new file mode 100644 index 000000000..f8d9483e4 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for BELY MQTT Framework.""" diff --git a/tools/developer_tools/bely-mqtt-message-broker/tests/test_models.py b/tools/developer_tools/bely-mqtt-message-broker/tests/test_models.py new file mode 100644 index 000000000..9645d0de9 --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/tests/test_models.py @@ -0,0 +1,213 @@ +"""Tests for BELY event models.""" + +from datetime import datetime + +import pytest + +from bely_mqtt.models import ( + CoreEvent, + LogEntryAddEvent, + LogEntryDeleteEvent, + LogEntryReplyDeleteEvent, + LogbookInfo, +) + + +@pytest.fixture +def sample_log_entry_add_payload(): + """Sample log entry add event payload.""" + return { + "description": "log entry was added", + "eventTimestamp": "2025-11-21T16:38:06.157+00:00", + "parentLogDocumentInfo": { + "name": "[2025/11/21/2] New Document", + "id": 105, + "lastModifiedOnDateTime": "2025-11-21T16:37:42.000+00:00", + "createdByUsername": "logr", + "lastModifiedByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:37:42.000+00:00", + "ownerUsername": "logr", + "ownerUserGroupName": "LOGR_ADMIN", + }, + "logInfo": { + "id": 267, + "lastModifiedOnDateTime": "2025-11-21T16:38:00.392+00:00", + "lastModifiedByUsername": "logr", + "enteredByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:38:00.392+00:00", + }, + "logbookList": [{"name": "studies-sr", "id": 7, "displayName": "SR"}], + "textDiff": "New Log Entry Added", + "entityName": "Log", + "entityId": 267, + "eventTriggedByUsername": "logr", + } + + +@pytest.fixture +def sample_simple_event_payload(): + """Sample simple event payload.""" + return { + "description": "Add action completed", + "eventTimestamp": "2025-11-21T16:37:52.537+00:00", + "entityName": "ItemDomainLogbook", + "entityId": 105, + "eventTriggedByUsername": "logr", + } + + +def test_core_event_parsing(sample_simple_event_payload): + """Test parsing of core event.""" + event = CoreEvent(**sample_simple_event_payload) + + assert event.description == "Add action completed" + assert event.entity_name == "ItemDomainLogbook" + assert event.entity_id == 105 + assert event.event_triggered_by_username == "logr" + assert isinstance(event.event_timestamp, datetime) + + +def test_log_entry_add_event_parsing(sample_log_entry_add_payload): + """Test parsing of log entry add event.""" + event = LogEntryAddEvent(**sample_log_entry_add_payload) + + assert event.description == "log entry was added" + assert event.log_info.id == 267 + assert event.parent_log_document_info.id == 105 + assert event.parent_log_document_info.name == "[2025/11/21/2] New Document" + assert len(event.logbook_list) == 1 + assert event.logbook_list[0].display_name == "SR" + assert event.text_diff == "New Log Entry Added" + assert event.event_triggered_by_username == "logr" + + +def test_logbook_info_parsing(): + """Test parsing of logbook info.""" + data = {"name": "studies-sr", "id": 7, "displayName": "SR"} + logbook = LogbookInfo(**data) + + assert logbook.name == "studies-sr" + assert logbook.id == 7 + assert logbook.display_name == "SR" + + +def test_event_timestamp_parsing(sample_simple_event_payload): + """Test that event timestamp is properly parsed.""" + event = CoreEvent(**sample_simple_event_payload) + + assert isinstance(event.event_timestamp, datetime) + assert event.event_timestamp.year == 2025 + assert event.event_timestamp.month == 11 + assert event.event_timestamp.day == 21 + + +def test_alias_field_mapping(sample_simple_event_payload): + """Test that aliased fields are properly mapped.""" + event = CoreEvent(**sample_simple_event_payload) + + # Check that snake_case attributes work + assert event.event_triggered_by_username == "logr" + assert event.entity_name == "ItemDomainLogbook" + assert event.entity_id == 105 + + +@pytest.fixture +def sample_log_entry_delete_payload(): + """Sample log entry delete event payload.""" + return { + "description": "log entry was deleted", + "eventTimestamp": "2025-11-21T16:40:00.000+00:00", + "parentLogDocumentInfo": { + "name": "[2025/11/21/2] New Document", + "id": 105, + "lastModifiedOnDateTime": "2025-11-21T16:38:20.000+00:00", + "createdByUsername": "logr", + "lastModifiedByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:37:42.000+00:00", + "ownerUsername": "logr", + "ownerUserGroupName": "LOGR_ADMIN", + }, + "logInfo": { + "id": 267, + "lastModifiedOnDateTime": "2025-11-21T16:40:00.000+00:00", + "lastModifiedByUsername": "logr", + "enteredByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:38:00.000+00:00", + }, + "logbookList": [{"name": "studies-sr", "id": 7, "displayName": "SR"}], + "textDiff": "Log Entry Deleted", + "entityName": "Log", + "entityId": 267, + "eventTriggedByUsername": "logr", + } + + +@pytest.fixture +def sample_log_entry_reply_delete_payload(): + """Sample log entry reply delete event payload.""" + return { + "description": "reply log entry was deleted", + "eventTimestamp": "2025-11-21T16:41:00.000+00:00", + "parentLogDocumentInfo": { + "name": "[2025/11/21/2] New Document", + "id": 105, + "lastModifiedOnDateTime": "2025-11-21T16:40:00.000+00:00", + "createdByUsername": "logr", + "lastModifiedByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:37:42.000+00:00", + "ownerUsername": "logr", + "ownerUserGroupName": "LOGR_ADMIN", + }, + "logInfo": { + "id": 268, + "lastModifiedOnDateTime": "2025-11-21T16:41:00.000+00:00", + "lastModifiedByUsername": "logr", + "enteredByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:38:16.000+00:00", + }, + "logbookList": [{"name": "studies-sr", "id": 7, "displayName": "SR"}], + "textDiff": "Reply Deleted", + "parentLogInfo": { + "id": 267, + "lastModifiedOnDateTime": "2025-11-21T16:38:14.000+00:00", + "lastModifiedByUsername": "logr", + "enteredByUsername": "logr", + "enteredOnDateTime": "2025-11-21T16:38:00.000+00:00", + }, + "entityName": "Log", + "entityId": 268, + "eventTriggedByUsername": "logr", + } + + +def test_log_entry_delete_event_parsing(sample_log_entry_delete_payload): + """Test parsing of log entry delete event.""" + event = LogEntryDeleteEvent(**sample_log_entry_delete_payload) + + assert event.description == "log entry was deleted" + assert event.log_info.id == 267 + assert event.parent_log_document_info.id == 105 + assert event.parent_log_document_info.name == "[2025/11/21/2] New Document" + assert len(event.logbook_list) == 1 + assert event.logbook_list[0].display_name == "SR" + assert event.text_diff == "Log Entry Deleted" + assert event.entity_id == 267 + assert event.event_triggered_by_username == "logr" + assert isinstance(event.event_timestamp, datetime) + + +def test_log_entry_reply_delete_event_parsing(sample_log_entry_reply_delete_payload): + """Test parsing of log entry reply delete event.""" + event = LogEntryReplyDeleteEvent(**sample_log_entry_reply_delete_payload) + + assert event.description == "reply log entry was deleted" + assert event.log_info.id == 268 + assert event.parent_log_info.id == 267 + assert event.parent_log_document_info.id == 105 + assert event.parent_log_document_info.name == "[2025/11/21/2] New Document" + assert len(event.logbook_list) == 1 + assert event.logbook_list[0].display_name == "SR" + assert event.text_diff == "Reply Deleted" + assert event.entity_id == 268 + assert event.event_triggered_by_username == "logr" + assert isinstance(event.event_timestamp, datetime) diff --git a/tools/developer_tools/bely-mqtt-message-broker/tests/test_plugin.py b/tools/developer_tools/bely-mqtt-message-broker/tests/test_plugin.py new file mode 100644 index 000000000..1680d9d8a --- /dev/null +++ b/tools/developer_tools/bely-mqtt-message-broker/tests/test_plugin.py @@ -0,0 +1,157 @@ +"""Tests for plugin system.""" + +import pytest + +from bely_mqtt.models import MQTTMessage +from bely_mqtt.plugin import MQTTHandler, PluginManager + + +class MockHandler(MQTTHandler): + """Test handler for testing.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.handled_messages = [] + + @property + def topic_pattern(self) -> str: + return "test/topic" + + async def handle(self, message: MQTTMessage) -> None: + self.handled_messages.append(message) + + +class WildcardHandler(MQTTHandler): + """Handler with wildcard pattern.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.handled_messages = [] + + @property + def topic_pattern(self) -> str: + return "test/+" + + async def handle(self, message: MQTTMessage) -> None: + self.handled_messages.append(message) + + +class MultiLevelHandler(MQTTHandler): + """Handler with multi-level wildcard.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.handled_messages = [] + + @property + def topic_pattern(self) -> str: + return "test/#" + + async def handle(self, message: MQTTMessage) -> None: + self.handled_messages.append(message) + + +def test_topic_matching(): + """Test MQTT topic pattern matching.""" + handler = MockHandler() + + assert handler.topic_matches("test/topic") + assert not handler.topic_matches("test/other") + assert not handler.topic_matches("other/topic") + + +def test_single_level_wildcard(): + """Test single-level wildcard matching.""" + handler = WildcardHandler() + + assert handler.topic_matches("test/topic") + assert handler.topic_matches("test/other") + assert not handler.topic_matches("test/topic/sub") + assert not handler.topic_matches("other/topic") + + +def test_multi_level_wildcard(): + """Test multi-level wildcard matching.""" + handler = MultiLevelHandler() + + assert handler.topic_matches("test/topic") + assert handler.topic_matches("test/topic/sub") + assert handler.topic_matches("test/topic/sub/deep") + assert not handler.topic_matches("other/topic") + + +@pytest.mark.asyncio +async def test_plugin_manager_registration(): + """Test handler registration.""" + manager = PluginManager() + handler = MockHandler() + + manager.register_handler(handler) + + assert len(manager.handlers) == 1 + assert manager.handlers[0] is handler + + +@pytest.mark.asyncio +async def test_plugin_manager_message_routing(): + """Test message routing to handlers.""" + manager = PluginManager() + handler = MockHandler() + manager.register_handler(handler) + + message = MQTTMessage(topic="test/topic", payload={"test": "data"}) + await manager.handle_message(message) + + assert len(handler.handled_messages) == 1 + assert handler.handled_messages[0] is message + + +@pytest.mark.asyncio +async def test_plugin_manager_no_matching_handlers(): + """Test that no error occurs when no handlers match.""" + manager = PluginManager() + handler = MockHandler() + manager.register_handler(handler) + + message = MQTTMessage(topic="other/topic", payload={"test": "data"}) + # Should not raise + await manager.handle_message(message) + + assert len(handler.handled_messages) == 0 + + +@pytest.mark.asyncio +async def test_plugin_manager_multiple_handlers(): + """Test routing to multiple matching handlers.""" + manager = PluginManager() + handler1 = MockHandler() + handler2 = MockHandler() + manager.register_handler(handler1) + manager.register_handler(handler2) + + message = MQTTMessage(topic="test/topic", payload={"test": "data"}) + await manager.handle_message(message) + + assert len(handler1.handled_messages) == 1 + assert len(handler2.handled_messages) == 1 + + +def test_get_handlers_for_topic(): + """Test getting handlers for a specific topic.""" + manager = PluginManager() + handler1 = MockHandler() + handler2 = WildcardHandler() + handler3 = MultiLevelHandler() + + manager.register_handler(handler1) + manager.register_handler(handler2) + manager.register_handler(handler3) + + handlers = manager.get_handlers_for_topic("test/topic") + assert len(handlers) == 3 + + handlers = manager.get_handlers_for_topic("test/other") + assert len(handlers) == 2 # WildcardHandler and MultiLevelHandler + + handlers = manager.get_handlers_for_topic("other/topic") + assert len(handlers) == 0 diff --git a/tools/developer_tools/python-client/BelyApiFactory.py b/tools/developer_tools/python-client/BelyApiFactory.py index 693dc18ef..6dda4da04 100755 --- a/tools/developer_tools/python-client/BelyApiFactory.py +++ b/tools/developer_tools/python-client/BelyApiFactory.py @@ -6,94 +6,113 @@ import os from belyApi import Configuration, ApiClient -from belyApi import DomainApi, DownloadsApi, UsersApi, PropertyValueApi, SystemLogApi, SearchApi, AuthenticationApi, LogbookApi +from belyApi import ( + DomainApi, + DownloadsApi, + UsersApi, + PropertyValueApi, + SystemLogApi, + SearchApi, + AuthenticationApi, + LogbookApi, + NotificationConfigurationApi, +) from belyApi import ApiExceptionMessage + class BelyApiFactory: - HEADER_TOKEN_KEY = "token" - URL_FORMAT = "%s/views/item/view?id=%s" + HEADER_TOKEN_KEY = "token" + URL_FORMAT = "%s/views/item/view?id=%s" + + LOGBOOK_DOMAIN_ID = 1 + + def __init__(self, bely_url): + self.bely_url = bely_url + self.config = Configuration(host=self.bely_url) + self.api_client = ApiClient(configuration=self.config) - LOGBOOK_DOMAIN_ID = 1 - + self.logbook_api = LogbookApi(api_client=self.api_client) + self.notification_configuration_api = NotificationConfigurationApi( + api_client=self.api_client + ) - def __init__(self, bely_url): - self.bely_url = bely_url - self.config = Configuration(host=self.bely_url) - self.api_client = ApiClient(configuration=self.config) + self.downloadsApi = DownloadsApi(api_client=self.api_client) + self.propertyValueApi = PropertyValueApi(api_client=self.api_client) + self.usersApi = UsersApi(api_client=self.api_client) + self.domainApi = DomainApi(api_client=self.api_client) - self.logbook_api = LogbookApi(api_client=self.api_client) + self.systemlogApi = SystemLogApi(api_client=self.api_client) + self.searchApi = SearchApi(api_client=self.api_client) - self.downloadsApi = DownloadsApi(api_client=self.api_client) - self.propertyValueApi = PropertyValueApi(api_client=self.api_client) - self.usersApi = UsersApi(api_client=self.api_client) - self.domainApi = DomainApi(api_client=self.api_client) - - self.systemlogApi = SystemLogApi(api_client=self.api_client) - self.searchApi = SearchApi(api_client=self.api_client) + self.auth_api = AuthenticationApi(api_client=self.api_client) - self.auth_api = AuthenticationApi(api_client=self.api_client) + def get_lobook_api(self) -> LogbookApi: + return self.logbook_api - def get_lobook_api(self) -> LogbookApi: - return self.logbook_api + def getDomainApi(self) -> DomainApi: + return self.domainApi - def getDomainApi(self) -> DomainApi: - return self.domainApi + def getDownloadApi(self) -> DownloadsApi: + return self.downloadsApi - def getDownloadApi(self) -> DownloadsApi: - return self.downloadsApi + def getPropertyValueApi(self) -> PropertyValueApi: + return self.propertyValueApi - def getPropertyValueApi(self) -> PropertyValueApi: - return self.propertyValueApi + def getUsersApi(self) -> UsersApi: + return self.usersApi - def getUsersApi(self) -> UsersApi: - return self.usersApi + def getSearchApi(self) -> SearchApi: + return self.searchApi - def getSearchApi(self) -> SearchApi: - return self.searchApi + def getNotificationConfigurationApi(self): + return self.notification_configuration_api - def generateCDBUrlForItemId(self, itemId): - return self.URL_FORMAT % (self.cdbUrl, str(itemId)) + def generateCDBUrlForItemId(self, itemId): + return self.URL_FORMAT % (self.cdbUrl, str(itemId)) - def authenticate_user(self, username, password): - response = self.auth_api.authenticate_user_with_http_info(username=username, password=password) + def authenticate_user(self, username, password): + response = self.auth_api.authenticate_user_with_http_info( + username=username, password=password + ) - token = response[-1][self.HEADER_TOKEN_KEY] - self.__set_authenticate_token(token) + token = response[-1][self.HEADER_TOKEN_KEY] + self.__set_authenticate_token(token) - def __set_authenticate_token(self, token): - self.api_client.set_default_header(self.HEADER_TOKEN_KEY, token) + def __set_authenticate_token(self, token): + self.api_client.set_default_header(self.HEADER_TOKEN_KEY, token) - def getAuthenticateToken(self): - return self.apiClient.default_headers[self.HEADER_TOKEN_KEY] + def getAuthenticateToken(self): + return self.apiClient.default_headers[self.HEADER_TOKEN_KEY] - def test_authenticated(self): - self.auth_api.verify_authenticated() + def test_authenticated(self): + self.auth_api.verify_authenticated() - def logout_user(self): - self.auth_api.log_out() + def logout_user(self): + self.auth_api.log_out() - # Restore later - # @classmethod - # def createFileUploadObject(cls, filePath): - # data = open(filePath, "rb").read() - # b64String = base64.b64encode(data).decode() + # Restore later + # @classmethod + # def createFileUploadObject(cls, filePath): + # data = open(filePath, "rb").read() + # b64String = base64.b64encode(data).decode() - # fileName = os.path.basename(filePath) - # return FileUploadObject(file_name=fileName, base64_binary=b64String) + # fileName = os.path.basename(filePath) + # return FileUploadObject(file_name=fileName, base64_binary=b64String) + + def parseApiException(self, openApiException): + responseType = ApiExceptionMessage.__name__ + openApiException.data = openApiException.body + exObj = self.apiClient.deserialize(openApiException, responseType) + exObj.status = openApiException.status + return exObj - def parseApiException(self, openApiException): - responseType = ApiExceptionMessage.__name__ - openApiException.data = openApiException.body - exObj = self.apiClient.deserialize(openApiException, responseType) - exObj.status = openApiException.status - return exObj # def run_command(): - # Example +# Example # print("\nEnter cdb URL (ex: https://cdb.aps.anl.gov/cdb): ") # hostname = input() - + # apiFactory = CdbApiFactory(hostname) # itemApi = apiFactory.getItemApi() @@ -129,6 +148,6 @@ def parseApiException(self, openApiException): # print("Success!") -if __name__ == '__main__': - pass +if __name__ == "__main__": + pass # run_command() diff --git a/tools/developer_tools/python-client/setup-api.py b/tools/developer_tools/python-client/setup-api.py index 6d9925f07..9dbfea59a 100644 --- a/tools/developer_tools/python-client/setup-api.py +++ b/tools/developer_tools/python-client/setup-api.py @@ -7,8 +7,8 @@ from setuptools import setup -setup(name='BELY-API', - version='2024.6', +setup(name='bely_api', + version='2025.3.dev0', packages=["belyApi", "belyApi.api", "belyApi.models"],