pax_global_header00006660000000000000000000000064147723177000014522gustar00rootroot0000000000000052 comment=38eecaa9a59dbf52feb1491f10941af95ccaf03f apprise-1.9.3/000077500000000000000000000000001477231770000131775ustar00rootroot00000000000000apprise-1.9.3/.coveragerc000066400000000000000000000004431477231770000153210ustar00rootroot00000000000000[run] data_file = .coverage-reports/.coverage branch = True parallel = True source = apprise [paths] source = apprise .tox/*/lib/python*/site-packages/apprise .tox/pypy/site-packages/apprise [report] show_missing = True skip_covered = True skip_empty = True fail_under = 95.0 apprise-1.9.3/.env000066400000000000000000000000641477231770000137700ustar00rootroot00000000000000LANG=C.UTF-8 PYTHONPATH=. PYTHONDONTWRITEBYTECODE=1 apprise-1.9.3/.github/000077500000000000000000000000001477231770000145375ustar00rootroot00000000000000apprise-1.9.3/.github/FUNDING.yml000066400000000000000000000000751477231770000163560ustar00rootroot00000000000000github: caronc custom: ['https://www.paypal.me/lead2gold', ] apprise-1.9.3/.github/ISSUE_TEMPLATE/000077500000000000000000000000001477231770000167225ustar00rootroot00000000000000apprise-1.9.3/.github/ISSUE_TEMPLATE/1_bug_report.md000066400000000000000000000012551477231770000216370ustar00rootroot00000000000000--- name: 🐛 Bug Report about: Report any errors and problems title: '' labels: 'bug' assignees: '' --- :mega: **Notification Service(s) Impacted** :lady_beetle: **Describe the bug** :bulb: **Screenshots and Logs** :computer: **Your System Details:** - OS: [e.g. RedHat v8.0] - Python Version: [e.g. Python v2.7] :crystal_ball: **Additional context** Add any other context about the problem here. apprise-1.9.3/.github/ISSUE_TEMPLATE/2_enhancement_request.md000066400000000000000000000005551477231770000235270ustar00rootroot00000000000000--- name: 💡 Enhancement Request about: Got a great idea? Let us know! title: '' labels: 'enhancement' assignees: '' --- :bulb: **The Idea** :hammer: **Breaking Feature** apprise-1.9.3/.github/ISSUE_TEMPLATE/3_new-notification-request.md000066400000000000000000000013621477231770000244330ustar00rootroot00000000000000--- name: 📣 New Notification Request about: Suggest a new notification service that you'd like to see Apprise support title: '' labels: ['enhancement', 'new-notification'] assignees: '' --- :loudspeaker: **What is the name of the service?** :link: **Provide some details about the service that can help with it's development.** - Homepage: - API Reference: :bulb: **Anything else?** apprise-1.9.3/.github/ISSUE_TEMPLATE/4_question.md000066400000000000000000000002741477231770000213410ustar00rootroot00000000000000--- name: ❓ Support Question about: Ask a question about Apprise title: '' labels: 'question' assignees: '' --- :question: **Question** apprise-1.9.3/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000025671477231770000203520ustar00rootroot00000000000000## Description: **Related issue (if applicable):** # ## New Service Completion Status * [ ] apprise/plugins/.py * [ ] KEYWORDS - add new service into this file (alphabetically). * [ ] README.md - add entry for new service to table (as a quick reference) * [ ] packaging/redhat/python-apprise.spec - add new service into the `%global common_description` ## Checklist * [ ] The code change is tested and works locally. * [ ] There is no commented out code in this PR. * [ ] No lint errors (use `flake8`) * [ ] 100% test coverage ## Testing Anyone can help test this source code as follows: ```bash # Create a virtual environment to work in as follows: python3 -m venv apprise # Change into our new directory cd apprise # Activate our virtual environment source bin/activate # Install the branch pip install git+https://github.com/caronc/apprise.git@ # Test out the changes with the following command: apprise -t "Test Title" -b "Test Message" \ ``` apprise-1.9.3/.github/workflows/000077500000000000000000000000001477231770000165745ustar00rootroot00000000000000apprise-1.9.3/.github/workflows/codeql-analysis.yml000066400000000000000000000020771477231770000224150ustar00rootroot00000000000000name: "CodeQL" on: push: branches: [ master ] pull_request: branches: [ master ] schedule: - cron: '42 15 * * 5' # Cancel in-progress jobs when pushing to the same branch. concurrency: cancel-in-progress: true group: ${{ github.workflow }}-${{ github.ref }} jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: [ 'python' ] steps: - name: Checkout repository uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v2 - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 apprise-1.9.3/.github/workflows/pkgbuild.yml000066400000000000000000000025121477231770000211200ustar00rootroot00000000000000# # Verify on CI/GHA that package building works. # # TODO: Currently, this supports RPM-based systems only. # By adjusting a few details, it can be made work # for other distribution types as well. # name: Test packaging on: # On which repository actions to trigger the build. push: branches: [ master ] pull_request: branches: [ master ] # Allow job to be triggered manually. workflow_dispatch: # Cancel in-progress jobs when pushing to the same branch. concurrency: cancel-in-progress: true group: ${{ github.workflow }}-${{ github.ref }} jobs: tests: runs-on: "ubuntu-latest" strategy: # Run all jobs to completion (false), or cancel # all jobs once the first one fails (true). fail-fast: false # Define a list of build targets. The labels should match the # items within the `services` section of `docker-compose.yml`. matrix: target: [ "rpmbuild.el9", ] defaults: run: shell: bash name: Target ${{ matrix.target }} steps: - name: Acquire sources uses: actions/checkout@v4 - name: Build package run: | docker compose run --user root --rm ${{ matrix.target }} build-rpm.sh - name: Verify package has been produced run: | ls -alF dist/rpm/noarch/*.noarch.rpm apprise-1.9.3/.github/workflows/tests.yml000066400000000000000000000074371477231770000204740ustar00rootroot00000000000000name: Tests on: # On which repository actions to trigger the build. push: branches: [ master ] pull_request: branches: [ master ] # Allow job to be triggered manually. workflow_dispatch: # Cancel in-progress jobs when pushing to the same branch. concurrency: cancel-in-progress: true group: ${{ github.workflow }}-${{ github.ref }} jobs: tests: runs-on: ${{ matrix.os }} strategy: # Run all jobs to completion (false), or cancel # all jobs once the first one fails (true). fail-fast: true # Define a minimal test matrix, it will be # expanded using subsequent `include` items. matrix: os: ["ubuntu-latest"] python-version: ["3.11"] bare: [false] include: # Within the `bare` environment, `all-plugin-requirements.txt` will NOT be # installed, to verify the application also works without those dependencies. - os: "ubuntu-latest" python-version: "3.11" bare: true # Let's save resources and only build a single slot on macOS- and Windows. - os: "macos-latest" python-version: "3.11" - os: "windows-latest" python-version: "3.11" # Test more available versions of CPython on Linux. - os: "ubuntu-latest" python-version: "3.8" - os: "ubuntu-latest" python-version: "3.9" - os: "ubuntu-latest" python-version: "3.10" - os: "ubuntu-latest" python-version: "3.11" - os: "ubuntu-latest" python-version: "3.12" defaults: run: shell: bash env: OS: ${{ matrix.os }} PYTHON: ${{ matrix.python-version }} BARE: ${{ matrix.bare }} name: Python ${{ matrix.python-version }} on ${{ matrix.os }} ${{ matrix.bare && '(bare)' || '' }} steps: - name: Acquire sources uses: actions/checkout@v4 - name: Install prerequisites (Linux) if: runner.os == 'Linux' run: | sudo apt-get update sudo apt-get install libdbus-1-dev - name: Setup Python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: x64 cache: 'pip' cache-dependency-path: | setup.py requirements.txt dev-requirements.txt all-plugin-requirements.txt win-requirements.txt - name: Install project dependencies (Baseline) run: | pip install --use-pep517 wheel pip install --use-pep517 -r requirements.txt -r dev-requirements.txt - name: Install project dependencies (All plugins) if: matrix.bare != true run: | pip install --use-pep517 -r all-plugin-requirements.txt - name: Install project dependencies (Windows) if: runner.os == 'Windows' run: | pip install --use-pep517 -r win-requirements.txt || true # Install package in editable mode, # and run project-specific tasks. - name: Setup project run: | python -m pip install --upgrade pip setuptools wheel pip install --use-pep517 --editable=. python setup.py compile_catalog # For saving resources, code style checking is # only invoked within the `bare` environment. - name: Check code style if: matrix.bare == true run: | flake8 . --count --show-source --statistics - name: Run tests run: | coverage run -m pytest - name: Process coverage data run: | coverage combine coverage xml coverage report - name: Upload coverage data uses: codecov/codecov-action@v4 with: files: ./coverage.xml fail_ci_if_error: false token: ${{ secrets.CODECOV_TOKEN }} apprise-1.9.3/.gitignore000066400000000000000000000013631477231770000151720ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # vi swap files .*.sw? # Distribution / packaging .Python env/ .venv* build/ BUILDROOT/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib64/ parts/ sdist/ *.egg-info/ .installed.cfg *.egg .local # Generated from Docker Instance .bash_history .python_history # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover .hypothesis/ # Translations *.mo # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ #Ipython Notebook .ipynb_checkpoints #PyCharm .idea #PyDev (Eclipse) .project .pydevproject .settings .DS_Store apprise-1.9.3/.vscode/000077500000000000000000000000001477231770000145405ustar00rootroot00000000000000apprise-1.9.3/.vscode/settings.json000066400000000000000000000004051477231770000172720ustar00rootroot00000000000000{ "python.testing.pytestArgs": [], "python.testing.cwd": "${workspaceFolder}", "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "terminal.integrated.env.linux": { "PYTHONPATH": "${workspaceFolder}" } }apprise-1.9.3/CONTRIBUTIONS.md000066400000000000000000000021531477231770000155640ustar00rootroot00000000000000# Contributions to the apprise project ## Creator & Maintainer * Chris Caron ## Contributors The following users have contributed to this project and their deserved recognition has been identified here. If you have contributed and wish to be acknowledged for it, the syntax is as follows: ``` * [Your name or handle] <[email or website]> * [Month Year] - [Brief summary of your contribution] ``` The contributors have been listed in chronological order: * Wim de With * Dec 2018 - Added Matrix Support * Hitesh Sondhi * Mar 2019 - Added Flock Support * Andreas Motl * Mar 2020 - Fix XMPP Support * Oct 2022 - Drop support for Python 2 * Oct 2022 - Add support for Python 3.11 * Oct 2022 - Improve efficiency of NotifyEmail * Joey Espinosa <@particledecay> * Apr 3rd 2022 - Added Ntfy Support * Kate Ward * 6th Feb 2024 - Add Revolt Support * Han Wang * Apr 2024 - Refactored test cases * Toni Wells <@isometimescode> * May 2024 - Fixed token length with apprise:// apprise-1.9.3/KEYWORDS000066400000000000000000000020141477231770000143660ustar00rootroot00000000000000Africas Talking Alerts Apprise API Automated Packet Reporting System AWS Bark BlueSky BulkSMS BulkVS Burst SMS Chanify Chat CLI ClickSend D7Networks Dapnet DBus DingTalk Discord Email Emby Enigma2 FCM Feishu Flock Form Free Mobile Gnome Google Chat Gotify Growl Guilded Home Assistant httpSMS IFTTT Join JSON Kavenegar KODI Kumulos LaMetric Line LunaSea MacOSX Mailgun Mastodon Matrix Mattermost MessageBird Microsoft Misskey MQTT MSG91 MSTeams Nextcloud NextcloudTalk Notica Notifiarr Notifico Ntfy Office365 OneSignal Opsgenie PagerDuty PagerTree ParsePlatform Plivo PopcornNotify Power Automate Prowl PushBullet Pushed Pushjet PushMe Push Notifications Pushover PushSafer Pushy PushDeer Reddit Resend Revolt Rocket.Chat RSyslog Ryver SendGrid ServerChan Seven SES SFR Signal SimplePush Sinch Slack SMSEagle SMS Manager SMTP2Go SNS SparkPost Splunk Streamlabs Stride Synology Chat Syslog Techulus Telegram Threema Gateway Twilio Twist Twitter VictorOps Voipms Vonage Webex WeCom Bot WhatsApp Windows Workflows WxPusher XBMC XML Zulip apprise-1.9.3/LICENSE000066400000000000000000000024771477231770000142160ustar00rootroot00000000000000BSD 2-Clause License Copyright (c) 2025, Chris Caron All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. apprise-1.9.3/MANIFEST.in000066400000000000000000000003551477231770000147400ustar00rootroot00000000000000include LICENSE include KEYWORDS include README.md include requirements.txt include win-requirements.txt include dev-requirements.txt recursive-include test * recursive-include packaging * global-exclude *.pyc global-exclude __pycache__ apprise-1.9.3/README.md000066400000000000000000001437401477231770000144670ustar00rootroot00000000000000![Apprise Logo](https://raw.githubusercontent.com/caronc/apprise/master/apprise/assets/themes/default/apprise-logo.png)
**ap·prise** / *verb*
To inform or tell (someone). To make one aware of something.
*Apprise* allows you to send a notification to *almost* all of the most popular *notification* services available to us today such as: Telegram, Discord, Slack, Amazon SNS, Gotify, etc. * One notification library to rule them all. * A common and intuitive notification syntax. * Supports the handling of images and attachments (_to the notification services that will accept them_). * It's incredibly lightweight. * Amazing response times because all messages sent asynchronously. Developers who wish to provide a notification service no longer need to research each and every one out there. They no longer need to try to adapt to the new ones that comeout thereafter. They just need to include this one library and then they can immediately gain access to almost all of the notifications services available to us today. System Administrators and DevOps who wish to send a notification now no longer need to find the right tool for the job. Everything is already wrapped and supported within the `apprise` command line tool (CLI) that ships with this product. [![Paypal](https://img.shields.io/badge/paypal-donate-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=MHANV39UZNQ5E) [![Follow](https://img.shields.io/twitter/follow/l2gnux)](https://twitter.com/l2gnux/)
[![Discord](https://img.shields.io/discord/558793703356104724.svg?colorB=7289DA&label=Discord&logo=Discord&logoColor=7289DA&style=flat-square)](https://discord.gg/MMPeN2D) [![Python](https://img.shields.io/pypi/pyversions/apprise.svg?style=flat-square)](https://pypi.org/project/apprise/) [![Build Status](https://github.com/caronc/apprise/actions/workflows/tests.yml/badge.svg)](https://github.com/caronc/apprise/actions/workflows/tests.yml) [![CodeCov Status](https://codecov.io/github/caronc/apprise/branch/master/graph/badge.svg)](https://codecov.io/github/caronc/apprise) [![PyPi Downloads](https://img.shields.io/pepy/dt/apprise.svg?style=flat-square)](https://pypi.org/project/apprise/) # Table of Contents * [Supported Notifications](#supported-notifications) * [Productivity Based Notifications](#productivity-based-notifications) * [SMS Notifications](#sms-notifications) * [Desktop Notifications](#desktop-notifications) * [Email Notifications](#email-notifications) * [Custom Notifications](#custom-notifications) * [Installation](#installation) * [Command Line Usage](#command-line-usage) * [Configuration Files](#cli-configuration-files) * [File Attachments](#cli-file-attachments) * [Loading Custom Notifications/Hooks](#cli-loading-custom-notificationshooks) * [Environment Variables](#cli-environment-variables) * [Developer API Usage](#developer-api-usage) * [Configuration Files](#api-configuration-files) * [File Attachments](#api-file-attachments) * [Loading Custom Notifications/Hooks](#api-loading-custom-notificationshooks) * [Persistent Storage](#persistent-storage) * [More Supported Links and Documentation](#want-to-learn-more) # Supported Notifications The section identifies all of the services supported by this library. [Check out the wiki for more information on the supported modules here](https://github.com/caronc/apprise/wiki). ## Productivity Based Notifications The table below identifies the services this tool supports and some example service urls you need to use in order to take advantage of it. Click on any of the services listed below to get more details on how you can configure Apprise to access them. | Notification Service | Service ID | Default Port | Example Syntax | | -------------------- | ---------- | ------------ | -------------- | | [Apprise API](https://github.com/caronc/apprise/wiki/Notify_apprise_api) | apprise:// or apprises:// | (TCP) 80 or 443 | apprise://hostname/Token | [AWS SES](https://github.com/caronc/apprise/wiki/Notify_ses) | ses:// | (TCP) 443 | ses://user@domain/AccessKeyID/AccessSecretKey/RegionName
ses://user@domain/AccessKeyID/AccessSecretKey/RegionName/email1/email2/emailN | [Bark](https://github.com/caronc/apprise/wiki/Notify_bark) | bark:// | (TCP) 80 or 443 | bark://hostname
bark://hostname/device_key
bark://hostname/device_key1/device_key2/device_keyN
barks://hostname
barks://hostname/device_key
barks://hostname/device_key1/device_key2/device_keyN | [BlueSky](https://github.com/caronc/apprise/wiki/Notify_bluesky) | bluesky:// | (TCP) 443 | bluesky://Handle:AppPw
bluesky://Handle:AppPw/TargetHandle
bluesky://Handle:AppPw/TargetHandle1/TargetHandle2/TargetHandleN | [Chanify](https://github.com/caronc/apprise/wiki/Notify_chanify) | chantify:// | (TCP) 443 | chantify://token | [Discord](https://github.com/caronc/apprise/wiki/Notify_discord) | discord:// | (TCP) 443 | discord://webhook_id/webhook_token
discord://avatar@webhook_id/webhook_token | [Emby](https://github.com/caronc/apprise/wiki/Notify_emby) | emby:// or embys:// | (TCP) 8096 | emby://user@hostname/
emby://user:password@hostname | [Enigma2](https://github.com/caronc/apprise/wiki/Notify_enigma2) | enigma2:// or enigma2s:// | (TCP) 80 or 443 | enigma2://hostname | [FCM](https://github.com/caronc/apprise/wiki/Notify_fcm) | fcm:// | (TCP) 443 | fcm://project@apikey/DEVICE_ID
fcm://project@apikey/#TOPIC
fcm://project@apikey/DEVICE_ID1/#topic1/#topic2/DEVICE_ID2/ | [Feishu](https://github.com/caronc/apprise/wiki/Notify_feishu) | feishu:// | (TCP) 443 | feishu://token | [Flock](https://github.com/caronc/apprise/wiki/Notify_flock) | flock:// | (TCP) 443 | flock://token
flock://botname@token
flock://app_token/u:userid
flock://app_token/g:channel_id
flock://app_token/u:userid/g:channel_id | [Google Chat](https://github.com/caronc/apprise/wiki/Notify_googlechat) | gchat:// | (TCP) 443 | gchat://workspace/key/token | [Gotify](https://github.com/caronc/apprise/wiki/Notify_gotify) | gotify:// or gotifys:// | (TCP) 80 or 443 | gotify://hostname/token
gotifys://hostname/token?priority=high | [Growl](https://github.com/caronc/apprise/wiki/Notify_growl) | growl:// | (UDP) 23053 | growl://hostname
growl://hostname:portno
growl://password@hostname
growl://password@hostname:port
**Note**: you can also use the get parameter _version_ which can allow the growl request to behave using the older v1.x protocol. An example would look like: growl://hostname?version=1 | [Guilded](https://github.com/caronc/apprise/wiki/Notify_guilded) | guilded:// | (TCP) 443 | guilded://webhook_id/webhook_token
guilded://avatar@webhook_id/webhook_token | [Home Assistant](https://github.com/caronc/apprise/wiki/Notify_homeassistant) | hassio:// or hassios:// | (TCP) 8123 or 443 | hassio://hostname/accesstoken
hassio://user@hostname/accesstoken
hassio://user:password@hostname:port/accesstoken
hassio://hostname/optional/path/accesstoken | [IFTTT](https://github.com/caronc/apprise/wiki/Notify_ifttt) | ifttt:// | (TCP) 443 | ifttt://webhooksID/Event
ifttt://webhooksID/Event1/Event2/EventN
ifttt://webhooksID/Event1/?+Key=Value
ifttt://webhooksID/Event1/?-Key=value1 | [Join](https://github.com/caronc/apprise/wiki/Notify_join) | join:// | (TCP) 443 | join://apikey/device
join://apikey/device1/device2/deviceN/
join://apikey/group
join://apikey/groupA/groupB/groupN
join://apikey/DeviceA/groupA/groupN/DeviceN/ | [KODI](https://github.com/caronc/apprise/wiki/Notify_kodi) | kodi:// or kodis:// | (TCP) 8080 or 443 | kodi://hostname
kodi://user@hostname
kodi://user:password@hostname:port | [Kumulos](https://github.com/caronc/apprise/wiki/Notify_kumulos) | kumulos:// | (TCP) 443 | kumulos://apikey/serverkey | [LaMetric Time](https://github.com/caronc/apprise/wiki/Notify_lametric) | lametric:// | (TCP) 443 | lametric://apikey@device_ipaddr
lametric://apikey@hostname:port
lametric://client_id@client_secret | [Line](https://github.com/caronc/apprise/wiki/Notify_line) | line:// | (TCP) 443 | line://Token@User
line://Token/User1/User2/UserN | [LunaSea](https://github.com/caronc/apprise/wiki/Notify_lunasea) | lunasea:// | (TCP) 80 or 443 | lunasea://user:pass@+FireBaseDevice/
lunasea://user:pass@FireBaseUser/
lunasea://user:pass@hostname/+FireBaseDevice/
lunasea://user:pass@hostname/@FireBaseUser/ | [Mailgun](https://github.com/caronc/apprise/wiki/Notify_mailgun) | mailgun:// | (TCP) 443 | mailgun://user@hostname/apikey
mailgun://user@hostname/apikey/email
mailgun://user@hostname/apikey/email1/email2/emailN
mailgun://user@hostname/apikey/?name="From%20User" | [Mastodon](https://github.com/caronc/apprise/wiki/Notify_mastodon) | mastodon:// or mastodons://| (TCP) 80 or 443 | mastodon://access_key@hostname
mastodon://access_key@hostname/@user
mastodon://access_key@hostname/@user1/@user2/@userN | [Matrix](https://github.com/caronc/apprise/wiki/Notify_matrix) | matrix:// or matrixs:// | (TCP) 80 or 443 | matrix://hostname
matrix://user@hostname
matrixs://user:pass@hostname:port/#room_alias
matrixs://user:pass@hostname:port/!room_id
matrixs://user:pass@hostname:port/#room_alias/!room_id/#room2
matrixs://token@hostname:port/?webhook=matrix
matrix://user:token@hostname/?webhook=slack&format=markdown | [Mattermost](https://github.com/caronc/apprise/wiki/Notify_mattermost) | mmost:// or mmosts:// | (TCP) 8065 | mmost://hostname/authkey
mmost://hostname:80/authkey
mmost://user@hostname:80/authkey
mmost://hostname/authkey?channel=channel
mmosts://hostname/authkey
mmosts://user@hostname/authkey
| [Microsoft Power Automate / Workflows (MSTeams)](https://github.com/caronc/apprise/wiki/Notify_workflows) | workflows:// | (TCP) 443 | workflows://WorkflowID/Signature/ | [Microsoft Teams](https://github.com/caronc/apprise/wiki/Notify_msteams) | msteams:// | (TCP) 443 | msteams://TokenA/TokenB/TokenC/ | [Misskey](https://github.com/caronc/apprise/wiki/Notify_misskey) | misskey:// or misskeys://| (TCP) 80 or 443 | misskey://access_token@hostname | [MQTT](https://github.com/caronc/apprise/wiki/Notify_mqtt) | mqtt:// or mqtts:// | (TCP) 1883 or 8883 | mqtt://hostname/topic
mqtt://user@hostname/topic
mqtts://user:pass@hostname:9883/topic | [Nextcloud](https://github.com/caronc/apprise/wiki/Notify_nextcloud) | ncloud:// or nclouds:// | (TCP) 80 or 443 | ncloud://adminuser:pass@host/User
nclouds://adminuser:pass@host/User1/User2/UserN | [NextcloudTalk](https://github.com/caronc/apprise/wiki/Notify_nextcloudtalk) | nctalk:// or nctalks:// | (TCP) 80 or 443 | nctalk://user:pass@host/RoomId
nctalks://user:pass@host/RoomId1/RoomId2/RoomIdN | [Notica](https://github.com/caronc/apprise/wiki/Notify_notica) | notica:// | (TCP) 443 | notica://Token/ | [Notifiarr](https://github.com/caronc/apprise/wiki/Notify_notifiarr) | notifiarr:// | (TCP) 443 | notifiarr://apikey/#channel
notifiarr://apikey/#channel1/#channel2/#channeln | [Notifico](https://github.com/caronc/apprise/wiki/Notify_notifico) | notifico:// | (TCP) 443 | notifico://ProjectID/MessageHook/ | [ntfy](https://github.com/caronc/apprise/wiki/Notify_ntfy) | ntfy:// | (TCP) 80 or 443 | ntfy://topic/
ntfys://topic/ | [Office 365](https://github.com/caronc/apprise/wiki/Notify_office365) | o365:// | (TCP) 443 | o365://TenantID:AccountEmail/ClientID/ClientSecret
o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail1/TargetEmail2/TargetEmailN | [OneSignal](https://github.com/caronc/apprise/wiki/Notify_onesignal) | onesignal:// | (TCP) 443 | onesignal://AppID@APIKey/PlayerID
onesignal://TemplateID:AppID@APIKey/UserID
onesignal://AppID@APIKey/#IncludeSegment
onesignal://AppID@APIKey/Email | [Opsgenie](https://github.com/caronc/apprise/wiki/Notify_opsgenie) | opsgenie:// | (TCP) 443 | opsgenie://APIKey
opsgenie://APIKey/UserID
opsgenie://APIKey/#Team
opsgenie://APIKey/\*Schedule
opsgenie://APIKey/^Escalation | [PagerDuty](https://github.com/caronc/apprise/wiki/Notify_pagerduty) | pagerduty:// | (TCP) 443 | pagerduty://IntegrationKey@ApiKey
pagerduty://IntegrationKey@ApiKey/Source/Component | [PagerTree](https://github.com/caronc/apprise/wiki/Notify_pagertree) | pagertree:// | (TCP) 443 | pagertree://integration_id | [ParsePlatform](https://github.com/caronc/apprise/wiki/Notify_parseplatform) | parsep:// or parseps:// | (TCP) 80 or 443 | parsep://AppID:MasterKey@Hostname
parseps://AppID:MasterKey@Hostname | [PopcornNotify](https://github.com/caronc/apprise/wiki/Notify_popcornnotify) | popcorn:// | (TCP) 443 | popcorn://ApiKey/ToPhoneNo
popcorn://ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
popcorn://ApiKey/ToEmail
popcorn://ApiKey/ToEmail1/ToEmail2/ToEmailN/
popcorn://ApiKey/ToPhoneNo1/ToEmail1/ToPhoneNoN/ToEmailN | [Prowl](https://github.com/caronc/apprise/wiki/Notify_prowl) | prowl:// | (TCP) 443 | prowl://apikey
prowl://apikey/providerkey | [PushBullet](https://github.com/caronc/apprise/wiki/Notify_pushbullet) | pbul:// | (TCP) 443 | pbul://accesstoken
pbul://accesstoken/#channel
pbul://accesstoken/A_DEVICE_ID
pbul://accesstoken/email@address.com
pbul://accesstoken/#channel/#channel2/email@address.net/DEVICE | [Pushjet](https://github.com/caronc/apprise/wiki/Notify_pushjet) | pjet:// or pjets:// | (TCP) 80 or 443 | pjet://hostname/secret
pjet://hostname:port/secret
pjets://secret@hostname/secret
pjets://hostname:port/secret | [Push (Techulus)](https://github.com/caronc/apprise/wiki/Notify_techulus) | push:// | (TCP) 443 | push://apikey/ | [Pushed](https://github.com/caronc/apprise/wiki/Notify_pushed) | pushed:// | (TCP) 443 | pushed://appkey/appsecret/
pushed://appkey/appsecret/#ChannelAlias
pushed://appkey/appsecret/#ChannelAlias1/#ChannelAlias2/#ChannelAliasN
pushed://appkey/appsecret/@UserPushedID
pushed://appkey/appsecret/@UserPushedID1/@UserPushedID2/@UserPushedIDN | [PushMe](https://github.com/caronc/apprise/wiki/Notify_pushme) | pushme:// | (TCP) 443 | pushme://Token/ | [Pushover](https://github.com/caronc/apprise/wiki/Notify_pushover) | pover:// | (TCP) 443 | pover://user@token
pover://user@token/DEVICE
pover://user@token/DEVICE1/DEVICE2/DEVICEN
**Note**: you must specify both your user_id and token | [PushSafer](https://github.com/caronc/apprise/wiki/Notify_pushsafer) | psafer:// or psafers:// | (TCP) 80 or 443 | psafer://privatekey
psafers://privatekey/DEVICE
psafer://privatekey/DEVICE1/DEVICE2/DEVICEN | [Pushy](https://github.com/caronc/apprise/wiki/Notify_pushy) | pushy:// | (TCP) 443 | pushy://apikey/DEVICE
pushy://apikey/DEVICE1/DEVICE2/DEVICEN
pushy://apikey/TOPIC
pushy://apikey/TOPIC1/TOPIC2/TOPICN | [PushDeer](https://github.com/caronc/apprise/wiki/Notify_pushdeer) | pushdeer:// or pushdeers:// | (TCP) 80 or 443 | pushdeer://pushKey
pushdeer://hostname/pushKey
pushdeer://hostname:port/pushKey | [Reddit](https://github.com/caronc/apprise/wiki/Notify_reddit) | reddit:// | (TCP) 443 | reddit://user:password@app_id/app_secret/subreddit
reddit://user:password@app_id/app_secret/sub1/sub2/subN | [Resend](https://github.com/caronc/apprise/wiki/Notify_resend) | resend:// | (TCP) 443 | resend://APIToken:FromEmail/
resend://APIToken:FromEmail/ToEmail
resend://APIToken:FromEmail/ToEmail1/ToEmail2/ToEmailN/ | [Revolt](https://github.com/caronc/apprise/wiki/Notify_Revolt) | revolt:// | (TCP) 443 | revolt://bottoken/ChannelID
revolt://bottoken/ChannelID1/ChannelID2/ChannelIDN | | [Rocket.Chat](https://github.com/caronc/apprise/wiki/Notify_rocketchat) | rocket:// or rockets:// | (TCP) 80 or 443 | rocket://user:password@hostname/RoomID/Channel
rockets://user:password@hostname:443/#Channel1/#Channel1/RoomID
rocket://user:password@hostname/#Channel
rocket://webhook@hostname
rockets://webhook@hostname/@User/#Channel | [RSyslog](https://github.com/caronc/apprise/wiki/Notify_rsyslog) | rsyslog:// | (UDP) 514 | rsyslog://hostname
rsyslog://hostname/Facility | [Ryver](https://github.com/caronc/apprise/wiki/Notify_ryver) | ryver:// | (TCP) 443 | ryver://Organization/Token
ryver://botname@Organization/Token | [SendGrid](https://github.com/caronc/apprise/wiki/Notify_sendgrid) | sendgrid:// | (TCP) 443 | sendgrid://APIToken:FromEmail/
sendgrid://APIToken:FromEmail/ToEmail
sendgrid://APIToken:FromEmail/ToEmail1/ToEmail2/ToEmailN/ | [ServerChan](https://github.com/caronc/apprise/wiki/Notify_serverchan) | schan:// | (TCP) 443 | schan://sendkey/ | [Signal API](https://github.com/caronc/apprise/wiki/Notify_signal) | signal:// or signals:// | (TCP) 80 or 443 | signal://hostname:port/FromPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [SimplePush](https://github.com/caronc/apprise/wiki/Notify_simplepush) | spush:// | (TCP) 443 | spush://apikey
spush://salt:password@apikey
spush://apikey?event=Apprise | [Slack](https://github.com/caronc/apprise/wiki/Notify_slack) | slack:// | (TCP) 443 | slack://TokenA/TokenB/TokenC/
slack://TokenA/TokenB/TokenC/Channel
slack://botname@TokenA/TokenB/TokenC/Channel
slack://user@TokenA/TokenB/TokenC/Channel1/Channel2/ChannelN | [SMTP2Go](https://github.com/caronc/apprise/wiki/Notify_smtp2go) | smtp2go:// | (TCP) 443 | smtp2go://user@hostname/apikey
smtp2go://user@hostname/apikey/email
smtp2go://user@hostname/apikey/email1/email2/emailN
smtp2go://user@hostname/apikey/?name="From%20User" | [SparkPost](https://github.com/caronc/apprise/wiki/Notify_sparkpost) | sparkpost:// | (TCP) 443 | sparkpost://user@hostname/apikey
sparkpost://user@hostname/apikey/email
sparkpost://user@hostname/apikey/email1/email2/emailN
sparkpost://user@hostname/apikey/?name="From%20User" | [Splunk](https://github.com/caronc/apprise/wiki/Notify_splunk) | splunk:// or victorops:/ | (TCP) 443 | splunk://route_key@apikey
splunk://route_key@apikey/entity_id | [Streamlabs](https://github.com/caronc/apprise/wiki/Notify_streamlabs) | strmlabs:// | (TCP) 443 | strmlabs://AccessToken/
strmlabs://AccessToken/?name=name&identifier=identifier&amount=0¤cy=USD | [Synology Chat](https://github.com/caronc/apprise/wiki/Notify_synology_chat) | synology:// or synologys:// | (TCP) 80 or 443 | synology://hostname/token
synology://hostname:port/token | [Syslog](https://github.com/caronc/apprise/wiki/Notify_syslog) | syslog:// | n/a | syslog://
syslog://Facility | [Telegram](https://github.com/caronc/apprise/wiki/Notify_telegram) | tgram:// | (TCP) 443 | tgram://bottoken/ChatID
tgram://bottoken/ChatID1/ChatID2/ChatIDN | [Twitter](https://github.com/caronc/apprise/wiki/Notify_twitter) | twitter:// | (TCP) 443 | twitter://CKey/CSecret/AKey/ASecret
twitter://user@CKey/CSecret/AKey/ASecret
twitter://CKey/CSecret/AKey/ASecret/User1/User2/User2
twitter://CKey/CSecret/AKey/ASecret?mode=tweet | [Twist](https://github.com/caronc/apprise/wiki/Notify_twist) | twist:// | (TCP) 443 | twist://pasword:login
twist://password:login/#channel
twist://password:login/#team:channel
twist://password:login/#team:channel1/channel2/#team3:channel | [Webex Teams (Cisco)](https://github.com/caronc/apprise/wiki/Notify_wxteams) | wxteams:// | (TCP) 443 | wxteams://Token | [WeCom Bot](https://github.com/caronc/apprise/wiki/Notify_wecombot) | wecombot:// | (TCP) 443 | wecombot://BotKey | [WhatsApp](https://github.com/caronc/apprise/wiki/Notify_whatsapp) | whatsapp:// | (TCP) 443 | whatsapp://AccessToken@FromPhoneID/ToPhoneNo
whatsapp://Template:AccessToken@FromPhoneID/ToPhoneNo | [WxPusher](https://github.com/caronc/apprise/wiki/Notify_wxpusher) | wxpusher:// | (TCP) 443 | wxpusher://AppToken@UserID1/UserID2/UserIDN
wxpusher://AppToken@Topic1/Topic2/Topic3
wxpusher://AppToken@UserID1/Topic1/ | [XBMC](https://github.com/caronc/apprise/wiki/Notify_xbmc) | xbmc:// or xbmcs:// | (TCP) 8080 or 443 | xbmc://hostname
xbmc://user@hostname
xbmc://user:password@hostname:port | [Zulip Chat](https://github.com/caronc/apprise/wiki/Notify_zulip) | zulip:// | (TCP) 443 | zulip://botname@Organization/Token
zulip://botname@Organization/Token/Stream
zulip://botname@Organization/Token/Email ## SMS Notifications SMS Notifications for the most part do not have a both a `title` and `body`. They consist of a single `body` which is usually no more then 160 characters in length. When using Apprise, the `title` and `body` are therefore combined into a single message prior to their transmission. | Notification Service | Service ID | Default Port | Example Syntax | | -------------------- | ---------- | ------------ | -------------- | | [Africas Talking](https://github.com/caronc/apprise/wiki/Notify_africas_talking) | atalk:// | (TCP) 443 | atalk://AppUser@ApiKey/ToPhoneNo
atalk://AppUser@ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Automated Packet Reporting System (ARPS)](https://github.com/caronc/apprise/wiki/Notify_aprs) | aprs:// | (TCP) 10152 | aprs://user:pass@callsign
aprs://user:pass@callsign1/callsign2/callsignN | [AWS SNS](https://github.com/caronc/apprise/wiki/Notify_sns) | sns:// | (TCP) 443 | sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo1/+PhoneNo2/+PhoneNoN
sns://AccessKeyID/AccessSecretKey/RegionName/Topic
sns://AccessKeyID/AccessSecretKey/RegionName/Topic1/Topic2/TopicN | [BulkSMS](https://github.com/caronc/apprise/wiki/Notify_bulksms) | bulksms:// | (TCP) 443 | bulksms://user:password@ToPhoneNo
bulksms://User:Password@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [BulkVS](https://github.com/caronc/apprise/wiki/Notify_bulkvs) | bulkvs:// | (TCP) 443 | bulkvs://user:password@FromPhoneNo
bulkvs://user:password@FromPhoneNo/ToPhoneNo
bulkvs://user:password@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Burst SMS](https://github.com/caronc/apprise/wiki/Notify_burst_sms) | burstsms:// | (TCP) 443 | burstsms://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo
burstsms://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [ClickSend](https://github.com/caronc/apprise/wiki/Notify_clicksend) | clicksend:// | (TCP) 443 | clicksend://user:pass@PhoneNo
clicksend://user:pass@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN | [DAPNET](https://github.com/caronc/apprise/wiki/Notify_dapnet) | dapnet:// | (TCP) 80 | dapnet://user:pass@callsign
dapnet://user:pass@callsign1/callsign2/callsignN | [D7 Networks](https://github.com/caronc/apprise/wiki/Notify_d7networks) | d7sms:// | (TCP) 443 | d7sms://token@PhoneNo
d7sms://token@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN | [DingTalk](https://github.com/caronc/apprise/wiki/Notify_dingtalk) | dingtalk:// | (TCP) 443 | dingtalk://token/
dingtalk://token/ToPhoneNo
dingtalk://token/ToPhoneNo1/ToPhoneNo2/ToPhoneNo1/ | [Free-Mobile](https://github.com/caronc/apprise/wiki/Notify_freemobile) | freemobile:// | (TCP) 443 | freemobile://user@password/ | [httpSMS](https://github.com/caronc/apprise/wiki/Notify_httpsms) | httpsms:// | (TCP) 443 | httpsms://ApiKey@FromPhoneNo
httpsms://ApiKey@FromPhoneNo/ToPhoneNo
httpsms://ApiKey@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Kavenegar](https://github.com/caronc/apprise/wiki/Notify_kavenegar) | kavenegar:// | (TCP) 443 | kavenegar://ApiKey/ToPhoneNo
kavenegar://FromPhoneNo@ApiKey/ToPhoneNo
kavenegar://ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN | [MessageBird](https://github.com/caronc/apprise/wiki/Notify_messagebird) | msgbird:// | (TCP) 443 | msgbird://ApiKey/FromPhoneNo
msgbird://ApiKey/FromPhoneNo/ToPhoneNo
msgbird://ApiKey/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [MSG91](https://github.com/caronc/apprise/wiki/Notify_msg91) | msg91:// | (TCP) 443 | msg91://TemplateID@AuthKey/ToPhoneNo
msg91://TemplateID@AuthKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Plivo](https://github.com/caronc/apprise/wiki/Notify_plivo) | plivo:// | (TCP) 443 | plivo://AuthID@Token@FromPhoneNo
plivo://AuthID@Token/FromPhoneNo/ToPhoneNo
plivo://AuthID@Token/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Seven](https://github.com/caronc/apprise/wiki/Notify_seven) | seven:// | (TCP) 443 | seven://ApiKey/FromPhoneNo
seven://ApiKey/FromPhoneNo/ToPhoneNo
seven://ApiKey/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Société Française du Radiotéléphone (SFR)](https://github.com/caronc/apprise/wiki/Notify_sfr) | sfr:// | (TCP) 443 | sfr://user:password>@spaceId/ToPhoneNo
sfr://user:password>@spaceId/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Signal API](https://github.com/caronc/apprise/wiki/Notify_signal) | signal:// or signals:// | (TCP) 80 or 443 | signal://hostname:port/FromPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Sinch](https://github.com/caronc/apprise/wiki/Notify_sinch) | sinch:// | (TCP) 443 | sinch://ServicePlanId:ApiToken@FromPhoneNo
sinch://ServicePlanId:ApiToken@FromPhoneNo/ToPhoneNo
sinch://ServicePlanId:ApiToken@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
sinch://ServicePlanId:ApiToken@ShortCode/ToPhoneNo
sinch://ServicePlanId:ApiToken@ShortCode/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [SMSEagle](https://github.com/caronc/apprise/wiki/Notify_smseagle) | smseagle:// or smseagles:// | (TCP) 80 or 443 | smseagles://hostname:port/ToPhoneNo
smseagles://hostname:port/@ToContact
smseagles://hostname:port/#ToGroup
smseagles://hostname:port/ToPhoneNo1/#ToGroup/@ToContact/ | [SMS Manager](https://github.com/caronc/apprise/wiki/Notify_sms_manager) | smsmgr:// | (TCP) 443 | smsmgr://ApiKey@ToPhoneNo
smsmgr://ApiKey@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Threema Gateway](https://github.com/caronc/apprise/wiki/Notify_threema) | threema:// | (TCP) 443 | threema://GatewayID@secret/ToPhoneNo
threema://GatewayID@secret/ToEmail
threema://GatewayID@secret/ToThreemaID/
threema://GatewayID@secret/ToEmail/ToThreemaID/ToPhoneNo/... | [Twilio](https://github.com/caronc/apprise/wiki/Notify_twilio) | twilio:// | (TCP) 443 | twilio://AccountSid:AuthToken@FromPhoneNo
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo?apikey=Key
twilio://AccountSid:AuthToken@ShortCode/ToPhoneNo
twilio://AccountSid:AuthToken@ShortCode/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Voipms](https://github.com/caronc/apprise/wiki/Notify_voipms) | voipms:// | (TCP) 443 | voipms://password:email/FromPhoneNo
voipms://password:email/FromPhoneNo/ToPhoneNo
voipms://password:email/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ | [Vonage](https://github.com/caronc/apprise/wiki/Notify_nexmo) (formerly Nexmo) | nexmo:// | (TCP) 443 | nexmo://ApiKey:ApiSecret@FromPhoneNo
nexmo://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo
nexmo://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ ## Desktop Notifications | Notification Service | Service ID | Default Port | Example Syntax | | -------------------- | ---------- | ------------ | -------------- | | [Linux DBus Notifications](https://github.com/caronc/apprise/wiki/Notify_dbus) | dbus://
qt://
glib://
kde:// | n/a | dbus://
qt://
glib://
kde:// | [Linux Gnome Notifications](https://github.com/caronc/apprise/wiki/Notify_gnome) | gnome:// | n/a | gnome:// | [MacOS X Notifications](https://github.com/caronc/apprise/wiki/Notify_macosx) | macosx:// | n/a | macosx:// | [Windows Notifications](https://github.com/caronc/apprise/wiki/Notify_windows) | windows:// | n/a | windows:// ## Email Notifications | Service ID | Default Port | Example Syntax | | ---------- | ------------ | -------------- | | [mailto://](https://github.com/caronc/apprise/wiki/Notify_email) | (TCP) 25 | mailto://userid:pass@domain.com
mailto://domain.com?user=userid&pass=password
mailto://domain.com:2525?user=userid&pass=password
mailto://user@gmail.com&pass=password
mailto://mySendingUsername:mySendingPassword@example.com?to=receivingAddress@example.com
mailto://userid:password@example.com?smtp=mail.example.com&from=noreply@example.com&name=no%20reply | [mailtos://](https://github.com/caronc/apprise/wiki/Notify_email) | (TCP) 587 | mailtos://userid:pass@domain.com
mailtos://domain.com?user=userid&pass=password
mailtos://domain.com:465?user=userid&pass=password
mailtos://user@hotmail.com&pass=password
mailtos://mySendingUsername:mySendingPassword@example.com?to=receivingAddress@example.com
mailtos://userid:password@example.com?smtp=mail.example.com&from=noreply@example.com&name=no%20reply Apprise have some email services built right into it (such as yahoo, fastmail, hotmail, gmail, etc) that greatly simplify the mailto:// service. See more details [here](https://github.com/caronc/apprise/wiki/Notify_email). ## Custom Notifications | Post Method | Service ID | Default Port | Example Syntax | | -------------------- | ---------- | ------------ | -------------- | | [Form](https://github.com/caronc/apprise/wiki/Notify_Custom_Form) | form:// or forms:// | (TCP) 80 or 443 | form://hostname
form://user@hostname
form://user:password@hostname:port
form://hostname/a/path/to/post/to | [JSON](https://github.com/caronc/apprise/wiki/Notify_Custom_JSON) | json:// or jsons:// | (TCP) 80 or 443 | json://hostname
json://user@hostname
json://user:password@hostname:port
json://hostname/a/path/to/post/to | [XML](https://github.com/caronc/apprise/wiki/Notify_Custom_XML) | xml:// or xmls:// | (TCP) 80 or 443 | xml://hostname
xml://user@hostname
xml://user:password@hostname:port
xml://hostname/a/path/to/post/to # Installation The easiest way is to install this package is from pypi: ```bash pip install apprise ``` Apprise is also packaged as an RPM and available through [EPEL](https://docs.fedoraproject.org/en-US/epel/) supporting CentOS, Redhat, Rocky, Oracle Linux, etc. ```bash # Follow instructions on https://docs.fedoraproject.org/en-US/epel # to get your system connected up to EPEL and then: # Redhat/CentOS 7.x users yum install apprise # Redhat/CentOS 8.x+ and/or Fedora Users dnf install apprise ``` You can also check out the [Graphical version of Apprise](https://github.com/caronc/apprise-api) to centralize your configuration and notifications through a managable webpage. # Command Line Usage A small command line interface (CLI) tool is also provided with this package called *apprise*. If you know the server urls you wish to notify, you can simply provide them all on the command line and send your notifications that way: ```bash # Send a notification to as many servers as you want # as you can easily chain one after another (the -vv provides some # additional verbosity to help let you know what is going on): apprise -vv -t 'my title' -b 'my notification body' \ 'mailto://myemail:mypass@gmail.com' \ 'pbul://o.gn5kj6nfhv736I7jC3cj3QLRiyhgl98b' # If you don't specify a --body (-b) then stdin is used allowing # you to use the tool as part of your every day administration: cat /proc/cpuinfo | apprise -vv -t 'cpu info' \ 'mailto://myemail:mypass@gmail.com' # The title field is totally optional uptime | apprise -vv \ 'discord:///4174216298/JHMHI8qBe7bk2ZwO5U711o3dV_js' ``` ## CLI Configuration Files No one wants to put their credentials out for everyone to see on the command line. No problem *apprise* also supports configuration files. It can handle both a specific [YAML format](https://github.com/caronc/apprise/wiki/config_yaml) or a very simple [TEXT format](https://github.com/caronc/apprise/wiki/config_text). You can also pull these configuration files via an HTTP query too! You can read more about the expected structure of the configuration files [here](https://github.com/caronc/apprise/wiki/config). ```bash # By default if no url or configuration is specified apprise will attempt to load # configuration files (if present) from: # ~/.apprise # ~/.apprise.yaml # ~/.config/apprise.conf # ~/.config/apprise.yaml # /etc/apprise.conf # /etc/apprise.yaml # Also a subdirectory handling allows you to leverage plugins # ~/.apprise/apprise # ~/.apprise/apprise.yaml # ~/.config/apprise/apprise.conf # ~/.config/apprise/apprise.yaml # /etc/apprise/apprise.yaml # /etc/apprise/apprise.conf # Windows users can store their default configuration files here: # %APPDATA%/Apprise/apprise.conf # %APPDATA%/Apprise/apprise.yaml # %LOCALAPPDATA%/Apprise/apprise.conf # %LOCALAPPDATA%/Apprise/apprise.yaml # %ALLUSERSPROFILE%\Apprise\apprise.conf # %ALLUSERSPROFILE%\Apprise\apprise.yaml # %PROGRAMFILES%\Apprise\apprise.conf # %PROGRAMFILES%\Apprise\apprise.yaml # %COMMONPROGRAMFILES%\Apprise\apprise.conf # %COMMONPROGRAMFILES%\Apprise\apprise.yaml # The configuration files specified above can also be identified with a `.yml` # extension or even just entirely removing the `.conf` extension altogether. # If you loaded one of those files, your command line gets really easy: apprise -vv -t 'my title' -b 'my notification body' # If you want to deviate from the default paths or specify more than one, # just specify them using the --config switch: apprise -vv -t 'my title' -b 'my notification body' \ --config=/path/to/my/config.yml # Got lots of configuration locations? No problem, you can specify them all: # Apprise can even fetch the configuration from over a network! apprise -vv -t 'my title' -b 'my notification body' \ --config=/path/to/my/config.yml \ --config=https://localhost/my/apprise/config ``` ## CLI File Attachments Apprise also supports file attachments too! Specify as many attachments to a notification as you want. ```bash # Send a funny image you found on the internet to a colleague: apprise -vv --title 'Agile Joke' \ --body 'Did you see this one yet?' \ --attach https://i.redd.it/my2t4d2fx0u31.jpg \ 'mailto://myemail:mypass@gmail.com' # Easily send an update from a critical server to your dev team apprise -vv --title 'system crash' \ --body 'I do not think Jim fixed the bug; see attached...' \ --attach /var/log/myprogram.log \ --attach /var/debug/core.2345 \ --tag devteam ``` ## CLI Loading Custom Notifications/Hooks To create your own custom `schema://` hook so that you can trigger your own custom code, simply include the `@notify` decorator to wrap your function. ```python from apprise.decorators import notify # # The below assumes you want to catch foobar:// calls: # @notify(on="foobar", name="My Custom Foobar Plugin") def my_custom_notification_wrapper(body, title, notify_type, *args, **kwargs): """My custom notification function that triggers on all foobar:// calls """ # Write all of your code here... as an example... print("{}: {} - {}".format(notify_type.upper(), title, body)) # Returning True/False is a way to relay your status back to Apprise. # Returning nothing (None by default) is always interpreted as a Success ``` Once you've defined your custom hook, you just need to tell Apprise where it is at runtime. ```bash # By default if no plugin path is specified apprise will attempt to load # all plugin files (if present) from the following directory paths: # ~/.apprise/plugins # ~/.config/apprise/plugins # /var/lib/apprise/plugins # Windows users can store their default plugin files in these directories: # %APPDATA%/Apprise/plugins # %LOCALAPPDATA%/Apprise/plugins # %ALLUSERSPROFILE%\Apprise\plugins # %PROGRAMFILES%\Apprise\plugins # %COMMONPROGRAMFILES%\Apprise\plugins # If you placed your plugin file within one of the directories already defined # above, then your call simply needs to look like: apprise -vv --title 'custom override' \ --body 'the body of my message' \ foobar:\\ # However you can over-ride the path like so apprise -vv --title 'custom override' \ --body 'the body of my message' \ --plugin-path /path/to/my/plugin.py \ foobar:\\ ``` You can read more about creating your own custom notifications and/or hooks [here](https://github.com/caronc/apprise/wiki/decorator_notify). ## CLI Environment Variables Those using the Command Line Interface (CLI) can also leverage environment variables to pre-set the default settings: | Variable | Description | |------------------------ | ----------------- | | `APPRISE_URLS` | Specify the default URLs to notify IF none are otherwise specified on the command line explicitly. If the `--config` (`-c`) is specified, then this will over-rides any reference to this variable. Use white space and/or a comma (`,`) to delimit multiple entries. | `APPRISE_CONFIG_PATH` | Explicitly specify the config search path to use (over-riding the default). The path(s) defined here must point to the absolute filename to open/reference. Use a semi-colon (`;`), line-feed (`\n`), and/or carriage return (`\r`) to delimit multiple entries. | `APPRISE_PLUGIN_PATH` | Explicitly specify the custom plugin search path to use (over-riding the default). Use a semi-colon (`;`), line-feed (`\n`), and/or carriage return (`\r`) to delimit multiple entries. | `APPRISE_STORAGE_PATH` | Explicitly specify the persistent storage path to use (over-riding the default). # Developer API Usage To send a notification from within your python application, just do the following: ```python import apprise # Create an Apprise instance apobj = apprise.Apprise() # Add all of the notification services by their server url. # A sample email notification: apobj.add('mailto://myuserid:mypass@gmail.com') # A sample pushbullet notification apobj.add('pbul://o.gn5kj6nfhv736I7jC3cj3QLRiyhgl98b') # Then notify these services any time you desire. The below would # notify all of the services loaded into our Apprise object. apobj.notify( body='what a great notification service!', title='my notification title', ) ``` ## API Configuration Files Developers need access to configuration files too. The good news is their use just involves declaring another object (called *AppriseConfig*) that the *Apprise* object can ingest. You can also freely mix and match config and notification entries as often as you wish! You can read more about the expected structure of the configuration files [here](https://github.com/caronc/apprise/wiki/config). ```python import apprise # Create an Apprise instance apobj = apprise.Apprise() # Create an Config instance config = apprise.AppriseConfig() # Add a configuration source: config.add('/path/to/my/config.yml') # Add another... config.add('https://myserver:8080/path/to/config') # Make sure to add our config into our apprise object apobj.add(config) # You can mix and match; add an entry directly if you want too # In this entry we associate the 'admin' tag with our notification apobj.add('mailto://myuser:mypass@hotmail.com', tag='admin') # Then notify these services any time you desire. The below would # notify all of the services that have not been bound to any specific # tag. apobj.notify( body='what a great notification service!', title='my notification title', ) # Tagging allows you to specifically target only specific notification # services you've loaded: apobj.notify( body='send a notification to our admin group', title='Attention Admins', # notify any services tagged with the 'admin' tag tag='admin', ) # If you want to notify absolutely everything (regardless of whether # it's been tagged or not), just use the reserved tag of 'all': apobj.notify( body='send a notification to our admin group', title='Attention Admins', # notify absolutely everything loaded, regardless on wether # it has a tag associated with it or not: tag='all', ) ``` ## API File Attachments Attachments are very easy to send using the Apprise API: ```python import apprise # Create an Apprise instance apobj = apprise.Apprise() # Add at least one service you want to notify apobj.add('mailto://myuser:mypass@hotmail.com') # Then send your attachment. apobj.notify( title='A great photo of our family', body='The flash caused Jane to close her eyes! hah! :)', attach='/local/path/to/my/DSC_003.jpg', ) # Send a web based attachment too! In the below example, we connect to a home # security camera and send a live image to an email. By default remote web # content is cached, but for a security camera we might want to call notify # again later in our code, so we want our last image retrieved to expire(in # this case after 3 seconds). apobj.notify( title='Latest security image', attach='http://admin:password@hikvision-cam01/ISAPI/Streaming/channels/101/picture?cache=3' ) ``` To send more than one attachment, just use a list, set, or tuple instead: ```python import apprise # Create an Apprise instance apobj = apprise.Apprise() # Add at least one service you want to notify apobj.add('mailto://myuser:mypass@hotmail.com') # Now add all of the entries we're interested in: attach = ( # ?name= allows us to rename the actual jpeg as found on the site # to be another name when sent to our receipient(s) 'https://i.redd.it/my2t4d2fx0u31.jpg?name=FlyingToMars.jpg', # Now add another: '/path/to/funny/joke.gif', ) # Send your multiple attachments with a single notify call: apobj.notify( title='Some good jokes.', body='Hey guys, check out these!', attach=attach, ) ``` ## API Loading Custom Notifications/Hooks By default, no custom plugins are loaded at all for those building from within the Apprise API. It's at the developers discretion to load custom modules. But should you choose to do so, it's as easy as including the path reference in the `AppriseAsset()` object prior to the initialization of your `Apprise()` instance. For example: ```python from apprise import Apprise from apprise import AppriseAsset # Prepare your Asset object so that you can enable the custom plugins to # be loaded for your instance of Apprise... asset = AppriseAsset(plugin_paths="/path/to/scan") # OR You can also generate scan more then one file too: asset = AppriseAsset( plugin_paths=[ # Iterate over all python libraries found in the root of the # specified path. This is NOT a recursive (directory) scan; only # the first level is parsed. HOWEVER, if a directory containing # an __init__.py is found, it will be included in the load. "/dir/containing/many/python/libraries", # An absolute path to a plugin.py to exclusively load "/path/to/plugin.py", # if you point to a directory that has an __init__.py file found in # it, then only that file is loaded (it's similar to point to a # absolute .py file. Hence, there is no (level 1) scanning at all # within the directory specified. "/path/to/dir/library" ] ) # Now that we've got our asset, we just work with our Apprise object as we # normally do aobj = Apprise(asset=asset) # If our new custom `foobar://` library was loaded (presuming we prepared # one like in the examples above). then you would be able to safely add it # into Apprise at this point aobj.add('foobar://') # Send our notification out through our foobar:// aobj.notify("test") ``` You can read more about creating your own custom notifications and/or hooks [here](https://github.com/caronc/apprise/wiki/decorator_notify). # Persistent Storage Persistent storage allows Apprise to cache re-occurring actions optionaly to disk. This can greatly reduce the overhead used to send a notification. There are 3 Persistent Storage operational states Apprise can operate using: 1. `auto`: Flush gathered cache information to the filesystem on demand. This option is incredibly light weight. This is the default behavior for all CLI usage. * Developers who choose to use this operational mode can also force cached information manually if they choose. * The CLI will use this operational mode by default. 1. `flush`: Flushes any cache information to the filesystem during every transaction. 1. `memory`: Effectively disable Persistent Storage. Any caching of data required by each plugin used is done in memory. Apprise effectively operates as it always did before peristent storage was available. This setting ensures no content is every written to disk. * By default this is the mode Apprise will operate under for those developing with it unless they configure it to otherwise operate as `auto` or `flush`. This is done through the `AppriseAsset()` object and is explained further on in this documentation. ## CLI Persistent Storage Commands You can provide the keyword `storage` on your CLI call to see the persistent storage options available to you. ```bash # List all of the occupied space used by Apprise's Persistent Storage: apprise storage list # list is the default option, so the following does the same thing: apprise storage # You can prune all of your storage older then 30 days # and not accessed for this period like so: apprise storage prune # You can do a hard reset (and wipe all persistent storage) with: apprise storage clean ``` You can also filter your results by adding tags and/or URL Identifiers. When you get a listing (`apprise storage list`), you may see: ``` # example output of 'apprise storage list': 1. f7077a65 0.00B unused - matrixs://abcdef:****@synapse.example12.com/%23general?image=no&mode=off&version=3&msgtype... tags: team 2. 0e873a46 81.10B active - tgram://W...U//?image=False&detect=yes&silent=no&preview=no&content=before&mdv=v1&format=m... tags: personal 3. abcd123 12.00B stale ``` The (persistent storage) cache states are: - `unused`: This plugin has not commited anything to disk for reuse/cache purposes - `active`: This plugin has written content to disk. Or at the very least, it has prepared a persistent storage location it can write into. - `stale`: The system detected a location where a URL may have possibly written to in the past, but there is nothing linking to it using the URLs provided. It is likely wasting space or is no longer of any use. You can use this information to filter your results by specifying _URL ID_ (UID) values after your command. For example: ```bash # The below commands continue with the example already identified above # the following would match abcd123 (even though just ab was provided) # The output would only list the 'stale' entry above apprise storage list ab # knowing our filter is safe, we could remove it # the below command would not obstruct our other to URLs and would only # remove our stale one: apprise storage clean ab # Entries can be filtered by tag as well: apprise storage list --tag=team # You can match on multiple URL ID's as well: # The followin would actually match the URL ID's of 1. and .2 above apprise storage list f 0 ``` When using the CLI, Persistent storage is set to the operational mode of `auto` by default, you can change this by providing `--storage-mode=` (`-SM`) during your calls. If you want to ensure it's always set to a value of your choice. For more information on persistent storage, [visit here](https://github.com/caronc/apprise/wiki/persistent_storage). ## API Persistent Storage Commands For developers, persistent storage is set in the operational mode of `memory` by default. It's at the developers discretion to enable it (by switching it to either `auto` or `flush`). Should you choose to do so: it's as easy as including the information in the `AppriseAsset()` object prior to the initialization of your `Apprise()` instance. For example: ```python from apprise import Apprise from apprise import AppriseAsset from apprise import PersistentStoreMode # Prepare a location the persistent storage can write it's cached content to. # By setting this path, this immediately assumes you wish to operate the # persistent storage in the operational 'auto' mode asset = AppriseAsset(storage_path="/path/to/save/data") # If you want to be more explicit and set more options, then you may do the # following asset = AppriseAsset( # Set our storage path directory (minimum requirement to enable it) storage_path="/path/to/save/data", # Set the mode... the options are: # 1. PersistentStoreMode.MEMORY # - disable persistent storage from writing to disk # 2. PersistentStoreMode.AUTO # - write to disk on demand # 3. PersistentStoreMode.FLUSH # - write to disk always and often storage_mode=PersistentStoreMode.FLUSH # The URL IDs are by default 8 characters in length. You can increase and # decrease it's value here. The value must be > 2. The default value is 8 # if not otherwise specified storage_idlen=8, ) # Now that we've got our asset, we just work with our Apprise object as we # normally do aobj = Apprise(asset=asset) ``` For more information on persistent storage, [visit here](https://github.com/caronc/apprise/wiki/persistent_storage). # Want To Learn More? If you're interested in reading more about this and other methods on how to customize your own notifications, please check out the following links: * 📣 [Using the CLI](https://github.com/caronc/apprise/wiki/CLI_Usage) * 🛠️ [Development API](https://github.com/caronc/apprise/wiki/Development_API) * 🔧 [Troubleshooting](https://github.com/caronc/apprise/wiki/Troubleshooting) * ⚙️ [Configuration File Help](https://github.com/caronc/apprise/wiki/config) * ⚡ [Create Your Own Custom Notifications](https://github.com/caronc/apprise/wiki/decorator_notify) * 💾 [Persistent Storage](https://github.com/caronc/apprise/wiki/persistent_storage) * 🌎 [Apprise API/Web Interface](https://github.com/caronc/apprise-api) * 🎉 [Showcase](https://github.com/caronc/apprise/wiki/showcase) Want to help make Apprise better? * 💡 [Contribute to the Apprise Code Base](https://github.com/caronc/apprise/wiki/Development_Contribution) * ❤️ [Sponsorship and Donations](https://github.com/caronc/apprise/wiki/Sponsors) apprise-1.9.3/SECURITY.md000066400000000000000000000005731477231770000147750ustar00rootroot00000000000000# Security Policy ## Supported Versions | Version | Supported | | ------- | ------------------ | | 0.9.x | :white_check_mark: | | < 0.9.x | :x: | ## Reporting a Vulnerability If you find a vunerability, please notify me at lead2gold@gmail.com. If the vunerability is severe then please just open a ticket at https://github.com/caronc/apprise/issues apprise-1.9.3/all-plugin-requirements.txt000066400000000000000000000005071477231770000205270ustar00rootroot00000000000000# # Plugin Dependencies # # Provides fcm:// and spush:// cryptography # Provides growl:// support gntp # Provides mqtt:// support # use any version other than 2.0.x due to https://github.com/eclipse/paho.mqtt.python/issues/814 paho-mqtt != 2.0.* # Pretty Good Privacy (PGP) Provides mailto:// and deltachat:// support PGPy apprise-1.9.3/apprise/000077500000000000000000000000001477231770000146425ustar00rootroot00000000000000apprise-1.9.3/apprise/__init__.py000066400000000000000000000074701477231770000167630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. __title__ = 'Apprise' __version__ = '1.9.3' __author__ = 'Chris Caron' __license__ = 'BSD 2-Clause' __copywrite__ = 'Copyright (C) 2025 Chris Caron ' __email__ = 'lead2gold@gmail.com' __status__ = 'Production' from .common import NotifyType from .common import NOTIFY_TYPES from .common import NotifyImageSize from .common import NOTIFY_IMAGE_SIZES from .common import NotifyFormat from .common import NOTIFY_FORMATS from .common import OverflowMode from .common import OVERFLOW_MODES from .common import ConfigFormat from .common import CONFIG_FORMATS from .common import ContentIncludeMode from .common import CONTENT_INCLUDE_MODES from .common import ContentLocation from .common import CONTENT_LOCATIONS from .common import PersistentStoreMode from .common import PERSISTENT_STORE_MODES from .url import URLBase from .url import PrivacyMode from .plugins.base import NotifyBase from .config.base import ConfigBase from .attachment.base import AttachBase from . import exception from .apprise import Apprise from .locale import AppriseLocale from .asset import AppriseAsset from .persistent_store import PersistentStore from .apprise_config import AppriseConfig from .apprise_attachment import AppriseAttachment from .manager_attachment import AttachmentManager from .manager_config import ConfigurationManager from .manager_plugins import NotificationManager from . import decorators # Inherit our logging with our additional entries added to it from .logger import logging from .logger import logger from .logger import LogCapture # Set default logging handler to avoid "No handler found" warnings. logging.getLogger(__name__).addHandler(logging.NullHandler()) __all__ = [ # Core 'Apprise', 'AppriseAsset', 'AppriseConfig', 'AppriseAttachment', 'URLBase', 'NotifyBase', 'ConfigBase', 'AttachBase', 'AppriseLocale', 'PersistentStore', # Exceptions 'exception', # Reference 'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'OverflowMode', 'NOTIFY_TYPES', 'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS', 'OVERFLOW_MODES', 'ConfigFormat', 'CONFIG_FORMATS', 'ContentIncludeMode', 'CONTENT_INCLUDE_MODES', 'ContentLocation', 'CONTENT_LOCATIONS', 'PersistentStoreMode', 'PERSISTENT_STORE_MODES', 'PrivacyMode', # Managers 'NotificationManager', 'ConfigurationManager', 'AttachmentManager', # Decorator 'decorators', # Logging 'logging', 'logger', 'LogCapture', ] apprise-1.9.3/apprise/apprise.py000066400000000000000000001004561477231770000166650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import asyncio import concurrent.futures as cf import os from itertools import chain from . import common from .conversion import convert_between from .utils.logic import is_exclusive_match from .utils.parse import parse_list, parse_urls from .utils.cwe312 import cwe312_url from .manager_plugins import NotificationManager from .emojis import apply_emojis from .logger import logger from .asset import AppriseAsset from .apprise_config import AppriseConfig from .apprise_attachment import AppriseAttachment from .locale import AppriseLocale from .config.base import ConfigBase from .plugins.base import NotifyBase from . import plugins from . import __version__ # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() class Apprise: """ Our Notification Manager """ def __init__(self, servers=None, asset=None, location=None, debug=False): """ Loads a set of server urls while applying the Asset() module to each if specified. If no asset is provided, then the default asset is used. Optionally specify a global ContentLocation for a more strict means of handling Attachments. """ # Initialize a server list of URLs self.servers = list() # Assigns an central asset object that will be later passed into each # notification plugin. Assets contain information such as the local # directory images can be found in. It can also identify remote # URL paths that contain the images you want to present to the end # user. If no asset is specified, then the default one is used. self.asset = \ asset if isinstance(asset, AppriseAsset) else AppriseAsset() if servers: self.add(servers) # Initialize our locale object self.locale = AppriseLocale() # Set our debug flag self.debug = debug # Store our hosting location for optional strict rule handling # of Attachments. Setting this to None removes any attachment # restrictions. self.location = location @staticmethod def instantiate(url, asset=None, tag=None, suppress_exceptions=True): """ Returns the instance of a instantiated plugin based on the provided Server URL. If the url fails to be parsed, then None is returned. The specified url can be either a string (the URL itself) or a dictionary containing all of the components needed to istantiate the notification service. If identifying a dictionary, at the bare minimum, one must specify the schema. An example of a url dictionary object might look like: { schema: 'mailto', host: 'google.com', user: 'myuser', password: 'mypassword', } Alternatively the string is much easier to specify: mailto://user:mypassword@google.com The dictionary works well for people who are calling details() to extract the components they need to build the URL manually. """ # Initialize our result set results = None # Prepare our Asset Object asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset() if isinstance(url, str): # Acquire our url tokens results = plugins.url_to_dict( url, secure_logging=asset.secure_logging) if results is None: # Failed to parse the server URL; detailed logging handled # inside url_to_dict - nothing to report here. return None elif isinstance(url, dict): # We already have our result set results = url if results.get('schema') not in N_MGR: # schema is a mandatory dictionary item as it is the only way # we can index into our loaded plugins logger.error('Dictionary does not include a "schema" entry.') logger.trace( 'Invalid dictionary unpacked as:{}{}'.format( os.linesep, os.linesep.join( ['{}="{}"'.format(k, v) for k, v in results.items()]))) return None logger.trace( 'Dictionary unpacked as:{}{}'.format( os.linesep, os.linesep.join( ['{}="{}"'.format(k, v) for k, v in results.items()]))) # Otherwise we handle the invalid input specified else: logger.error( 'An invalid URL type (%s) was specified for instantiation', type(url)) return None if not N_MGR[results['schema']].enabled: # # First Plugin Enable Check (Pre Initialization) # # Plugin has been disabled at a global level logger.error( '%s:// is disabled on this system.', results['schema']) return None # Build a list of tags to associate with the newly added notifications results['tag'] = set(parse_list(tag)) # Set our Asset Object results['asset'] = asset if suppress_exceptions: try: # Attempt to create an instance of our plugin using the parsed # URL information plugin = N_MGR[results['schema']](**results) # Create log entry of loaded URL logger.debug( 'Loaded {} URL: {}'.format( N_MGR[results['schema']].service_name, plugin.url(privacy=asset.secure_logging))) except Exception: # CWE-312 (Secure Logging) Handling loggable_url = url if not asset.secure_logging \ else cwe312_url(url) # the arguments are invalid or can not be used. logger.error( 'Could not load {} URL: {}'.format( N_MGR[results['schema']].service_name, loggable_url)) return None else: # Attempt to create an instance of our plugin using the parsed # URL information but don't wrap it in a try catch plugin = N_MGR[results['schema']](**results) if not plugin.enabled: # # Second Plugin Enable Check (Post Initialization) # # Service/Plugin is disabled (on a more local level). This is a # case where the plugin was initially enabled but then after the # __init__() was called under the hood something pre-determined # that it could no longer be used. # The only downside to doing it this way is services are # initialized prior to returning the details() if 3rd party tools # are polling what is available. These services that become # disabled thereafter are shown initially that they can be used. logger.error( '%s:// has become disabled on this system.', results['schema']) return None return plugin def add(self, servers, asset=None, tag=None): """ Adds one or more server URLs into our list. You can override the global asset if you wish by including it with the server(s) that you add. The tag allows you to associate 1 or more tag values to the server(s) being added. tagging a service allows you to exclusively access them when calling the notify() function. """ # Initialize our return status return_status = True if asset is None: # prepare default asset asset = self.asset if isinstance(servers, str): # build our server list servers = parse_urls(servers) if len(servers) == 0: return False elif isinstance(servers, dict): # no problem, we support kwargs, convert it to a list servers = [servers] elif isinstance(servers, (ConfigBase, NotifyBase, AppriseConfig)): # Go ahead and just add our plugin into our list self.servers.append(servers) return True elif not isinstance(servers, (tuple, set, list)): logger.error( "An invalid notification (type={}) was specified.".format( type(servers))) return False for _server in servers: if isinstance(_server, (ConfigBase, NotifyBase, AppriseConfig)): # Go ahead and just add our plugin into our list self.servers.append(_server) continue elif not isinstance(_server, (str, dict)): logger.error( "An invalid notification (type={}) was specified.".format( type(_server))) return_status = False continue # Instantiate ourselves an object, this function throws or # returns None if it fails instance = Apprise.instantiate(_server, asset=asset, tag=tag) if not isinstance(instance, NotifyBase): # No logging is required as instantiate() handles failure # and/or success reasons for us return_status = False continue # Add our initialized plugin to our server listings self.servers.append(instance) # Return our status return return_status def clear(self): """ Empties our server list """ self.servers[:] = [] def find(self, tag=common.MATCH_ALL_TAG, match_always=True): """ Returns a list of all servers matching against the tag specified. """ # Build our tag setup # - top level entries are treated as an 'or' # - second level (or more) entries are treated as 'and' # # examples: # tag="tagA, tagB" = tagA or tagB # tag=['tagA', 'tagB'] = tagA or tagB # tag=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB # tag=[('tagB', 'tagC')] = tagB and tagC # A match_always flag allows us to pick up on our 'any' keyword # and notify these services under all circumstances match_always = common.MATCH_ALWAYS_TAG if match_always else None # Iterate over our loaded plugins for entry in self.servers: if isinstance(entry, (ConfigBase, AppriseConfig)): # load our servers servers = entry.servers() else: servers = [entry, ] for server in servers: # Apply our tag matching based on our defined logic if is_exclusive_match( logic=tag, data=server.tags, match_all=common.MATCH_ALL_TAG, match_always=match_always): yield server return def notify(self, body, title='', notify_type=common.NotifyType.INFO, body_format=None, tag=common.MATCH_ALL_TAG, match_always=True, attach=None, interpret_escapes=None): """ Send a notification to all the plugins previously loaded. If the body_format specified is NotifyFormat.MARKDOWN, it will be converted to HTML if the Notification type expects this. if the tag is specified (either a string or a set/list/tuple of strings), then only the notifications flagged with that tagged value are notified. By default, all added services are notified (tag=MATCH_ALL_TAG) This function returns True if all notifications were successfully sent, False if even just one of them fails, and None if no notifications were sent at all as a result of tag filtering and/or simply having empty configuration files that were read. Attach can contain a list of attachment URLs. attach can also be represented by an AttachBase() (or list of) object(s). This identifies the products you wish to notify Set interpret_escapes to True if you want to pre-escape a string such as turning a \n into an actual new line, etc. """ try: # Process arguments and build synchronous and asynchronous calls # (this step can throw internal errors). sequential_calls, parallel_calls = self._create_notify_calls( body, title, notify_type=notify_type, body_format=body_format, tag=tag, match_always=match_always, attach=attach, interpret_escapes=interpret_escapes, ) except TypeError: # No notifications sent, and there was an internal error. return False if not sequential_calls and not parallel_calls: # Nothing to send return None sequential_result = Apprise._notify_sequential(*sequential_calls) parallel_result = Apprise._notify_parallel_threadpool(*parallel_calls) return sequential_result and parallel_result async def async_notify(self, *args, **kwargs): """ Send a notification to all the plugins previously loaded, for asynchronous callers. The arguments are identical to those of Apprise.notify(). """ try: # Process arguments and build synchronous and asynchronous calls # (this step can throw internal errors). sequential_calls, parallel_calls = self._create_notify_calls( *args, **kwargs) except TypeError: # No notifications sent, and there was an internal error. return False if not sequential_calls and not parallel_calls: # Nothing to send return None sequential_result = Apprise._notify_sequential(*sequential_calls) parallel_result = \ await Apprise._notify_parallel_asyncio(*parallel_calls) return sequential_result and parallel_result def _create_notify_calls(self, *args, **kwargs): """ Creates notifications for all the plugins loaded. Returns a list of (server, notify() kwargs) tuples for plugins with parallelism disabled and another list for plugins with parallelism enabled. """ all_calls = list(self._create_notify_gen(*args, **kwargs)) # Split into sequential and parallel notify() calls. sequential, parallel = [], [] for (server, notify_kwargs) in all_calls: if server.asset.async_mode: parallel.append((server, notify_kwargs)) else: sequential.append((server, notify_kwargs)) return sequential, parallel def _create_notify_gen(self, body, title='', notify_type=common.NotifyType.INFO, body_format=None, tag=common.MATCH_ALL_TAG, match_always=True, attach=None, interpret_escapes=None): """ Internal generator function for _create_notify_calls(). """ if len(self) == 0: # Nothing to notify msg = "There are no service(s) to notify" logger.error(msg) raise TypeError(msg) if not (title or body or attach): msg = "No message content specified to deliver" logger.error(msg) raise TypeError(msg) try: if title and isinstance(title, bytes): title = title.decode(self.asset.encoding) if body and isinstance(body, bytes): body = body.decode(self.asset.encoding) except UnicodeDecodeError: msg = 'The content passed into Apprise was not of encoding ' \ 'type: {}'.format(self.asset.encoding) logger.error(msg) raise TypeError(msg) # Tracks conversions conversion_body_map = dict() conversion_title_map = dict() # Prepare attachments if required if attach is not None and not isinstance(attach, AppriseAttachment): attach = AppriseAttachment( attach, asset=self.asset, location=self.location) # Allow Asset default value body_format = self.asset.body_format \ if body_format is None else body_format # Allow Asset default value interpret_escapes = self.asset.interpret_escapes \ if interpret_escapes is None else interpret_escapes # Iterate over our loaded plugins for server in self.find(tag, match_always=match_always): # If our code reaches here, we either did not define a tag (it # was set to None), or we did define a tag and the logic above # determined we need to notify the service it's associated with # First we need to generate a key we will use to determine if we # need to build our data out. Entries without are merged with # the body at this stage. key = server.notify_format if server.title_maxlen > 0\ else f'_{server.notify_format}' if server.interpret_emojis: # alter our key slightly to handle emojis since their value is # pulled out of the notification key += "-emojis" if key not in conversion_title_map: # Prepare our title conversion_title_map[key] = '' if not title else title # Conversion of title only occurs for services where the title # is blended with the body (title_maxlen <= 0) if conversion_title_map[key] and server.title_maxlen <= 0: conversion_title_map[key] = convert_between( body_format, server.notify_format, content=conversion_title_map[key]) # Our body is always converted no matter what conversion_body_map[key] = \ convert_between( body_format, server.notify_format, content=body) if interpret_escapes: # # Escape our content # try: # Added overhead required due to Python 3 Encoding Bug # identified here: https://bugs.python.org/issue21331 conversion_body_map[key] = \ conversion_body_map[key]\ .encode('ascii', 'backslashreplace')\ .decode('unicode-escape') conversion_title_map[key] = \ conversion_title_map[key]\ .encode('ascii', 'backslashreplace')\ .decode('unicode-escape') except AttributeError: # Must be of string type msg = 'Failed to escape message body' logger.error(msg) raise TypeError(msg) if server.interpret_emojis: # # Convert our :emoji: definitions # conversion_body_map[key] = \ apply_emojis(conversion_body_map[key]) conversion_title_map[key] = \ apply_emojis(conversion_title_map[key]) kwargs = dict( body=conversion_body_map[key], title=conversion_title_map[key], notify_type=notify_type, attach=attach, body_format=body_format ) yield (server, kwargs) @staticmethod def _notify_sequential(*servers_kwargs): """ Process a list of notify() calls sequentially and synchronously. """ success = True for (server, kwargs) in servers_kwargs: try: # Send notification result = server.notify(**kwargs) success = success and result except TypeError: # These are our internally thrown notifications. success = False except Exception: # A catch all so we don't have to abort early # just because one of our plugins has a bug in it. logger.exception("Unhandled Notification Exception") success = False return success @staticmethod def _notify_parallel_threadpool(*servers_kwargs): """ Process a list of notify() calls in parallel and synchronously. """ n_calls = len(servers_kwargs) # 0-length case if n_calls == 0: return True # There's no need to use a thread pool for just a single notification if n_calls == 1: return Apprise._notify_sequential(servers_kwargs[0]) # Create log entry logger.info( 'Notifying %d service(s) with threads.', len(servers_kwargs)) with cf.ThreadPoolExecutor() as executor: success = True futures = [executor.submit(server.notify, **kwargs) for (server, kwargs) in servers_kwargs] for future in cf.as_completed(futures): try: result = future.result() success = success and result except TypeError: # These are our internally thrown notifications. success = False except Exception: # A catch all so we don't have to abort early # just because one of our plugins has a bug in it. logger.exception("Unhandled Notification Exception") success = False return success @staticmethod async def _notify_parallel_asyncio(*servers_kwargs): """ Process a list of async_notify() calls in parallel and asynchronously. """ n_calls = len(servers_kwargs) # 0-length case if n_calls == 0: return True # (Unlike with the thread pool, we don't optimize for the single- # notification case because asyncio can do useful work while waiting # for that thread to complete) # Create log entry logger.info( 'Notifying %d service(s) asynchronously.', len(servers_kwargs)) async def do_call(server, kwargs): return await server.async_notify(**kwargs) cors = (do_call(server, kwargs) for (server, kwargs) in servers_kwargs) results = await asyncio.gather(*cors, return_exceptions=True) if any(isinstance(status, Exception) and not isinstance(status, TypeError) for status in results): # A catch all so we don't have to abort early just because # one of our plugins has a bug in it. logger.exception("Unhandled Notification Exception") return False if any(isinstance(status, TypeError) for status in results): # These are our internally thrown notifications. return False return all(results) def details(self, lang=None, show_requirements=False, show_disabled=False): """ Returns the details associated with the Apprise object """ # general object returned response = { # Defines the current version of Apprise 'version': __version__, # Lists all of the currently supported Notifications 'schemas': [], # Includes the configured asset details 'asset': self.asset.details(), } for plugin in N_MGR.plugins(): # Iterate over our hashed plugins and dynamically build details on # their status: content = { 'service_name': getattr(plugin, 'service_name', None), 'service_url': getattr(plugin, 'service_url', None), 'setup_url': getattr(plugin, 'setup_url', None), # Placeholder - populated below 'details': None, # Let upstream service know of the plugins that support # attachments 'attachment_support': getattr( plugin, 'attachment_support', False), # Differentiat between what is a custom loaded plugin and # which is native. 'category': getattr(plugin, 'category', None) } # Standard protocol(s) should be None or a tuple enabled = getattr(plugin, 'enabled', True) if not show_disabled and not enabled: # Do not show inactive plugins continue elif show_disabled: # Add current state to response content['enabled'] = enabled # Standard protocol(s) should be None or a tuple protocols = getattr(plugin, 'protocol', None) if isinstance(protocols, str): protocols = (protocols, ) # Secure protocol(s) should be None or a tuple secure_protocols = getattr(plugin, 'secure_protocol', None) if isinstance(secure_protocols, str): secure_protocols = (secure_protocols, ) # Add our protocol details to our content content.update({ 'protocols': protocols, 'secure_protocols': secure_protocols, }) if not lang: # Simply return our results content['details'] = plugins.details(plugin) if show_requirements: content['requirements'] = plugins.requirements(plugin) else: # Emulate the specified language when returning our results with self.locale.lang_at(lang): content['details'] = plugins.details(plugin) if show_requirements: content['requirements'] = plugins.requirements(plugin) # Build our response object response['schemas'].append(content) return response def urls(self, privacy=False): """ Returns all of the loaded URLs defined in this apprise object. """ urls = [] for s in self.servers: if isinstance(s, (ConfigBase, AppriseConfig)): for _s in s.servers(): urls.append(_s.url(privacy=privacy)) else: urls.append(s.url(privacy=privacy)) return urls def pop(self, index): """ Removes an indexed Notification Service from the stack and returns it. The thing is we can never pop AppriseConfig() entries, only what was loaded within them. So pop needs to carefully iterate over our list and only track actual entries. """ # Tracking variables prev_offset = -1 offset = prev_offset for idx, s in enumerate(self.servers): if isinstance(s, (ConfigBase, AppriseConfig)): servers = s.servers() if len(servers) > 0: # Acquire a new maximum offset to work with offset = prev_offset + len(servers) if offset >= index: # we can pop an element from our config stack fn = s.pop if isinstance(s, ConfigBase) \ else s.server_pop return fn(index if prev_offset == -1 else (index - prev_offset - 1)) else: offset = prev_offset + 1 if offset == index: return self.servers.pop(idx) # Update our old offset prev_offset = offset # If we reach here, then we indexed out of range raise IndexError('list index out of range') def __getitem__(self, index): """ Returns the indexed server entry of a loaded notification server """ # Tracking variables prev_offset = -1 offset = prev_offset for idx, s in enumerate(self.servers): if isinstance(s, (ConfigBase, AppriseConfig)): # Get our list of servers associate with our config object servers = s.servers() if len(servers) > 0: # Acquire a new maximum offset to work with offset = prev_offset + len(servers) if offset >= index: return servers[index if prev_offset == -1 else (index - prev_offset - 1)] else: offset = prev_offset + 1 if offset == index: return self.servers[idx] # Update our old offset prev_offset = offset # If we reach here, then we indexed out of range raise IndexError('list index out of range') def __getstate__(self): """ Pickle Support dumps() """ attributes = { 'asset': self.asset, # Prepare our URL list as we need to extract the associated tags # and asset details associated with it 'urls': [{ 'url': server.url(privacy=False), 'tag': server.tags if server.tags else None, 'asset': server.asset} for server in self.servers], 'locale': self.locale, 'debug': self.debug, 'location': self.location, } return attributes def __setstate__(self, state): """ Pickle Support loads() """ self.servers = list() self.asset = state['asset'] self.locale = state['locale'] self.location = state['location'] for entry in state['urls']: self.add(entry['url'], asset=entry['asset'], tag=entry['tag']) def __bool__(self): """ Allows the Apprise object to be wrapped in an 'if statement'. True is returned if at least one service has been loaded. """ return len(self) > 0 def __iter__(self): """ Returns an iterator to each of our servers loaded. This includes those found inside configuration. """ return chain(*[[s] if not isinstance(s, (ConfigBase, AppriseConfig)) else iter(s.servers()) for s in self.servers]) def __len__(self): """ Returns the number of servers loaded; this includes those found within loaded configuration. This funtion nnever actually counts the Config entry themselves (if they exist), only what they contain. """ return sum([1 if not isinstance(s, (ConfigBase, AppriseConfig)) else len(s.servers()) for s in self.servers]) apprise-1.9.3/apprise/apprise.pyi000066400000000000000000000042331477231770000170320ustar00rootroot00000000000000from typing import Any, Dict, List, Iterable, Iterator, Optional from . import (AppriseAsset, AppriseAttachment, AppriseConfig, ConfigBase, NotifyBase, NotifyFormat, NotifyType) from .common import ContentLocation _Server = Union[str, ConfigBase, NotifyBase, AppriseConfig] _Servers = Union[_Server, Dict[Any, _Server], Iterable[_Server]] # Can't define this recursively as mypy doesn't support recursive types: # https://github.com/python/mypy/issues/731 _Tag = Union[str, Iterable[Union[str, Iterable[str]]]] class Apprise: def __init__( self, servers: _Servers = ..., asset: Optional[AppriseAsset] = ..., location: Optional[ContentLocation] = ..., debug: bool = ... ) -> None: ... @staticmethod def instantiate( url: Union[str, Dict[str, NotifyBase]], asset: Optional[AppriseAsset] = ..., tag: Optional[_Tag] = ..., suppress_exceptions: bool = ... ) -> NotifyBase: ... def add( self, servers: _Servers = ..., asset: Optional[AppriseAsset] = ..., tag: Optional[_Tag] = ... ) -> bool: ... def clear(self) -> None: ... def find(self, tag: str = ...) -> Iterator[Apprise]: ... def notify( self, body: str, title: str = ..., notify_type: NotifyType = ..., body_format: NotifyFormat = ..., tag: _Tag = ..., attach: Optional[AppriseAttachment] = ..., interpret_escapes: Optional[bool] = ... ) -> bool: ... async def async_notify( self, body: str, title: str = ..., notify_type: NotifyType = ..., body_format: NotifyFormat = ..., tag: _Tag = ..., attach: Optional[AppriseAttachment] = ..., interpret_escapes: Optional[bool] = ... ) -> bool: ... def details(self, lang: Optional[str] = ...) -> Dict[str, Any]: ... def urls(self, privacy: bool = ...) -> Iterable[str]: ... def pop(self, index: int) -> ConfigBase: ... def __getitem__(self, index: int) -> ConfigBase: ... def __bool__(self) -> bool: ... def __iter__(self) -> Iterator[ConfigBase]: ... def __len__(self) -> int: ...apprise-1.9.3/apprise/apprise_attachment.py000066400000000000000000000301131477231770000210650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from . import URLBase from .attachment.base import AttachBase from .asset import AppriseAsset from .manager_attachment import AttachmentManager from .logger import logger from .common import ContentLocation from .common import CONTENT_LOCATIONS from .utils.parse import GET_SCHEMA_RE # Grant access to our Notification Manager Singleton A_MGR = AttachmentManager() class AppriseAttachment: """ Our Apprise Attachment File Manager """ def __init__(self, paths=None, asset=None, cache=True, location=None, **kwargs): """ Loads all of the paths/urls specified (if any). The path can either be a single string identifying one explicit location, otherwise you can pass in a series of locations to scan via a list. By default we cache our responses so that subsiquent calls does not cause the content to be retrieved again. For local file references this makes no difference at all. But for remote content, this does mean more then one call can be made to retrieve the (same) data. This method can be somewhat inefficient if disabled. Only disable caching if you understand the consequences. You can alternatively set the cache value to an int identifying the number of seconds the previously retrieved can exist for before it should be considered expired. It's also worth nothing that the cache value is only set to elements that are not already of subclass AttachBase() Optionally set your current ContentLocation in the location argument. This is used to further handle attachments. The rules are as follows: - INACCESSIBLE: You simply have disabled use of the object; no attachments will be retrieved/handled. - HOSTED: You are hosting an attachment service for others. In these circumstances all attachments that are LOCAL based (such as file://) will not be allowed. - LOCAL: The least restrictive mode as local files can be referenced in addition to hosted. In all both HOSTED and LOCAL modes, INACCESSIBLE attachment types will continue to be inaccessible. However if you set this field (location) to None (it's default value) the attachment location category will not be tested in any way (all attachment types will be allowed). The location field is also a global option that can be set when initializing the Apprise object. """ # Initialize our attachment listings self.attachments = list() # Set our cache flag self.cache = cache # Prepare our Asset Object self.asset = \ asset if isinstance(asset, AppriseAsset) else AppriseAsset() if location is not None and location not in CONTENT_LOCATIONS: msg = "An invalid Attachment location ({}) was specified." \ .format(location) logger.warning(msg) raise TypeError(msg) # Store our location self.location = location # Now parse any paths specified if paths is not None: # Store our path(s) if not self.add(paths): # Parse Source domain based on from_addr raise TypeError("One or more attachments could not be added.") def add(self, attachments, asset=None, cache=None): """ Adds one or more attachments into our list. By default we cache our responses so that subsiquent calls does not cause the content to be retrieved again. For local file references this makes no difference at all. But for remote content, this does mean more then one call can be made to retrieve the (same) data. This method can be somewhat inefficient if disabled. Only disable caching if you understand the consequences. You can alternatively set the cache value to an int identifying the number of seconds the previously retrieved can exist for before it should be considered expired. It's also worth nothing that the cache value is only set to elements that are not already of subclass AttachBase() """ # Initialize our return status return_status = True # Initialize our default cache value cache = cache if cache is not None else self.cache if asset is None: # prepare default asset asset = self.asset if isinstance(attachments, (AttachBase, str)): # store our instance attachments = (attachments, ) elif not isinstance(attachments, (tuple, set, list)): logger.error( 'An invalid attachment url (type={}) was ' 'specified.'.format(type(attachments))) return False # Iterate over our attachments for _attachment in attachments: if self.location == ContentLocation.INACCESSIBLE: logger.warning( "Attachments are disabled; ignoring {}" .format(_attachment)) return_status = False continue if isinstance(_attachment, str): logger.debug("Loading attachment: {}".format(_attachment)) # Instantiate ourselves an object, this function throws or # returns None if it fails instance = AppriseAttachment.instantiate( _attachment, asset=asset, cache=cache) if not isinstance(instance, AttachBase): return_status = False continue elif isinstance(_attachment, AppriseAttachment): # We were provided a list of Apprise Attachments # append our content together instance = _attachment.attachments elif not isinstance(_attachment, AttachBase): logger.warning( "An invalid attachment (type={}) was specified.".format( type(_attachment))) return_status = False continue else: # our entry is of type AttachBase, so just go ahead and point # our instance to it for some post processing below instance = _attachment # Apply some simple logic if our location flag is set if self.location and (( self.location == ContentLocation.HOSTED and instance.location != ContentLocation.HOSTED) or instance.location == ContentLocation.INACCESSIBLE): logger.warning( "Attachment was disallowed due to accessibility " "restrictions ({}->{}): {}".format( self.location, instance.location, instance.url(privacy=True))) return_status = False continue # Add our initialized plugin to our server listings if isinstance(instance, list): self.attachments.extend(instance) else: self.attachments.append(instance) # Return our status return return_status @staticmethod def instantiate(url, asset=None, cache=None, suppress_exceptions=True): """ Returns the instance of a instantiated attachment plugin based on the provided Attachment URL. If the url fails to be parsed, then None is returned. A specified cache value will over-ride anything set """ # Attempt to acquire the schema at the very least to allow our # attachment based urls. schema = GET_SCHEMA_RE.match(url) if schema is None: # Plan B is to assume we're dealing with a file schema = 'file' url = '{}://{}'.format(schema, URLBase.quote(url)) else: # Ensure our schema is always in lower case schema = schema.group('schema').lower() # Some basic validation if schema not in A_MGR: logger.warning('Unsupported schema {}.'.format(schema)) return None # Parse our url details of the server object as dictionary containing # all of the information parsed from our URL results = A_MGR[schema].parse_url(url) if not results: # Failed to parse the server URL logger.warning('Unparseable URL {}.'.format(url)) return None # Prepare our Asset Object results['asset'] = \ asset if isinstance(asset, AppriseAsset) else AppriseAsset() if cache is not None: # Force an over-ride of the cache value to what we have specified results['cache'] = cache if suppress_exceptions: try: # Attempt to create an instance of our plugin using the parsed # URL information attach_plugin = A_MGR[results['schema']](**results) except Exception: # the arguments are invalid or can not be used. logger.warning('Could not load URL: %s' % url) return None else: # Attempt to create an instance of our plugin using the parsed # URL information but don't wrap it in a try catch attach_plugin = A_MGR[results['schema']](**results) return attach_plugin def clear(self): """ Empties our attachment list """ self.attachments[:] = [] def size(self): """ Returns the total size of accumulated attachments """ return sum([len(a) for a in self.attachments if len(a) > 0]) def pop(self, index=-1): """ Removes an indexed Apprise Attachment from the stack and returns it. by default the last element is poped from the list """ # Remove our entry return self.attachments.pop(index) def __getitem__(self, index): """ Returns the indexed entry of a loaded apprise attachments """ return self.attachments[index] def __bool__(self): """ Allows the Apprise object to be wrapped in an 'if statement'. True is returned if at least one service has been loaded. """ return True if self.attachments else False def __iter__(self): """ Returns an iterator to our attachment list """ return iter(self.attachments) def __len__(self): """ Returns the number of attachment entries loaded """ return len(self.attachments) apprise-1.9.3/apprise/apprise_attachment.pyi000066400000000000000000000021711477231770000212410ustar00rootroot00000000000000from typing import Any, Iterable, Optional, Union from . import AppriseAsset, ContentLocation from .attachment import AttachBase _Attachment = Union[str, AttachBase] _Attachments = Iterable[_Attachment] class AppriseAttachment: def __init__( self, paths: Optional[_Attachments] = ..., asset: Optional[AppriseAttachment] = ..., cache: bool = ..., location: Optional[ContentLocation] = ..., **kwargs: Any ) -> None: ... def add( self, attachments: _Attachments, asset: Optional[AppriseAttachment] = ..., cache: Optional[bool] = ... ) -> bool: ... @staticmethod def instantiate( url: str, asset: Optional[AppriseAsset] = ..., cache: Optional[bool] = ..., suppress_exceptions: bool = ... ) -> NotifyBase: ... def clear(self) -> None: ... def size(self) -> int: ... def pop(self, index: int = ...) -> AttachBase: ... def __getitem__(self, index: int) -> AttachBase: ... def __bool__(self) -> bool: ... def __iter__(self) -> Iterator[AttachBase]: ... def __len__(self) -> int: ...apprise-1.9.3/apprise/apprise_config.py000066400000000000000000000407721477231770000202160ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from . import ConfigBase from . import CONFIG_FORMATS from .manager_config import ConfigurationManager from . import URLBase from .asset import AppriseAsset from . import common from .utils.parse import GET_SCHEMA_RE, parse_list from .utils.logic import is_exclusive_match from .logger import logger # Grant access to our Configuration Manager Singleton C_MGR = ConfigurationManager() class AppriseConfig: """ Our Apprise Configuration File Manager - Supports a list of URLs defined one after another (text format) - Supports a destinct YAML configuration format """ def __init__(self, paths=None, asset=None, cache=True, recursion=0, insecure_includes=False, **kwargs): """ Loads all of the paths specified (if any). The path can either be a single string identifying one explicit location, otherwise you can pass in a series of locations to scan via a list. If no path is specified then a default list is used. By default we cache our responses so that subsiquent calls does not cause the content to be retrieved again. Setting this to False does mean more then one call can be made to retrieve the (same) data. This method can be somewhat inefficient if disabled and you're set up to make remote calls. Only disable caching if you understand the consequences. You can alternatively set the cache value to an int identifying the number of seconds the previously retrieved can exist for before it should be considered expired. It's also worth nothing that the cache value is only set to elements that are not already of subclass ConfigBase() recursion defines how deep we recursively handle entries that use the `import` keyword. This keyword requires us to fetch more configuration from another source and add it to our existing compilation. If the file we remotely retrieve also has an `import` reference, we will only advance through it if recursion is set to 2 deep. If set to zero it is off. There is no limit to how high you set this value. It would be recommended to keep it low if you do intend to use it. insecure includes by default are disabled. When set to True, all Apprise Config files marked to be in STRICT mode are treated as being in ALWAYS mode. Take a file:// based configuration for example, only a file:// based configuration can import another file:// based one. because it is set to STRICT mode. If an http:// based configuration file attempted to import a file:// one it woul fail. However this import would be possible if insecure_includes is set to True. There are cases where a self hosting apprise developer may wish to load configuration from memory (in a string format) that contains import entries (even file:// based ones). In these circumstances if you want these includes to be honored, this value must be set to True. """ # Initialize a server list of URLs self.configs = list() # Prepare our Asset Object self.asset = \ asset if isinstance(asset, AppriseAsset) else AppriseAsset() # Set our cache flag self.cache = cache # Initialize our recursion value self.recursion = recursion # Initialize our insecure_includes flag self.insecure_includes = insecure_includes if paths is not None: # Store our path(s) self.add(paths) return def add(self, configs, asset=None, tag=None, cache=True, recursion=None, insecure_includes=None): """ Adds one or more config URLs into our list. You can override the global asset if you wish by including it with the config(s) that you add. By default we cache our responses so that subsiquent calls does not cause the content to be retrieved again. Setting this to False does mean more then one call can be made to retrieve the (same) data. This method can be somewhat inefficient if disabled and you're set up to make remote calls. Only disable caching if you understand the consequences. You can alternatively set the cache value to an int identifying the number of seconds the previously retrieved can exist for before it should be considered expired. It's also worth nothing that the cache value is only set to elements that are not already of subclass ConfigBase() Optionally override the default recursion value. Optionally override the insecure_includes flag. if insecure_includes is set to True then all plugins that are set to a STRICT mode will be a treated as ALWAYS. """ # Initialize our return status return_status = True # Initialize our default cache value cache = cache if cache is not None else self.cache # Initialize our default recursion value recursion = recursion if recursion is not None else self.recursion # Initialize our default insecure_includes value insecure_includes = \ insecure_includes if insecure_includes is not None \ else self.insecure_includes if asset is None: # prepare default asset asset = self.asset if isinstance(configs, ConfigBase): # Go ahead and just add our configuration into our list self.configs.append(configs) return True elif isinstance(configs, str): # Save our path configs = (configs, ) elif not isinstance(configs, (tuple, set, list)): logger.error( 'An invalid configuration path (type={}) was ' 'specified.'.format(type(configs))) return False # Iterate over our configuration for _config in configs: if isinstance(_config, ConfigBase): # Go ahead and just add our configuration into our list self.configs.append(_config) continue elif not isinstance(_config, str): logger.warning( "An invalid configuration (type={}) was specified.".format( type(_config))) return_status = False continue logger.debug("Loading configuration: {}".format(_config)) # Instantiate ourselves an object, this function throws or # returns None if it fails instance = AppriseConfig.instantiate( _config, asset=asset, tag=tag, cache=cache, recursion=recursion, insecure_includes=insecure_includes) if not isinstance(instance, ConfigBase): return_status = False continue # Add our initialized plugin to our server listings self.configs.append(instance) # Return our status return return_status def add_config(self, content, asset=None, tag=None, format=None, recursion=None, insecure_includes=None): """ Adds one configuration file in it's raw format. Content gets loaded as a memory based object and only exists for the life of this AppriseConfig object it was loaded into. If you know the format ('yaml' or 'text') you can specify it for slightly less overhead during this call. Otherwise the configuration is auto-detected. Optionally override the default recursion value. Optionally override the insecure_includes flag. if insecure_includes is set to True then all plugins that are set to a STRICT mode will be a treated as ALWAYS. """ # Initialize our default recursion value recursion = recursion if recursion is not None else self.recursion # Initialize our default insecure_includes value insecure_includes = \ insecure_includes if insecure_includes is not None \ else self.insecure_includes if asset is None: # prepare default asset asset = self.asset if not isinstance(content, str): logger.warning( "An invalid configuration (type={}) was specified.".format( type(content))) return False logger.debug("Loading raw configuration: {}".format(content)) # Create ourselves a ConfigMemory Object to store our configuration instance = C_MGR['memory']( content=content, format=format, asset=asset, tag=tag, recursion=recursion, insecure_includes=insecure_includes) if instance.config_format not in CONFIG_FORMATS: logger.warning( "The format of the configuration could not be deteced.") return False # Add our initialized plugin to our server listings self.configs.append(instance) # Return our status return True def servers(self, tag=common.MATCH_ALL_TAG, match_always=True, *args, **kwargs): """ Returns all of our servers dynamically build based on parsed configuration. If a tag is specified, it applies to the configuration sources themselves and not the notification services inside them. This is for filtering the configuration files polled for results. If the anytag is set, then any notification that is found set with that tag are included in the response. """ # A match_always flag allows us to pick up on our 'any' keyword # and notify these services under all circumstances match_always = common.MATCH_ALWAYS_TAG if match_always else None # Build our tag setup # - top level entries are treated as an 'or' # - second level (or more) entries are treated as 'and' # # examples: # tag="tagA, tagB" = tagA or tagB # tag=['tagA', 'tagB'] = tagA or tagB # tag=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB # tag=[('tagB', 'tagC')] = tagB and tagC response = list() for entry in self.configs: # Apply our tag matching based on our defined logic if is_exclusive_match( logic=tag, data=entry.tags, match_all=common.MATCH_ALL_TAG, match_always=match_always): # Build ourselves a list of services dynamically and return the # as a list response.extend(entry.servers()) return response @staticmethod def instantiate(url, asset=None, tag=None, cache=None, recursion=0, insecure_includes=False, suppress_exceptions=True): """ Returns the instance of a instantiated configuration plugin based on the provided Config URL. If the url fails to be parsed, then None is returned. """ # Attempt to acquire the schema at the very least to allow our # configuration based urls. schema = GET_SCHEMA_RE.match(url) if schema is None: # Plan B is to assume we're dealing with a file schema = 'file' url = '{}://{}'.format(schema, URLBase.quote(url)) else: # Ensure our schema is always in lower case schema = schema.group('schema').lower() # Some basic validation if schema not in C_MGR: logger.warning('Unsupported schema {}.'.format(schema)) return None # Parse our url details of the server object as dictionary containing # all of the information parsed from our URL results = C_MGR[schema].parse_url(url) if not results: # Failed to parse the server URL logger.warning('Unparseable URL {}.'.format(url)) return None # Build a list of tags to associate with the newly added notifications results['tag'] = set(parse_list(tag)) # Prepare our Asset Object results['asset'] = \ asset if isinstance(asset, AppriseAsset) else AppriseAsset() if cache is not None: # Force an over-ride of the cache value to what we have specified results['cache'] = cache # Recursion can never be parsed from the URL results['recursion'] = recursion # Insecure includes flag can never be parsed from the URL results['insecure_includes'] = insecure_includes if suppress_exceptions: try: # Attempt to create an instance of our plugin using the parsed # URL information cfg_plugin = C_MGR[results['schema']](**results) except Exception: # the arguments are invalid or can not be used. logger.warning('Could not load URL: %s' % url) return None else: # Attempt to create an instance of our plugin using the parsed # URL information but don't wrap it in a try catch cfg_plugin = C_MGR[results['schema']](**results) return cfg_plugin def clear(self): """ Empties our configuration list """ self.configs[:] = [] def server_pop(self, index): """ Removes an indexed Apprise Notification from the servers """ # Tracking variables prev_offset = -1 offset = prev_offset for entry in self.configs: servers = entry.servers(cache=True) if len(servers) > 0: # Acquire a new maximum offset to work with offset = prev_offset + len(servers) if offset >= index: # we can pop an notification from our config stack return entry.pop(index if prev_offset == -1 else (index - prev_offset - 1)) # Update our old offset prev_offset = offset # If we reach here, then we indexed out of range raise IndexError('list index out of range') def pop(self, index=-1): """ Removes an indexed Apprise Configuration from the stack and returns it. By default, the last element is removed from the list """ # Remove our entry return self.configs.pop(index) def __getitem__(self, index): """ Returns the indexed config entry of a loaded apprise configuration """ return self.configs[index] def __bool__(self): """ Allows the Apprise object to be wrapped in an 'if statement'. True is returned if at least one service has been loaded. """ return True if self.configs else False def __iter__(self): """ Returns an iterator to our config list """ return iter(self.configs) def __len__(self): """ Returns the number of config entries loaded """ return len(self.configs) apprise-1.9.3/apprise/apprise_config.pyi000066400000000000000000000030401477231770000203520ustar00rootroot00000000000000from typing import Any, Iterable, Iterator, List, Optional, Union from . import AppriseAsset, NotifyBase from .config import ConfigBase _Configs = Union[ConfigBase, str, Iterable[str]] class AppriseConfig: def __init__( self, paths: Optional[_Configs] = ..., asset: Optional[AppriseAsset] = ..., cache: bool = ..., recursion: int = ..., insecure_includes: bool = ..., **kwargs: Any ) -> None: ... def add( self, configs: _Configs, asset: Optional[AppriseAsset] = ..., cache: bool = ..., recursion: Optional[bool] = ..., insecure_includes: Optional[bool] = ... ) -> bool: ... def add_config( self, content: str, asset: Optional[AppriseAsset] = ..., tag: Optional[str] = ..., format: Optional[str] = ..., recursion: Optional[int] = ..., insecure_includes: Optional[bool] = ... ) -> bool: ... def servers(self, tag: str = ..., *args: Any, **kwargs: Any) -> List[ConfigBase]: ... def instantiate( url: str, asset: Optional[AppriseAsset] = ..., tag: Optional[str] = ..., cache: Optional[bool] = ... ) -> NotifyBase: ... def clear(self) -> None: ... def server_pop(self, index: int) -> ConfigBase: ... def pop(self, index: int = ...) -> ConfigBase: ... def __getitem__(self, index: int) -> ConfigBase: ... def __bool__(self) -> bool: ... def __iter__(self) -> Iterator[ConfigBase]: ... def __len__(self) -> int: ...apprise-1.9.3/apprise/asset.py000066400000000000000000000353771477231770000163520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re from uuid import uuid4 from os.path import join from os.path import dirname from os.path import isfile from os.path import abspath from .common import NotifyType from .common import PersistentStoreMode from .manager_plugins import NotificationManager # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() class AppriseAsset: """ Provides a supplimentary class that can be used to provide extra information and details that can be used by Apprise such as providing an alternate location to where images/icons can be found and the URL masks. Any variable that starts with an underscore (_) can only be initialized by this class manually and will/can not be parsed from a configuration file. """ # Application Identifier app_id = 'Apprise' # Application Description app_desc = 'Apprise Notifications' # Provider URL app_url = 'https://github.com/caronc/apprise' # A Simple Mapping of Colors; For every NOTIFY_TYPE identified, # there should be a mapping to it's color here: html_notify_map = { NotifyType.INFO: '#3AA3E3', NotifyType.SUCCESS: '#3AA337', NotifyType.FAILURE: '#A32037', NotifyType.WARNING: '#CACF29', } # The default color to return if a mapping isn't found in our table above default_html_color = '#888888' # Ascii Notification ascii_notify_map = { NotifyType.INFO: '[i]', NotifyType.SUCCESS: '[+]', NotifyType.FAILURE: '[!]', NotifyType.WARNING: '[~]', } # The default ascii to return if a mapping isn't found in our table above default_ascii_chars = '[?]' # The default image extension to use default_extension = '.png' # The default theme theme = 'default' # Image URL Mask image_url_mask = \ 'https://github.com/caronc/apprise/raw/master/apprise/assets/' \ 'themes/{THEME}/apprise-{TYPE}-{XY}{EXTENSION}' # Application Logo image_url_logo = \ 'https://github.com/caronc/apprise/raw/master/apprise/assets/' \ 'themes/{THEME}/apprise-logo.png' # Image Path Mask image_path_mask = abspath(join( dirname(__file__), 'assets', 'themes', '{THEME}', 'apprise-{TYPE}-{XY}{EXTENSION}', )) # This value can also be set on calls to Apprise.notify(). This allows # you to let Apprise upfront the type of data being passed in. This # must be of type NotifyFormat. Possible values could be: # - NotifyFormat.TEXT # - NotifyFormat.MARKDOWN # - NotifyFormat.HTML # - None # # If no format is specified (hence None), then no special pre-formatting # actions will take place during a notification. This has been and always # will be the default. body_format = None # Always attempt to send notifications asynchronous (as the same time # if possible) # This is a Python 3 supported option only. If set to False, then # notifications are sent sequentially (one after another) async_mode = True # Support :smile:, and other alike keywords swapping them for their # unicode value. A value of None leaves the interpretation up to the # end user to control (allowing them to specify emojis=yes on the # URL) interpret_emojis = None # Whether or not to interpret escapes found within the input text prior # to passing it upstream. Such as converting \t to an actual tab and \n # to a new line. interpret_escapes = False # Defines the encoding of the content passed into Apprise encoding = 'utf-8' # Automatically generate our Pretty Good Privacy (PGP) keys if one isn't # present and our environment configuration allows for it. # For example, a case where the environment wouldn't allow for it would be # if Persistent Storage was set to `memory` pgp_autogen = True # For more detail see CWE-312 @ # https://cwe.mitre.org/data/definitions/312.html # # By enabling this, the logging output has additional overhead applied to # it preventing secure password and secret information from being # displayed in the logging. Since there is overhead involved in performing # this cleanup; system owners who run in a very isolated environment may # choose to disable this for a slight performance bump. It is recommended # that you leave this option as is otherwise. secure_logging = True # Optionally specify one or more path to attempt to scan for Python modules # By default, no paths are scanned. __plugin_paths = [] # Optionally set the location of the persistent storage # By default there is no path and thus persistent storage is not used __storage_path = None # Optionally define the default salt to apply to all persistent storage # namespace generation (unless over-ridden) __storage_salt = b'' # Optionally define the namespace length of the directories created by # the storage. If this is set to zero, then the length is pre-determined # by the generator (sha1, md5, sha256, etc) __storage_idlen = 8 # Set storage to auto __storage_mode = PersistentStoreMode.AUTO # All internal/system flags are prefixed with an underscore (_) # These can only be initialized using Python libraries and are not picked # up from (yaml) configuration files (if set) # An internal counter that is used by AppriseAPI # (https://github.com/caronc/apprise-api). The idea is to allow one # instance of AppriseAPI to call another, but to track how many times # this occurs. It's intent is to prevent a loop where an AppriseAPI # Server calls itself (or loops indefinitely) _recursion = 0 # A unique identifer we can use to associate our calling source _uid = str(uuid4()) def __init__(self, plugin_paths=None, storage_path=None, storage_mode=None, storage_salt=None, storage_idlen=None, **kwargs): """ Asset Initialization """ # Assign default arguments if specified for key, value in kwargs.items(): if not hasattr(AppriseAsset, key): raise AttributeError( 'AppriseAsset init(): ' 'An invalid key {} was specified.'.format(key)) setattr(self, key, value) if plugin_paths: # Load any decorated modules if defined self.__plugin_paths = plugin_paths N_MGR.module_detection(plugin_paths) if storage_path: # Define our persistent storage path self.__storage_path = storage_path if storage_mode: # Define how our persistent storage behaves self.__storage_mode = storage_mode if isinstance(storage_idlen, int): # Define the number of characters utilized from our namespace lengh if storage_idlen < 0: # Unsupported type raise ValueError( 'AppriseAsset storage_idlen(): Value must ' 'be an integer and > 0') # Store value self.__storage_idlen = storage_idlen if storage_salt is not None: # Define the number of characters utilized from our namespace lengh if isinstance(storage_salt, bytes): self.__storage_salt = storage_salt elif isinstance(storage_salt, str): try: self.__storage_salt = storage_salt.encode(self.encoding) except UnicodeEncodeError: # Bad data; don't pass it along raise ValueError( 'AppriseAsset namespace_salt(): ' 'Value provided could not be encoded') else: # Unsupported raise ValueError( 'AppriseAsset namespace_salt(): Value provided must be ' 'string or bytes object') def color(self, notify_type, color_type=None): """ Returns an HTML mapped color based on passed in notify type if color_type is: None then a standard hex string is returned as a string format ('#000000'). int then the integer representation is returned tuple then the the red, green, blue is returned in a tuple """ # Attempt to get the type, otherwise return a default grey # if we couldn't look up the entry color = self.html_notify_map.get(notify_type, self.default_html_color) if color_type is None: # This is the default return type return color elif color_type is int: # Convert the color to integer return AppriseAsset.hex_to_int(color) # The only other type is tuple elif color_type is tuple: return AppriseAsset.hex_to_rgb(color) # Unsupported type raise ValueError( 'AppriseAsset html_color(): An invalid color_type was specified.') def ascii(self, notify_type): """ Returns an ascii representation based on passed in notify type """ # look our response up return self.ascii_notify_map.get(notify_type, self.default_ascii_chars) def image_url(self, notify_type, image_size, logo=False, extension=None): """ Apply our mask to our image URL if logo is set to True, then the logo_url is used instead """ url_mask = self.image_url_logo if logo else self.image_url_mask if not url_mask: # No image to return return None if extension is None: extension = self.default_extension re_map = { '{THEME}': self.theme if self.theme else '', '{TYPE}': notify_type, '{XY}': image_size, '{EXTENSION}': extension, } # Iterate over above list and store content accordingly re_table = re.compile( r'(' + '|'.join(re_map.keys()) + r')', re.IGNORECASE, ) return re_table.sub(lambda x: re_map[x.group()], url_mask) def image_path(self, notify_type, image_size, must_exist=True, extension=None): """ Apply our mask to our image file path """ if not self.image_path_mask: # No image to return return None if extension is None: extension = self.default_extension re_map = { '{THEME}': self.theme if self.theme else '', '{TYPE}': notify_type, '{XY}': image_size, '{EXTENSION}': extension, } # Iterate over above list and store content accordingly re_table = re.compile( r'(' + '|'.join(re_map.keys()) + r')', re.IGNORECASE, ) # Acquire our path path = re_table.sub(lambda x: re_map[x.group()], self.image_path_mask) if must_exist and not isfile(path): return None # Return what we parsed return path def image_raw(self, notify_type, image_size, extension=None): """ Returns the raw image if it can (otherwise the function returns None) """ path = self.image_path( notify_type=notify_type, image_size=image_size, extension=extension, ) if path: try: with open(path, 'rb') as fd: return fd.read() except (OSError, IOError): # We can't access the file return None return None def details(self): """ Returns the details associated with the AppriseAsset object """ return { 'app_id': self.app_id, 'app_desc': self.app_desc, 'default_extension': self.default_extension, 'theme': self.theme, 'image_path_mask': self.image_path_mask, 'image_url_mask': self.image_url_mask, 'image_url_logo': self.image_url_logo, } @staticmethod def hex_to_rgb(value): """ Takes a hex string (such as #00ff00) and returns a tuple in the form of (red, green, blue) eg: #00ff00 becomes : (0, 65535, 0) """ value = value.lstrip('#') lv = len(value) return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3)) @staticmethod def hex_to_int(value): """ Takes a hex string (such as #00ff00) and returns its integer equivalent eg: #00000f becomes : 15 """ return int(value.lstrip('#'), 16) @property def plugin_paths(self): """ Return the plugin paths defined """ return self.__plugin_paths @property def storage_path(self): """ Return the persistent storage path defined """ return self.__storage_path @property def storage_mode(self): """ Return the persistent storage mode defined """ return self.__storage_mode @property def storage_salt(self): """ Return the provided namespace salt; this is always of type bytes """ return self.__storage_salt @property def storage_idlen(self): """ Return the persistent storage id length """ return self.__storage_idlen apprise-1.9.3/apprise/asset.pyi000066400000000000000000000017231477231770000165070ustar00rootroot00000000000000from typing import Dict, Optional from . import NotifyFormat, NotifyType class AppriseAsset: app_id: str app_desc: str app_url: str html_notify_map: Dict[NotifyType, str] default_html_color: str default_extension: str theme: Optional[str] image_url_mask: str image_url_logo: str image_path_mask: Optional[str] body_format: Optional[NotifyFormat] async_mode: bool interpret_escapes: bool def __init__( self, app_id: str = ..., app_desc: str = ..., app_url: str = ..., html_notify_map: Dict[NotifyType, str] = ..., default_html_color: str = ..., default_extension: str = ..., theme: Optional[str] = ..., image_url_mask: str = ..., image_url_logo: str = ..., image_path_mask: Optional[str] = ..., body_format: Optional[NotifyFormat] = ..., async_mode: bool = ..., interpret_escapes: bool = ... ) -> None: ...apprise-1.9.3/apprise/assets/000077500000000000000000000000001477231770000161445ustar00rootroot00000000000000apprise-1.9.3/apprise/assets/NotifyXML-1.0.xsd000066400000000000000000000017321477231770000210140ustar00rootroot00000000000000 apprise-1.9.3/apprise/assets/NotifyXML-1.1.xsd000066400000000000000000000033361477231770000210170ustar00rootroot00000000000000 apprise-1.9.3/apprise/assets/themes/000077500000000000000000000000001477231770000174315ustar00rootroot00000000000000apprise-1.9.3/apprise/assets/themes/default/000077500000000000000000000000001477231770000210555ustar00rootroot00000000000000apprise-1.9.3/apprise/assets/themes/default/apprise-failure-128x128.ico000066400000000000000000002040761477231770000256050ustar00rootroot00000000000000 (( !8KVdpwwpdVK8! GG  JJ =qq=,,D ) 18?CEC?91 )D BAf&&,,003377::;;==??????>><<::884411,,&&fAB R<$$55::<<>>@@@@@@AAAAAAAAAAAAAAAAAAAAAAAAAA@@@@AA>><<;;66''<RN 6''==????@@@@@@AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA@@????>>)):N4k1199==AAAAAAAAAAAAAAAAAAAABBBBBBBBBBCCBBCCCCCCCCBBBBAAAAAAAAAAAAAAAAAAAA@@??88##{<][*)5588::;;<>11k]* i22658899::::;;;;??AABBDDEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEDDDDCCCCAAAAAAAAAAAA??::$$|*e O216566889999::;:<<=>?>@@DDEEEEEEEEEEEEEEEEEEFFEEFFFFFFFFFFFFFFFFFFEEEEEEEEEEEEEEEEEEEEEEEEEEEEDDCCBBAAAAAAAA@@66Q  +%%325466767788:9;:<<>>>>??@?@@CCEEEEEEEEFFFFGGGGGGHHGGHHHHHHHHHHHHHHHHHHGGGGGGGGGGFFEEEEEEEEEEEEEEEEEEEEDDCCBBAAAAAAAA??//9.^00335565667777:9;;=<==>>?>??@@@@BBDDGGGGHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHGGFFFFEEEEEEEEEEEEEEEECCBBAAAAAAAA==&&.H('22435455656698::<;<<====>>?>??A@AACCGGHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHIIHHHHHHHHHHHHHHHHHHHHHHHHHHHHGGFFEEEEEEEEEEEEEEEECCBBAAAAAA@@22 #HG;-,21434454557699;:;;<;<<==>=>>@?AABBBBDCFFKKMMHHHHHHIIIIJJJJJJJJKKJJJJJJJJJJJJJJJJJJJJIIHHIIHHHHHHHHHHHHHHHHGGEEEEEEEEEEEEEEDDBBAAAAAAAA::NG8^..213343445477:9::;:;;<<<<>=?>@@BABBCBCCccqqJJKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKJJKKJJIIHHIIHHHHHHHHHHHHGGFFFFEEEEEEEEEEDDBBAAAAAA??&&8F $#{0021323343658799:9::;:;;<<>>??@@AABABBDCzzeeKKLLKKKKKKLLLLKKLLLLLLLLLLKKKKKKKKKKKKKKKKKKJJJJIIHHHHHHHHHHHHGGFFEEEEEEEEEEEECCAAAAAA@@//FE%$0/1022324465879899:9::;;<<>=??@?A@AABACC@@V%ppXXMMNNNNNNNNNNNNNNNNNNMMMMNNLLLLLLKKKKKKKKKKKKKKKKIIIIHHHHHHHHHHGGEEEEEEEEEEEECCAAAAAAAA22E,&%~//102122436587889899:9;;=<>=?>??@?A@BBCBkkCCZ..?xxzzXXOOOOOOOOOOOOPPOOOOOOOOOOOONNMMMMLLKKKKKKKKKKKKJJJJIIHHHHHHHHHHFFEEEEEEEEEEDDBBAAAAAA66 +, #('/.0/112143557687889899;:=<>=>=?>??A@BBDCNM66IeeQQQQQQQQQQQQQQPPPPPPPPPPOOOOOONNMMMMMMLLKKKKKKKKKKIIHHHHHHHHHHGGFFEEEEEEEEEECCAAAAAA887 ((/.00101143546676878898;:=<=<>=>>@?AACBCCDDpp%YYRRRRRRRRRRRRQQRRQQQQQQPPPPPPPPPPOOOONNMMLLKKKKKKKKKKIIHHHHHHHHGGFFEEEEEEEEEECCAAAAAA88 +&%|.-0.0010225565767787:9::<;=<=??A@BBCBCCMM]]} ssSSSSSSSSSSSSSSRRSSSSRRRRQQQQPPPPPPPPOONNNNMMLLKKKKKKKKJJHHHHHHHHHHGGFFEEEEEEEEDDAAAAAA66[ m.-/.0/0032546566767798;:;:<;=<>=@?A@BABBCBEE bbUUSSSSSSSSSSSSSSSSSSSSSSRRRRRRQQQQPPPPPPOOOONNMMLLKKKKKKKKIIHHHHHHHHGGEEEEEEEEEEDDAAAAAA22Z#e-,/-//0/31445465667687:9;:<;=<>=@?A@A@BACCED^^BBXSSoYYUUUUUUUUUUUUUUTTTTTTSSSSSSSSSSSSRRQQQQPPPPPPOONNMMKKKKKKKKJJIIHHHHHHGGEEEEEEEEEEDDBBAAAA..#Y,+.-/.//10435454656687:9:9;:<;=??@?AADCEDGGss::MXXVVVVVVVVVVVVVVVVVVVVVVVVUUUUTTTTSSSSSSSSRRQQPPPPPPOONNMMKKKKKKJJIIHHHHHHHHFFEEEEEEEECCAAAA>>IM&&|,+.-/.0/2143435455779898:9;:=<>=?>@?A@CCEDFFRRzzyywwXXYYYYYYYYYYYYXXYYXXWWVVVVVVVVVVVVTTTTTTSSSSRRQQQQPPPPOONNNNLLKKKKJJIIHHHHHHGGFFEEEEEEEECCAAAA99 d,+.-.-/.113243445465879898:9<<>=>=?>AACBDDFEFFxx66H]]YYZZYYYYYYYYYYYYXXXXXXYYWWWWVVVVVVVVVVUUTTTTSSSSRRQQPPPPPPOOMMLLKKKKKKIIHHHHHHHHFFEEEEEEEEBBAAAA11A<*)-+.-.-1/3232434465868798:8=<=<>=>>A@BAEDEEFERQPPiuuZZZZZZZZZZZZZZZZZZZZYYYYYYXXXXYYXXWWVVVVVVVVUUTTSSSSSSRRQQPPPPOONNLLKKKKJJIIHHHHHHGGEEEEEEEEDDAAAAAA""u=*),+-,.-/.213232435476879798;:=<=<>=@@BADDEDEEHG WWtZZZZZZZZZZZZZZZZZZZZZZZZZZZZYYYYYYYYYYXXWWVVVVVVUUSSSSSSSSQQQQPPPPOONNLLKKKKJJIIHHHHHHFFEEEEEEEECCAAAA>> -qh,*-,-,/.002132324365768797;:<;=<=<@?BACCDDFEGG^]66H"dd[[[[[[[[[[[[[[[[[[[[ZZZZZZZZZZZZZZYYYYYYYYXXWWVVVVVVTTTTSSSSRRQQPPPPOONNLLKKKKJJHHHHHHGGFFEEEEEEEEBBAAAA--E5)(,+-,-,0.212132435575AAVUa_ecdc`_RQAACBDCEDFFHG|{~~``\\]]]]]]]]]]]]]]]]]]\\\\\\\\[[[[[[ZZZZZZZZYYYYYYWWVVVVVVTTTTSSSSSSQQPPPPOONNLLKKKKJJHHHHHHGGFFEEEEEEDDBBAA??Hh ! i,*,+-,..1021213243cbVUEDFFHGHH]\WWr cc^^^^^^^^^^^^^^^^^^^^^^^^]]]]\\\\[[[[ZZZZZZZZYYXXYYXXWWVVVVVVTTSSSSSSQQPPPPOONNLLKKKKJJHHHHHHGGEEEEEEEECCAAAA33 03)(,+,,-,/.102155[Y||XXq88H&&1"!+))6CCWGGFFHGIIKJtt88J zz______````````````______^^^^^^]]]]\\\\[[[[ZZZZZZYYYYXXXXWWVVVVUUTTSSSSSSQQPPPPNNMMLLKKKKIIHHHHHHFFEEEEEEDDBBAAAAMP!!n+),+,,/-1010EDZYr%%/ IHGFWWddZYMLWW||22B@@Ucc````aaaaaaaaaaaaaaaa``aa````____^^^^]]]]\\[[[[ZZZZZZYYXXYYXXVVVVUUTTSSSSSSQQPPPPOOMMKKKKKKIIHHHHHHFFEEEEEEDDAAAA::'0*)+*,+--/.10QQ(eepoFFcbQQQPggvv vvaabbbbbbbbbbbbbbbbbbbbbbbbbbaaaa``````^^^^]]]]\\[[ZZZZZZZZYYXXXXVVVVUUTTSSSSQQQQPPPPNNMMKKKKKKHHHHHHGGEEEEEEEECCAAAAg9$#u+)+*,+/-/.JI'NMGGttWWqnmSRUUllWWr ccbbbbbbbbbbbbbbbbbbaaaabbbbbbbbbbbbbbaaaa``__^^^^]]\\\\[[ZZZZZZYYYYXXVVVVUUSSSSSSRRPPPPOONNLLKKKKJJHHHHHHFFEEEEEEDDAAAA:: -)(+*++-,/.88mlFE}|((3rq^]UUWWuuddbbbbbbbbbbbbccccccccccbbbbbbbbbbbbbbbbbbaa````__^^^^\\\\[[ZZZZZZXXYYWWVVVVUUTTSSSSRRPPPPOOMMKKKKJJHHHHHHGGEEEEEEEECCAA??Yq! i*)+*,+.,0/cc$$.GG[EDWVji..>H9*(*)*)-+../.0/10vtEEW ZYBA&LLc||^^]][[ZZZZZZYYWWVVUUTTSSSSQQPPNNLLKKKKIIHHHHGGEEEEEEDDBB??iO*(*)*)-,/./.0/11@>WWo@?`_\\u;;;***GGG444&&&ff^^]][[ZZZZYYYYXXVVVVTTSSSSQQPPOONNKKKKIIHHHHHHEEEEEEEEAAAA'' d*(*)**.-/./.0/2154rp&&1BABB jjjqqq ``` gg``__]]\\[[ZZZZYYXXVVVVTTSSSSQQPPOONNLLKKJJHHHHGGFFEEEEDDBBAA--$#q*(*),+.-/./.1/3132CBff?>dd:9I777888''3iiaa__^^]][[ZZZZYYXXVVVVTTSSSSRRPPOONNKKKKKKIIHHHHFFEEEEDDBBAA00 &%u*)*),+.-/.//1/313242TSA@BAyy!!! ,,,bbaa``^^]][[ZZZZYYYYVVVVUUSSSSRRPPPPOOLLKKKKHHHHHHFFEEEEEECCAA449 &%x*)*),+.-/.//1/31324265FEutnmFE>=oolllAAA+++ 555NNebbbbaa``__]][[ZZZZYYYYXXVVUUSSSSRRPPPPNNLLKKKKIIHHHHFFEEEEEECCAA66 Q+&%x*)*),+.-/.0/1/314243657687:9<;>=qq  yyy nnbbbbaa``__]]\\ZZZZYYYYXXVVUUTTSSRRPPPPOOLLKKKKIIHHHHGGEEEEEEDDAA88 &d9&%y*)*),+../.0/1/314243658687;:==>={z66ETTTVVoccbbbbbb``__]]\\[[ZZYYYYXXVVUUTTSSRRQQPPNNMMKKKKIIHHHHGGEEEEEEDDAA:: +rE '&y*)*)-+../.0/10314243658698;:==>=lkww```iiccbbbbbb``__]]\\[[ZZYYXXXXVVUUTTSSSSQQPPNNMMKKKKIIHHHHGGEEEEEEDDBB;;/zJ '&y*)*),+/./.0/10314243658698;:==?>A?VUSSj@@@RRRwwddccbbbbbb``__]]\\[[[[WWTTSSSSQQPPOOMMKKKKIIHHHHGGEEEEEECCAA;;0vH'&y*)**-+/./.0/10314243658697<;==?>@@BADBpp44C iii--:iiddccbbbbbbaa__]]\\ggeeSSRRQQPPOOMMKKKKJJHHHHGGEEEEEEDDAA:: -k>&%y*)+*-+/./.0/20324254768798<:>=>>@@BADBEDONxw..< !!!111AAA888eeddccbbbbbb``__]]\\))7 &vv__RRPPPPOOMMKKKKIIHHHHGGEEEEEEDDAA99 (Y1&%x*)+*,+/.//0/20324243668798<:>=>>@?BADCEDGFGF^]ttEEEXXqffddddccbbbbbb``__]]tt99K UUQQPPNNMMKKKKJJHHHHFFEEEEEEDDAA77 !F$&%v*)+*,+/.0/0/20324253668797<;>=?>@?BADCEDGFIHKJMMbbDDWuuueee "mmffeezz``__]]++:PPPPNNLLKKKKIIHHHHGGEEEEEECCAA55$$#r*)+*,+/.0/0/20424353768798;:=<>>@?AACBEDGFIHKJKKMMNMppCCC///iii^^xhhnn__]]NNh@@VccPPNNLLKKKKIIHHHHFFEEEEEECCAA22"!j*)+*,+/.0/0/10424353768798;:=@?BACBEDGFHGJJKJMMNMOO__.-; #jjhh11@==P]]hh%%1QQNNLLKKKKIIHHHHGGEEEEEECCAA..X*(+*,,/.0/0010424353768798;:=@?AACBEDGFHGJJLKMMNNPOQQSSut;;LJJJ___hhkkhh;;Nqq[[55GZZNNKKKKKKIIHHHHEEEEEEEECCAA**B*(+*,+.-0/0020324353768798;:=@?AACBEDFFHGJILKMLNNPOQQSSUTbb??Q <@?BACBEDFFHGJILKMLNNPORQRRUTWVYYJJ`lll---@@@III ~~hhggkk&&2llZZnn##/VVtNNKKKKJJHHHHHHEEEEEEDDBB??Uf+)+*,+..0/1010314354649798::=@?BACBEDFFHGIHLKMLONOOQPRRTTWVXX[[QQh---III__yhhhhggffnn{{ZZZZll%ooKKKKJJHHHHGGEEEEEEDDBB>>3f2('+*,+.-0/1010315354758798::=@?BACBDCFEHGIHKKMLNMOOQPSRTTVUXXZZ^^CCWqqqOOO&llhhggeeddxx55EII`__ZZaa"KKKKIIHHHHGGEEEEEEDDAA<< 2_+*,+-,/.1010214354648798:9<;>=@?A@CBDCFEGGIHKJMLNMOOPPRQSSVVXWZY\\]]&&2uuugghhhhffddddccyy&yyXXYY77J``KKJJHHHHHHGGEEEEEECCAA66;+),+--/.1011214354648798:9;:>>@?@@CBDCEDGGIHKJMLNMPOPPRQTSVVXWZY[[]]^^ (jjhhggeeddddccggXXWWmm\\|55H]]JJHHHHHHFFEEEEEECCAA$$z+),+-,/-10112143546586:8:9;:>=@?A@BAEDEDGFIHJILLNMPOPPQQSRUUWWYXZZ]]^^``aaakkkhhggeeddddccbbbb&&1YYVVVV}} kkJJHHHHGGFFEEEEEEBBAA8>%$u,+-,/-1011213254657598:9;;>=?>A@AAEDEDGFHHJILKMMONQPQQSRUTVVXX[Z\\^^__kkvv444 JJ`iiggeeddddccbbbbbb**8XXVVVV vvIIHHHHFFEEEEEEDDAA??FP,+-,.-102121326465758799;:=A@BADCEDFEHHJIKJMMNNQPQQRRUTVVXXZZ\[]]__``uuBBV!!!WWW vvggddddddccbbbbbbbb%%1WWVVUU {{HHHHHHFFEEEEEECCAA++ .,+-,--0/21213254657687:9;:<;?>@?CBCBEEFEGFJIKJMLNNPOQQRRSSUUWWYY\\]]__``aahhggeeddddddccbbbbbbaaVVVVTT ||HHHHHHEEEEEEEEBBAA@ s&%x-,--0.2121323365768699;:<;>=@?BACBEDFEGFIHKJLKNNOORQRRSSVUVVYYZZ]\]]``aaaa&&&##-ggeeddddddccbbbbbbaaggww +VVUUTT {{HHHHFFEEEEEEDDAA;;>_-,--/.2122323355768698;:<;=<@?A@BBDCFEGFIHJJLKMMPOPORRSSTTUUWWZZ[Z]]__aabbjjLLc***HHHvvvqq}}ddddddccbbbbbbbbaa``//?NNhqqVVUUSS zzHHHHFFEEEEEECCAA''>6,+.-/.1022324354768687::<;=;@?A@BBDCEEGFIHIILKMLNNQPQPSSTTUUVVYY[[]]]]``bbbb~~!!!SSS%%0ggddddccbbbbbbbbaa``nnVVUUTTSS vvHHGGEEEEEEEEBB@@D('.-.-1/22324344768787:9<;=@?CBCCEDGFHGJIKKMLMMPPRQSSTTUUVVXXZZ\\]]__aabbcckkxxddccccddccccccccdd++9VVUUSSSSbb))8llHHFFEEEEEEDDBB44:O.-.-0/2132434465878798<;=<=>TPPo^^GGEEEEEEEECC@@$$y: -,.-/.2133434465879798;:<;=A@CCDDFEGGIHIIKKNMNNPORQRRUUVVWVXXZZ\\]]^^````aaddmmbbOOi%HH_VVTTSSSS*MMFFEEEEEEDDAA>> $YS../.0022435454759798;:<;=<>=@@BADDEDGGHHJIKKMLNNOOQPSSTTUUWVWWYY[[\\]]^^````aaoobbrr|| //>jjTTSSSSXXvvHHFFEEEEEECCAA,,Y-,/.//22435454758798:9;:>=>=??AACCEDGFGGJIJJLLNMOOQPRRTTTTWWWWXXYY\\\\]]^^^^``aa**7bbbb *55GTTTTSSSS55GCC]ssFFEEEEEEDDBBAA ^S/.0/10325455657698:9;:<;>=?>@@CBEEFEGGIHJJLLMLNNPPQPSSUUUUXWXXYYZZ\\\\]]]]^^cc yywwbbbb  XXuTTSSSSSSbb||xxNNEEEEEEEECCAA((_))0/00315455656698:9;:;:>=?>@?BACCFEFFHGJIKKLKNMOOQPRQTTUUVVXXYYZZZZ[[\\\\]]]]^^|11@aa````??SIIaVVSSSSSSRR&&5FFEEEEEEDDBB==jW/.00215455656687:9;:;:==?>@?@@CBEDGFGGIHKKLLMLNNQQRQRRTTWWWVXXZZZZZZZZ[[\\\\ooqq____^^KKc 00?llttUUTTSSSSQQ))8hhVVEEEEEEEECCAA kj+*0010325565767799;:;:<;>>@?@@BADDEDGGIHJIKKMLMMOOQQRRSSUUXXXXYYZZZZZZ[[[[[[\\BBW[[x^^^^^^]]]]^^TTSSSSSSQQKKe$$2HHEEEEEEDDBB66 u>0010324366767787:9;:<;=@@A@BBEDFFHHJIKKLLMMOOPPRQSSTTVVWWYYYYYYZZZZZZZZ[[}}yy&ff]]]]\\\\\\ddffVVTTSSSSSS__RRp ]]EEEEEEEECC@@^u! k1021336676778788<;<;=<=<@?AABACCFEGGIIIIKKMMNNOOQPRRSSUUVVWWXXYYXXYYYYZZZZ]]"ss[[[[[[[[ZZZZZZZZjjttyyzzzzxxrrjj]]VVUUTTSSSSSSzz77KwwEEEEEEEECCAA114,,21224376778788;:<;=<==?>@@BABBEEFFHHJIJJLLMMOOPPQQRRTTVVVVVVWWXXYYYYYYZZZZ77I..=>=@?BACBCCEDGGIHJJKKMMNNPPQPQQSSTTVVVVVVWWWWYYYYYYYYpp^^}||iiZZZZZZZZZZZZZZZZXXXX^^}}yykkbbeeoo::ODD_[[EEEEEEEEBBAAF e22325476889899:9<<>=>=?>@@CBCCDCEEHGIIKKLLMMOOQQQQRRTTTTUUVVVVVVVVWWXXXXXX @@U[[ZZYYYYYYYYYYYYYYXXqq~~^^11BLLjkkEEEEEEEEBBAA&& K ((324365889899:9;;>=>=?>??A@BBDCEDFFHGIILLMLMMOOPPRRSSSSSSTTUUVVVVVVVVVVWW77I VVrhhYYYYYYYYYYXXXXWWWW[[(&%%2++9,,;))8"". ppooEEEEEEEECCAA22K104344769899::;:;;>>?>??@@A@CCEDEEFFHHJJLKMMNNOOPPSSSSSSSSSSTTUUUUVVVVVV\\XXWWWWWWWWVVVVVVVVVV}}%%3mmEEEEEEEEDDAA== # "2244657699::;:;;<;>>@?@@A@AADDEEFFGFIIKKLLNNOOOOQQQQSSSSSSSSTTTTUUTTVVVVddVVVVVVVVVVVVVVVVVVUUUU ````EEEEEEEEEEBB@@;"=32546687::;:;;<;==?>@@A@AABBEDFFGFGGIIJJLLNNOOPPPPPPRRSSSSSSSSSSTTSSTTUUUUuuaaVVVVVVUUVVVVUUUUUUTTTTTT!!, 00CbbOOEEEEEEEEDDBB??Q"?N54557798;:;;<<=<==@@AABABBCBDDFFGGHHIIJJMMMMOOPPPPPPPPQQRRRRSSSSSSSSSSSSSSTTSSTTTTTTTTTTSSTTTTSSSSSSSSSSSSmm??U)AAYllccGGEEEEEEEEDDBB@@a?aV65668798;;<<=<==>=@@BABBCBCCEEFFHHIIIIJJMMNNNNOOPPPPPPPPQQQQRRQQSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSRRRRUU^^JJFFEEEEEEEEEEBBAA""raT667688:9;;=<==>>>>@?BBCCCCDDFFFFHHIIJJKKKKLLNNNNOOPPPPPPPPPPQQQQRRRRRRRRSSSSRRRRSSRRRRSSRRRRQQRRQQQQPPPP]]YYHHFFFFEEEEEEEEEECC@@$$zL447799::<;==>>>>??@?BBDCDDEEEEGGHHJJKKKKKKKKLLMMNNNNOOPPPPPPPPPPPPQQQQQQQQQQQQPPQQQQQQQQQQQQQQPPPPPPOOOOOOhh{{~~oo[[HHHHGGFFEEEEEEEEEECCBB>>c;548898::<<>>?>??@@@@CCCCEEEEFFGGHHIIKKKKKKKKKKKKLLMMNNNNOOOOPPOOOOPPPPPPPPPPPPPPPPPPPPPPOOOOOOOOOOMMMMLLLLLLKKKKKKKKJJIIHHHHHHHHHHGGEEEEEEEEEEEECCBB<<I329899<;==?>??@@@@AAAADDFEFFGGGGHHHHJJJJKKKKKKKKKKKKLLLLMMMMNNNNNNNNNNOONNNNOONNNNNNNNNNMMNNLLLLKKKKKKKKKKKKKKIIIIHHHHHHHHHHHHFFEEEEEEEEEEEECCAA;; + *)99::::>=??@@@@AABABBDDEEGGGGHHHHHHIIIIJJKKKKKKKKKKKKKKKKLLLLLLMMMMMMMMMMMMLLMMLLMMLLKKKKKKKKKKKKKKKKKKIIIIHHHHHHHHHHHHFFFFEEEEEEEEEEEEBBAA33 f77::;;>=??A@AABABBCCCCEEFFHHHHHHHHHHIIIIIIJJJJKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKJJJJIIHHHHHHHHHHHHHHFFEEEEEEEEEEEEDDCCAA>>##uU40/;;<;==?>AABBBBCCCCDDEEFFGGGGHHHHHHHHHHHHHHIIIIJJJJKKJJKKKKKKKKKKKKKKKKJJKKKKJJJJJJJJIIIIHHHHHHHHHHHHHHGGFFEEEEEEEEEEEEDDDDBBAA66<U1 g<<<<==>>AABBCCDDDDEEEEEEFFFFGGHHHHHHHHHHHHHHHHHHHHIIHHIIIIIIIIIIIIIIIIIIHHHHHHHHHHHHHHHHHHHHHHGGGGFFEEEEEEEEEEEEEEDDCCBBAA""r 1 -22;;==>>A@BBDDDDEEEEEEEEEEEEFFFFHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHGGFFFFEEEEEEEEEEEEEEDDCCBB??55.|W11>>??@@BBCCEEEEEEEEEEEEEEEEEEFFFFGGGGGGHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHGGGGFFFFFFEEEEEEEEEEEEEEEEDDCCBBAA44_|5Q33??@@@@CCCCEEEEEEEEEEEEEEEEEEEEEEEEFFFFFFFFGGGGGGGGGGGGGGGGGGFFFFFFFFEEEEEEEEEEEEEEEEEEEEDDDDCCAAAA55U5c@44@@AAAABBCCDDDDEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEECCBBAAAA66E c700==??AABBBBCCDDEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEDDBBAA??==//7H f..99AAAAAAAABBCCCCDDDDEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEDDDDDDCCCCBBAAAA99//f HK (M&&66AAAAAAAABBBBBBBBCCCCDDCCDDDDDDDDDDDDDDCCCCCCBBAABBAAAA66&&M 'K- F%%}44??AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA??44%%}H -7U""r((..2266776622..((""rV7VVSS 88 ,nn,?????????????apprise-1.9.3/apprise/assets/themes/default/apprise-failure-128x128.png000066400000000000000000000374071477231770000256210ustar00rootroot00000000000000PNG  IHDR>a pHYs   IDATxwՕsOA%@"`xqZ ,Kt/1`FDFy4ɩg:V}ԭꞑ,avtC|`P> @a`8 ^}Bm.yCRY}ѨgS{D+K@  iG^/,<\PSSPrtp A\ tEtD4 84DG4zYB }6<Ҭ ph'(\Ԃ39ĭ(i{B!6bD5>&,pt ˁoIDE!`R0|0!:\)sXO<_?b]o/gpD5mpZF(e~ G,*(`b0Hϗ.d pCC_;:hvQ@5hO ŧ(%UVCA@׉kÚ~~۽ީW%ON+,3-'I}bȤ"ao(ĝpf_ Ew:,v9+WJpNXD=wt;[ev Vvuў9xG`v.t*../gZn.CQG1NVDX`5nm%=n:(,ϝ&MǛH LR M,JK$?4wfK&#&W 0WrTOUښ!PTETM|߱|u"773H8*9M2Ɉ|I9AT1ۅ*IB _E [9'$Ɉ 䌊`.P[L.Npԓ,O s:PO׋EAQcR#6x] M尜'c6+;0k#En7疖rFO#1I@Mc+ 7O܂Ӯ Ej]=pmm7@3hZvP(V!]gk?W.tpZI$-]~|7s1*(BIY,f". !MTױs@eguKu31 ^=.^{J 0c/HD+K1߰XRXďN%2̻b>~Č_1AUͰV%IG aTEIlHX&8Yr~i)74d?4HsWdL %+B@PUcR!!~t2YMNSYZ\+7v}-C H$5:Pp(UKWz7#r͏WV55O7Qb(')xi,MXg[Kg**٢MCqMM A';64D[[e>' ODUW#-!DJn|9пObU1R I 9|;N;.眃K$D8Cs{; iI%R\.Wi)qWUC .9yyx7S#! o #O\QWP]|%>PeCRguVW[o?۶fv 49&pƌ̨}:99(.IB!❝@Y^RNzb1RxRb)}*ߝ8"7 ބ=&ތPR0_KT>koy3԰A>z?33g$|jn.ÇѣT_S}}&Mbl=pMeQuWVRW^Nuq1Eyy^xy_yRP<0񎎴#沦^d2S½'0v K,̒br9X<)@H 0Xg 3fM^@>-Jww}&巿m={Ψx*+YHCRG]'i Kѣlojխ[yqW+?(ڵq=tc_[2'.-@rZP%UDK:ӓD (&_5gd @ ߄ s aߏYB\N;wy9Ǔ|=.J0EEbhhc#B>Lظ7GqٻO/_nXUPLnF>CKسKKoe\iX̥:^؞_s]aht)|UP^yeLGPgg2 SSwD<{z}1(\K.pJj/Kn::[[K%੯O ak57siӦ}MMfj'9rA.,/ s[-@LY ;VVTpF"lOCi7c39HPQL ) L'#SX7= /o{M厦&X r!TX[[FM# rݽ /hz@pd͸JK E")QW?v>M1='HfHEd,/-eZnEpKᆅS, 0 0(nG OC5ŋI( WtT,hiogRM q@6aGY.:Sp4ɓq\Y#!Htwyf{{Y8y2p1}{zDx rb*X쒔q;Or]n,.!IzEM֑4~3tJ=hl QT1_h{DuϞb}}D9s _0-T]~}R&9WB ++dw I{ط@lyuB4s|C9̖I/uO ̞M4gqqoz ]QAӦ!ȁ].wU(KU5ˢ_'nʊo\ee) oߎW8'}U3$]o ^?aR~T&9`U3u#H \%i 0D@J^YQd(C`\ܥӬB<}6AWQі4#L5.s.;W^>)>,7\B!V4=ʑV,Zd` E!:4 W~>UUݲ>bBƕ n;vCƲcMGݩoINmM\P^NC0')1Zb$KoP,ɠ$0H$a4 CC-*"8w.P1 :,0B0X[.4(JWށj 8דּ|}{nmldqLUXhPpB&B!孝;ܜޒ<Ξ5 4hkA[/- uݾФX\Xtx9I34XQϴ SQ\Kh `^)I ``tPU}56"iaϸqKKB!kC72q#;v޵O䵷w?c?ddaZN̝8:7޽;Լ<rҝZP@7u-1@)py\oօsbQ|$'_ \m>[=fwe%ߍY |c?7;iXMdo"-6lEk_KO͆ݻ nצ0]]̨Jm2]BQPd%Qb99kDh 0J`ZnjRXf1sSBx<ԟXg\3ҦM wuu2'zz-+Ka!*Kq :( 6K=^*}2=CiHJ]Q#!#$"0:yPssW=reM7YjvW^y%3gL9?'Ov xYvu!…j$aIX`xWH[D Ų)6s8ە@Fuu1qi<"%??^].G _ kY0ϼvj%o4tqs`| b3bs]eK%H. QJ* S):_rZ>e99?~Lx`ɒ%\~#N⧏ r_ DCX۝ kl/_b.Fͬ 0ê%.!cjEw Bވa6 V\`_FiDP'$gBPK9/^|L rk3Zoʔ%NJvGC$c*`iijƏ73J+FSY) &C<] Mu! KO"ٟx1j4]#vZ},X|#;mڴV`_k+<JKqWVf4S.{[Zh11E'#{%3f$A¡LzzQ `ziF ax>[>+G`uQ,_WG$;`\ϝ;o5\r'f*= @`|1RKKNH8QYБ"SN'X[TW]gfc K=r]ieR)rVT>)3],^USf=BPhсz߯Y39ak.y^~匮g?=_(n7yF\IO{'L@~^X>m.7Mto PUڱÇS3ji֪1( Ӂ1Yj{nYO5׺M Qj ibb,_N\=ϗ.]O$h7t K/ .p<=fB0n\>xo{o{NĒSNATJJ64CKx@ӈdo̠ .!dyvH*!ⵚ}Ĭ!I)P/'H-+Cx/};3_?x 2n:ΠPojR%BK-n4$qz :Dv;7`m]B^09g*۷gv%.!DNɤ)eopԬTgA⯯| \99t[nǶOw|AY^=44C=DsNϟW\x^c z/+]U5T/V,Z+VwM?kRZgz$3}:&O6MvKq{Oݞi-&E6Ǒv!#ZqQzƙ,9gGS_Ox׏ITWW;kiiIZ3\/"a|{6|N5і<<ζK<_\=Axn❝Lopo5 ].Bn7;,^d`'].{bU>gxLO tɴZܹT}7 T׬n[9Rd""?S3Rčq`X Ԃ?{x{{DLrWEO}M{^z,=mxckN0.P]֩57eR> 0EEq݊K/h!u]\w睎XƼya,Cyf~_fNNNy|jv9=f`j|̬`Fu5!^}7L}P: k_]'8al4J@M+  KOQYTA5[R(lM[\LgPr%k~߯^}|vRA2_i!9Ang+^x|3wo*}5kȧD?9>~?x#.U%лFY*2$7=7MA?y#/jB)|Q>>&oë-[8#Sppp*B~~>/ޫ_[i{I._iu3IDATu++qWU?thT?r?Ngo/_y%3Ǐ7ZƼJ'@2ߒPoȾNL.ew::.˸MDJHT_EG~[b>c\pTVVRSSCUUTVVfzVG?+WL"uX:q"GӢ^}˖ށr* ц_: TXs)hArh-ѣg6/HU]+-J¨Ǹ'9ѣ;@EEwqxE]SO=v썭[5\l2 眍PrTUQ4yZ" 8sl.X|IU2G h{a`vm^o1y]g( ɓfbm381̆yxi6dPJۄkintP9k6|wZۍKJoc#s\Z}F57nO H<#uO~Bgo/y->{EB9pp7on2G=) pie<807x#<]]]K___F8%ѥK)/*7ej`)<},9)dci`{JXcz7<.tpUU,s FQaُxLPơ􆔃V/r8p8; ̠^}5P8<2Aܹ2|bٲQ>vBh4k6l`l lnAnp ]@z)lem}r$%tt݂:Pxa۬xa:n[34qW?ּ'FϚ5#Gw^9B[[;v8y/FW?L6W;5<`((!ՙv9:{mrv9 0$i Dy)fyU$4ql_8kTE|l9Y}Bntww7 UQ ^ rr(.| ss xeݾb1#z#G   D,`FCZ8lhv*˦v ABNy)3VdvA\Pn3:O2,^L}e%F<`K }Ά(c LebM ƍR2>c1aN_nB"1M =4MQv77֎r~Ι;_vں$nլ|L!3<6(cgՍRg@<O+vz%vUÁ瞣}&Vڏ}~.t%gŅK0BdoD_n۶1 nKaIc,UPRXHEQաw"h7$*u'.x/ =2o=)N ) "h"Au.hwn.n+0xY.[2o`\x1O2[G?1q8y\rY^MCFwv2mq1mo*|$&fܤ$?`pnː3,/>Ӛs&w~/15"턷n%֖nw dkc(9wtoo7v4g՚"H{|VNw('yFH`M PLÕl([j,ͭ[9Ж}GPUZ<9D wt 8}2^ۼts5__y%%%D oFdmm҅l<݌|u{ MKŏ))''_Kjl`Znڳ0KLlF`vF"E":0HySurw<}At]l|fZZoo3f"H3ۍJSDE&y5f}&4oc}3a+LN$|m`t 0< 0qǀ SR+sp!OXP8L.p\|%% /_Ϳ5w=DNX^.ZPRhkkلeһ&3qBGHo[e*Q&ȥD ]'ôw ӽo>.,X9PUEANy4TVpL7B3r@Dɟ6e𕕡;t{/>tihjbԩB1v5X 33E6;ҵv_IʙB6^k{7;s/]$f\\5@0݋jCgS>s&\0w adx(ġ_"c.R{%'>+*b0Kɮ\ccS`3 ?QX;?50I*`k?i}Ky$nmx $q60Cnj*jߺέ q\ƹ8~=ɝCEuuF»;f;xo\vB$Y qVQ޾&ne!F̿"47X ()f;zOƻ %]pj/9:BwKr$0vtR0i~q5D)-eĉ(K11Yx'rHU(߷+Ip?dkǻ>Km5_NVВLwBʒOH_VDW_9?K'Ь(6=MPR ۪@S|"1&vv}׺yg]㘖@- _Vm70 ș)5fEiZ@T܄ W/ ˲X!(Guzu(Ə'/mmE}'!<~ k[tg#lǶ-[2nIɿ Mpy LSRAU LA_\6tRk$+zBʏ*/=0y8.Z(D7씯mP6W ȍf:opݎU=V%!ap?9BQ,D <FcѼZƚDmT0(P_OIx~bZZ=XB+}NMu"fV0jͱO!ƥ S!'-AR)"66u7!cj&̱' xH[Y.|ˉz`϶+AK8;}} 椀$d1XicaV£(u$фA?/oTk;? Xe@z^Xj,c ŲWNєĉ8O9—m's;Uٯ;Q)`"0~p@?)x(u{,$kf5Xj$7Zl !Ɣr0c/p>hG{pE+r<%d P$QP;y 8EYG[xq>.wO2 I$_yN@~{fvNX]umQ!nۿh\~68 VGsP(p =Bޘ)x;N6IvmVci?uvBh' >\pVXTYg^u2>\'ÇʶmɳG ~pɰl[ZG,/`rN} 'nïr+p*𞈦s`Wy[8 |#/TsaK*yNbJ}>\..~3cm↣`mw7=t dza,d8'0=J-+Knqk$k-@\YGm`Db43o0='a2;{o:%%,*,ƭ(m{Bmc1M#blcU{;a - )9<6yTxq c1:Q7bW(d3ֱ&FF_: |cKrsT5WQpR<Dk`x2yߍ `S0_e~= a`ORsKXq\w3#!g_ R`t HKaRw=!?qB `F fut]m&gvqFz5b́-p5@t!AI@0RHKޅl=fDf( uIENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-failure-256x256.png000066400000000000000000001217131477231770000256170ustar00rootroot00000000000000PNG  IHDR\rf pHYs   IDATxw|Vze˒-WL)ƐRL!MMrpo !  !0Mƽ&j+iwc>3;3; 6|i;;<9}'}'}'}'}'}'}'}'}'}'}'}w >rR Q("(2I@;7~Z:]`O-3}J9p"0 4*߯0^Sw HͿ5 m:`)X j @SL&~$RMuE`/et}}Oe0Pc ejGM=[jx}g>2P:T|?^l^1s}B|\d.ph5bcm܃=xGI"@1VaܳI_pTI p,pp&:Bؤ($0"B*JLU*?itit6%tT&,뀅cF'}Ӏ)C"qEapAb1 QXH],F"ҕ\Q( zPvBXAWe3FёNn$Z[Y6ttЙ>p$ɿA/)8X_LH"BBUH0"`Bq1㋋D(P#wyw~VtLӵ$] 3R*(c1آ" (4ugnOdXβVm$;:;i?xD;p5pЄ^'}*t8⸒2".g@4Jnmni(@-Hte2lK&ʶ6^oliJ~< g O 襸,/S3p؊{䲢;WnY߹oft{O:3y{p0d:p0+~H¤>Y]ͬ CNV}57C/T[6ʆi0<5ⅽ{ypvoi-&9.5<>3=\A"13Wvoy%6{pasG+]]]rEOBob>+S/+.挪*1_<G nILYΒ&ݹZZ X7AG{sS+*EEwL\$2)HA2aYK lݻ{{@I\ + ]Y%uu4X7T1nC0d}{;m3{?N'Wg>w L ]Y9 @dl\zgzn^![I߶gae[[oζzgb||Oઞ8  ŬZVq!7 f(ME[A0kܖL}߶8UFg>rNU064pJE UEn@QCʋ|8W-3); A7f'4tgٚL ˌ0})!>ltB@R 1/%*YG F@JewS} Ca.ك[7nmh Y'(0I5|ћuIy(*XSCm;`Wyy7Jws buzY% 047׭[ydΞ5 \NNg> \ޣHQ;t(ǗQ2o3m{Zč™^tLʕџ Q"s; ׏ '! Y-lt`FN ^riR|7~v-FLڵB5%`z痃l~(XpPAQ^x9s+Pݼ U30Z;;t5m#dw7l7l _KƷ#G_qA6H ]RzPU(>?Ǫ/!Dʊ8‡k>nk=%= C+Z[eXho {}×i謱ey!UYɷ) J-BQmW$; #() l4 @C2Hk_;vYa|6:o~_+T#$/G!xUAqj YkpĮ(뵽F1q8U*E|$$_>(Gu_O(* cFEBѕJ)\]}Ŋ"ŗvPq\͸FM>[׋z'f zb|>آ"흝ys@3?G]pZe>h̽G+@s/dу^B5!/;W0@XHTOJ`,%}fCFd4R̓>>pq/?o@^_o8|W90C^s "Q W :*Ӽw.1@5Y1|<a6mKC83+鱷dWjz9?>u>灙6,92P'+/) fW}(෪xw)|TMV.D@:J LP:ۣe' 9Y{ J(c`_;w5ke(0\-_/(ØXZRH. 7YOw[ZsS~3.| pRxѣ/0?i@57.cO=ɥ,km b3S#] , ~:r$ ]sf<1l&@9\ݝEYݑ~-XNpu\SOKHAECg$4mH __sw7-_K9«d @/|1fWTzTEXUh){ϩȺ/-VNO!/FJ u 4&wiշ_6nbMpD"z&CnӐN"Luw[ AZ:30uZ;1 gz&Qy6ͤ[[ɴ= <@*Ꮫ6qA߁ @%|}vX3(gVrz(b}UH5( q$tK]Wn?/Ë/`>n@~aDx@GFùsݎCW2 tmJƍ'FQ*%d˖-z { }Y-^͠JJst+V$G ű紭:^Wi"oCsJj Fp?< YrLILJbdB> CzBè*+ L|Բ2MMh1K%%$3ݿ?_ KDG0p<{ikwK)U\r@;^m՗k?+  Dʸ;>~<st6Z;:h`_K --kiή.i SSVXH2 HbPTP@Bxx ;wҾt)ݻv靁&8ؓ̂Kk/֬͛}4i CN$OmqAm-a7^}_W}+*q8:uTC6^V7 +Ϫ C(-izEA@z89Pw F!/'c)8Ѧ߹f)|E"& (/Siڽ5$/Fdvݺ|gw~ @}F% j( #|~XkcPV3[՟ߋn@$".D('l?~;ݩS#sС\O3g$ᰫBI3ǠTY L:>L;@ k7H\VkoO.3mBP~ [\J}y n S=PHsCI@Cf& SQ=7}7v:l؎F~Uy*)-,?بQ(ݻ!qr܃07N\NZϏ_6 QÕ{7?ovk^NOd$~ 8s cAtpC*/G( ♞x Aňm}kn厅 i NuФ-w%KزgWZj&TYIt2d D(Iwd" ^*|ѸW-28QЌ.8uŁ 8 RoӛG,<3|{6dIK %U)8r"uu!!t 袯Q(Qs>xEZ:>fQsշUG(dX?[RwZBQ\yV|8>(9qEEsB&kjP Z5xG[a{c#M3 b,VVtޝ%E!6jeK Yu+KWHV쳴'3dX̆ DޱL[[`,nMcUw>?m0s @{BmmR7Vq4DGQ-%U$%u ncڂ| Ȑ!'L1wΟϭ'=<Ͻa@qkHto݊n)q oً̬A\ͻws$+Yr% P[UedD !D:r&U~ہ%34dtD\0h]!^+!0V}E|E48@hҀIGg׮.y I~|Sww$`7g.?Ÿab:! l?\Z&M%PzU۞pe~ʞM!= ӂzƔ-S褓P$Wŷ;dKwvrڤIH]Ӄـn`fsHM BB1BD,FA4jz0B"IO|N6t@+پo's 㺅Ҟ=dPTEw_PŸb7O bvŗ%@d,%7 C3#xnr 4 Kh͛Kqwn@E 8bn`B^e2N뿝@Yl#RBDjks F޺u|YiS>ʧgO;hz@I,jl!)>SNJ0"H-<˹cЁ+(Yp+* F;pxJD!9D4Jbҿ</N֭㋧N()JK޾]o=%&3M*}wNfs:IKѺI77ӵiOj^MPapqќ9tvubf:$0eyw޶-pjP4jq4Wp'`j(t4%u 9ٕSQz+N?yIQ\i%W^b}SD8!z"َod_Yv55Ř1vu~\E3g1Xxp=>AHJYr%OHiU%TVFUCξ Ea@,Ƴ{j vK&05PUU(4:y{^Uxr@9Ise# 1<4g\vB%%z]4jgʕwvܩSϥҽmΥL߮@U LtPG7NT33z4#l53i:WXᅯR\@7r3&Mbgc#m>ttxJ.:㌬-K$$&Ixm${{n 0H ? ..ڠ:Z[wo)2KJi.BɩC^mU7Q*1@TqVI A"{Vc5pI)D J7z:đ#\]PHO nK\uJ,Ft0g&:x0JBh4 :~HDoT"\WaZG} IDATZ*|遤Rtm܈L0T ;e ;`lj-dք,Me YXݻi:lF= @57 zB!L9q?Bʩ+oNs9UH/P,yK%P\mנ8+Fb'4{,ԃ` G-bsϑ>P-̩SY5 o s(3" eOH#6r$~t+!OMC֭)IΜ~%6lG_{^y%?<+6mbOs3ɮ.ҙe2 t֎6Ţ˹xwZT!(,( C5g bD[mހt^M' 9'. Zi[3fdxdRΩM@i8LK*=c+8p*}cFkCWCv|kƵ^t2K-#&_ /Sql$ 0E cuB[Q_60kD˳@8yqB$&MBҥ|g?5HilW^Eؾw/E8iTErE sFBпڪ*~@׷3dudC&,@Hۈ Bg^$5xu8TvNfjIy(H^_qٺ\Ss[KV`%jFDBڪ<[iw$2hm8ZZYhUYg)!IjCHTF"o/@5*YUL+/G ЄMpKq cs3} s݃@ۈor|-RW.Hx%~wsŋy|8LlhO:sx}ٲ_&~eNR;1GmQ^M^˒~y8-xϳf92Xq}_4rYmGʼ68U |4R(җD E5C_E.&\5~xWAa_ia!%!;{Ǽ"1}:HD$^:zTc3&MbJܐ[ԛo‘J]>ΆU ?B/9p4g /\rqх09sl5)Zi]_#:y Zf ϼS~x4IqihmnJ>jF|XqɊ\_aÈ KC/*/) g\ y+b+}8oJ"нJ7 Te+g,q]t#c[ {j8=awlcdVP1\.BJQQA0޻'JoOcqKLd ڻ׺P(w;j0:Բv6.X㨋(8QҬ@G59'`Fh*H}JD,?pߟ\9,O--cRY{QhpǍeׁJB$d_۷ly{A&ʪH{ŷ($N橁5[G?KVnĉ&02Xja!ݛ7gn%RPQcyg;;d2JQ8%t̝6SHKZ2$QbY Ț*.3 Æq̙:!< KCAC/4fck+?y}rz[H=ڊϘڵG!|9d"~R:/`*eI{Կۿ]ַ,& oϧMEWW%~Lq V8';(,P]̪Am)N@z~sJY[mނ+z0' 8ه ,,W@Qaی=V~XC!Cl_z ŠC--㳳fY9~%G$o_ڽIOjRvڹ[\$ׯ{6YCh%a+oBdΝL(hLvG:; j a^],8G3UɚwzŠ3|67@0/eH,w*c zV@&@+fx7")mT44x(s=yNH5K qijmo?Y+]N͛\U$Wszw Lf~RwӹnmK⋴}69{D* "$c.#ݞÃ5 1q@9Hz8_?*˃?m!7i^oIPNjxCl?\rR4 |LC.Ys^yMFqycCZ۹i9moeE@@*Er |3 8~0y j"tHEQ@-ҝ'< Enf.R/k3#4{\jyx׬kF^|;\ʲǺp6,еش7$\-,s2A޾SZX\{[ZC8rcy]!E!ucA> xT_Z毩UPVccLY)yyq^iO"2d4 :T:nYG[L@s55kࢹs]&v75d/)f3z-+IACM2{߈ވ) u^J2&AS7))迖!rW|!,<|56z4AlۼoiI%vDt$/ {ͣ0th t~_K e( ()>&"г;pO 㽶PdcsM lX~wW`aq{X]sjƠeuA/!#W=K 6j5;ٻ㈖nnyaI!roji 1,i@QAmeAA"jb1?4~\ex"ZSuu|~xBy ٰ|]Ko,ji"ͽz՜9ǐuo[/_΂_Fl1VL MlΠ25EtRY1W6PgZ9U:GqQ| 8TSN e %{9AylU%>n!g9E4n~vIuRuu6rh\fyкr\MBnҾbж5-`;vo*@&H7@E4rݶ<8]*sj@β_GӼs(KLj}c3sφ,]+G>*vV~9Rg卑GGO=~IZZj;wKGG&&2 xA@H0? 8~AX8͊Hpr@ lj {!f=zV;{JN?Ѽ Ġ JO0>%^%%$/&bԲ4D.#Ɲ{os3MAm PmT!_pLuu .w-͗XCA@OYu4w0G TYIĉmyw(e7db|VCgjP-/V Q%X,h>KkΡ,jƦ]:di(j-]cSW%D>7ċ X'giŅhx&S8y.w?7=UYl>;-4)4n`}2UP~~gB$ZTd=Y]fstfڼgCAix"AwMBl>*=  ZM 0\hYwމ7h1UE>l+G\"x4BEE/o+[n,LymZZJT"|Q `Ѳe q?v PDZŅᱽpVmH&CF 3W 1'ƠX ;qZA3Fq /-q(ǍE,mf&Tfw^o_w7&&UL1Bpbzh$''xIjq CxvX"o> 7ۀHM !Ζp:!g2{U&qj.ߚ9?hs 1%[2_ط_4J??1'y?a\WP-SbPshV KT<0PSO%ўL_礱G׏o~O׿fΜ9|f-_,V0afc GIt{\3MJ3)fV蹍| Lr&LCB!#t 7jVH~MR)~rP-I'Dii)-G?^R4y1RP@LJʹBH}M)Z[IMo39!OQs*;"w z$繾O됭-HpNQ IDAT-l=פوNNӤ l+3TP~ID9_}Ίw4UUU\r%tM 8vR ?eNΝ;ӟą^ȹ yg).._*1iAFL4W_}H$´iӬlDB't\[=ᰭ'G6s Z1gin(V'NsۮON=8JK{N}txc{c*&* {m CxGh{5#8hs )v* ;%]fqcƌ;gaʔ)碋.sO>uJRg9s&ӧO[oeҥ̝;N8;:~s[?_l猍^Z`Ǿ}$քC!.;nE%JJb;~Ki d^Nj/g1|嬳h‡ts3][dՓkB;h#W<>DHFc~WvPq(y܊4 R"ʏ?~fמL<ȷGgUU^{-vFkT+kkEM~GSS_}ә >4kl~%ѨUk~tmlFW\Aia!T55/f޼yrJo~Μ9DZm6"72ƠGH[G;諾A6hNBXoYmW q!Ó(;Ur-_~7^S_R0·޲az,\9M r*ϴ8k\^{ >hŃN((~C 򃛞=n7r&*o%'S}꩒wˣJ)++˙hs-n:ʿʯ~+:::硇/gСi&>֮]K&aʔ)vm466ӟƠҊbtI,\.QSSCķ%AkhCZlΝ>Bnr㷾eSLk+2 `g[y8^(J5p>.T`RBk^V}naFCR~˃P2=_qfc[%dH94#ctVŸ^}5pjÖ9s}k̛73fsNYʜ9s8뮻xYneee(®]xw[k馛Ncc#f͢˗2g+_w7?կ~_\vel޽lڴ-[0vuuq`/^L,ga\s5yɽwTFFmVa>˗Ӿd mӶx1o+bMt^Imƌ8f`mfrM:mT!(.g/cB.TYPr{)>ϓ_˹AY"=?cgA:[)]/??(;}[$7o\tEC&a֬YL>[oe͚5$ .Rlʨ>v1g׮]dԩs9pkQ{:5J2me~ oa)pf2 P9!< $;p\Z#ȣ Y/~1Qyw*Ŋ•?Ž{u)fNwuuquˬz~Zwc?  ;9YnCQ+d馛~a/^ܫϱiNY"L@x,?=ˊ%|>ݍMr }l* _[Kia!HH(D,aȑ,jf\-;x:"N B.^=v |p%%tsQYޞCJ=0Rw#ΗA=YD(vp < ǢB}˦l-=z[}ٳgSWW֭[Y|9Ceĉ 4jƨQ8y7yTbYl'tF"H`Ryhn6?|~a*ϒ;kp<"B*]۶٘5?@*=OpǏ)&s@ή-[?E~Xs' ~`ePeP v;j@MM$+/gԝv K{g'\ӟr?{(̜9Kj*x.\Ȯ]83khiiᤓN3RC!{7}'|I&嬶RYY_ݻ׿Wb%dw*ihmd5vB7meGÀ3s&_: 9>HLlIVc7s!@ s8n|'!W_o:}wffj.-ĦPNtPgn UμR~lurHii)~:_w,bL8z^zsOm6x^zիW}vvwAKK _~9o޽{g^l|sP x=t- Ah}u:9UYI~-/`(Ud? 'lcK<9B>N?55y|dK؎ٝ$%k)Ki).mh҅})!i =qNlNڥ1xf4R@'/Dzhs>g\qڸg?vG_O<믿΃>]wE='p=~!˖-?1'tz*gq^x!S~;wyJ HJ4eʕtttif&x?>?rgOSTTwo_,XފϦ:X,ƽK?ƍ# DOOaS<|sg Q3ǩ=$in&D)ݗ&i=nqIl눽7$DBg @d:I(?Rp)!v} 5_3)((oO;Uc=_~ b3{^seÆ *t?EEMM 4''}~#4O?:JKKy饗뮻w_Ntwȋ/ U?~onAoJJOpjh̒9(Na{`IdZeR\a"ܢ`͢H0 4~QP; U "Sqi~L^\!Ï}t9/^?Ϻr'Š+c~{ofѢE̘1 rWzyꩧ8)//'o뎑gÆ |\zL>X,oazv/LQQ ,?lb=̲enV,^LB.\n7V!1!H2;⸴66BҘX)]"{KK 88˩V@\CHþo Q4'yxo ny'&K @MM O=]w]FYv-o)--.c޼y,\S 7p'N駟FE.2OWb„ [Od`Pr8_̤믿Kr8|fq{&LSQaEc[#gOH KҒ$nR +:8|ALc^H@K{;=Y-pRRRo~}n~~>}O>$'Ood֬Y<3ر 6p[or/~AOOg梋. .+W^ys9G+Z\~۞S0dڵYC-w[X Ϥ~on+2dIrC @"mb87dHK䓙dtk6Ox;~`8L - ;ͦMxGMQt7tDŋxT%ٴi+W$ r 7pYg6 \z,uI_{>UD1Ǥ_'8G aPJ$d\qI(?Õ$!Ҏ~[HtzxgnA!yc/+CUAIij70b,_LnV0˖-K_bܵk;ʜ9sxشi̙3~,[L7ʛoɻ >HZ^z%ibuV@f1 @Rػw/===<$ n7+WT=OEδ(/YZMrRpdpH|d~ 6Ý>/Zs@L¦\UPoh\{_i)38q !lȯdyԋ(%oD,pDQ{1ۆSLa֭4;(+.[W^yEe1C=Dss3f_/"N;Cz=igk*wߑ, DŽ!۝U!SS`H$Azm=^LDAl ]}4I6r|91ßEf=SS%+VdIm10jcDz}v.](?>/"*pҢ?)S]˹[@cc#zls{1"'|2~zs ֑"ϭ^;IHYfNnOz"tbt8+m~z-R @~hG2PiMqI'Q8a=DT"̜ׯ'\I!ΗLM2L:::,wɜ;SN9^{-1kjjToc۶m8gݺuu]\K/-ObӦMr 7q}r5੨ g 2h&3,*&4Hs6ފ({ms0h Z>E,F My gZ0P24b,&T0;Ǟ6N\0_}z0 P2UUO#Ɨ urRXXh![NmW_YI SO%|| 9䓙;w.> \}<4Tb1UW]EuuYAAUepKç IDAT!57`81UYy&UAcxL&) vn3&}vew_,F*f8h?)'?I Vy 54kY|DjsMƍc9'ȬItvv>v󝝝\r%42"NnvE>HSSnW_}e˖owqs=Gnn.7>^/F͚sr/X0|wBȡy`ּFuE$@diهl4>of$|Q)b  'RJ@ n,JeC17 )6U*!iʓerG"zzھ]~ A'Nmzn6۝;)ַx'?0k֬kF-ɖ-[8زe Un7ޙ3-wUJ,Z{տ[4aFVb8XkPLe25EDqv"Y/>|ݎ=%;$8ϼ^7j]. vnF,ev\ ¥Xzcƌq>>,ӧO'Hcn2|3 x$<帕![ݨvz2'2݆Pt9ca(0)0ss?_N?Whpoiիu|%j0EO}=̙2k;f ;v)W^Iww77|c2;?OMƚ5kxZz\ ^[.|&cN 96d)ՂM8{;tnD @ 6 Io Swk$jf>CCf qгҭq=HWWK,+n)L^Ztnڕ`IY..!0<%Еu%` *7^ &.\{:ް|JwhֵktSZT]rIŴk.K\93yxwt ي_{wZʍb)~\:(m};XpHN=AC `gl{Kp $}|x WZ$xo&h^\t>\Qːc9ff?O=80&O< 9>{3>qDSQl1ÕW^ '^`[1{1,(팀Ɵ!@PtD"V,@iu؉xP"@I0fAR`ٙ,=mmuy L ҹen*YKO`ѬO@XZJ͛8餓z0k׮OTah&hGpH-$3$qVӀm\yGTFeh{ahH?$zu? OHϿ.{5|nژʎK[T(A3ի7:i$?e``qzc-M@ue%ʨ`1QSz䤔+ݮH$E" D 麟;:hlmeO[~>5g:DHSJVv`+%9@ʌr&QIbݼYwg@ ;47Z߆?xQނD,&f'H=(, Q2c][8p@%{L ,.ޛظqϞ<3gr¼y5u*evcXleDQz)5P%P9YQR¬I8sɒݵ׮SB$vxL=\9x{wX*ULnQ)4S3E:*7>o F I\hYP\,Z?qt"?S?y1=<,),bB:6lP@|"B kGD4$=D&ٳg"RUQI8nXDn|2mfj˼ xR,&'.u,$$_2#29ABɑo;ɴK f`zDQ)Z?kt%T:8)9 tb0oۇLock @G$L0Ǘ)8|+PNp@O?߼2T̟OCQ c.ZN^5xr;vXjT-GM™Kp,MN1QbpO(D"HDVX )=P -"##\.DoAz~\ňyyKx(2k?O8LpF»wOi00X|Y㜩'`1 S6Z=3Atnz=kN%^x]>OGJ0v" x4r5;a`֭z꩖vUUM}>.8$> M@IAe[Hٿ)clO=K(_]99[kת ZKԩ9 .`}m-P-[};Yw"̘3SƍQ]]%Cd)I JEh53ity]߬`!(9pM&y!:I$x׎hPUG໏!CM\DWR[0k+q-XQ.E_mʮ{F G",@)Dhj" 3U՘]lkh̬+&eEE@⽽D |z6/31 ET{΂6h2C>/kՌo(*&Z' =57|0Pv3U0HI @K=GGLZ::Xzє$}^`D~L'̟㯾Jkrl.\5NV?J/R @`&B۷miAR~'w ԵFf`!M*IeJcl6>]5 *R8ULC4vwu}@2rxp' S@PI"1cQ YpX4v,ϧP?2 зj&Y4j!92#NM*eǍ ' 0 ]vJ# ;z!Cm_?x].TPA6 E+*DQU;t*wsNUs9E IJQvS^\kfݿ^=v,_t|{LHyC :2C`6ׯG G Ape\I7.g3H 41Ɏwɮo6ci@MeI9C98vmNc^GБ0Cy@yK5\@L2,NٌꂚP^Nt0˩tP#UUtoۆ#Pۃr&+عw)g^=\~t5x Z6HMڼh{;(*uHVU3TWS]KLPUZRoiGNg$A!Ya2JLo'Z[Kh%@uuXwwTԥdL)r'".QղY2tuQBAW'?O^,_x *ADyq1_>t&MJ<gp6z}P UQ{-]wT9`{Oy>R--HJ&fL<4M,Z'ǻ,&Ri*Ym1]NI̒n59^L{c_onKDr ;F\\LV:D`+A@_K3ϗk+|Δvv{_"_8XDATPw^>#ŢN0i&#+O;^s X[iU &?pk0`w$/_SUJ7o{{zӧS9f˅'s3ĜM<v)P"bC/8 ÏS證Q~|Ri`i&>ˋ*9 ti p`: @$̱EEAU4]%="PjjTCpܜ9ԷД%WQvp9TUU Hnq1}uu?n<5\P^N]s3ss˗3. 67C GtGɲeϞ@QZ}v +6lm\4tQMcY-d8KSҔ#eӕײUd;N{oΟE@:7|)Hr{)& &Ix].&*_jM&wAi1ͥp8yss9oiٲs+nX @S-Z[)=h\LGʲ1jGKLt^MMj.E9EE*5V >?)޷OrtQYZiy9a-/PxIu?#Up 8I>jw~ Go>_`(?|Hx\.}.4)=ڊt RUN_9"(;1 Bq̝ٱCUD4 xu--e֤I@lptX_ˍN?m.ܝk垧͢O$h'/ɠ Hf6Cّk(c<-"/lfKd:j,XZ_O #73u$ EϧW /Sx55_QX$sQUŞ6:Oʎ={8eb" 䖖_;'EԩYyխomJM˸qws󡍅twqrΦMi@O;kz]DZ[;σQt3^mBO #L~B4i U g5 I85F,q8`v~>9jLPD53UGn^>c >e8pqi׮H,F iɆ$yy˂lB7׫[% j0e˖Qtrt81| n>^Y.-V6_Z<DhUpd$V&%6哜4".Ѓ1m6SЃ*:|7m۞"JEc5A``1Qͥ2׫P@$zn U"h\pO&)+f},1j  ݛJNmNpg'^["a(J2MIGU IDATL駓3t/I;ї^~aA88>Ϙ1E2sxJm+Sl`$c/4އ5wRH,>_61%y$dƠyxA 0'G`w7Rr~dsũRYZΫ'Ēsl$rɳÝ] ; %zP ?%K(;WeZy_t]8ubƗ̢""ITv϶58lqזBf |9G5IBpml&1Z3i]H;RY#rVOr!""h0I ЃQ1{6.Ed@m]]Է. '7'1cPV㨩%;8=ɨ)W":Q엎d&"(L]'x'G04ţz}vCV8MH4̐R^E%h158F$&)iғ&ʁX{poc#6?=Bk1<..,%Ij L%Ɍ8Gӹ};/rwkc#E|ii12f$LF&"T~a9顨w: %v;+6"M2\/e>bu*ʞ %{Ҿy3_iU Y(C0n*u~#B!jAYmǻMO6N?3Ma]L mNUK=#~rd%r,(,Adg%It|G.zpq18H-lgSR riؤ#QU#$aA!)I^x}]zGmLFa9^3Ŗ,2SOFՌCt]6dנmh"D?K[#*Cy=$cQNBt9aXC݇ oGS"J&͹UOҵ O8֭c\ffJeeʺ60ۥN]ɸZ۴d$ &04 NjvFN! ֭tك~Q<'$ ̞RvS(jPJt . ,h]#hsf BI;\@)eH}}άQqYa߹N; 2LzID s$L%+2Z i3u˒_>77j{f}׌N` pU(X`APo :ct}o@A&S ćx -MO(/Ý;ixB_NKf"MMH9u9#7+nv^&d:PM:$atI?Ģ spvf:~aOJľhZD=Tov.^Cx`!h;Uo:iw(Im-;:`Z#.@+x:Dj_D؅L<cuOo &Pp#IrBPyi&PfL;55!"#4hRMRrҽNpނ^,f"zϞtXy}s}~NR^hov`@u9V1+39ΰ/=Y, @RM-$1@X$ۮ|hÚ #srSn31RKNf62v(t<~Nϑu87)wBO)i1IŦ¤fQjmJ hH t@[%H ߱Cw cpe},♨hS~I!|*#Vadc4kT?]^"]҂eZP-А.׭D`@.o܁ 1Xs 0$Fyn ),x"L&,D!+; %YytrϜy-n4:aы0KبڝIbq|=yHwH7CRt჏C?.k'ƢywIWov[J"f@(B<8 $:yWw|;|ٸ!himndjgHYzKu:P|Ɇ|jwl,pktӎ>(:K ?Nr-Tn//j*.9fsi=1)1lν.yԩ(]Lu]=`+O„G׭!(TO+ȕwjyIOm-ٱM>4'KYsۆ)1VFһ6C$6( F~$1e 1&y>ם}aKss9eB#DX|1)MRP$Talw|~;U Dx.=EY>`Ll7"&% ka ɤ݁Xо^0+A!cci`sC!.]ʿsne40n$vӀF=V!콆Bi/$;e$Ba|'n*T>+dPP^ɞ@|I~pp 3! a=%0s 5OP/0VSA.>gyСKNw6 s1@hZPauz}&0ÀKpL R,#KnEb}Guu4r}% ĺu7܀VO&N)O9`q.+玺:Gd/Z^UiI4( 탃 "F@gua`101 S~IRO ۋJ%\̝3GS.'9չȆZ҅BϰjB4X?Ei}Mú:H?.TaY(%'}L%_$Vuu?;wlʇ)#E$r8p H |}] |8PT6? c)&HdbF7Ln_?RtUX.O6<`4w'A>CG"@{/A[<.7iy[ vecnH]@K7l>a2a)w+kgr< B#7dRc9%Isn;^iQ/ : )^'vFl-L B!5 dx )j#84 fjFen)/SeOVJsO} GdVtoİ$u`Py(aJ,}^Hx o%6& 5 ^QUYHϕu6 yAg?ҿO $J7NƟ$l/*JybrH`O&jB2w@ŧƟ(wJ眗Qlv}S.rg;*(ށ.:\LH3K$Wqެ[-rHV2@&߷%V~_@Bơ,4)ShS〴 `߯cQjiL ʭƒ~ ٰ} iD+z.m5o9\†=Hʦ 7$0_NQ} g|Ӧ,F )X"Ԋ(ڪ*&z}>Dì˅@#p yP=ax,dЌt͸ %5F#[@y:Bp.㤅7fo~)6#$lEvG$ª.<TyrP4M h&&)u䁨#+ %V]K]7ܖ4C; 0۶ :- j?Ji6Y )L9ypD2* i:)eVzu,YvZЌD$ᴏkPpc\!(y(wv;y }(kQpdJrp =(Q74O N,ZU RdHnQ$׬"vj~/x|߾t S-WSH܂@W&L`vAkr6*ic\ß$)oJ/ MH_æ~™a zi6+5Cl-Ң".?*%5.2I .:S0*\;>T*d$h"AC CMMё٭}jr=sdkN*)ayfqk`,3|N()ayi)խ` Bۅ.o2ޟd9Jm$ٻ*1Fytv %y@ǁ5)94]|Ei)VVR+4dc'*u3KYqyvl?f3,V߃!taط3%~LQ`ԁc?39,/-܊ E\I^'y}{EJof(ao:(CX"A0cZg'g'2o] 7pPFJK9yy2! )Ҝ܆ILJ[]zgg @,ƮA>獎6;38Љ 5J!mbਬr1gAAKSPN8b9E}}~ꆆF-!t/2䨌e"p&r`A 0R.;i~2bUROQ0zu/8]]B `#rVet)AIDATV~D$Ҭ8$cp1T|x\.`̔>)nśqOHxx`;;y=I-rw5ryO]POa"r*99gAAS~|TP㿌y瘅RpH{A6e`Ȉ@=p+5|29#uP P06'\LzT|GWZm,Y),@r{(cw @= "DNo3qdJQ5L>0<d$\Gp"99Lz3#/j˅G;W {knkuw EK  RP {hhh"ATynbox93F 'QȈKa{{]>ING*[ I<& qB&>`VqjGV:vڎNg?NP_ h 1aW IM6f_7!9s~sw~PH Q~ OI[[ދ.Xz,1͊@Ҷu<9U*u]/9@YnOxVTQa-.)eeQ#MubhdV-[[KM!˼>KJWMўyM5 !v9~42Zf`@T=Nϭ %vg'㺫@[Rds9fWWsܹ̯czQDkM"Y8OAűmK$@LB zөenV͚CWsl.GMe%\jk8##080UU8'Omlq0柱gVk8@kxhxEVUa;PrgK w+L;IYt.cضNM$pr9VObƦhTbl wj0YCJH,cUnn߸;:cRD"qξnrk_F?dq y{Sp̙t;{^?,ãbL9y ڽq9C6ڶIftp(thS$" }x0kIPArl,͠uvߑ,_w``.1~޿OlDx ݽmds6Z;9;o߶6~n8ۑQf3fl==lFG&VdMMMG*=Ua 9)$1`ׇz\3{N#9>hcҾbhؿxʪ[7 LePЀ!3!cAq֤2F~ 콬ĕg֬d(2s% ]cf_[Q>K$v"߂ݭى+Ĝ5xpaQQn,/@\g4Dc1>8 xm!\"P <_nY]u>^Z\o.+7z߯75K]T\ yA] LʁR[ .h흉*p1b96IENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-failure-72x72.png000066400000000000000000000166601477231770000254530ustar00rootroot00000000000000PNG  IHDRHHUG pHYs  bIDATxy\e?{NoIg%IN$d%K'121{u\+hQ!Q@  @BF:IwNoꮮysNթjBG>MëH!rBu)rIR)`0x;xٕ|R^}Ά+'Nu]5& Ԉqoe0M z[Ϝ!Gg%f'MƣHMC hR"̕0 x>qΣTvOY|ΥUU@lgf۔.h.*3f i]Jt!ХDHTB7R| R%4912?8g+? t`3LK˹]2hR4 ({=PoEJ!lUJWV"4Q$*njF1a0ͬj*H2-{co@;Q˂ a#ѥHnZ=Yh%%*$#u4Ɏ$_8r_9l-0v p5r>@׋GlHZ M˨º[!AHX<^/Kc=]Yd s1c1R== ٙ9H"$U#H׋,.D,RII4])fl9#%B 6 ߴi gpxE)nf8c8#<2BGOaԩL9ϤIty5Ŝx,pjxHr>*I˿U)()RZ!-ݖxN7oͫ'O2s 'F%qP4]M]2v6  tLA$ AHי x؉.^RMf3>4ucxR%緆$"o\<mW⇟^yf%^/&š`H0Hpph}}O"ՅCm8LkQ7^vqiFIuwÊ .l~H`z0H_"He太,@Kk}>644"m0,uKDMJeޝ%SSoytEG Fb1,YBQe%ɓ'J!;y2E-^LpsojB+.ΘxzzXVVexm]](X=>K N-*b#Eg7Z644ReH8#"cNJ i !^/3GhZ:RSÂPt?2B_-w_dǫrT3gR܌z11#&G̺"v;F$c믳 LsxJ_J$N\]lE KKhiQ;iiGfAu Zy9'ʸkT ӤYV/X@ zzJKy wŎW_ectc;/Gu~xjk .\Rbմ44r[{54 |64[`xKvzAi)K+*h% LHUHZAΑR:- F"ZW+0e n&Wfp̝>== F"Dqvi=# R TT 4)t"h4Nyn`Kj'B40 _S KZ:-=+[Bs'=zڲg;4DK8̢!f=5EEl۰˗/GA4܃aεalDJC!oX**Ƥ\>\WSHJm@Z/;8q;EV%Yggo/ @CC7x#wy'9tw}7`mףWW# X 52ki2A~mzy9L`ӧAJchM e'#1`PIJ`﷥DaoS TIi'%Z @j,6>8'{ztg?˷m>On:yϺu2wK"҂*\K.jASwyz=zy9޺:=Xa*`icc&xŽϗ&)4Ǥ\'Lp+vk$T|Tyhn 1:q"ë9K>@;wdӦM mFm~c8_޴ 4 ̛gqP =[P׮%kh./`yW&qf:]'Y-B@A4rQR%d1kkZ^(\=ݹR߿q1555r-}G[͜HvT&qM7QHxR~Sh_RRN!KE, 6Gx$iR\Ն1s5Z @u [[\y(஬*F"LCq̾ 6xǶm7}:B׹e">wy9D_z  u~~䓙hkR:S_^!K-}!2sP55ܱy3WqH2k,ڵM6ei|iGǐ 3 g2?XyTObnlچ z*bРɩV*|+K/~u_yؘ1N zcɸq_< ;SS׶t7GӝV #/!]4Vפ\,Pz0HլلjkID">~o oo߸ǦR)/{wnG}4G=}&QMMV#P2MiIiL`sp={79`d+'Ldvc#Ov&*U:7nܘJ0jiaqeSNqFnvnf|?pOLlˀxiz4S;1^`U8DӾ $n_~ :zO${Wb6Be~ӟrU3G AIQ+*(+.o;lTp$B ٔ=ێҍg/4N?QnskvIȮ;;&S)ܲ!%}Y2{6-eyvn"5RD"~BK[ZXu,>2F4J$P5`t-^ c{q~o}H lso.Slk+20O&BU^/JA*>w'c믢\v-{7IUU,5Ws٤:;b믓"O:3M҂ 1o' #q*c1M&9}ہQ7@+hǙ d5}%Vq"aUTɓ0,]֬H1`>7ތLQȈ5h?VE!]?ח1X kr%^!kNSZǏsuJ+Z񕕑zu&^8t(\u%|ꪫfmm-QOspNbgD9,OO[/{nvH~!JygS"$ ĉcygo/'O^&h:mĻ/3׼_Hٳ}O'4>Z҃98OY&|ey`0822·-+0Lי KhNC)ʼn.޻j%GYc#1{.YBc4'0id{Oq4N'W=i=\娖kcǜNp1%p*)gSH r"F4-SZ4MV-ZJQe+3S)3gWPBr=|k~kd Ib G > j>9&=9;m, vw7͚2].}B W()0 |^/% KfF@-.rBSr#o}'NLHì]o @# 0&_B"ZYů;>IcMy\-x4iV/h.~`i j ptS]KCC|(i;tBv@ U.Ǒ4MG"ܸwf{"j\R&a\P\Z I=h_/~BYm00r}ғd.#P]M QKL?G%w9AKSXs^z={t'r*s:VV а(-=^ <2BctToq :qILjKw7??}u=:&4XQ֟=$Hf:͌ZN: qS=m#G,.o۷3dT+78:~.˷c-&#>NNtLSo&Tf0 Yi?)$R)RVGa~pRsm l$Y ߹X Ţl"m,NZB*{;ʴn0c_x.Kdr )uM/ #ka_gu{{3udkp}6gY>DiӜkIZѸkm`9Reta<7I\\ V𮓱(Lфڀmf!rLY؝e2o+Iޝiϥ@<樌0͌g, x7?]4y[vjg G)VFEPLН^u^^ IjD^ikC'ׯ[ I;U;B }|_)ϻNےÇ)Q:qu]|: xwu5ź]k83eg R#PS (*.8v*9_px wfjnps-oe@" T)?%&̑"@E{{VxH )TlIkFq~365M7;طC'qmd46e'3Є$`u B@' 9mǏ#}8aS3?u| ֔MbqAk֡=IK͂FP$sb( }@>v bo8]xX/@/xXY^5&QKrgܷ0גo$ttΦRC-oRo;V?eqqe%W25$i,HstѩS=5 $b0LI$_vm(ۺW̃5d"ZmCUkzdןeKOÚ?X3jIvmNa-aF՟}O&G䆑sDIENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-info-128x128.ico000066400000000000000000002040761477231770000251110ustar00rootroot00000000000000 (( !8KVdpwwpdVK8! GG  JJ =qq=,,D # *#0(6-90;1906-1)*##  D B7.WIq_%n+x/268:<===<:863x/n+q_%WI7.B R3+q_%49:<>>>>>>>>>>>>>>>>>>><:94r`%3+RN .'xe'<===>>>>>>>>>>>>>>>>>>>>>>>>>===<{g(1) N -%]N|06:=>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>=6iX"3+ ] OB~i(36778>>>>>>>???????????????????>>>>>>>>>>>;z0[M]* [L|03566788<>????????????????????????????>>>>>>>>><8jY# *e E:}/345566789;?????????????????????????????????>>>>>>>=:RE e 9/l)13445567889:>????????@@@@@@@@@@@@@@????????????>>>>>>>=4E: & p]#0223445677899:=@@@@@AAAAAAAABBAAAAAAAAAA@@@??????????>>>>>>>>>>;o]$.H{f&{/~0112334566779::;=@BBBCCCCCCCDCDCCCCDCCCCCBBBBAA@@@@???????>>>>>>}1 HG5,q*z.}0011234556689:;;==@FHCDDEEEEEEEFEFFFEEEEEDDCCCCBBAAA@@@??????>>>>>8C8G8UFu-z/|/~0012334567799;<=>«_ȎɐƋηmFFFG¥G¥G¥G¥G¥G¥G¥G¥G¥G¥G¥G¥GFFFFEEDDCCCBBBAA@@?????>>>>>=o^%8F n["w-y.{/|/~01223356788:<<=?ͥĹy̥ԨϞʲb¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥GFFFFEEDCCCBAAA@@?????>>>>=t- FEr^$v-w.z.{/}/~0122346678:;<#!oǠ֬ʼnŪS¥H¦I¥H¥H¥H¦I¥H¦I¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥GGFFFFDCCCBAA@@??????>>>>|1E,s_#u,v-y.z.|/}0012245678::<=?¬fͥWQB<9-v֬ӥѻvŪS¦J¦J¦K¦JæJæJ¦J¦J¦J¦J¦I¦I¦I¥H¥H¥H¥H¥H¥G¥GGFFEDDCCAAA@@?????>>>>4% ,  ze%t,v,w-y.{/|/~012345688:;<>?IŔFB5̘֭ʰ`æKæKæKæKæKæKæKæKæKæKæKæKæJæJ¦J¦I¦I¥H¥H¥H¥H¥G¥GFFDCCCAAA@@?????>>>7/( |g&r+u,v-x-y.|/}/~022346789;<>?@įmЩ#!׮˗ūTçLçLçLçLçLçLçLæKæKæKæKæKæKæKæK¦J¦I¦I¦I¥H¥H¥H¥HFFFDCCCAA@@?????>>>7$ r^#q+t+u,v-x-y.|/}/1233677:;<=?@Iˠxq\ ׮θnèMèMèMèMèMçMèMçMçLçLçLæKæKæKæKæKæKæJæJ¦I¥H¥H¥H¥HGFFEDCCBA@@????>>>>4[ dSp*r+t+u,w-y.{.|/~1224568::<>>?@ʸ~ ~waׯƌéPèNèNèNèNèNèNèNèNèMèNçMçMçLçLæKæKæKæKæK¦J¦I¥H¥H¥H¥H¥GFEECCBAA@????>>>>{0 Z#]Lo*q*s+t,v-x-y.|/}01235689:;=>?@ZЪTPAidQׯ˖ĪRĩNèNèNĩOĩNèNèNèNèNèNèNèNèNèNèMèMçKæKæKæKæKæJ¦I¦I¥H¥H¥H¥GFEDCCBAA@????>>>q,#RBn)p*q*s+t,w-x-{/|/~0144688:<<=??BƘ jذ̗ƬVŪQŪQŪQŪQŪQĩPĩPĩPĩOĩNèNèNèNèNèNèNèNèLçLçLæKæKæKæJ¦I¦I¥H¥H¥GGEDCCBA@@????>>=l[$e2)j(o)p*r+s+v,w-y.{/}0~134578:;<=>@A®oȢJF9̧ذǍūTŪRŪRŪRŪRŪRŪRŪRŪRŪRŪQĩQĩOèNèNèNèNèNèNèNçLæKçLæKæKæJ¦J¥H¥H¥H¥GFFDCCBA@????>>>?@LřywذذϹqŪRŪRŪSŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪQĩQĩOèOèNèNèNèNèMçMçLæKæKæK¦J¦I¥H¥H¥HGFECCBA@@???>>>7]Lk(n)o*q*s+t,w-x.z/|0~235689:;<>?@AİuϪEA5ٱٱ͚ǭWƬTƫTƫTƫTƫTƫTƫTƫSƫSŪRƫSŪRŪRŪRŪRŪRŪQŪQĩOèNèNèNèNèMèMçKæKæKæJ¦J¦I¥H¥H¥GFEDCBA@@????>>x/A7-j(l(n)p*q*s+v-w-y.{/}1~245799:<=>?AMͧfaOɾٲدйrƬVƬVƬUƬVƬUƬUƬUƬUƬUƬUƬUƫSƫSƫSŪRŪRŪRŪRŪRŪRĩPĩOèNèNèNèNèNçKæKæKæK¦I¦I¥H¥H¥GFEDCBA@@???>>>eU!= i'k(m(n)p*r+t,v-x.y.{1}245689:<=>>@Bʻ oiV֯ٲ̘ǬVǬWǬWǬWǬWǬWǬWǬWǬVƬVƬUƬUƬUƬUƬTƫTƫTŪRŪRŪRŪRŪRŪQĩPèNèNèNèNèMçMæKæKæK¦I¦I¥H¥HHFECCAA@????>><'! q`Ni'k(m)n)q*r+u,v-x.z/|1~346889;<=>AAYɦEA6!Ƽٳذʰ`ǬWǬWǬWǬWǬWǬWǬWǬWǬWǬWǬWǬWǬWǬVƬVƬUƬUƬUƫTŪSŪRŪRŪRŪRĩQĩOèNèNèNèNèMæKæKæK¦I¦I¥H¥HHFECCAA@????>>p,E2)}g&j'l(m)p)q*s,u,v-y/z0=S]aa]N<==@ABñxΪ}zt_ڳڳӿ~ǭXǮYȮYȮYȮYȮYȮYȮYȮYǬWǭXǭXǭXǬWǬWǬWǬWǬVǬVƬUƬUƪSƫSŪRŪRŪRŪRĩPèNèNèNèNèLæKæKæKæK¦H¥H¥H¥GFECCAA@???>>=>4h cRi'j'l(n)p)q*t,u-x._öʾʾʿáĠ̾Q?AABWϫЬѬnhVٳڴѡɰ^ȮYȮYȮYȮYȮYȮYȮYȮYȮYȮYȮYȮYȮYǭXǭXǬWǬWǬWǬWǬWƬUƬUƫTƫTŪSŪRŪRŪRŪQèNèNèNèNèMçLæKæKæJ¦I¥H¥H¥GFDCCA@@???>>}101(~h&i'k(l(n)p*r+v/Vɽ{oiWFC70-%*(!41)UPCɧ}BABCFq̦ЬѭGC7 ڴڳѼvȮYȮYȮYȮYȮZȮYȮYȮZȮYȮYȮYȮYȮYȮYȮYȮYȮYǭXǭXǬWǬWǬWǬVƬUƫTƫTƫSŪRŪRŪRŪQèNèNèNèNçMçLæKæK¦I¦I¥H¥HFFDCBA@????>>B8PgU~h&i'k(m(o)p*>}ǼpjX.,$ ʧƷCAR`UHR̻Χӯԯ{@<2RM@ڴڴ͚ɯ]ȯ[ȯ[ȯ\ȯ\ȯ\ȯ\ȯ\ȯ[ȯ[ȯ[ȯ[Ȯ[ȮZȮZȮYȮYȮYȮYȮYǭXǬWǬWǬWǬWƬWƬUƫTƫSŪRŪRŪRŪQèOèNèNèNçMçLæKæK¦I¦I¥H¥HFEDCA@@???>>8'-%}g&h'j'k(n(o)Lµ(%xdɧʨj@^ǞͨƜƴ|LLb԰հȦu ڵٳйqɰ\ɰ]ɰ]ɰ^ɰ]ɰ^ɰ^ɰ^ɰ]ɰ]ɰ]ɰ]ɰ]ɯ\ɯ[ȯZȯZȮYȮYȮYȮYȮYǭXǭXǬWǬWǬWƬUƬUƫTŪSŪRŪRŪRĩPèNèNèNèLæKæKæK¦I¥H¥H¥GFECCA@@???>>XK9o[!}g&h'j'l(n)CŹ|&$ȧʧ˽HBȡrnhVʨЬɟ¬jORĮhϤձײϫmhV }vbٳ۵˖ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ]ɯ\ȯ\ȯ[ȯYȮYȮYȮYȮYǭXǬWǬWǬWƬVƬUƫTƫSŪRŪRŪRèOèNèNèNçMæKæKæK¦I¥H¥H¥GFDCBA@???>>7+# |f%~g&i'k'm(t2ɧȥhAz2/'pţү‘ZQSѿկײٴӯtâ۶ڳɱ`ɰ^ɰ^ɰ^ɰ^ɰ^ʱ_ʱ_ɰ_ʱ_ɰ_ʱ_ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ^ɰ]ɯ\ȯ[Ȯ[ȮYȮYȮYȮYǭXǭXǬWǬWƬVƬUƫTŪSŪRŪRŪRèPèNèNèMçLæKæKæJ¥H¥H¥HFFCCB@????>>s- v`#}f&~g&j'k(s1÷gaQȧɧMAƶ >;1ձΣϽ©XīZʵk…Ѣӧnjʲaʲaʲaʲaʲaʲaʲaʲaʲaʲaʲaʲaʲaʲaʲaʲaʲaʲaʱ`ʱ_ʱ_ɰ^ɰ^ɰ^ɰ]ɰ]ȯ[ȯZȮYȮYǭXǭXǬWǬWǬVƬUƫTƫRŪRŪRŪQĩNèNèNèMçKæKæJ¦I¥H¥HGFCCAA@???>9)# .H;zd$}f&h&j'k(b _ZKȧȧt@]ʧhbRztaϬײҧ̶qƭ\ȯ^˳d˳dʲbʲaʲaʲaʲaʲa˲b˲b˲b˲b˲b˲b˲b˲bʲaʲaʲaʲaʲaʲaʲaʱ`ʱ`ʱ^ɰ^ɰ^ɰ^ɰ\ȯ[ȯZȮYȮYȮYǭXǬWǬWƬUƬUƫTƫRŪRŪRĩQĩNèNèNçMçKæKæJ¦I¥HGFECBA@@???>YK^3 aP{e%}g&h&j'l(ĹrlYĺȧDFȹ(& άٵɓ˴gʲbʲaʲaʲaʲaʲb˲b˲b˲c˳c˳c˳c˳c˳c˳c˳c˲b˲b˲b˲b˲bʲaʲaʲaʲaʱ`ɰ^ɰ^ɰ^ɰ^ɯ]ȯ[ȯ[ȮYȮYȮYǭXǬWǬWƬUƬUƫSŪRŪRŪRĩPèNèNèNçLæKæK¦I¥H¥HFFDCAA@????~1 e! u_#|e%~g&i'k'~CǽA=3lǧȧf@d̪20(HE9ڶٲʒѼw̵g˲b˲c˳c˳d˳d˳d̴e̴e̴e̴e̴e̴e˳d˳d˳e˳d˲c˲c˲b˲bʲaʲaʲaʲaʱ`ɰ^ɰ^ɰ^ɰ^ɯ\ȮYȮYȮYǭXǭXǬWǬWƬUƬUƫSŪRŪRŪQèNèNèNèMçLæKæJ¦I¥H¥HFECCA@???>>  <1zd%|f%h&i'l(b*(!;7.ǦǧǹDDƢ}h >;1ܷڵӦɑҽx̴e̴e̴e̴e̴e̴e̴e̴e̴e̴e̴e̴e̴e̴e̴e̴e˳d˳c˲b˲bʲaʲaʲaʲ`ɰ_ɰ^ɰ^ɰ^ȯ\ȯZȮYȮYǭXǭXǬWǬWƬUƫTŪSŪRŪRĩPèNèNèNçLæKæK¦I¥H¥H¥GFCCBA????>`Q (]Lzd%}f%h&j'l(vǦƤ`@³¸,*##!rҰڶ۵د͘Կ~̴g̴g̴g̴g̴g̴g̴g̴g̴g̴e̴e̴e̴e̴e̴e˳d˳c˲cʲaʲaʲaʲaʱ_ɰ_ѼwćĈҽyȮYȮYǭXǭXǬWǬVƬUƫTŪRŪRŪQĩOèNèNçMçLæKæK¥I¥H¥HFECBA@???>6w# zd%{d%}f&h&j'l(|a\MäǦĶ>Xǥ|ubSOB}¹̬ݺݺ̘֫ćҼwϸnηmηlηlͶj͵i͵i͵i͵h͵hͶi̵h̵i͵jϹp‚˔ӧ۶ͪģڵ׮ҽyȮYǭXǬWǬWƬVƬUƫSŪRŪRĩPèNèNèNçLæKæK¦I¥H¥H¥GECCA@???>>n)! zd%{e%~g&h&j'l)|ǦƤH@Ŷ¸<:0b]NkƼճݺݺ۵֭ԧҤѢϟРРҢӥ׬ٲܹڶohbRA=3%$1/'ojXΫڴ~ǭXǭXǬWǬWƬUƫTŪSŪRŪRĩOèNèNçLçLæK¦J¥H¥H¥GFDCBA@???>SFL?zd%|e%g&i&k(m)x'%keUťǦj=`ȧTPB ! 0.&GD9a]Nto]~jrxxwslxr`d_OPL?20($" ]YIٳ֬ʱ_ȮYǬWǬWǬVƬUƫSŪRŪRŪPèNèNèMçLæKæJ¦I¥H¥HFDCBA@???>t-lX!zd%|e%h&i&k(m)iƻ<8/ãƦƹBCɻ***ڴӾzȮYǭXǬWǬWƬUƫTŪRŪRŪRĩOèNèNèMçLæK¦I¦I¥HFEDCA@@??>7Qzd%zd%~f%h&i'l(m*NɿfaQsťƦ^=gɩ&$333lllJJJ ڴʼnȮYǭXǬWǬWƬUƬUŪSŪRŪRĩPèNèNèMçLæKæJ¦I¥HGFDCB@????: Q zd%zd%~f%h&j(l(n*q.Ⱦ C@5Ĥť>Eǥys`ڵʼnȮYȮYǭXǬWǬWƬUƫTŪRŪRŪQèNèNèNçLæKæK¦I¥H¥HFECBA@???<>47-zd%zd%~g%h&j(l(o*p,rUQD ĤģU<³%#_[K۶ڵҽyȮYȮYǭXǭXǬWƬVƫTŪSŪRŪRĩOèNèNçLæKæK¦J¥H¥HFECBA@???=ZLL>zd%|d%~g&h&k(l(o*p,{;mgV¢ĥ:[árlZ;;;***GGG444&&&Ƥ۶թʲbȯ[ȮYȮYǭXǬWǬWƫUƫSŪRŪRĩPèNèNèMæKæKæJ¦I¥HFFDCAA@??>r`%`Nzd%|d%~g&h&k(l)o+q,t0o0-&Ĥâ==Ķ jjjqqq ``` |g۷۶ɯ\ȯZȮYȮYǭXǬWǬWƬUƫTŪRŪRĩQèNèNèNçLæKæJ¦I¥H¥GFDCAA@??>m+kX zd%|e%g&h&k(m)o+q,s.~@ö¢ãĤc:_ǦGD97778880.&ܷ۷ѣ˳eɰ]ȯ[ȮYȮYǭXǬWǬWƬVƫTŪRŪRŪRèNèNèNçLæKæK¦I¥H¥GFECAA@???v. p\"zd%|e%g&j'k(m*o+q,s.u0Pãã<=ɼx!!! ,,,ܷ۶Կ}ɰ^ɰ^ɰ[ȯZȮYȮYǭXǬWƬVƬUƫSŪRŪRĩOèNèNçMæKæK¦J¥H¥HFECBA@???19 r]"{d%|e%h&j'l(m*o+r-s.u1v2CrjA9lǧlllAAA+++ 555a]NֳܷҢʱ`ɰ^ɰ^ɯ\ȯ[ȮYȮYǭXǬWƬVƬUƫSŪRŪRĩOèNèNçMæKæKæJ¥H¥HFECBA@???4Q+s^"zd%}e%h&j(l(m*o,r-t.u0x2y4z5|589ģp yyy ܸܷͷlʱ`ɰ^ɰ^ɰ]ȯ[ȮYȮYǭXǬWƬVƬUƫTŪRŪRĩPèNèNçNæKæKæJ¥H¥HFECBA@???6! d9s^"{d%~g%h&j(l(n*q,r.t/u1x2y4{5|589ŸƦyD@6TTTkfUݹܸnjʲaʲaɰ_ɰ^ɰ]ȯ[ȮYȮYȮYǬWǬWƬUƫTŪRŪRŪQèNèNèMçKæKæJ¥H¥H¥GFDCA@???7% rEt_#{d%~g&h&j(l(o*q,r.t.u1x2y4{5}789gǧȨʿu```Юݹժ̴gʲaʲaɰ_ɰ^ɰ^ɯ\ȮZȮYȮYǬWǬWƬUƫTŪRŪRŪQèNèNèMçLæKæJ¦I¥H¥GFDCA@???8)" zJ t_#|d%~g&h&k(l(n*q,s.t0v1x2y4{5}79:;PȻɩʩȨhcS@@@RRRwݹݹ˲bʲaʲaʱ_ɰ^ɰ^ɯ\ȮZȮYȮYǭY˖˖~ūTŪQèOèNèMçLæKæK¥I¥H¥GFDCBA???:)# vHt_#|e%~g&i'k(m)o*q,s.t/v1x2z4{5}79::>?lƢ˩̬A>3 iii85-ݺݹң̵h˲bʲaʲaʱ_ɰ^ɰ^ɰ\ȮZȮY̴fթޟװԧʲcĩOèNèMçLæKæJ¦I¥H¥HFDCAA???8& k>s^"|e%g&i&k(m)o*r,s.u0v1y2z4{5}79:;>?@Kǣͬάv:7. !!!111AAA888ݺܷ~˳d˲bʲaʲaʱ`ɰ^ɰ^ɰ\ȮZȮYң42) $"vذժȯ[èNèMçLæKæJ¦H¥H¥HFDCBA???7# Y1 s]"{e%g&i&k(m)o*r,s.t/v1y3z4|5}79:;>@@CE[ƶάϭ¢sEEEmhWڷݺϟ̴e˳d˲bʲaʲaʱ`ɰ^ɰ^ɰ\ȮZкqױHE8ذ͛éPèNçLæKæK¦I¥H¥GFDCAA???5 F$q]"}e%h&i&k(m)o+r,s.u0w2y3z4|5}79:;>@@CDGJ`ƝЮѯƦTPDuuueee ¹ݺݺͶk̴e˳dƋ֫دңѼvɰ^ɰ\ȮZͩ75+ԭ׮èNçLæKæK¦I¥H¥HFDCAA@??3$mY!}e%h&j(l(m)p+r,t.u0w2y3{4|5~79:;?@ACDGHIKĴ}ͨҰӱoCCC///iiito^޻ݺŊ̴eϸnԨͫЭ۶̘ɰ\ȮZ~ڴc^MSN@دҤɱ_çLæKæK¦I¥H¥HFDCBA???z0 fS}f%h&i'l(m)p+q,t.u/w2y3{4|5~79:;>@ACDGHJKL]ξѮԲ96- "!ӳݺժ̵i̴fɐϭ>;1LI<ӯ۶ʼnȮZ̴eڴ̧/-%ׯƒçMæKæJ¦I¥H¥GFECAA???r-TE}f&h&j'l(n)p+r-t.v0w2y3{5|6~69:;=@ACDGHJLMORðsϩճťJF;JJJ___޻ܸƒ̴fͶi׮|hJG;۶دϸnȮYƌڳʿD@4Ѫ֫ƬVæKæJ¦I¥HGFDCAA???zg)?4|f&h&j'l(n)p+s-t/u0w2z3{5}6~69:;=@ABEGHJLLOQTaΧִִNK? IF<޼ݻң̴h̴fζk۶lgVnڵ̘ȯ[ɰ^Ϟٲq ׮æKæJ¥H¥HGFDBA@??>dT!(! }f&i&j'l(n)o+r-t.v/w2z3{5}6~79:;=@ACEGHJLMNQTUWʞ״ص]YJlll---@@@III æ޻۶Կ}̴g̴eͶjٳ0.&ɧڴͶjǭXͶl֬ѫ-+#oiUԫիæKæJ¥H¥H¥GFDCAA??=J>f }f&i&j'm)n)p+s-t/v/x2y3{4}679;;=@ACDGIJKMNQTUVYœ׵ٶe`Q---IIIup_ܺ޻Μ͵h̴f̴e̵gΛܸmzڴ͙ǭXǬWŊذl#!̥׮ηmæJ¥H¥HGECBA@??<,%f2xb%h&j'm)n)p+r-u/v0x1y3|4}678;;=@ACDGIIKMNQTUVX\ĐضԱTPCqqqOOO%#æ޼۵ηl͵h̴f̴e̴eѽwٲ԰C?5\XHԯڳȯ]ǬWɰ_׭!֭ɒ¦I¥H¥HFECBA@??: 2[Kh'k'm)o)p*r-u.v0x1y3|4}679;;=?ABDGIJKMNPTUWYZ[Ȗڷڷ0.&uuuyg޼ݻƒ͵h̴g̴e̴e˳d˲cȎ۶y$#ΪڳѻvǬWƬVɒٱFB6}v`֭ҥ¦I¥H¥HFECBA@??48.h'k'l(o)p*s-t.w0x1y3|4~679:<p\"k'l(o*p*s,t.w0x1z3{4~678:<=>BBCFHJKLNORTVWZ\ë]ĭ^ʴlժܹv444 \XK޻ݻϟ͵i̴g̴f̴e̴e˲c˲bʲaʲaƋ̩63*ٳ̘ǬVƫTŪRΛ ֭իлt¥HGEDBA@??=FM@k'm(o*q+s,u.w/x1z3|4~578:;=>ABCFGIKLNOPSVWX[ë]ĭ^Ʈ_ϻw۶۷SOB!!!WWW ɪݺܷѼv̴h̴f̴e̴e˳c˲bʲaʲaʱ`ŊƤ/-%żٳ̘ƬUƫUŪRňǢ ϧ֫ҽz¥GFECBA@@?|i) ,# k(m(n)q*r,u.v/y1z2|4}578:<=>@CCDGIKLMOPSUWX[ë]ŭ^Ư_ɱbƋܹɨ{h۸ݺ̗͵h̴f̴e̴e̴e˲cʲaʲaʲaʱ_ΝǤٳʔƬUƫSŪRЩ Ȣ֫Ӿ{GFDCBA@??8/ sq]"m(n)q*r+u-v/y1z2|4~568:;=>>CDDGHKLMOPRTWXYë\ŭ^Ư_Ȱ`˲bϝݹ&&&+)#ǧݺٲ̴h̴f̴e̴e̴e˲cʲaʲaʲaʱ`˳eرw)' ͩٳćƫTŪRŪR~ˤ ˤիӾ{GFDCAA@?: >YJm(o)q+s+u-w/y0z2|4~5689;=>>BDDFHJLMNPQSWXYª[Ŭ]ǯ_ɱ`˲b͵j֬ݹ`[L***HHHvvvqݺݺ~̴f̴e̴e˳d˲c˲bʲaʲaʱ`ɰ_Ȑֱ<9/c^MԯٲϹqƫSŪRŪRĆĺ ֬իҾzFECBA@??r`&>3)i'o)p*s,t,w/x0{2|3~5689:<>??DEEHIKMNOQRUXYêZŬ[Ǯ_ɱ`ʲaʲaܷ˪!!!SSS.,%ݺݺϟ͵h̴e̴e˳e˲c˲bʲaʲaʱ`ʱ_ڵnٳٲƫTƫSŪRŪR̗  ֬իѻuFDCB@@?>;1xc%o)p*s+t,w/x0{2|3~5679:<>?@BEEGIKMNOQRTVYê[Ŭ[Ǯ]ɱ_ʲaʲaʲbϝ۸lݺڴҽx̴e̴e˳c˳d˲cʲbʲbʲb˳dƌ۶64*ȥٲȏƫTŪRŪRŪQժxb63)֬թηkECCB@??3:K>o)q*s,u,v.y0{1|3~4679:;iY#: n)q*s,u,v-y/{1}3~3678:;=?@@CFFGJKNOQQSTWêYŬ\Ǯ]ȯ]ʱ_ʱ`ʲaʲaɑܷ}wdɨݹڴθo˳dŊذǥάģe`O$"[VGٳٲȏūUŪRŪRĩQŊȽ(&֬ҥ¦JDCAA??< YO@q*r+u,v-x/{1}2~4578:;=>@ABFGGIKLOPRRTUéWĬYǮ]ɰ^ɰ^ɰ^ʱ`ʲaʲb۵ֲp۸ܹƋ˲bϺqر| <9/ٳٲժ̵hŪRŪRŪRƬUԧuƟ֬˖ECBA@??m+Y o*r+u,w-x.{1}144789;<>?AADGGHKLNPQSSUéVĬYǭ[ɰ]ɰ^ɰ^ɰ^ʱ_ʲaĆܷzt`52*ɩܸ֭˲b˲bʒϭ(& D@5ӮٲذŪRŪRŪRĩPϦD@4YSCЧ֬ϹrDBBA@?? ^N@s+t+w-x.{0|124689;<>>ABBFHHILMOQRSUéVūWƭYɯ[ɰ]ɰ^ɰ^ɰ^ɰ^˳dدش yܸܸѼvʲaʲaΛͪ pjWЫٲٲǎŪSŪRŪRŪQɰ`Ҥ{x֬իçKCBAA@?ta&_~g't,v,y.z/|1~24589:<=>@BBCGHIJMNPRSUêVūWƬXȮYȯ[ɯ]ɰ^ɰ^ɰ^ɰ^ʼn۷wq^>;1ܷܷ̗ʲbʱ`ʱ`͙ԱPL?]XIزٳٲƒūUŪRŪRŪRèO˕˥2/&ȡ֬ćDCBA@?;jQCs+v,x.z/}1~245799<=>?ABCEHIJLNOQST©UĪVǭXȮYȮYȯZȯ\ɰ]ɰ^ɰ^ϸp׮ǥâܷׯкrʱ`ʱ_ʱ_ڴǥ_[K =:0lȤְٳٲ׭кsŪSŪRŪRŪRèOǍׯ52)h֬իƬUCBA@@?]Oj k(v,x.z/|0~235789:=>>ACCDFIJKMNPRTèUūWƬWȮYȮYȮYȮZȯ[ɯ\ɰ]ɰ^ΛԱSOBsm[۶۶ʒɰ^ɰ^ɰ^ɰ^ɰ^ԧ۶ڵڵڴհڴڴڳڳٳٲ׮ʓȰ]ŪSŪRŪRŪRĩPǎխa[J0-$֬֬ćECAA@?4u9.v,x-z/|013468::;>>@ACDEGJKLNOQSèUŪVƬWǭXȮYȮYȮYȮYȯ[ȯ[ɰ\~ڳz$#ά۶׭̴gɰ^ɰ^ɰ^ɰ^ɰ]̴e˖֪ڳڴڴڴڳٳժϟƋ˴eƫTŪSŪRŪRŪRȯ]ɒԬjeR ֬ӥɯ\CBA@?>REudRx-z/|0~034679:;=??ABDEFHKLMNPR©SĪVǬWǬWǭXǭXȮYȮYȮYȮYȮZɰ]׮¸!t۶۶ɰ]ɰ]ɯ]ȯ\ȯ\ȯ[ȮZȮYͶiлsҽyҽzҽzѻwйr͵jɯ]ƬUƫTŪSŪRŪRŪRѽyѡ̦HD7w֬իƒCCAA@?y/4 o*z.|/~114468:;<=?@BDEFGILMNOPéSĪTǬWǬWǬWǬWǭXǭXȮYȮYȮYȮYʒٳFB7:7.Ϋڵѡɯ\ɯ\ȯ[ȯ[ȯZȮYȮYȮYȮYȮYȮYǭXǬWǬWǬWǬVƬUƬUƫTŪSŪRƬVӾ|Ҥ֭xq[өիРçKCAA@?>43+x-{.~0134688;<=@@ACDFGHIMNOPèRĪSƬUǬUǬWǬWǬWǬWǭXǭXȮYȮYϹpڴxr^|ڵڴ͵iȯZȮZȮYȮYȮYȮYȮYȮYǭXǭXȮ\Ӿ}ƍŊҽyͶkʲb˲cθnΛذ̦KG:ZUDѨլիȮZCBA@@?=4 \L{/~01255699<=>@ABDDGHHJMOO§PĪRƫTƬTƬVǬVǬWǬWǬWǭXǭXǭXȮYٲ RM@ڴڴǍȮZȮYȮYȮYȮYȮYǭXǭXǭXǭXϹqժǤɾϪٱٰ֮ʤ~zt^?;0e_LԪ֬ԩͶjCCAA@?o^% K {f'}/1235689:==?ABCDFHIIKMP§PĩQŪRŪSƫTƫTƬUƬVǬVǬWǬWǬWǬWÅڳFC7 mhUͨڳժ̴gǭXǭXǭXǭXǭXǬWǬWǬWǬWȮ\ժ&$$"0-%74*96,52),*"  pԪ֬ԩθnCCBA??}1 K y.0233579:<=>@@CDEFIIJKM¨PĩQŪRŪRŪRƫSƫTƫTƬUƬUǬVǬWǬWɯ]ӧٳԯѬҭװٳٳćǬWǬWǬWǬWǬWǬWǬWǬWǬWǬVӿ}Ԯ0.%ի֬ժͶkCCBA@?<  }0123577:;>>?ABDDEGJJKLNèPŪRŪRŪRŪRŪRƫTƫTƬUƬUƬUǬVǬV˳eРذٲٲرׯĈǬWǬWǬWǬWǬWǬWǬWǬVƬVƬUƬUnjù x`֬֬֬Ҥɰ`CCBA@?>4+"8.}/234679;<>?@ABDEFIJKLM§MĩOĩQŪRŪRŪRŪRŪRŪSƫTƫTƬTƬUƬUƬUѻuňŊʱaǬVǬVǬVƬVǬVƬUƬUƬUƬUƬUƫTƫSňׯ*(! @<0ybʢ֬֬թĈĨOCCBAA@=G<"?G;124568:;=>@ABCDFGHJKM§MèNèNèOĩPŪRŪRŪRŪRŪRŪRŪRŪSŪSƫTƫTƫTƫTƫTƫTƫTƫTƫTƫTƫTƫTƫTƫSŪSŪSŪRŪRηmذRM?'%UPAlʣ֬֬֬ɐʱbFDCCA@@>TG?aNA234669:<=?AABEEGHHJKèMèNèNèNèNèOĩPŪQŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪSŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRƬVΚذӫЩʤƼͦШҪլ֬֬֬ʔȯ]¥IFDCCA@@?cT!aL?34568::=>?@BCDFHII¦KçLèMèNèNèNèNèNèNĩOĩPŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪRŪQĩPĩPǮ[̘֫׭ׯׯ׮׮׮֭֭֭ժԧѣƒǭYFFFCCBAA@>kZ#D915679:;?ACCEGG¥H¦I¦JæJæKæKæKæKæKçLçLçLçLèMèNèMèNèNèNèNèNèNèNèNèNèNèMèMçLçMçLçLæKæKæKæKæKæK¦I¦I¦I¥H¥H¥HGFFECCBAA@?2 [L59:;;=??BCDFG¥H¥H¥H¦I¦J¦JæJæKæKæKæKçLçLçLçLçLçLçKçLçLèMèMçLçLçLçKçLçLçLæKæKæKæKæKæKæJ¦H¦I¦I¥H¥H¥GGFFEDCCAAA@?@BCDFF¥H¥H¥H¥H¥H¦I¦I¦I¦J¦JæJæKæKæKæKæKæKæKæKæKæKæKæKæKæKæKæKæKæJæJ¦J¦J¦I¦I¥H¥H¥H¥H¥GGFFEDCCBAA@?44,U1 \M:;;=>?AACDFFFG¥H¥H¥H¥H¥H¥H¦I¦I¦I¦J¦JæJ¦JæJæJæKæKæJæKæJæJ¦J¦I¦J¦I¦I¦I¦I¥H¥H¥H¥H¥HGFFFEDCCCAAA@?dT!1'" {0:<>>?@BCDEFFFF¥H¥H¥H¥H¥H¥H¥H¥H¦I¦I¦I¦I¦I¦I¦I¦I¦I¦I¦I¦I¦I¦I¥H¥H¥H¥H¥H¥H¥HGFFFEDCCCBAA@>3(" |LAz0=>?@@BCCDDEEF¥GG¥G¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥H¥G¥G¥GFFFEDDCCCBAA@@3SF|5H<1=??AABCCCCDEEFFFFGGGG¥G¥G¥GGG¥GGGGGGFFFEEEDDCCCBBAA@@3K?5c8/2?@@@AAACCCCDDDEEFFFFFFFFFFFFFEEEDDCCCCBBAAA@@4=3c0(v.<>AAAABBCCCCCCCDDDDDDDDDCDDCCCCCCBAAAAA>@@@@@@A@A@@@AA@@@>3n\$?5 -0(K?dT!wd'q,|1464|1q,wd'dT!L@0(VVSS 88 ,nn,?????????????apprise-1.9.3/apprise/assets/themes/default/apprise-info-128x128.png000066400000000000000000000404371477231770000251220ustar00rootroot00000000000000PNG  IHDR>a pHYs   IDATxw|ߙ͒,ے-Wm1565Bz˅vp@@H$'@6:EdɲVZmݝdc/Ei;3}y?:;hxxH1 . xx>7op:0ߋہZ f3}xF 88 xv/+3\G qglzڸOȌ"޳:|U+.(,DQ\nda!}D{;l'rP޳F_ 6 _dK7Kq(>B@wE8<_@b}E~Rc$0J`Nwa %VQۅP"M|1|R) r &FВIF&ܼ3 |ݰ -XjS;T_.M3]A(:ӌ B(+Q :f7PXftؖ!M㥤ϿOE9t@rD~EL;SfW3D>۳߳62g&x@i&6U`g){m l}=JǣE(? 'K?>E6!B:#e>?}ٽ@ڏ3!Ʃ! clS\pBD/2>?IO +;5.Љ."L?/M'x!RZUA>@3 $IhH) b$8adIi n0n}5TMwC *N|! o"z{f6ڈ,ĶJCvar7R"),gMS:2~ۿV)).2-(聱G/8 2ܘwٟ6.0c#" KߐB1 nP2fODu>:260 vcT~P(_qўN"qaKm0{N |־r:jϼϟ5L.PTTE(aUA¤"U 7w5!e/^7nEA٫iR4FIIz#1}6Mibc{nڟ-܀ų\sP{3zNW  m*Fɺbn]GLt8GWW dHHIdJoh=f [RX{۹ {E?9?\j]q00.@>z hF\6đq *CK&I%D" vݾރ"5 EQRC.E*T̯(`4N4ա(Μr1-ڀd"לATլ_*(f5 $bP ~<&LBÐ4D-𳶾{7՗:W͚XpjϞǔi hOx͝L&GKhOi~ODx(3o,]tu}5u ~~ Bd EAUIt0oEA]l#K78h"e#)+kJSABEZe')\L` r{#ɔ:H%&^x~>Xf =;35Ә>Ρ(:c D%)`S,(XSWNϝ9NJ ,K-%87.<{qn7ťqΧ?뛻?0A'Ra`[jX|8;nζg.u ( xiӴ\7tO#$M}#\DT/-{3DS(=q LU9vg^Iڴ-zXr)TA1HOAFۛtw/GV`3>|)חq0AlȊv Bb)y?yOäRI{{8x< .WFC}cq88e `4"҈4±$]#QG3V[Ig˼+D#9Ao3{rPV`w)(4?_Ŋ81R1%V'tipO\S?)Sfpz}f$i/ucO} @?6-Eq /U5tkѻIxhgڬ4̛OЭM&G6x>7~~/~~E^7! Iݵ7L̵8ǭC1yLP:ZNZ¥PH'[Bi#I qR20=8[nCo 5$D@o]s9\sfp3~'ﻇ>T "^9(Z&"϶oxK}-[sOvnc`VIb LjZr1&ީ(2¼Ď5gz} Vlb97xj@ >3v` ]짷Sj:]̌OZ~[/>7ioN^p1.UA#cW^HPhzwyflmS<1CiU%啸Ah!78Je: Gll sg -CG74m 5fb.+O%4cv֐C12G`$@Kd! 3("gkB?zTSR>K">Ÿd칔lUPcvl)z*)L&5h.LO'}IX{~q=S20[6}޶D<#w0o) y\+#v->" ˕(j~a\OgyeT3p{9`"o*3xX `8Q}J;3Ex)TsAW3̚!.iIYu~яY\U"9q?f`^yp,ɮV=_<"rKܵUpg/ej,knzn;RSV`6DJcPƅK>{n n܊IJ,#P3}\:N89Syrݍ ^EP;k/"e=&q$ SWH 0RM|=۶4~3lߊ0 6K/̛up=Վ~tN||ʟ=w6ӂB&Hhƅsz[_4(B63BܥxK+̧ zNZT'7: h'sfӪt4ݴ wpD碋.q/FƳSBM[3$5ɖa^S1皻7o!%!+H9p6L`{FE\jN:zBg 37̳?BpYfz3JIFIӳ)Az2)E t[~sg|{͛7#￟/am<{- PZ$\2I+y`0k/ūOA|Y~hکUOܪșpLQ)[ aL2^!SiSTlIW9]>ivzi^cK30yH\&kJ(ۆ'=VzkضmO<_=Yp+ٳg;WuwɆIaPTs=D<ChRȦḘ)_vYyi>rYpl -ssKu]3A0cf)@N߼֫.yB/_>qHTU;Jgs,Rw"@zc^(s/Xñ#X%CFD NVIUG<4=y zCbŗRGW~!%"]V6,F1݊a7[B}Rē}3/d JWs@ْ?}D u3h ,T&g@zs͒rH 6.AWX;盧)7o^^)޲?Szx `ny!nE޼SgH,sB6!&;},1gG}04XM AJ!+k˙Q$qwoOӗ.]o˖-\veyϽk@*?U2HϾY%!(CuYy\.H]*~ڛ8f /=0 I!se2e\BqZX) `rDW ɪ  A]7 ?/X ン;w#qƼow׶'QTzGU } xGI * Q`qrNmLʫkGrq'Mpj=<UX ^i.)-ޞէ6V\ԑUOkJ}i+xhaޓO>q{*]/|5אL:/ 8묳[5Κ?, =+rm\85hJB$bQ׹Q3Ҡy`4gSrPAEzZ^;8[VePZ~;/B2grQ,9Rd$KT| ῾e7~'p nk#}W_}z( +z\:h]wUEaC$ec.Ù|’Rڇ# xTe%xT{m@A!K?UU3h7ٕ⯬BT8iLDO:V/ F^Z纯>(2?xj$ᮻbxx˗я~q }ן{ȹ$LQZQsΪSqG>A\F|7rΙX-!%a{ﰳAN7 {K,=X{i_]{6n9BkXS_߭oN~/_mkAAߟ?8/q+Q!2 )M6ᴋ.a隓s֍z#1Bs _4w|3wDsDq !Ԙw9`JilZ\- //BUy+?)2:=ؼZ[[ْ:/ǯZ*/D|`_ vW z:sŏ-i=_x}~چ"l1DyuHhڒyYh &RdX'W&ʁT_#5vSRfsLo-5M8Tp,NRdƲegD"-;شi?^n  9_淋i7KQ-] 4Υq.^jF4IYRQW )X|BL3vHRح٭:kQea3d/9qTY8b!^x1r~`d`dd}9o;G2G0[nqф IDAT#)C|Ϊaɣ;M爵(P,/i|\ߠ\Eb9Ye%klKŝVabpL=Nuo>53r1 4V/go37}Q._DGFFhjjX%%%׳tR/_g>q_<L)eDDs$p"_20a_hxJcc[^0 G,+Kg2Ǥe.t鱓#*1PB ]ՍɖW_&r$ƌ3xYgEuu5uuuP[[Kuuu^C \}՜s99A".704O<&;oeE\~ )Hs 4F"ǓcU"7mwit/d m s}=4g* C:ӤR[x?H3fP\\oʔ)tMú'̹˃> /76<8"*>C~Z"mǓ<s a hb{<ݩ8fc(A;$U̙ *Eg\8S5 `,aۛm1P尉O?`n_\w >pB#IihRg78"Ic]vRWF I'F07ef!d+/q5Wp7|Ԉz6k׮eݺuz{(i, 9yX8qK%:F 0eV 4Kͥ(q=^]^ݸ9|^, 6pwrWiS|͎CRJ~Eݪ%[$גO qq JYiۢFQS^&s$!AXzm[_Qez6mDAAAξ/^b p+V`֭oyZ)%/jTHg[V-kd6+MX"35ʢv+M($x٬0r՝]\z,!2ŘjJ S]NJKJs;;8 PYYH|_|p8l):4bUW][a fJf`S{XTt,r Y&U0d~#P3j( hoˏ.&zBsW?|H>c5o!_oӦM<^!s=}Ǎ7c=vo~܅mqD~~^V6=56j/%9[0%{b2ò=v7P/<|⳨º3yrj; frg/wQa|HWOd/\uUq1<< CCCyac'?ijϣ兴Exr|_dF<$:w僚{R}]?љ=vM&x|27oMhUo2"֛?4#g4ciS8 OfcѱHf+B|F  ?w?8'!֜~6g5R\VNaIiFEl2 3LYf&s-m]f B?Hd%g܌ 1r)"xq/аVqn@5^]NrO{3'ղz @ 1ֹx2Ɲnd희=TNb wUkOfGSg˾Ig}χxJk4^F'N}v sLLh$OwuD'G&h޺C#B &F~uõrfQ].Um 9Krb X$X{GM<{:yGh8.NooOK#Q g8涫z v{A˗h:ŧȣe8zc +u@K$,w%RJ+&$W^䥧E!*JHjv.EEէɕ^ZCSzxM]C2u@fWӌOF4oRo+mOg$\?F̍s42pp"x3%{]#A%S^tRI䇌-+gT?L[0_C"s̢cg5uv ݑ>o4cM&̌~$liCѲMʼM' `-"֦*! o7.  \LI޲)x˫3meUUeղ%lC$&HQRVǥ"#%,5^ _LmE=F_p_O;^N6CR%o<@>NޠUl# d!{BЌPfL(hXb,1J EJvƄ3wˏvw;4gʕ<ģ } ]͕S2lŶ^"~7!U1$ϭf/aÝc[:2.>=ef Hof$12Lc#og|wMҁDrdLDBR79cZJNF WA( Avc(㘺)O[aU 7 s{Qq%L+ pU*^FIy2ͮU>HLs4e@L@S6 I`7mH-ewn  d k8ybBFǘVYVK wVVr7D0^%:Hqy/|i?!)%jX32K!RP\B T@uF9o.SS5&1{i#$sxh8f1ג < ;'v `"=? -$>CܥzWp͠7aYYͯHt,2i^PB7}CdF-E͜ͷos Ϸt *pQXt{ooxG(b(B=4;oBZc!Cev1hPL1Bׁg$/<1sYy‰ݶ@^k '0=4ʱ˖I53{:ŨX>c*"0iҿb߲4lޚz]C,zgjdBZ<(w#}pav{d,B%؏.0W^6n8̞Vē-Amfij'f}o5r%fHۇ#`d)teqXA MhK`n"1;7otD2kGR"I hںfnvVPa1SB>He [8}Gen}Zff3lH4h=v?a8JfW\-YHIS_?0#idެ |SmND'ALSWL-M0i=Jgq:c`2`[+'s>B.w'g ;n&975NFYv5meϯsza: p/s$EA9#[2.=ij>NJ@p4fn'tXRc_3geFcqCrqל/Cǻiz&q8^2^؋7tYIfd.BSgD@ɸ*f,VP3~&s_~1a}s71f .uwƶp. ƺڝn߀N&8t^ό.%8!SbNa(b zI:)ILEQ9w4} v޼ Q}^"/#="z.<ؓR2mvZ_8 `HuԜaׯ7\7UD:! PtFЏ׏/-σbۆ" [TGptޟ;ZR"S):_ {)Z<'܁?Ǭ洋pBtbFFQ*VtX*ŀ5V?C>-'Amۆ|;| xHI= ׀O3Aݛ͘+4e [+Va С]ǐb#xN/\Htc݁&z5;fnHh3dU}o<_5ըfr3΀5-YGBQBɇ(+p' \HctWr|zVG@G@_ 0 g޻{q))3_3ssB(Nl /;>&+D}>O4voaooc㽻 - T<w'P+>ЮMD{OecŔ`fD5&r&s lyz6?A~Nw3[+%9.Fת([.\zpeaVB;|}XևM!Ld;]=5,1Bͺ ([GQ]Y0.e0.7a(=nIF&̿njcZ֚;TDP4g1=$(*¥ 8K`B0GAK񕆑mo˫t$ Łϣv& y\L@[B٢UxJq^o.n)s{l7^``˫[H Ot$ҭbo{\ {Ê>Φbٖhf :O},<][/㌴6E49g=|%(_1I)_qEs"\.523@%yt7:d0CM[}YZ  1@z 臆8))'T7@ 5xJqB._ә15ǔHMCK&qXTlddddP?cDcXGy$8 *~76!?qBQ^` z !%TÆHwJ7wL7KszxdYa:5 eQ_^ +=w{7EHMIENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-info-256x256.png000066400000000000000000001245031477231770000251230ustar00rootroot00000000000000PNG  IHDR\rf pHYs   IDATxw|l/ZnYr1Ձ&@s/$oHiBM^ HnƸ.ZJcgg6`=|ٙg99sct1:Fct1:Fct1:>CނܨZ  WN >WS mV`-U$8xEl|MK ҪȨ 6:Gͨ?|_GTa?%yIRGhTWYPK몵SFǨǪB*5w `}Fǿ8Wb/3ރ==9yJaAύ=+UuN>Dφ') FJxB<#<d:L $LN''3#q3xxEuFǨ8h #dCnPABu Տ%2PM#`o( j PAQ٨PRP+3u{dRC ҉Jb6INfxݼ5`5p'jUUpM6Qit*wUN(I3(2HB5 *jBm'(2C$;H"}^w[CgGx7TS1T}_!gA)i7}r?PZf y]oIgh͛ dH o[=(>Gfhp/ln 8(r`ξA(4E'P9(|h~_=Xa{_VviU$Bϕm]O:9L"xjQ0E \,׃K+L|<?`epȷ,Ô~()%t4]oHM uW1Q:|e_8×R6uf<&s2<(=?o BJwI1JWi{q?ӹ=+>F#YDTn_PybIxBgÏ)):!x]$@W.߷ m}]} g4hTW#]AOk҄~$f~+|ݱ7t\W__]]L{oI ̌ZV ? GT]H4jBiVVE`\ axh3(unRژ.R:6 $wx!c\sĽ*Q| H;c>B+k/@ ԥ\dnmnQ>(kU,۬pW$N MxC1EtvoFuF|3F%_Y%Ns('+T"[O.^˪_PmVΑ(:]Mh}&Ҥ cetjL 'WCHxY{#{[G^*w&KYܗJhXO5UAW;(B(8j]+QKbY9)Ƣ(^RBzhmݏ?p_H.clB/r-)RK:l1 , 5U^5HBu'F؅9/lIXH}3ERJ{ؾNW?poH.c+qeFU(.&SԨ^p,uKOdܔʞ t澺]y0P/6Bfbvͮn@)R7 M.)RһigɔwȒ*Co~MFaϦt0L}_s@͗;aQ%9I ]- mbdRnx=Gro8DM>?N?>*^TE˻8fvTB`aK;@ ZAZY feOeV 6֧&(v瀇F}lQ#6qc>ؤY9V5\Vr 5Ufv_ >+Pr [UP"g ̶ֽoWFrE851lu^c?rU ዔhBR,&{t 0b;'`J':hϭBSAVeZR\HxͿ>/ ~~Ay֑lQpY9#Moʇ}hA+H!z@~u2h@ng:( E 8A$v .4*,$[^b=^*[Dg $CP,>[ dRPrBq 21>";l?Ioo \鲏±s d&.Ute}~P!."K*1?IӇϯ :>)zs_h#?c՟0 thsB Ot 8B[U<;^"(ý-KWҿm#DA0|X!!>"J&`/Q>X;E>cҏF Vc b,EXYt4ЧP;';EL%@Ȅ=[W %-):- vc|mq&G{3.{k*OMƊ֫>=o'BW](К  KجpPc) yJƒ͡i/t"* U dY9/H{۲x=Ux82ḏϢx*<*o2B׎7yKh"leXC`ǬLA8[(v"Q^9֪IrB<0=o^P!u!QIข~I9-Wsyϐm$3qXRi>P(BoHՕr 0XtAr|Oဘ,Ne҆VlաP+z&@Vxף(x$3ttLdvvssMLWtQOG*p3B;!N8^+ǶdWQԗS}hX!F+C \P1I)2,u n9WTTTD|^J>!iS$L ;4L`CA Vw1̊ cC[nV2fnc7s{n$84Nu|O7J޴7x)&л #] 84M@Gd-.$Ĵu >EtU Jѕfg_wzdsmA CEag dR)v>r7V[rQoU}Gi8Toƕ:U?]ћꪮ7ѡhQL9YA,FDe;0)c*9/e(ܿ(D^LQ3b0qtl Yo) fJҷBj1UgZK΋.0 ǟjYȾc?hȽTQLAN=jpp4s_oE*y{-ljݘUSܺ21$N!'EO7}]{{$JEDK(%\RB0!-!-! ok<ɚ]]OY&n|z\L7n+*dխcTXlj#,嚣NOct]9֒YvZ]1 s̥8BSn LHk;4b/'%?ȃܲζ=t(!V^AuC# cBɴLIS\$7@*mpl#!;?\-= ȏid'Zv*y8>%Cv>.G3BL("bCZ"SmYM$`2Vڡ5AQ9O!-.5̫+3l'㉿;"/fA"R*j똽p15sܿ}` }k ڹ!,[]h[浬Uٲg&N8H$&};0X6*%'Utq||߸[&Й& $ d,^:?x><ްӭ\?&5<`f/|ˎ؀}OnK`>2` Bq]u>=_hZGS* dE'2i}y3\ `^Xl3MYY HP$<)93RquQ4+|9@8 ?9*0Wp륿mjk噕ѻoϺ$DK KeZUCk<A?bi:C]{]qlWVZ>E|R 7}EK5kj/:ޘ͇_{s Pt@SnE2(4FC̩)cfM)3kbL,aRE -e*CG 1sRtCS$zFdq"17t¦ȥ=v+07zW#o[+0 1UP|Q1&by "պ4̨.<#S|G!P34B]4HFBw)n.5mTz(LiʓD}{ˬy|]L;ϧ]Hali%-No(By>w-` \Ը9ͧIZE{f[~ ;hX@zWOIU}v=BP\fg_d*㊞=__I)f:^~N\5Bcv?cT;:o 0ę!ZPy(j)-Xvu9E",޹u,SQ Ѿgv F" 02]k4c捼,d"A˔B^/"ҦI+N`t^uB%PA&AW_CaK;fX1;b b f|P+dxtAHH  |q$"ͥaԕ鬗ÝW]ΝWr#x^^tehTˇٗ JEg_ OɆ^}N5EǶP8FK y"t'v8Y{fhE|vk~ÀG%OtS||=ɇ6 ` ̵ v%VP"uTڶA.9РxcIUxtd<| W @YU5 hhOU} "%1CIz;;dǦlߴ]&+o'~ 2޾: Y/= R * lW0ĸOMdJm>>Z.9_1uﯨ<^C1",L*t0yfGzd1%!^+=VYzuU+m/b޾]ʬqj<74}](B!RV`R)%Ɂ~e_lz^؏u6=?wS)vi!t{(}c6 #gOHGz虭4S_#z̥c zg.dxh`"i*`SW?LaHJmR GOr1>STEQB!3p+8S4KyG!{{L&+hkl̅}ڻU Б2pg_iGx.P{.=Ji^3$0 C Dt|Annf%Hi$y !Og0t&jfe8-S ٳRCn^{pI6Txƒ;$:=(7lM|&7|2٪-'KȮ{l`O !%!yǛ3aÏ9L&o eٶEl| K:2MKwseN9%~?HgmbCk,=YN+8r_gm9m0jUٗBS(6:ew[=T7m'O?ìGiooKVPHAr^i aXes}p"زM~om[to|WW{OBm$5 m21nO wH~ p7:ӏXHMC>p++ pmL͢]|LH=&AVl=̹zn]Śݗ`!-LǠ90ҁPżD̾te~Cam<`p L5 .})["XA$hE190@pK~/M0u !p>sM TwTMoV5f2i:cOhĠ%XԮiDz7w0B6f0lɼ>~jx9"\l)$4#0Dž_܀-ӳ)Iƃal,<ވk+b 7iXp/rD8ܿ=6BPZы%~=t5A3Є )fbE԰U+36dL&d@&!Ji۹/c^uoP4J0VCI10 ;iFJ.4X~yb-Rw.}#yh21|~z7 !x)8Sx}4z&@d =-Feӻ 9#|0 i$VH1Jz DBLr?{귾CEMOZ rUBP~1.LJɎyz_ynJ:ZFkGS*cćRY $X X˪kihuv4a2-SkaK`L 4)*|9҉[YPR^f9Ph9Ӎ9M+Wl*GyG" 7`A0X1a! ZihJh:?u/Jqz<̭cɁOٹeLz{yYL}ђX$XWaW_Ucba-`ZUc [ |v^٧ j >voOQZUbiR xJv9N9> ;Pq>)e0έYcHH9 DĻ"%|vqY(7K|hBsOf{ՉJYo=o7i6nӼuaS2;@55&LV!+4-mza]˥EiI:5Ρ1'lw.=aCx"TTYl7_ ޫⓏ#jīV vE@H]ヷe˺7 EM$1xQ wwѕ6Ï9)s57ғ\;As,`-߻|_m/+/Q'M/Vi.QH_qUcJi~ R[qk:Mʩt_PFjwGm)ѽx8ŸӭͿ:4b!661ԍK)zE~/[R'VD\-B{r[1rU^q$Fjx5?b6&fMkJ4n*(yw#m(E J{Ddq\#M[gv+ԃG^KI;RƋ7\2 )TF1/xSc׾.0!#_Sҝp".Ї/ ܻ{wU8@kJmjIe*kNc3g>,<oI`3&L-vps+4zᗎdz,Z>yV[6?wk_\# fj:[iꛙ0}&h׋#R:*) _lkG4ڽ(2.q>F X)Oݺ}٢=G0$dh `1+/l|$vV@V_֟lzsc ~0w1Ԯ>οZr~X: Ċ1W27]  ?*HIFJnm z==Ϡ6Sü̓ںK}S3U5#<^C${i۹6x땗[#+n ZGԷtyr[:RfjRrlK ͥyN>t>Tyyۙ!$<ò뙃l-)߶7.!mN eOgN(~S̎7_?E=!V_ۦJ?,BǐW1(櫘 t v~`qSa_vvp|sػ>Zw`YB :^UX*#IҌ+ƖL;ɳ<JJˈ+q Ng%y܉,U!HR2p&C:fw*jK~I1Hpտ%9"AjAXm2R?a>n|"-}ݚV=pXS.Pe=qpiտelj;}\[oߜd+Z 㯢}lL"jץH1%iUWDŽ-eù~}޹lGZwcyi]1tĜzE60zSm K78ow]$]ao0j")gfs u6&̘EQ ;{;"'L5yoy; sLZ {mȡ7g>abT`oW2k+=ؑgvRWJiR)Mlzt-2 قfUڢ!؟{g\d"LBhxyip?7 ׍qyꃻ T`sՒ0JӾ6Hy*ftN#Ts pXmy(E]|?cg6!8\351+>鞖Om*A)5hz ζV.oGa|yԑ/WڪS@ :Fzߛ'v5Gt=-^vT|َCɄ.E_:g9 \'`ݥ3ɤMowB"l]z/̊@/yiw^(&Θ]\mx ]Rr+:e{$--\ip9|xع:g'

 x :d\Q<(Jq NbJp/5<f(~UVtMB[8O~ö !@;K ĠbYE}]tyt#y׭DaBmCzLƚ+,vղwCN EL2) DlJ~DŽp hE<.$͸PiDŽ9e Sbz_s=ە*>ZJÆRX7M$Pq QJIOqzl  1<4T#F1-Dѱ Vչl7k}_vDƌGx$fsYaI56DHFTM8H{? eOzU[T__[k_ZS?B%4 *WO2"Y>w\డn@Uuővh!:!܉Y=BϤ$5<\EK,swmz$ msؘAUurd"Oa!Sn܊nn#@s'@,HacgזiXuwCJ]_IwGvʂ~b!cl) f VVE8Sx@H42Bۨ^p>(J#}}2|:K lVf]W]K:%!ycHeיR*jKҏŴU9沈{^yk|P-k &xLxHJE3Kisl}{3 3_E P:c9[ҝ7h8nr}EbG? o֜6_09{avu+YkitQy-5=Q oꐶ!wǗGе.}?k`|sÌ꘻Uez yhD0Mj2 hd*,iq?Ct Ūp$5c,CL@0Ҋ Gin/tYI ]J~W@!J^{>?!R4st, _cʍE#|{0 Y8@-Gi^puh0و; 4fKpD" m G(ЀSE!T74f޼F̰}rxl`Ku!tXty!%Bd[F1k>|ן7_|Gƥ9Q__^Dp䪰 J;QVQ:ߕij>Ezغ~]Q7mᭈ(Ȅ v7=،'vڡ\VuZb@3icVki#=H:U:# mLGq=SEϘyd2&:nfiDŽee<aʣ  k_\]ag(\;S֪! a)1#( $Fedw$ GߠAe)3օPA~FKL1bpL,! J ܋Q2ZbxKNCq477 X+<_a7l}- P;%=zEͫJql΄ #ݹm<(5Ad*MpwRv6mr"c5B*#{:%Wo"˿=([( 1v {{>Dַ?y]_; SrZ\]4†d">RN;`:K(ʟ-PizvϯzsL3N30Dއ!<"-$* }ɍ`ox !\X$AJQ_s @~*5+v3 ^z)rJwkSJ\w]4]EeO !h2P8Sqqe|77E݇I30l]#;;(٫"n]? d7퓍 PCU _ -< IDAT"Da0M`IQ \6U3P.Y8?>oz+|k_cyKm<[4B^Z]oV;0dJ 0iEV'pt05jA4IAqyN]mmx{pR&5/ u4YF=[na͚5TTTpW2i$ \g:!4=uDL̕UU1i>.# gQ~7uڦf'pI[ol#Oo| &sIY9nm7hұ3 !x(zm= .(NaQ@q& ծWT[G"Zva 6|$݅l#w/ 9z|Qc]?~}wx֛yS:444000 `>huQl߾iӲoȽwQc\&:Qs-@Ths hG}CFQ8wlȚWDYrG  =n8 GATCMP!O*&]EZYD2ˡ.z<`1T~`2?CzD"wqs=r 7~O?;3{X,E=̟?;gy…ջ۵&\vl(}:7x#>( ,+3䓟$<?&JO|%KyGu\s k֬OgtSf[ДW>Vř^_:%{i472J}4d8Ϟm[+2]m^ӭ],W.:cƝ3TY?.hrTh^HwA4t/a6L&S,_ ;tU["mV$aj]%s*-SޱeW쇼ӇO6?O̞=?/mlk֬[nᬳ /dڵ~D".R.2vMڦ~#sxz{\ђ;'9򸓴3RA%ͭk Յt_E,966isNkYn&l@J|%xa҉h `:`{}25uى.߮B҉0je`h $c٤1i4}i5|K;֞Ͳ{h 5M˧Mö}Y2Q ﶙ3نjG0*#V1G!ZJì3tn=MNYo;bHW4(L)U}bƷ9^8 ^xnƢCƥ,_͛77ZXf*qS*HiE%{-V)yfGa)%?v?+M}_ԏm3b{π/f.I%%P!6R p4 )mL' yJĊY+ s2sKdÛj8vt-CȳK/뮻Ғ}ڊ>N*wTUUfi;ٱeS zTHv;Ns?͵DcٶոeEdu;a=\zmTsi2do8e+_!rW(v!$FN*TW`>^XW=naKjXB8Ӽjп㥩ٍpm~oxCW>P(ĪU~%,YcDza 8dΝK:駟{a͚5vm|+_aŊ|W .9-y9[VXwzֵ0aU `I'3nt|vm݌cY'|[o°>Lo0K/,XsG48Z؁w..^`D >?HrYBQ4!JdQ | TT GEysDfqB( 3}qrR6̟GQj!S^̆*J "4m-*nͯ'?aӦMuQ$ pBl·m$w~:&L^`Ŋ~z***%\b/_Yvbܸq  1nJ>kuSW?/6M]$*gNgAhC! ^LPՎ.:)b1Ŋj6ō ?O?]tDq?z|;L0mƺuشiL pusoؘ ,]'|+W244=R\wό^yRs$pw Myz^rݽFRn{чGt_v 7nVNh}L;RBpgap6T`@YxQmF[VaJnuדVaemV[a C﫟MPtmx> Oo22YNYU+]i,_/˜}٬XEʮ]Z?˗xbny6oLyy9ƫ]w5\å^o[nv8c?~u~g+͓<)_HSY A`q"~oRćҬkm=*6^lykmQoK4öG6!J+L}#gC&PNڊu./VNÉ_^AEфE3ٳ ܭ‹@QBSPt}t&N((W09cjE@ yqύ7gO&NH8côpSReٶmmرc]]]+{o6|7k</`G}z.ŸVZöiUwv$xջ:y~g'/nVWi:S αyT>{ =8kglg b;5 'OUd8c9|;k7D7,{n-_# ؇m0`l" VJsttJYz~Uo ֏R|ԕ;E%{[X6x1YZB:&YgIӁ k-K2*K8ף8{o|~~ͮ*>-c``Ib?W_Rug}vS r'@ < W}m`ӏ+~>A.oG@.zl=H\W3EeA>?@ir˟∣[:z%zHA%{5痹* VVy~$YXW&BуeJ<=mJqmY1VQ[Q>!kcN^]/>^i'NdΜ9[osWr׳l2 5x>E5qO<9NJ+8${\tE<3<#vo? '}>Ι T^H`T۩4g]|nf1U?oN 6m`qS2ea佽nd6& ²@ [ /FTX֧bop`*OX4a,*Y!N9t< gL=+_ fNb5n) ҩTT>$ir2@!qHؼ/ХƤ.+fָj&>L$I47ֳr]wcǎ?!X Oʊ+g֬Yq^yլz<\y3sLb#G>.kwHW-㟥kOqEJ]KW(&Bdٹ+sCiY ! BJ|]6|79 8PΟй]UR@,- ʌZת Sf^:,I7ug̚ʤzG8=__y߹yyN<8sy>`Yd mmmh" 1k,n)))3'o^{G}ٳgs 78묳زe gy&D;3^jjjM8&yϲ$3*Kl %h gA ltB ;5򭑓mod>.[44ai;CYcs}} *[v:>bnF8<ǮJy䑼̞={'Q83;Yt)9sXv:{M7QZj^Y_`ݻ{Mug?ʿ/9ضm vmر￟}kwm<ĢY AFg ߄da<8s!nK@\{L¸mISr&M-(xȪe*WBIhs3x);ɔjx?FOgxyun/l۶;w= ꪫ۹7oSLaܹ\veAx ;}]|A6n~mȼ'2jLm"9d37#H\gUNib{Yb=V#v@,Itig *C`UeU\"[/<ˏ/=Dkii:uGg|_eժUK/3g2w\^Kmm-O>$,s饗2yd];`̘1,[zH%es^s5jTk8c9sXf q-7꬀1%T\anpLrlΙ/- ͜+ [M˴kL ֲ:JzGlH_wא&YWWW^l8Q^^{~9[TTć~?>}:O=7ofڵ\tE,[ u]ǯk8C WB]]W\q/g}G饗x7r-YjՐ@׳dN` O^\ 7I!Z@͢L [6/\,@F*\s ֭cɒ%aZDX*2\d0a6lΣ| ,ॗ^Rrrӧo~3 /'o>V٭ꫯ2n8o[[*UFN9.u&:u:l|O?47nTi};ncɒ%\r%\6Cn:^{gگ?&ނQ,1|e !Cp!r +OcP"d Z)͙ǢPjL!<(#ԓfϨi4]7=:85᧦-FDI[Q^' ծ .ӗ{7L:ӧ۞ H$T>>H8餓89󨫫_:sv2*v͕W^ɸqۃ$궪 ;H,_A,>fIB!#gfH鯷Ħ>ƤD=jcQ,2L}E:!w`d`i.[മD:l,e.V68LIȁfh5AnIcܠi:qe:z3K;7mԯ\ tDUxhJ->pe]v3ORSSÜ9slSeYO˗ljD"a9Xr+͍72yd?X4`?ǜH F'Tmp&/+ǘ+#^{ <4<_ `7@_n:]v2nURĤ:aݺ=敿=#g:"lۦ;)]Dwa~[Zk?ʝyywꫯVvc,Z:uֽ"kI+M}TYH% E}=V;&ҲL Gr%}t} PR>+) t+꽷w&,{i[ճ|^TS66ԋ ҽJE̮ -#F^;oL<EQO~W^ɜ9sb8>}O5d# ,Xd SPi哿oVa++p>Lr `3C%EF1lq^*2{%۳wdUs^TRQRV[-8~tU&USSeoUU)W\Agg'_k)v7|I&;) 'CpkZ7^U2 sXsvXud"oFkXZV6.v[jGB] EPC?˪W_Fɻ_vw/0*_ X]H<ԥ){?QY]RveI|QQQAEEx fyKi3 60|^:::8c˾n㏷NFtyv]ʳsɳ ЕXڳDڛM*tuEP%nٛ{AW,z_ 6H_ݶc)$Œ +c:B!n6V\g]wd1vo 0ԣSccbK8L$IY| j QO4ٚ X(47:m: _B,J9ZD-5T傒rꕶՂMMMX֚& 3gW\qwcs=ǬYXlwBJ2wo'Vu`q>!n7ZSmNb=Ca7 eˇh3{P5Bg77枕a/<.0oh YL{ ̺ә>k3DvD"-[+9s5XWW 'FYj<{*g`х>r75 B02c=z$n@݇vߞMUvt;}ͼ^P ٤ǑƨKN_8ϯq||~͛60Z_|1>,W_}5{lbb7BE̘@<[%GW#lz +'_QBl+nZܼlAkLR/˙Tf`I(w,{$4-M]at0ohGOg^RG}545X﷾-.֭[w@ eHD " B~|^Ϗ/eE()t]($I$$qpXh$B,ap63q?_WMjj,⭤.OXXzrP'PE%c2`s첿 ]Kк)2!-LĽ$SIаs^>Eu! 2sloKyć{V]Iq(3.+H\ 75B~__>R{{;˖-Ą1'RU;ʪFÈQU**PQQZtf$X4+H@?vuAw{T=t:䅜y7-{H*!4`3w?g׆%ҏ݄-DN ` L_r$LiAF<#d2AOVX497ܕ%Z= {[i@"X3fNQݴb.?[|G}L9#ׇ?Fl6s,CCWdxX&n;uDY͓C}GFoeނ9|lLرd236eGuXw 8C̣ٵ$?7"ՎlDnyA\ʑ2uǜrߎXRּ];Ix}^͚W|caή;3sQ;֕xVž@`߿RHE=ܲxoאd7"Uツ[R2mVMbL ͯOEi,6(@Zp|X߬빬tŞ TXYW|->ԅ3`u衇~*>*`ԘZ?vM@21Sv0`4iڞ@W[+.ffGk'$f0Z[ps?IOyfK0n8 U=&L;cNYԙs|TXxXhHR!()($A"-_:rd9 $)# |^ʂ> }^^M>#L8TSJB@5DZaYj̬: Yˆ~z>y*ՇNbcrZfZ+B-BV.:C]^'f欯Uf$a.\hin;-[4rr%3fUZڸ26Gi0OYs@Qf*.CJRQA ,4d5$0:& JJ][@XdYv?ּmw8d@xJ& P@M5A$HDŨQ4?crYR;q x<;JWi#(/ +)_ןy[S;;)G✯]żO?A-$I$i6%>D5F˹*6gckod (6d&0zN_7eyOp#9ҝ6)L[iI OdLACO>`nr'T3DN!omn|~.՘w׌Ÿ曙:usFeh'7gdK(vk?Mx'/lI;vo3Y[+9J&##_ z;,CU+@`?I DyMױٿ궝:cN÷#ksp8LWW*s=ɓOJ^cnW};~6kZg ^ʆvwkބ)o*1.uY_";u+qԹPp`[crGviYu5ydz@?4w: ~ô1AUN>cX'_^DQq {<7e,ܫUu{4LoD"_6.~N9b?NM̍+(;iꏤ]ONJFe6 Ya*6pt=22w0Gd_01`zM&k`"Nu數џUW# R)%WnJgENoZn*oR~jۆE(XNBvw'%)0De23ƺmʕ+YxmСqr~ko#4 #RfMSۻ%,&db~cՓՑ&Bz6+Rϵb\qfdfY"B: J8)+oѾ<ޥ%1h!POQ[XBpdJz)^0GO^Rl^f؄u_#Gy e@*%󱫽[* [Q.#NGҿ-+m9>q'V؁P:mA`P(Ӎ,-(5}e0G,'E#atJRVwH) A Ș>ڱzaϽv>4Ҥ"Z c$4PCa?im^24p+n l#ݖp?-gEFXZ`y4JGKāi%5x! ƌ gt!Ǝnf0}Mػkxqb Gp rܴ$} Lw-S?4Ęu$I,\p9xUYv $UI+/豕L,%+%Pcz})1$ D<icxCgﮝR)XCɨ1ҡ&tFSVLqЯ5iع}1H7ws93V$W3OffoqoڕoYtn]8}MTV|л$C~+L%|v5G?3N)<}j|}?( dr8 IDATábʃ4D?@VYt }.A94i58߱a*κ⛪} SԿ=^/յ8ȣĈԪ|A*}I\U=<2S*K9uJ~ ##&I%ɶ?k{#VYvwt0bTfnY`k{H ?@eo yf&'Y)faBVE;] < `)3}>C/B* ufQ_^D<c϶-$EQ$?Zǖ] 'SGB^1CLj%e;UGRxqj&W r:OZzAPoeE|^uK4 =%HQ~^ &fJBdjY_S}YG{عi=_>bBH# h|T -iI21_+~YC r^ݔ+.2$N!e"gFm[瓊} L0~# VT+5 -j532s6'}>xŷ5eT@ГTvw`,Nz14M$uabe gL?t^CUBgn{؃t}pGŪ,P3@, j"n7ck)1%25BѼb)_ǁGpAy@"D^bu@œg'LjIdB}{a*BZי@(Ĭc .eĨ*%$(d 'p0d`(Ǜ-fwGOȘi$O\Y--U^SȗIbߞ]?r}wly'|u[YG[;\D i wR7V8cCiq}G.v`_l'f%H ğvc"pU'Kԙ$ )S $[ʇI2%!bZ~9-6fAGk3-CQ$I&**fDU 5Ԍ;y:X$9z E .*B̩Ÿbdϐ.^S/]pI3<vv^v3p)Њύ#IRo$:7  v/3X[ogazaO X3lǠt` 3f̴9QVT˜ sLU F.x:jC=>d7 F'tiTk׫ACWkZc#9B^VdqMM}y;hvDhif 'SXRW2#TAԭ.F.>>OwM3Y $/0za#oZ:.]m(ԆI \j@B"uMFbĒT+ |b }bT_w0c G [/IDw%{ͧNGq3;Zy~6hlD%=2u "i&08fw`{(@+SRQm~rVekwb6^p p~s?ζŷ.k%*MG#ȲGh9] d9#|nj] JB  >>@AǓ dhHn|9(C23EXt9V؂/f^{+f&N#,3 9.q|W^dJ}߂VTƮ<{ ]q#.LbOҥ`_.\ ,v%ν2I#fCw:i9sQ!ELpPjC oa U @_9&?@'a,k\t_}?=(ӫ*9 x}>֮xQ[׭eŗMfx=ذ~Y".= Dy~T_;OƗBd> jnZ? R %#\})6 9CbRhF+hufnƇׂwH^*3y%!?# BY3m`(U{zT2Q&\~:+gOϥM~U_81׳9%{$v Ĝ&piG Cb%] *̲9߀d`nf]X쯱.2Q΁SGM'C x׆ߚǝ8yl}FyVݚf/ŮG}Q1''O{ov}_ݾִ)A*7QAbOZ-2&Hj6F/D&VJ|,_U6-tHIJ@mYq*;y [?((.#AJg+ D=, LLg%WdQ\e maVhok&a*m,g3].| ̶!JaD kYu9{IǴu=PIIYY7AHMP[ZD߫n0}ooCTܟc:tYU@*p =C3k|sw%XbbvrM=TEzmȹ9q%:%>8@Cd9+Fɐq'2de!(Te^ad$ $oBI-/VҊ$ 6y+d2AKC='szeA?mQzq]ђ/?#y0;΂C29.ߝ$#LQj'Naݪtt%TC&0~tu VޗBGbxAVKf$ vmD&V;eגŴo)R?sV`0jƝF V`Ӻ);_2y&?9HzAҹzls0¡UCB!>\'n[E_ϟJW2B!Ak }>ol2:1߅ҵvXnga5T ()oE !??Wva`Ҿ\_ld<>58% ,.HO Ӻ+hx!t"`kG>uy { ,^]s&;֙6Kq0 Ǯ4y)CEvR4a:ǫ6gE54"Fa6& i}}m/kmX~rEJY1}L@$$ ť~Ճ !ho˼_BO4Awdj6ѸHC*5mY'GYl7WI}x`wl&W8xhre}.$ 78Moow-P9T_JѹR^f)eJA}vHмYfw(Hr^Vn?s\Cެۧ[JJ9kWꃮU"`9Xҭmݙ} ?VۅMֵiҞc~Aű, UjA;$KH5<|6iFb|خ;ÜO䈣;y#tф2lnLsNEO@51fp܊>AnOÜL~0NŢl{tov7eTTœް/e@渁j eyjvC} B}tdr~3l׮Њ\\Bo}Qq (\vpbxHםNӘ| @)2ѳH0G[$c'Yfb}Z3{aB,tcc$[IE!I $jҶo/u6Egr֕WISǒ TfAr}tRl;"rp#!5qov@=߾7NqNJa( R BFJMxCls^_[!_߮ A5r%LڜIS]RHqG&i3Y³D\GCo-pw$Ψ!z cdz}ZZ69z ~΍ȉ6]e ޡ83 <4sE2C>XX5߼?|'" T_Pi6+*%8jƄ /EN9s#gn ACS|}aH8>?%eyu̢2^r9WuiG}$Y]a xD>y=F6%(>/Ho vZof|eG6w cGR.h pHU VJD,K*+vFMÎ4>tﴳO~Ή]DyH)v64@[ z.n`3)Yh_IG :׿{~f_E&lQ K/U'EZP08$PA0a'ʮ݋eX}D)I3WSYRDsn~ 3'yB Ds FJW/zIgi8o׵d%yxUN=p i2ϧXO#mW귓):dZ Ӣf~Z`t*C.] и&j;=)„b0)*-gl\skQ,|{0}F6eNV"T5Z!14N㊃r0k$jխ5&J+2`pC 8iO^pS=&J6 3 δ7IG-8P 8# CU=Î=m0(g7~t#'bG՘kuASQw&0 P=|W9΅ M~Gܜ}1T '`i+,64sH#P#Fb t2!?Ϝ#Qdp}ظzvCQ?0͞K)[zL$?67%j$vŸ7'(|HmC+ܮpi@p-01}FզHA% dӖFn򑬉>nęȁ6*^]CHC ȔK9.KCng8ƪZp jXRXȣv@e# 4Ա0аcj ExY*gHO'vLuӿ{+R#4佹9cHZRԸWc "O "H,Ȳl6a]s%,Yu2/>Mګv KP?AIH._[c_nv%px~RT N BU3;s_j4bwCO&vuFPQ@l° Diꏸ@{@rLuF&_e{/IYi3}J) o6O4Wv<|Wѻ+ϒ@I|6G!p?pgK͉g RG7q!ô@Pm2l, ƝJ~b :7#1(i w1[ ?/9V{_k?c|nZ[Q}c1%sIi 2Bܚqh=dk?B[/SJSy( >O*uwyp.vmA(k#p;IDAT9 DY`g Ж~Ire`Ua\n||~DŽ1 fL{t|U ?yϣ&t%Ypdj*5ِ%D !@9>Te]Xg<A0dm.|}.P2 cAGp=_9w @;T}#ȣN찹 #*7(M s|* 3 ~VPMjSUy.aE3?s5o֋l׎7GRe2oMRx(ȘER4n%h6PM9T֠W23 OpBAdr\ݹKL_x^wwz2;_(N[̘KIYpд6 0LPOzxYmqX% (ix ׼=#oGT"AYHḶ)y &oE?:4N:MHZ@i-RZ*=/ !7ĉN!NPh+C-Q>4ΫW0]Ҧ#ֱٙof~**[T%.wh:ξ8 tZ$]!hg͟B&EbSǙfQCr6.[ 4C`Qz?HxAOƷ5Gw5i+y6 z u7J y4(gO2335;jy,UV`=yx5K` Ӈ5OWלD@Ӕ>X9/\*D'u78qgDI\q *40|$UR~.':V0n#kV0협GEW&7 l SHDܺ9rsf Nϡ& ߤ3JJmzz ?Ӆr^_e"=uui') E>8މei`RFċ(|f:tO mނ1{1! ,EMS {9G1IO%/%qErO nҵEee cWR\WzFQ뙍IaʪԉǣH 篠;rwDZ|Ճ1ԡ&LL7g~;djl6ּ(ʽhC^FCg U@@p.#Vd"0P .Qʯ/Zo}w]ۃ`:&4σ)nPeq'Fg,A79EGRsʪj]HBlț>ol^l\n7n`c:ah:^7RIX@4r4{t*NaD+"; Mh5V:Z,x9XheK͛FaRJ].CwwVUvgܧ_›¾uo3u l?O^/ mRi[VnTҳ+YJ%It7+}X)ښWT:ξC :ہ0r`d t K2܃4S%( EOT|v5+IqB9T+J&w|.F/;a! GaxS0)nݛGGhLEiPOl²aey2t1}|4[#L.1 !p{0in r!t%%J)RB0fOnw>j7-t>/gRpOipßu 0S#cIIRRL>V?k6\Vvo^wJf%5mfbҦ'G)yNM4 , M4&dmMIi`+/0< zѸ@9a#a [^iz14T ~KlO*w?yƀiE%r`6P)-"@B]i ionXS#-73rEee%1q%>~iC0}~{9vmxrY)h)Fhg@([~{CgGbow=;9̹! $\&,:@?AeHsmHtkJfFmϱFa/!wX!n6#6*籥oq̹+E/N,al,&a$lI,a2 .CMG)EgGzj%"! fz"F$tE98qp['TlQ 訍VmtAۆUr-O8lr*it;̋O_o3UIܖN ɖRaXΎJlӺZ$t]c=9D,Eg4NB6$#{>+ރrӷ IK%K:gRX6VsfWZu(@Yt|x*R%PXEBJ,[bI%,̮Z8ȅlm5K6EZUrVw00 аFΈѤӏ,xyNЛQ$9+  JrfW|;v0)]Il `yYy٥1SR\ntd|~֮(qUHFh:RJXm+Ow[.k0q RGyݍ&{R ォ$ŀZ*%A*~oi> pK0z4+]Qlo~:zBq`\1Rl/cM#԰}kxp@x5(WW*sr +Ve8w7rT\ ; dA.!bSc&{IENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-info-72x72.png000066400000000000000000000173031477231770000247520ustar00rootroot00000000000000PNG  IHDRHHUG pHYs  uIDATxwx?̜stz-˒dl\۲ՔdY@Hvs yH \&ln 1&\@h)qْ%Y^fcʙ#ɴM>w̙~}.ot_(<:F2@H1 _Hji@LY$FN`; e+T,S,0\OQ>A( Б,z:Janh^^+4Voq9Z *-**D Q=>IDGH#CP?T ) pJ+(nGaL5((W\+pxe^R}Ļ9=u2~t+ Ծnq&KϧjBB@J0)!!H^x};ܧ  Sb3i5aup\HJ9G9@0oM+|2h"XAETEq{y T_}z>o++U-QEETEEQUTS|< ҵ&GeK~%^F(` TD[b-f g\EUPUQLEbp}@tp`'”LG%+k޷#,^8PX 4tE MX"(@b!@(ug#N*\d@ټ @嗽}*h!(yu~ UCTUCUT/EլX7.ԙfnB(j* 0ud 2̠KDbH2({u޴/ G'P8p&BP:g_EQQ5UL@4[A2UN2n/&ᤋgi(Ч9\dxڐ %RtDK0Dl*{M_O\=\P|.oa :ਚbWl)xH!*HzxT",ɬGHsͫUa믒ǐRyTNHӼ|yLi>xVX4ɻ$R[t?w 𩿔~ | 8y/oE1UKQI+,r̪,⤚C4@"q8GA˽}~hGسu3n [^{IS@m؏GtR @ѼHtj95H5O+V-.3 &` EQQj&a !,L R-i+sby-/>G]L>]$I@6H%o) ]GTYXK@fgd 2I @`b=}vLw r*?-2 x7O#`[MEjAjW5< ̯.~Kk`K_KJkO/I) j"B$f=vyoL߹ډl`{Tl~f[N+} 8-Sz)fȠEera%GyH1SyjjL {?7wvR=ӛIduFY1Q5$f$=R}$IW$IwZ2(%xλ{U: LumT&UO1"jH3Һ Sa_i%u\kȩ AB0MA8&j0P}ĆgJ%$bQN> ʊ´ ǭ~ZYX[bfW14DCJ׉uS:E3Ob9yw!:ByZj$ woAf+t R6oC"LplV*\hB1SsLBbNXyt~uz{ʊ2cI }:7Oo;6ηd믲sӛ6%%̞1XdV'[\I}m[,90H‘vE`8l?2i_T!Z v8ַb.BҔUU!TD,:صx%!s\߱N6"? UԄ XP["A75ܹ;aJqeQmu"TpxWE9\ZғR1a^N fgM[RO2cL+@)ളᴋ.˯ێ"khffS C U4mN ⤥sx8N4uo"=g'Asf35 [`JҔOq%,鲥I`kBUżCcY{DGYs/4SKÌ U OIy=үd" `:sB sJ^Ǫچc+aՅXe)HAy5n`("OE|Ŋh8H-;3{={pwxG9]T DV'D3m,r'M7ekL))hBTM3kER-USδo;vQ5BPP^PKmr* m)2GZ7NC ՕS;7\x1Pzaa!W_}5555ߵzHfu.tcmAWQQTeTt8G5BP,M5^+&WRmJWZA&8ҥڬd#Ś~wBUUUc>\r g7`o'V]U>ϘRbCdi3v*,B񴞳bBZ P= M$/#d(̯-EwqoX̙2݆agќ׿ɓ qoGS*g߻yU# Fɸb0M`2m z~JU)i8P fxBp –.8*!FY8kի̭.B7|s9gct]'w~?䓹KPM>϶7S_"C0ϭ~\McĭbCI~N ,LQ6QpCUL'T7_R|KʂTi:vWر1 І XjJ䭷RWW@2g~a̩*F`˟y}۷*.+fW~/>v ]i qKrDiAUp7#\$-rv}.pl%) h?\{nyk̙huRsNv̓>SYYɍ7lߵg5~¤Ws;)O8&=&\x jڇV$ G{R*׋Z̓ 9~5OJx~҆Z6rhu9 2˝X,Fgg't5kp!뮻 :RSkh$|z&ul UZ^xt3~ПH;y1 Gm *PTk1X)a \:g*U?!wW9nL(r}7G7ndժU5RU%E;x<33<2m}DY2IOi%V>BIul!`Fޖ؏-U !Ψ'lyV,qkMMy qe1|Y`A^yz\s5^mJ~)g˔ 8MbHdp)lf,2HeƩ2OARsuPV)]ꔻHaYꊂٷsQCؚ \9眃ih(coG%p y/<ƒݑd% U䖻{^>:H,#FinGUT\4HCf\払$Ry-k]'ʼgÆ \믿|ͤR{<ZM@< 38H@S1nØ][H]Ȥr:_Ӛ9gT;q:TK }·< vaiiiaŊd3-ӟv3d2AVX᜻iKl~m~a6}Z9n߈2Jkdb{CqadR$5XUJ/]pؾ}qd2q]+Wr뭷O窻~L,2´!⃘8Գi{&,idRDg3R2g]Ŗqlgqo깋8cf6mMY|=sG/r7ne(qz8\eɜG[R#2ѓsPVv&RJ]T88㲯$|~\<qk1mOBcIJ%vLq6`Y2I[N;^c;ru3lXzŬ_O<?L& E!.PQ^Jte1@77#% $2&Ns# Iw#6@ `= hoQ)Ґ=P_~Ѫ~'ddlé\ΖF߿SJ"AN8d.na\ꦐB#1 fF"TFbc*9X)JyU5Kin9]{3:tGۣ,(Gx:M$x/E<w,V]A8P?c_nzwHKzF$XicwE^)OݼEM]GqԪ 8s9kJkO^exRkІ˘{)ǐ햦QvD np9.ǜ\rxWMDI)%J*z;;Xr gZY[7;:PrEW|| Xd P7F HpP#=RAK?32#`Ng .g78O(d~j sy ̚5H TG+],8Lpŗ+'bjY5Ԇ LdHpsYp;zґ.[l^JbG[9C'ǫK7jbXFgzS3c RJ􂋨)-f۱AT'\^"%%3L( 2B[7rLih@x$deSp EAyrW@ yBRFM,8ij}ˍ޷L:M6axS<H!tIBKd4<@ƂcGӺ^"3_~ɗ}_q9Bm/P0RbIP|^M#w{w?ΆyiU?7_m\YmitG7kU|PrT&+fM[TVg g$F_4I41n( }^+y5zν**Zbd 9~{TGn`l29rT˹-xml~3(n?x˫T vWE sam hF乳ٺ :2g)ݱK&-];%leƀc!UKJɱk\24sG(j P(NS_Zjѻ:r{Ͳ?ECM5{"dtR "qcx^!%C>YƜ@|p6A]5{n7\n`~:'|)~{,| ֞5 +~r$p-'q\p l<ѵk;hc|70nšٓwrAͶ]k0)>9%E`mH)gd +H0mPܳ wtl\g ƏUHJ H]GJUm5ryVc8ʤ%gQ7uqopޕc(a.ꍚc$%59HD7һU7Nפ;W̉ DQ WZg:BA EX%$ ٲ ku +ng\[dž&qq-Y>YQqʙ/8O0'} R ( Á(TvSILٝc6s3~kǀk`􃖏3g@ ]?#~PF Mk%Ӈ'D!5*Id$H9dwZ8` "r/2c0zsv ?{=Z#P5ՁR=!WZ"/,u<7H듿fp3ZG_:o~D$\?>EjBDY@[aO\x1y=9]myavnFO[sz5ţeiBJf/"cBUԆz`]jn_::Bгq-w`;YŊR:T-'PY ǔ>1c=}Ix]SOZyYwVXq.)7{- CCM߾|~1_AeUVEgS4}6&Փ V@X %RѓqGо5~`,LOj 7P'a9=:Q|%xBEad80>lG/Y+|2-]a R iMfIZ!6*mjyV믲-ނbN\Y"̙~ͤar]oC/zKYU/<(/(sIENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-logo.png000066400000000000000000004722131477231770000241750ustar00rootroot00000000000000PNG  IHDR?GriCCPICC profile(}=H@_JE*R ␡:YU(BP+`r4iHR\ׂUg]\AIEJ_Zhq?{ܽFiV`tL'b6*_02$)u_b<ܟ_[ ij0m mp'tAG+-~\tY3'.VLx8j: 8kkߓ0WNsI,b D( lhIKRU# ~?ݭUl%@ϋ|]YwcigJ `zGuGS`ɐMٕ4Bx?o@Z>N u"e{ϴTrbKGDPPV pHYs  tIME9z IDATxygyygzIт]BlB l0X ,9v'>I,N|8xcb;`vd abB  AY%|~y033333G$߄i3766Q$.> L fR_Ӗ3E 4T+jِhj}CBT e_<T< e"! }黐w #]133333$j AJAGH ڀIuD )ALJ(TB)J`۪6pR}J@(ULJ}dԝ"K JQ[u'B^(ח&_p8H2;=GWԀE;ԐF0jS4bpfHБIt3R H}4'}-k?WyVLU%DZK*8)E I|з! ڭ2f8H2;E?CG٠`JuvK+88 qs5I]km >Eݾ #4UՆIm+\{g;uOnipA 2\L(C 6;neBzi9X233333sdf,3_ pi4OVGQ* T*VT$1G r?'>rpB3箶2~SDm˳oW[Bbi7]ۀ[oj!)/;lu#tiv%6F.mʥ-bL}5ݷ1ESgEE_Tvʗt o Jfffffvpdfk+\Ĉ!t !M$IJ db0;J rG @uMipVi9UwPw7{[oB]+ͥmfffffIf?\tf#|4plY #AUpG˲AJjL}.14nJ! 誑&uaʜPiK#`p'p'ff|=}L(AA=8P5,l.Iڊ8 R ¢_ݶMWei0M4mduwR?l6@*k+$V9GJ_mjTu8# "ho;inx~gdF@wȁ=$-AVfg8Fy<=s1mVuH?D3sZ--L%*K V$ ƳѶ'lD|-͉|g0Qw733333{ 5>fvP;d3< `7ps\Cn2A\ q,.ЉZ_@ԞԅJjzQP2KiDh1_%ibN";/&ֶkAsTf^ρgGY/-΅84M2Zbvh$i0oItS9U;A@ZDڀR mmpVu#>*$ZԈ6'S{v<ǟ33333ۧ$١C?0͹Á-9B}bpj$#j08 Jjlv6$7mRs5 $4jWtmK" ޟ33333'$CmE@m8'SnmG!`^ F mkzJ:kU$!@D#4:BMFIRE&n3`fffff?IfvQJiGץH"mi8,&u+T!m4lXV$~APAҦ.H)9I@$D#)P3q_M$zWkq Þ7IfvgL׮{offfff{AY1"$`#p!ecIA Jayjrm;{!1ZJ}v3M>;zCJr; H vFRS)QiyS"$R_v)e)/{{33333+nm33k}b m4J;BoP*r7 ( C$џp* |*ϞS4|,I%S\*W O73333{$3ÆQ(?1}>y]mH6)+6ՌxG"aĜ*2HiD3hskDWH$=)}n#&;Xf=s9?V t 4uE3ꍹõskBʵNtmi6Hן댤ų]l:Cda]fiGW$F*osR7#I$`$Ѩ!5H*'!RZN͉10Gg?ﻙ63{so>F)jvo n"$JDۺ[V.}+Ziim)u*K"rvP293`􂄞.8RO֯yMqdf7S.o^)> Д,G3^rA>w 44]NDPN)!+ T: xBK!$33333wosE419B,(Ph*JVB mmkT9ԮV+ѴshPڦDj+קr| rE~scK?k{:MfN! VQ/6X_EuHJ]o>CK9/s!J>%b&HJ$ !9%HI.AR;|집jTt PVT<1~cnm33}ؾP*2p')//EOT%p$4R]{ZEVT%Emmkg&i]ʨ>JDDЯMuEA7^۰<xDogIs#fffff)W$Kyz$Dz:e;"ijjҝl:CKyІ=!ږ*1R$mk1h_K$V+I)uLmER+;[OHvx=G~#H23ۗd-w񿀿7h'Be5 %ukmgou%t"D^BElrO+~#H23_>36.xp|T$Hų残7kqkOC4*G%A4AER02#]RP._iRas3333C+o O | 4y]P?)or]nD{c`Ѹ^cP4ԆHg!6#=;`3牘jV[q*÷Q{M Hr{q:s:،x{nffffvsdf@%w3HsƱncV(H߲fgM]Sͳ+)Dq' qbۮ  rGfL䫉n|iHf fFoҰm[.S+}l.VtK)6 tK=߸=73333;8H23{]&(݊?x׀岲fahf7\n^-*7qժkåHI. B8x󩤹_g$y޳G;.ǹ _+ԋٸeܓ-IlTb$5eE6R"QBr{ InmwIAڶ E;+&Iu^R7 @܋.[_^ @dfvWO_@?t,p$ƂHKjsX#wݓw%bRIDXI@ĊQ*~4W&=<#̬*m.G;3bxL;~i_>I4;/^*9unQtE{[r)Gےv,R*c DFxS\#"K"țsΗIZ7^كAp̏=w–- dQ`(~!UϟWؾ~=O" ֥<&1o[DLFѵFa^-&DEX 5 IDAT$|[NoIK)(-rl#do o\A {'+ӨfWF(gSL&׼}_{ؓHs<| f&R?v}2DvjVRʒR[fڤKk[ N7VF#䆹OKv˞AJ_K+o a~Kb.c| n *bP%UCNb*M1\e85?AyAw%3IfvY^ޒ'4F;hoDx|0ߤZO n)ImYQ"ۦ&I2;<:#i_}T!^WcӁC,;n?fffff$3;?/FdGm]bYD,S=8Rb SPFD%Q.zm ڊ&͔{ #uG} T}<xl1wb[ns33333o$a!/|y1۶)^m3ۃcͥ'K'6I>|!xF0ޚHOnv ҿ“6x胡nVROmV[\S/ѰiLX!<$xrj}Eҧ@׿meRU ElvPvKbW?udmu?PQf+#>CBOƜjnglsdf_O~ի9ڸ'x K0ChzlOQ%ar;G3W-<zl}6ȟpL!)IeU5uiyF^ƧوMD iJguڕ$ҴFTG0!atгD~fffff9H2CIy/i^?SMrԶpa`To*Z+ ZfXm#oEDFD[IZzJ4u#c/r]{Uڲ&E,q17𮛿_3333}ANzDΛb2Ʉñ.MjmO_Zb0;Dyy嬋> $)$IX[#T$-uv\! 3j4xgD&J>ydƧջ03333$!׾5~9r>"//:XX"wAPmGSiD_>n&L$"'!#b֏V$EeQR "rr[YMPpts sZ)8n$3333}ArN~~W<ꡜeiE% 0ZC5 &ix T(E,1~ ED.M~%I 'RPD(+ՏRR@ KZ] &FR Fz`j-VB9"AMeVwTW e9…M }~q^wEQ5ٓɄ3~v .']L*MHVRWuQ5fHRqցp<w"#m9"Ê4=h;(%J,]A[[3D }'J$XxJ]Z[7Ifv(oz9yi'˧NJ+["gLfSjX XhT*6GQ%( rW JV&ډApx))[E/uBdT}[j[۠. o#AQBe/v_o z\ھF1K4P*$нC1J\DLH-g]Ύ&*)=e\xg^YUZIՌ")%uwhW;[JƠ!=3L☙7ޱb<>g2}m&ݭ3Zq_1~ُRO >)6pl Jf4\MD6D*"AW%ʁ 9hڬ*r9~y }fffffi3;h?.c&2X]B^1ŧKF:9@{Z' ߃~ny$ڈq%D3R*qTR BRՖ5PnNsITjWĈ!ihjefffff{A4θ⍛&Kc1U/ՅN2}m }S\U랦$rց>|_cYG_#rDLA@ѷX]GOD)AVk!Im⤮ᶪJeZƒ|epw-xzN瞗dffffWC`fS^4ٵiyiSby\al,#E<˸ioOqOetmƔ7n{{:ߛ}._R.)cE[VßT(,TnH@jRT&w6)l#ꎣ>PJRJA*tIfv~7IJOqmC V10{k0Y%kirNʳ.bp(R0RRD= EW}DۗVg$;hn){stt~YA,!> qТ-Ұ,̃Vb[9ҊUcߚ!=R^OxG-nfffffkqdfs韧_ 8//9'$>`TAjDfB M1IPq-3g^X ӱJY1uXA"i3D/Q;{끋vpIKAPK"[c299/-lHa:V|j1^0Hz|^Zjg^D"q]| 4dO Ꭹbwv^R[ٔ^R4 c7!4ñ ~nq33333[Aν ?+&UҪʠXc]ϚiM_f$Znzۀ^u| WKnkjWuzzs9Gikn+ ki\g$3jtѮgfffff=If;?|՚4?8"ʢZ{W9RL}Z,i ꢮHTcۺۺ}6lϜ]E9"&kq n޵.`ҠI3owq5VB~ۀ&4߾ֿfffffAu_yk4愙ru=G{8uaTBB+ފG^kl@*öt6L ;j웲rxxhmwR]Mv섫A1XT7}z1jffff?>f`89^}([Ktm 0,VhJSu`*ESPg5hJK4u(-ni!r.yUw[䈮I<}@W~}3]-$L~"p{ِ$3{PLOF]"r!*%A1D.+M70 5 2@Xk̞Xy5w8{Oi^w/;*)IKtp=ml О>DiŚwJ{G{0t[Bݟ.=i.IМ/-nfffff$3{@zg_޶MhL<0^Z&/-,fӄ{Quկa^rcW~XW R:2ǣ3Ӗ |-V{JPWb%XBO3D$rg#kڤd\mp<+EoY XKK,p‡F6dimw`bPW2D2hf~Q-.>m{Z.0q&tbju3ܟ\B53:h rڒ'GD.Q #rXm*TA 4 w$W\r/jYvݐ"QKXW *LZk0b.ܠ0V^*%g*ϴ4p{}p0o<Rc&fs1$vSU))ӒRn;> 2 BGB 6n9ʿffffvrdf^8Ou O,/ͬSWN E;_U4#qFgk7p;VL /=?zP~S&wD|6wkqې* rDXB(@)k}{hRu ƶ1.o/}Iف^9Ow/$~u^pxK3?hnn 9)!% aڞLZfWVM7ijދUٗ`Iͨy},tqJ5\uu=t}Wޖ,EWqZY ô#wkjE\lr[bG09Gz~mffff%If_oK֭{HRCR_qԵM2c eN? g۶[nPto+M5LiJmDiװ08"QsJㅃж.ѽs>02J;uZ m*ܣ6V̴neb*$1H"o^-4/{6333Ò$3oNm~/`: G)ALv$//IWhEuUjAԧQjYcmQ[cihE5BLs+xv5 1C33 4$E1 jxeʻi[v͈j,) mK]}iXR[KoʿffffvXqdf1{޺3aii4_2]uOy5i8!K5j us(a4 꿷+MU' u Wi[mOjض 4su^r7%$b00;wl7p==iYB )JkjL0ޚb{_єϹx+syeȇ`ffff IfOl>w-y-,jSOvbyH@+#D+zPuRՉ jVX f6-AR0}l|<=U-Ű Qs"o<>7s)'_Q]-L]@r9|&ܮ6f+ǽYjcOgO$40/Offffvpdf?S^μʏu .@jE,/3޵osKCB+1肞3lŬTSKL07usbܰ- 5I i F7|6uсJ}쥤HJ]莀&I$D d) !ֶZkV盝կJ4NFDR7RܦH3qF䃷]6$vxܦ l?Yc>O,mVVk̷ZΆ>``۱r崩]j  nÇڧ6|]?yJ(4P>~{>JoR;s뎻U1򗁑 ?b*n*[Qd{n.!RyG#A5@@|o`ffffIfv?-faݩ`UH݌wL+{}trH>pV"c IDAT>57)ۺWVӚ&i@pgyT>P-i鏀;v~{ |ѽ%\ @yO`d8LP(oltڈbffff?}8+~7_fOܑ3~bm,oimO WncED5,IpoϫMDihⅩF};[iAS] ~jU#`՗0gLerXp#OVo ] Mi`(̒<\R{TKj-YW~}=[D #괮 Lؾc>t>A!AW/W?}c]5̕o]ocfx36 /7c@ &BtU]U?2#yKЭרֽy####Ns~?0I#$]47UTw}fXQKevPSOs"eTۮS+D ȸ wg6 jBB¼u Ls>1DӐEίUqgMțQ3D68N!>f\JuFH6r n>qpku\e.%A^LEwO~=2 2yF# Yg$6@ۻb"AD=<!ToSۥ$s{jBBBBBBBB HJHHhg?3absU6>~6sf`fԦHDaK@Z"4~UMIU>S-+2M7jQðoLe'fŸ&[W$0u7* }:R%A 3{uJkLiBBB1;nM?|b"$"Pb0=HO0#D[E)l 2k)`*mx,\S mş"V; /N[y"UbDEDh$*e 4gp#x{+e:_yIC0#HHڲI-PEˆ'IDLS_ꨚcq"rLBY[e`sQ*nxI~{HNBBBBBBBB D$%$,3M0yS ]jF:4čM5~פSƏÓYGSFVUkJ[tŶPT7GP9BzLsYj˕wQISYYΔ-M5$A+%Juw h 1 HJHX&u㍫wp u>9YA0-̾}(}l5D ۶¹*fuuaѤۈTc*̅dS#q+QDIHňp6 5Ir!n&m;'z-UI@!2S9gT(FuVIڂ74"kc*ڸV=DL|N(m '8v~?t:+:gA(}klTuqB^+ - j.R?ZTV\*J\+k:#Iz(lV+UV}ZZva ?T ED9L62e|ly Cø:.Icˌ1w'.޸' F '")![nYW̼tWtG$t_?+P%ۇb8gG+46t0vm $hԚe0c:ANTۛ$$"ڃ5 "Iw?0yneˡ߰l 0[ (TMhª2o>*$FWD58EVzi JHHHHHHHH8I ' v|*d٘gGQ[Q Pa0= EE$Q-ݏѶ)CkB:N+JC)-b[R! 뜡@T ȩXlˆZqc(КML|mHO^)wH( CE޾BĜS\/XK>Ve-Nwl,|Y.iz)m5YLԕ).:zQ$wg.o]Ol +!!!!!!!!a#)JzWw|𼕧lOK"ՐCB ve l XETJa *5Y( TJ9¡bg *]\8NWjixsX&dZHDf&+SA ֭DxRf)Ul;6UGB@ >9 R&y]keyCwg4yM\q_۸6ϷkY}ʈWY gfV#x5GZۈЂ+VK%<i:Ʃj5Q(^Du`RHut6U2f JS::_E~bv lYȌnpזI!wmj^Q=֍ $chk*>$I8j\oBBBBBBBBBqD"ML/|tk,AQeIR3uILFM}LPVfhݱ샵-w~gqԖ&T55=i0VNQ/ֶz;qcѴR q?I<$=[ӅpObi)P43WI0.^;WBH`J5UQQ .4(vm?h KHJHX׼=g]n|CGDZ^J1;aUqJT^^s{* \Q'j>U7.l(,V(6ˠt%nAQIK9Q 4QwHήt=|듟v x-{1$b}j mHm /)J"R.S@sOHHHHHHHHHxf n=}v֮vk閻ǎDȘ _l&h }\q4bTVPf9: sbL AqA@bP* ?@g*%hWF99AS̲t| HQE# %%K$RuIq$UmqBtEM "&?.PrSHP*ʑQ ڂd*{(V~Ӏ䐬m vU+-y3|秚<\F{6(Ν.U3 ҈-w[H띂F&Z_ӯLD-K\WpT<&֙,ۺ\WR dAuaWLQDXV3#)`hQ*oLighM%%pMҸ,4:&$$$$$$$$,5$")!b7*Z"dٓ,[,l,^GY6IKjBF ¤#(tZFB]{J*I\ABU$zœZ失%]TNN,[\?og^5JԮJ!o$D7nl(-GRYZry_mDQ @ 9quŮoNnwG0b"Qt{7S0ڛ]q)lp. K HJH8Jظua0fɲKl|8 $D:bnÇAkYVd?Dڿ &* qzQ%hK{"aPX.'#?E9 SWoQX{鮳w}0u*kVÒsD2+Ƞ'1DXV+%N IuGj Nw3!!!!!!!!a)!I ?%V>fSde/a=dcmFc~bH (H3XN/0jHuEm[BcK+T&Sɭ+"bcM N{#- aZrxHW+`E.ר=IoDU; AvHA|:At?IH!( $:mNDNIHİֱQo׿&ڜq-^|.\~gLb뽎9smsrm_)DRƏla͚iudE0D@ZMZ>/@osFLVeXf$o2[Cj dph;$$;ҍHEQ8_C֤65-V$(D5f;>~*ҕ)_λ O4A,L@24+Nz,MV9!LP B2<"m:m KHJHxuM@0,ڱ,1Ik%HD:N6 00LH1Ace󗑑-L&^'fQ/ ?p[f%O}avgZR0JRXsqloTUn[<'Q}ZfvY^Oų lkpO*ʟ˺`: V bUIm h$V:Iiv`w5!!!!!!!!a IUf/ IDAT o^S u?pPx׾ .L|5V_\ T۫$w;U߻MR&&X 嫖u[ _p> ú F( ?6YY].].ս )s!iMHHHHHHHHXHDRB_6F@tnlfل >0v gg1wR#)2]IX;F짌*jDj#-qTIYCh+{`z{o&8\t t7YS Ba#ùt\TV%Qe$ h-h̆t)f4n' 6Z1aYUOHo/ >Ü B'%p2 <\BBBBBBBBR@"=%[HB$Zsvj{Luum;xJuxB C(Z}r.X?EҜaKB>p0U SWhZA7IU$7)""MRQ"v|I%E̹foXl۶5{ٿܯq;1 -@&C@{Ot3LRcӖEe]&ܻI߇e%YO%7 KHJXv| wXvc9&ϟa ldY"\,[,&KD0lmyQ#-Uَ-MGvڮ%*-#-0Z o+C ) J\l-ΨrfMjGM4vUJ9(Q2Άgc~γ𮻾eO4dع<]W}TZPPIi{nu MP-"CV<{?xiNHHHHHHHHXHDR²>k&Ƭ$)69ps&&?A0J<_lvZq9ORnJiKf٘wdjJk&&!I*13b}Uʐ }Ԭ݅m M͒b$`qŀܚN[BBBBBBBBbG"NLr֮v;Wcvlvf.8$5lFh~*I__+^;uHPͭYSx-m\!6ϼ)n]wrEkN ?yWxg"rt{ JIҪk YFE;kuI*SUFuJQeڲF(2` FUL;aﯠ:s Op O\3ĄED$%,iU&5,;Mxwfcq_baѩV1FſNCVj]HբRԱ7*+1Hrg@8uG p0NHRmr.޻q%$$$$$$$$$")ay`O2rCk 1,!An=6;t;YJeYk1&ccE7;򛨵6Wmm5q?5f4eKE µyҦ,yL 6$L tZE&*2𩫜 LMg#6GQLz[,Ogmp ɸ~pjXNu:'PD4iM<ߕF!^?p @DX D&2t5kW*ޝ/A)xW]EgMHHHHHHHHHHDRѢBV_t,}nd+i*ciT2϶Nw6`ec-15U5*օ [#r85 Z?1`z+R.,[T.)&p^n9بƠYwͦԩ3tV1DmnWt)ŷlƦ?xFF{o %ݙme̲ixC4(:ݑF4V94f:^k$cH\BBBBBBBBB",a|ƍivYjη]old674L5F)EӁ0"B 8FgZ ¡=F{$ŵ j$cm:$Eqv2F*犫0W>*m$S?$d|3\낳.* [ItQdׅvMJpc|EYJa`PvA c/C_:G}BJT߸:mtVJ$"N sO#ͅ3ϹNbBBBBBBBBB"%Q$k.Y'M<';Vcf٦ymv8Uә0Zf% Mf kcE*OLo "ZTk8ꌀs#V2+Q%R 1Be{v~bRuӂ(IWQl>ha'I$\͒taSII$ڵiX(0[`00(0%**bKVHVM8E 8(huzO#M8SNRBBBBBBBBB"<\zuK/=\J0回Gټv:k&::(h֥āE,b.%(=̺ 5n'd92QGYT$RՖÙfQY= +PC}/^Cc3'aϣ#XJT{)?tss{>Wb8u5/Ti1Tgp$ћS9ZI;&CXW+Z տ(J]|qwή>{Ӟ2s_+^ʫ$]k741c3Kc lKZ7kg]<)%*܌@/A*6$[U H$>\l֡a?ԕs0*Re_ӼJk2b,}·>E1P圇m ýl"U_G4IV3^kfگvaٔON=$" ;k}!cFy]t#j[]RYתR*t77"Ļ6=d!OX*+I{ ^BBBBb=Vƚ4ZyEرE̲v%3d ?OWZOuZ"@^ "2b8]{Ç?K_o|sou3()!!ƞpnu;vxpt׮];u]V|~ǝɗØ ,_w;e9LZ[(2zh 9>mŌR5Є|Г a_h#:}&t@sH:kERQߏ}*u}ސ7 (:%afQxn%%>lzm3`v߾?}[_͊'=5_V.rtҠiKJk)!Iq T|8CkJLBҘJU;,i{_O,iui8`}s#iY0){Ҟ8ǂƏ=о!HJsW_WngqCX[$NPÚ֩+)\@ ssfvx>UE0%RB1BR$-bI ,9 V^l7d;M3a,3$A&T/.// dP=IZiz錘eE-yaamb'{be|䫬z̲.KcHgCuZ6U%(.- tQ)E*x ,L6BR(vL$;S/5BԙoXH7e Qa%3M"!|DuBb8=-{O|~?ac[U4t_BȤui=!>f{[?E~ߑ\rJZ{Lph:($ #aĔ4SS&1eskdy6NK1 XEQE1keD++NJW wm:{9guicl9j,/R$wx>|Zc`zt@ddjaH#>LuV$yD;s.Q DA?ɗ>LGj]UpACH!b/E1@13;u7cbʍ~ l'}&N n!\#pts鄛2y Gh;( Ma1[C^}o}[?ûo{[CN_o~n_yY7vN&3Y^c%aҥ Q>quqgjE]",R@C  ٙ_>2Nj?',/;^:"Ě/~m]XSNh!)A&#V{M69[KS,кkّn1Y~cVc1eضB eS,6dmj{ķ}K4?]u3 A!0@/aOM?2ʆWYv E& ꁥGƹ[6Mgt??z>pRiǵW\ylwccgr km-z߬s>Rd }O9ó]2pfF<$ssw:a׫n\c*ۇ`T&[MJ5b6n;*r ƍOpmE ae]b55ZGJۺUFL\8Aևwmfrڶ=m0]i2k8RdG)E)j/feJ.$\ԁRsB "jNzqL5Fc*`8;41?wR15VƖ̟#H'd|b !@Y<QR[ 9I^"om@ ZXcjXue17'fPV(&Hۥ.NѿWU_֘Ir 8᭵^K48팗̪s9xv_`__ZZaŊbucN9i=登O'}m㳞u\> Z(dd6sd0w=~Hw_&i-vv7[o⎋[CÉ)5lȬtrBЅH쁱rs0!P*f;KOl+x.y?ӘOJLE7}Eݙ Yo_1[<9dY-o,!i`ҒxծH8n#]Jڸ8'GAh?}_K{x17(v乩'˴p#8lLl1A "w:h_drIHh,$ZOkE@ڽ|֥6]+|t,#J-']6v9KqL]ҭS|)[$}RZ8쭕ߗbn/(;H]7xeQ:&/6y6NX4cvȇC:f%&1ZYrAi$SAZff?"!KeMlm/r. gdk娛b$cp]Hx WQW6FVT%{MoTOIv{^"eX/y z~w-K6ۃEm? ㇠d;͖8L.mSV?׿%4ZP wΈTauv<.NƇp-rE"0VP`7|_J_y+siz(@ލO< ښZЫEKAָ$ID&JMBl!jgQöM$E&"iu7d8@\DÂ`kG$z\{{L*=O%$H۴??7?D2H`0*cyAv+ Ԫ֎#9%Pt~[w0C }ޣH!Le- ( i|yaFhё,fQA6)gPDp"J L=똍e+>Ǝ@"ܱhL%Zf^^Z=QxS3yU:(& |Šs 믹{2c?[^B^LRHi<{G!Ʃ;HLŒ@ua#ɓhTDZ|UD@/bMe7O8}з1 Yf-2"Z9GgsXЁ[QZVm,NF}pQQq"AkM[`|m/yA~{˝oy; -Ѽb/;:}Qa.:Q`{cm^"pGmFq>vQevD.Ytyaom+mjUW= T ["͎ݲ^\s֗=6J oR6۔enΠ舢ūE*al{Z5Oo#:Tc+H>(R,>r8u]6oj._ ;6t8~,t$Kpk(vU-/a$|Qoh5ⅾ4) 'H-cTRUh-_*1c6R~e>: $-$(3jPّQjnͶ0j@;pW>{HpX>{C&͏JZ0TZQ๙^Dk-YUQTWD`hPa#f%:eksXAX5,a{㱑GT5tP4XO<~嗿d~yY}%=amj;mP م 3‘HRսSiULFP5 N7t{;e8\}ֳu8?~_ܷHxQZ*3-te-,ctd $)GnQDpxE-xt֮Y5s$Q`2rVnۀƬ^ Jo߲#Zvt#j|U|7Dir1iҺp0!m@`{uO_кg᪫~osv}l]RҨ^*,樣. 5)ŵ_Εlj.:Vpk}>seac~Sa$ͳx.ba 7e^[Wiښ)FZ%xKPM0Tw=kp+SUI#Zym l3MNvdz| `%zAܣZ'Hp4-,G8gҜ;q; 5? t{'?Woކ ۸ΪUc1Ɛܱ)ym87 aX,pT=A]/ g iEI#y9!r'GeeoG2OlA"E0w~ki6k(/Db*ђQ$'~ݟ>Tes9mWH1(-M̕7Ԟr%m]a^Y(<:Vc֍}SGoLY{{p{`,(MvoiYVV53r$Ĭ;rQiTB^0#z6<{\~;\tk8#r{E:uh߉3sQ _pG4 )}7rvMAT쉶煮%:h ;k_QWEdfDΩTJ [G,xIx-(0U4]*UU.6UC6)CDWfW2:~Gq˻/~C}v~ko=RF&ĭOfyHq]6a%h`wPh1 G2JpyJ;7ne;~x[#~3![gҖ"g;RLz C?ײc}k:=g@Ҟ{îO{~avk s``e.)sգ{9ͺ#] t$ZTjʨ!mGob>O!_HLCc66Ғԫ.w$`{p2@#3S3 @ƍoSFrޟ_=vCr둌@gg}}k^{0qbT1n{O e(wnnzQmu",Om"B3_صΔz3t/G6r`9tqG] /=7e1sʁ/u3d!O }agKnsoIrIAU٠HB0 *w{p#I: i~]2{9TCFQ"I?{fڀ3I~58L$kLp:"Gˢ@01@t]MhM@(r㱝A#u1Rϙ7]BFUq'>AX9r'nWƛ䮮#NXXMD J*{ئgUL轴%bp*u=΄MR@%VFӾ}x8|ۊx} $)3{\5XX;?oY 1(Vcm^ЀMִˊ3?s\I;ZyOS`hXjНS^ xQ97]~^C['YH0N؟tS򈰠b< ڊv7U|J;,AcL GL]HܔzBakcɵ5hl3-vT5i s'&ilSܧw-4ܻetlJJQRz{PL򞺮TLÑ6%>Tg& qmN0 JJW:[s5xҢp4XۄʔT:?v35':GaN>]+ё/[;ȧ)1` aH0:DHӛ|۰ʲ@ٻhxG|2C4%h֘=l׎NDx&Xpẑ֍M c٦i415hLvEo^S}d׳hDŽ'4@E'td5R1 y,`/z95`F@[Cc IB \(ߟd⃿{{^^n$4E66S 4sfј=[EeK̭)Gg}L0v~$= &lضVoc~bK^{^ǟi ] 5~)ivz_Q7kӋ^.ذ;T:mC^tFr9`Ђ~MzG@ cv IDAT A7`'0~/Gxi+یsڞ5S\ʅõ}Mm:tIOѣdA4PH0.дM0" 6kƈ@33Yu-[m],`)&S3AD:LB@wDH3%}T}7LRaƋ96eks~4fVbq<:P 4/1vN%=,A*| 1wc7|v+Tݸ>TBM$|hƥAfsq/Ph n- hhM R[MX~4 ǜDa:AB  ]|G'wʌb%r͇[Lq2/ fǒHwv/Y/~I )WΉlu,B4^ܮ.f_ջgv.4t7.I<Л={ٟV@żeoFJ>3x@xE$:F jXmjDmbCkШek.&L8Scj!' ҄dRq-N(LlN|Y= -]6V5IOn_] 뱢ı݃Pc|d)4ڵJ[ITg L#<>mt"P@PtnT8am@* ml }Ae ֶƒ{.D.F­(gZӅ2aj$0 Cg)!W '`xw ;?pS |(*SR%{1G*ڠQwv?9W ~qfϞyfm堯N|_EQ3rop<:ºvikvL(0i"ik)JX[ |Oվhj$G\I2SX2cѓtUb`|⯓ѷ=-"xqwA11t9u[}3YU PLzac.?vtLA.&&(]23t.Xu۰:u@A٢w ˼yC1 "`֍Sk0:nExsz#赥1{O-\s[Ed_"옽p<;z J-AP;:/0 on*UA%Iӄ nj S%Hq۶.Ў'?-8Da8D^dh;5$'B۶qOϧ:V/69rY`Xi*$$ B# ҨvdHDz۶m;poϓӂ̫֕ߣ KF>`]r[gvzYWyPB#Hc_&YcZ$ƿgڝ0KDCLuYJr14tsF/.tGFϪGN~D)Cl8TfQ-BTH 3fj"FvS#S{d}m#IMe<ؕK ik{w;ٻb']®.ʲD|kz綊qu hY9Hd'Z} Anj!a=Ŋ,E:?$%WjqQd<ΈHqwhYjܤ?(Wy`lGcJvWg>[jWU>L}Pȸhh5-HDR|,~ǣeKPHag>`1ES9(m25 , HqZD5FpYǾxcƴ|^܂yg0`e!!KӤeС7|3I8%ZK˰1$[[3g"6 &T:2-ԙ3"l30a\O9c6z3+'>@ҎU V2k5fLM$>~]eMX C7w.]I?z;x,"3[h[ZLv8*LOO@lPWz"ư Lt> [z`q'lxi0A+{{,oPʙK5 :G#hEg $0|Եf`bM']eC]mJTR_'I:H9mNպɃ]\m 6OPfU^5;.m@̼r{{77'RLݺQ@YtuZ=ʺJtw;ݢh1pԐZDlizԚktb Z6HzpQGj<Б6wk~i OD|N5h $i# X\-..Ǟ7k+mǑ뫫GNKf3Q@iMb5ch7!(ʹr{V~찘U H]D@CFZhsPc4#9ảx)NJHP[tc-.&MZ#cr.w^s/iU}l8I+8 G<ԙń,CthI;v j]M#/qK]kJ~K`VD(N&ZhH1\G;`nUW}nN)^d' 5eV1udJqiZF/:z_nkK>D7I5c¼{4(eСe׬-*y0+:2܌Bhٌfs\Ftښi4q$;qp)'EI76*va-"szg>?Hyz5) ˪3J%h9=j:EttЌKe.yFSra%{υ[=JZ!ړ3{/a~.t N 0xV|IYBDdbTˋc;$!KyaNYn O cӢDk?2w𢟼e/[s@#kRMDeuFn ;/za ( wotǁ4Yuͳqp f0ˢOGuQsojec7x6a5+&pwnV[5Œ0] H ]YC`S&cɭ}YtF6R1`l;g\3SlqٻE"-(#jKGQHQsRh|k]F"2752x͹.G+09h {tgi& ֔@vO*IdFCbh3Aa!pdGv 3u{h_9#j|$K'>n%VT*|?s`+zn= M۝m ;JZOȾ5L6fMgavp<Ƣ„bvQ@R`ӈHI$pl YSf^S;}ֿ:MPzlItҌZK=M$:Y+7 '-@ObMcc܂VcPŒ-y1 ~X𜌬>g{SnK ՚gGJH2e wq/f`+!ϟzMu؀y0ePXv-lF K_3[$T8cǞ4RPgm$hs5߶>㽨8F.oV2E P F7ز6PNH:f)@Uqَ2"flye f-o4fQ|Z|pq>cB+~8b7f ӾǦ}jz$PY+јZs=ظT LgkxA#ߛ}iasY95! Oŷ~eyNHU$- I4r3+4dJxL:1̶`wW AH3 [7 wuv|_KT)Nk]SNF( :"Fg!hۯ=s7Xw*x^*K( LդS[owzY4.ޞͺEms@j-} V0BUgal_iPQ8H"B19jA; u_ vz֫OK0L򉐔vzCbAj4+ew$25$*XkjP5h2ՇJ~WwN_I[w&ӊȔ~=#bzL&aD˙}_iXxR "X7),vd'urrfDb۳{/BA#ʼn`"43g*~ٟ/y6GEQfgJzZ+_:VnOX9k}ߞR E0s8G53avLye0IVk/q7zwҘ+G|q|777QuEq Ȅ5+,F5po e;Yy jaT ?0t%1*njqAR_{֩8A 9$}v.pG33.4W)w#% sgQcU4 Ɗ&`ے}8i욃X{rkD( ~&NcZ>쐥Z?|p̮tŠYRD޻bk>-,tNg9wE_,3y_z\*!KV E+1 2#$: Dx`.h\AQܮM P&;J, Xݬ nK%^12m,AJdpJJ BGҁx7ݫ`; {4޿?tiht\du &>i3B'h0ەJ.9 it qk 5{ @QƮCj #"De!."r45<:(-LKJWq),:f;ґ38'YU 5< `ll-5v5vz|Տ4y{]2^HZR:S.z˯(] mF KT4oGǶ/4z`?F]m,zZP)>)]323w(%(Hg*!nYO~$3 xOm"պv )qrbwo'$erR54 ob@nzZ3Zsꪹٷ;z33kQs!N^ )ݳ淼]Zz=!55ѡ/NBnm@ D!Q1mBQ%`d ږU1O& gXhŻV.AAfd(/7U2etŏbz`>x<^7R/-UR'1lv{(ӱUlguю"TLb+iKP&/*|&=5:tT >3xs"(G,#rX+ !M-l g+$q2MMnӆ(fڸA x]"d*zJI@2D;+ (5MO195(-AqP=֬@VHL6fC21Nq i=zw>)"N5h+@~ #雠-ՍA,&8 QfY4v0r]ayuտ*h&XX-{0D^UM;xL|2w G0a|m)el[C/6Vۂ=#/LY3lag1v`mBhI(d# ӡZcg~GdSڢ]e4#EmN U{< TI4ZeriϵgҞ`;|v=9O?{}ofhݮ ':1a:^({ˋe[=}odUú&Aiw uo]6-;?yYU|/g>p}iC]Es1vhӻA!*Ɲ `A۴)1&kj$V`Ut|` &]NŤfոJfߙ܅ $ݚNQ74>YK tjsuʢ$v=lz-=6BX=ODI4(ӕЫ /AW;1kE~v6b̍XS gHP03s5}{p@QgKwm^ {DY΢Fo(ZYht3Nî )6tn3t@6O!H"9U*Y4us)o<t]"xXFĎe1LQnք4Ŋ#l߿ qV#65?rdí4־:&8aC()K$iT`@< \3Md&e#߷]O Bi f\~r=4Oy7II6&IW ިœ4qFirS-PQ DF7U]2`CRPj{Ɓ%#ܴL(XF&v6b=4IeM?a%"N=fg40.X%AOZ"aeВ%w32k!P (&MTʐ'go*3z8}13?_yS5Н6Wy)3Y)S?=2o(HIޥ hcEȲ tޒKDK-}:*Q$v/g X-on^|EMO5u;Y/)v婒JfȠut7UGS+@ɂ NB뜙e V 09 $y?П77[J^c!"٢R6A,/*fW>ƪP7_R1iiRҞM554~J+Ω"pjJa . 5QU--):  :ǽl:6J5c|LdP.[O?:aͺrI^+ E*0q=ƻڱ4*Eqf" $*Kز,܌.VVma48(wA"ڰt x0t FcK޺Le!;&F_@M#JE=HY7LeU f@j J8~LڀeőzT 9%NP]10ӼMС֨jhٱ T!; Z}ы'><5a|qcpBlm2FL[azպEi ͐g{Z+~*x\WT2T4IMd9>^=sF[ap"86ZDy :!OZۻҲ[ƢbqhI8"m֭8N4PLX✮Qo W'/mZOqX921Ε}w6ET^5!"a3sZ#;4HMv&vvamC^EWL.Ig9y`~̓=̌+aY+HIZӉ)E;SWwԣZihei:#wstJ 9S'p,%1[ ,`ńIY^SP bl5-ɬfY Y2 'JI\?_~U-7l@WYu19XlƦQιv;Bt(>xBޔS |f0dlP¡ymKX 9L'toW&GYRoZb.cC6}$3X5sA=0xVǯK@ fjF4=w tIV ie#p7%gF16:P*@K]QođQ\I?_8y4Z2w% HQmX}W#q 7NPT;6޵,ȠF-Y-$Բ&d Վ4\'ћ߱ E""yN[KV @Xڕz7_ 2:0.vER MZ2ʭUM;Dl͹F dz(߮oT~ ~Օ򋛞km!dkZx~IL&cQɨڛ"_>kIZg34f[A zO) -3$ID"t)ŀvbR\ۯ~ҁΝbgGO:ZqA-P\"^9Y!k]x| )_qĔ(iQa"{ 3ahu@*j%$~{\򲰪LDnZ##ɔ Hjcq GG?/=3 d]E8Ȳ[}@D pZ64IC&d){m RξF$OY޻@am=>1Rx[XT6A6sC;v0TS1Fzf@}BEzP1%P9/|ؽ7N%,?1 s='y]?g&(L0o:=%Uj PTbI>MŎvL@v|Aݫ8)r/x# {#IjޤՓHc-*נtN ą%}>-V#;/4$KKHFed?/ ?˙vMJIT΄lF߼yO n{W7g0wmƙN>;` YN=r~wݗL LCiҁ`-i3^Y}Rcڝx]wJU}䭠&P.{(F嬪$ژNxEKbVF>?"?}l71vGKM5u˞2f3ƕ寥Π.V:0%>5sFRSRucz"7V|bB9]N5{vxr9Th+؎*QENJ&.-uR cW{GW񫣭^?mcJcQlmڋ G,Q .k>(q9 щn9tϽ-EJst莎 ȋ,ny]cAԫ+"2>N-5ժ kM&!"%p$fkSҙ@XvӼ  "1GW㥤!`_7VFDžŅ`?VcY󺑽ܬa ߭edS6N*_=z_ȏLJ@?H Wq tˆ"Ȓ|t"Ƚ-ܝAsYv 5f\lF)W"G@fҚO~FF- O DʞU=U1YI%:Ew8?Rk; "qݑQwt̀/َA#_JmOH}GQdY hjYRnGiv*7P񉏿rv~FH,r(3Hl'Qalbd۳Gp4:^W33OkH]W"2&9L֫Z^9zttU_¿+ʲ'eٓBJa͡}RW+P/Z d9)zo{>e<퐭kY B5ڝF[ܰ˫]3'iHP魋$ؽ JoJ#v^sՃ]ioR(- jQ3Twp+]vhͭaAŤ;m2ZZ= P57yfY9 dTلS:N 97|$x,(:2'JwLS%Z3ԯ`:^"RWO,|'g㽮ZZ۳25ݛ'ٔ.4@TJAm}FN*V)w͠R.tHfQja~/걈, }B_()zwRIO;f=1#*dY t b0G@p 5i'>;XRցx* Ȩ,b`:[jz 5R9&N܉(#uUr^ȝ\?")EQEb"ltU`އvTlY Z4]]R Y tb1lmvIIl-9et0qrfVq]ܪOD' RIȷǬu}wUP|"'((JY.s<^Er!@EE^9C4L‰0:uG4vͤm%Z !({?]޷N@d<˂sb+(GO}ϼ<;v̰dha4q7l\tlf~3iBÂ*4죪щߨ-_{[=|JDV6ߜݻjW3hE~QSuqyvr<oY©FƄqCشUToF0" ;úխ)qM z$#Y?'MU DNlD7ॳHw>zg]5s^: Vb;\M=i/6YxY@rU|cĭ×6V͑PE(i4$;)Sly15X)3=qMBnhZ xAN%Qh}LidnkIf.N^]=r7pO|yp8eY.Oا((ke5x\VWUUQϤVZF;>vWEb >g>1sMS8h=f{?_:i0`Fh|ɝ0YndX[w {z$5G\a]IzU*\;z/e$^vi9|#{@b[QsRc %9+@_9RY|vFY} e\xz@/J^PLx4Ro^{+7;c rg #1 m>}nާZ\]ͪZY e\^Za]/\WKǏX>vZ&è=W{}zsEYl#!z>zu5(DPBu]({((8Ki]~?z7d'~$:i\ڿرADNoFۊ;[BjZ :3@(gg!u5s 0l _Lj9T18 @0jḒ?BwVr_-fd nAT^-e]_."?S BSͮRlG7;_4L8+l4a[z]㗎d ];](aGw-=8ۀ`|,vv:,0K#YəA,Ky0_g54wHoq~a }i@3; hK5]Rʹ"򍭾;몢'k)$ӷvpA$iL¨[B֡ ĐXsܑ9 +3էKOEۄ=CBJdLdj$/Xvf\13`qE"K] LK-fzǞS&7yw|G?SYKZ-rkE?|/#e +_WCQ:[u~`:=*'H6/3IHi z~nf&D40Fr5cc0-vЬ+~}l׿^W"ѵqѫ_qR;Q}!)35F`+t̻ʼn`Ի$5vӎD%jr]Pi$jqqmi3̔!m,Ϊ#FΘɽX\O ~wxٔ|;L}ӛ> CspR=dD+)ť?ߺQ :ZT;y;"(кRWZE>W++7$8$1Fh藞e2JN..iY9uVIw|g>??mQ(ZՉ ?8!`F, bH3RVGd=Y`|IhcAe"hf~:F2ېr֭0lIY>)>)v6DlFoyCLrB]nZ;"T՝G&*_?24ZukK׬R"JEP%K6'fiMqg P?lCQcH$r7"rLDZo]PxFBHJA+ge˓.9'?bsXWiT023az^X6[A]=mpG?Q$I@ -j|ӫ`REI%T:iXzIDY~Dm@>uĞ^w'`0"kX^gw\ҿÿ_o0ζKuul|dJɱs)*91nc[~a!"r5X`]d9{,zEw`Р7;,sjJ*Ih>[R33^W>淿OOW=wjz:gPiwwץ?#$)@5XwG l z`ie?~hq_W~Ӵto{E""{ӛ$sR ױiF z/lߞak6%rH]ځ;M@AmlZXwcrp@H93|yǎ+0 Pӯ@59@a'J)ep/K)O}$!"JEUu&n%Tsk ίgc Lo3Z)ҒTRE*m`%akbREe?ڂ +D cRy}gmG#~-q}-$uD)n, _ IDAT̀n1nև@E!Egv VӸPAV4qf("h-Ej ԬM9`d1ڱc.}Co2?lsvw)v5) 5Fmi;k6i۶ZěE-^L +*"iLE-tp {ct#3VeuE B5g2zN.Õ4oh-~SO.q|f] Esr&RąpgoM"rx1`G!͹R v0x?'RQy&P5N YG?!"$Qi9/|F).{3!)/3WOzRj6Neлnxqqq}{ǹBmo|j_>EfUu}N+q9V^?[̢lpVF_iZʵzxpm]m i3Žfgg ֕4m7D}s'{Alb8xF Fz(U!+ؑ!s׏>EA A teqTM6Rc2yoȺ#G6.VE:] ap3"$1uI5ڬz<_:dWE;\_tjQxF;ik0'VvlTt!W[Q0t t_5XIZʚݒ h-/P8Qg*8ڱ辭F ?z#/?qe,ꦿ!9@#VMҮvNGn׿P j IJPه<)^Wxё3$3g/[u_\Z?&";Wx#Eb0ؕϡa8EzA@0B"F߾7$g靣)MKZIxȡKYL8gϊgzc r8|#"T%iG$"y%}!ʢ}lq@4= 4nei;Z^.\]Wq6_Y4]u6LBj!Fel@CY^O۱7oM;Ti ?5ж=v~ ݒrA<}6hHP:K|ޡH64՘A7֛xtQM9p#9-m%v%|6#p!?ɉ`^ZRqΆsO9/Q6 Gr6Thm_FL9~!cNf FyB+"X^\:HuG_ M:3G}_rz+,ZY#MqmcfBJPٛP7 dѠْH@-U۹dnJb~BEژ\ҷw\xiaݷ1X8WK)Ҍ[I!dvZk(SJ$M_+mpkd mBWQ\hܐ 8$&E'Լ #YZEeR|s "gէG2ů}w?W++?2Z\+\/f#M MK)%,[yp8۾ͧ~ i"sqPR[%M:'iMf\9pHݛU%k]֯گh?%J2z\a6HڟH\@.\4ԛ}CI8u9 2g~97{`O.VI-Rn #s&UD7k0Bu$ʸET2^YQtQ 7Kb,$(c{PwkJ'o\Ǭr~I )%M Dd, Wk1Sm&eAsu.$F2%6#1pղp36pȚ7ӭ Rvekm&:~J {wP:C={ qLX`)QmC۸Poĺ'E6\&8:oU7T+,uvׅ> ;kZ'=OyKN>gv~{ 0[G:۹L:vƋBʯG=~F 鰨x^ eoީ9XxM':/RN%?>ůy`sSr>vn'H4zT_u=__w~7sK"hwbzgj^Fې!)XDuI;K{۷E,`I (cZƁBBZ}aUUU1v(<3z+n 0YvA-0A3TsW7klcǔ6B(.=ȏZ{L[Bwn`p\Yߵ/?V.8lBNv-zS5h00ljp!Rb{~hVJFFφ!nРYzHOniM^mUK/!1ƿQEX.PG61 ФCvu'?R z@&D )S2hn`?[`^CJUp=W~t2%3cpqJ7WWl(3jl`Q-T̏>c׷^{n{hi"5 "yOqȅ Īr`8 sCo4]KBkړ [9lQf)gдٷ>X$`葏W/˛^e8ڀ_Ĵ\@.\,ܶ+y&քƄ6*&US:B|PHގ3f^V>#@h-~U$ٍ=(jDJ)׃`v !e6RW GmZQd#luִi ǵ/zQk*m[@֟& qb'1pC69FQEշ-EFxĶ]]3t<پp-"HI)5&{cڀ.7Y{c؋_p|8~F^e'gůC"CDj^q-2Na!.t#mЕ4k6dːnAV՗ _њ@7%O7HQZHaxLA~L/0uG??UMmHcU"J`;3)]`>׀u}j Yg@>谓ڬn27?ى**^4aS"JHO5Ӷ'8;l\Bم/ itDnHHND Y$=۵CT~u`LBEFE!EY@Y^u 1KB3õ$H^#ƄL?hV$E5 1{Q&pj!~Xs7[=v|D2PfMLr & ZW{^pU'YWkdx|uq]"z= Vp<k)Zrn Iqaaz :=!?7H:QqX--aEL)5 i$B ]~Z J9yQbͅY9{~G8QMUβc488m20yK]U)#}~LE0t~_dZ9Y{ED߸7.ud8=2y9Ȏ< :@E=ZP XK<\KC\o˗ \8?;?X1qċ@ ]u~Z^#xbvߋ_|iJ](1 n:F"ˏ6x ˲˥Ifp4FdIF20"ExT>倬@f%rA&uIO=)`);;s7ڭ\,VAkCŀĽxt{5am]$B+5sPhؘ LM@"O11 V= j;w^Yr[)%N $?/tkӥcsbL8 /;Rfٍ+X|ɺ^iV 1X?uM ^CGyWaԊ~H:bʉ1ڴ&pE6èՉn/Ai*?]d̲o^WD}ھ͠za溶cV('bx`bk}@}yyze9G q5Zqa-F"sYjuV6߱Qb/(`sAWpYڲ=Lj\*n/""7R_uߘ1xkK Rh-CHfM)Aǎ?mvd8fK[O1hQ(Y-+z8Ռg1Bp:yv^~FjvKS72\np@R9<ݹ3i0uB44Lٟy8x-w GlVZQB64T9ݍDBޅI>՛u慜KoG#Y=vL$٠fP9LɯbQzW itգGq}9gNXMN4Ih=q(gDkj;E^`.&u*No ; H eEBUzmѮL8i!rGaΨ]G`ޅdÍbl EsȚpzh+bQlD ?["![4jsʙLmxI.G8+'4ˀ&DlMV|{}ϥIQ) XZi6,[:Vt<2>n (f´) "QjLV:-)m;s|T߫ǣHOy.4 D;0MI-<$e xn"E{97:g>*TCց_jѵDZw$I{ EK䴘y>S)rJNI M:֥MLS]*҅A9;{]onv(E f>)P-uۿ$aEEn_v=κHr6Q'wYy/hHPA@),w:}v>xu-ʊWT*?(6"6裥Ǩ<ӧ3n>Vz}z8ήP%$hzuZg?{.IzEI:A?uݾl|"bE%h~c2-ٶ96LHw t2p{y\1EV:^њ pj.a=\3HrS|Yj<=ںaiYNM Ε nQ(z%&cg n, FgZVK66Tހ0q~$5W8a\FL~밉 6u12쟊x 0aYMаdFLqҚ_KWkC 8nhE!(6j FLxX@ldZڇ9+[`AzX*Vm7](Rx`̔9jv@>hւa  _Oz%+h#a FVW w3Ѯ Z(K?g],˪2}ΉGwQDTD߭b_ *%O g=:znu W<,ZJP*/@. o WVU""#✽wךsω̌̌=TfFĉs^k9;iD%NQ;\I%tggJbrJ9DaAS!-O?f4Ғe>>]%&|TR" iKH4O߃-3:M#gSU_3u>t1C75ldb>@$˵,p+uZ7m xXWR[_v 9A Wa<ކ)nhm1)`B/{EZ3+]<Ƿ-=ꒋCqAסvi4>#\Y19U9@6LƞəM3V:+;^e &:,|:Т9/1=s,/욤δݡ`m'#+$:ƺg,ϖUBŲ4fD&7 Zr ,G& Z-!La#I%4ɏi9FkH *JfaT~0.mRbm yw: 9 1imRuNI;0(d [`Fu^FUDHocힰ9#t;nZ*JǍ$I,Vz1Elڄ]@Ȣ]!-4RInݫe3!R'C3mC,`J) իkK#;o?YW_ L445V,HG.{bO< )286u@*΁jX)}%"#OONC+Ă'[D64T7 ɴ^ٵ;*b 㕠~RƒiOX2Mr57Y7 bb Zz$ԎRLN4I(N4W #HT7V`ҤYUE_f"0oL+sF/D;aTmVikjmFLLΗCeec (foo6 v ^NK*:A"0fTm_F9qH[qv|;'$M}:v tO1n3s HZX_\$BGoc:j`fa04eNkw[3kfը"i2F;=h;-D ZNt]"ёӊ$׉zʣ:GexV>s̰ψv3az{ZRgy.kƱ~nu@a,I0$C?i:z@fG%0趨.g5[17ɊKpcJJ8PKYɄ*|X,5$vžGDb<^sUý 70ޭJsNЬ,/*gx A墠Mʣ IDAT F6Tj-Ukϵx!bg%9Kre4lM3]yɧ5Ŕ36LucfTݦt P$VsRm22yRzȱ*"g Yɐ&ߙ-6Nq*98ioҟ2)~ dILRʫ].?WoROR+1LNhsS錧k:W/QQiqJ7K>6L eolEL!bI/(^æ9tFڪX1[f<%?'tm^9@I:’;rlw;f}9AσH(d sjiCG#'<*IKR _ Pi].uSp>jn yC"> iy/RH7KˀbpAd Tځ,ޥyr`tu C\mL&UTYBH'scE3=bx M3yP_Mo}ܷl&O܀y g$ Aan_fQȑl#rJ>!,NNɸ9E$ߔHI9Rh&.->+1RTdyhN8Iɀ/!;AvnJo0iۖpȝtQ2QPcEPNVo"64:4 Zȩz6Mhh`FP"M:4 8-gV52س2lI?K~'H66 ovZ(mnm_\XҝW UP -Rј m6E )AkVQaaCLBXllq3DGoDaLGu7$5&(DnU×b#(Qb2u0AFu*ݾ 7XRG(ij"nZYG0;k3hBsO&L-]ofGG"\" fy?XpTK $&$85 ܐSRr螻Qf [r}{vub zԙbL6©E$U+"1 y h 2KU p*p#a guFfx)qK;@Qq-9S&^i6L 3jT iǂxG8JjQꬡRb(g F`RRʧ|}" 4˃I_èĉf+6'|s SNŶs HB1{e,Jq 36Gh:ajjiQWJҭzyht'ZBZ_T6we.e>YPp ŠLdC0@G|@rOꪒD& 5$=I\RCD^#:9L4Z\{n@y@ک6=y0E4wp\ #dM?{A 0vJ2 HEzrIJF(H-f-qFUdx%]\OGi 9J2aG lŬyX&J*]-5=ؗŬ0Jm i/q\̀]`i 4 jvF΀>tU2,Ԋr8J(dV7sQGԹzƉVfp 9 kظu:\ ^# ,(3mFh'>:˺i4˦$Ld@*ed0  b0=|3t*4ɓc@x^]2AL @uIWe,2J4&h6 #^[Zů(7:rWn9ΪuyV!DE*)BD.MjW7 c H|6Z޳bׅU +PHwC-#t-w\ )kbl"ڭV/<$9 .d+(EPAqlr"A@_%A++VOȝZ @?jUZIǏry4j'7"lH؛)6)-[]uS&չو PW&H!}>SsNN~5Sٽۊ7VvT"aR1[ L-v"Bö\4@ks>c xO3јz0֠q$CLS1-3F9bhfP, M-b}2k99]bɜ3FP6qHLYf;RU9@Xv61{îy!7vF) .sort> e:a"+b8=KT^* hmRunI{ˋ"ꌘx)XuBn`T@T( [ہ5_t 6`: (},Hg!Ma`fmŬA9@ N&]8h7aMeHNV'V+cZǛ IZ~Z/A9 Fؑ _dXuhm(b]kqO 'ҿ8$kceMMCCs:$`&Uiī:LE`m_$;] ;CxFAUa=C ؚ}HQhy3q>\F%-g㉍Ii)/CΦtJ|i_8Z: f>UFؑlD4$6OqۇM#H7lueZ͉xΖVkv֌s[OWHoV RJ" Pp,TUS]]ND9̠fjQ6HQ}"MgmR7t6 X = l6 =&vZPH ky8=Zibw ZJ"05A E}3Z\1MMw=|4$m?;i$8QJ~ F_l|nLQ-kUBӼCNAdBO8 %ob+AI^TtP~9uDqF*W^@fCkNu4UK+ ގ3CGCzbk?yW߽f-52v YNV1cJU^=W_mЛ)Αry@wd(<) $S`uK$*=LKТÌz8g$H7瞑qT Jލ18C4}6 TB竾0>b#94D22胰"R^F(*g72Q0I#% r5RXvGG+?eSWw'^"dfO4KG@!)]h9;Õ*bxy; !d3g#v@ EDp`J*j}SCQf`}!dM#$J Vᅔg(&#i!h#{ڀҴ x'oX=@*! )B>x6{70qE082AkYd tOV(}5FZLMoŮ,L}{/,~2 M-雷IOql ,ގ=W]ۗEO~v9S*b<:a_D]Ulm_g9T EwROA$P&kqmf-S90 ޚ܆Ӣכ+柸~`}굵otSR\8sPG٨IOŒ(IAk~(&g4UDQqQ_@AqT~NcS`_Y߬W>Ճ%5 8'Uo!Ag?s: _HB -gNPRz6$Ĩ$VC)HL0%ͨ }O#!ki$!Gӌ'EM*aEW ٠<( : {kz!sP20p Ȟ8GY7tKfv J#czZ.1N0#d5jjyԳ.VS B TaҮd6Fb`/ٵ%ևpNDplM#t{" erҚ8%=KhahJ(fH5f읍\p4kkўZѥ,vdQw$1ׅeo ᆭ޾ZY>w}חvI}ʠt=AYܸԏB\P魕#پH:ve,uՖtMv,MiG 6tb(r/|gKK04b2&zЅdX֍ y*`1aE.MVH ggf8vjTO''mgNLNbEdp`26SgVYkFwX7!x8Ql:Jfa׵f'azئ|w&{. ŠssSZ6D6J<0 UˁL"Pl|șfyNbrLʁFG@dq [ĊSiɚ ZYFSvJ&sL"8NOq 2vTk_c!P [qS8q5X1?lSI^ R,`Lfb-DrNp ŕ `RŨ4C3l3qsC. &v{Yy7\4>^Ӭd)'P29K ֡.9s3ǞyJ Z]]G_ Ϊ @Ym.G}fL ŋ.zUye]$2S;xݵXZR+Hھv IDex_RHC)\`..d <Iq6עS Ǟ Wu-lqRpRL\2&a`4Y <{Qq_AZ%c("[#f N^zwsT A eC;fm`ɦȖt.B2Te"I|?ш&KutŅqQ.nf~K)F>QUD9ZP#PL|&L#66̩O}BDS¡hË--ZZ)=Ȍފ=մq Iy m >$d#M=-e åA^]}:gV'Zd`k HhT]it9A)gn=%=d:D R'R$dWMwN^/. f]5ƽMg0_f5 CF93 g0gRu] l uȆ܂7pqrnaj4GnG'yD|ZTf> V!8~n% V5`7:AScu&wguI$+9oے4y]2FNѹ:xIG"ZE4e(,ڡC{Tf4j#=?#*{HqǑ(H29Wh[2yP(93AW?꿧VXsZ.(vMZT~*(X«̡. ?pWQO8j8ZFJ9Q? VH4*mF=t<hƉf=Wl 0:-U xo`q)Ш< <53Q8h2ƺEFa&o JMܞv5^;~ w=Z]lԐMf]9e[_ّ!x"h0&Z# ɞe瀇xo|_,bM@ْs22sǘ{SE e1 ѭzE- YPP Fo$i@+zz .eYJ Ümsi-fh]Yx<\ݼYөe(ƩvDDO1ڮ۸kX&j & IDATE5kM%(`}Rs{vfg@`dqR }8 ؈aPh;*Nж-^VԴMfD1n p=(5A!716@:'Bx˗][W>2}1opaT: ǟ}mC+HzAȵֳLV[+QLP aPd̷厢rtUT{;%j=z4A=lA(g)Ιr2 6G5VD\>fE.Cw0n<9{ GIc^yNf(pBm#jbK AkC{WT_|o[%f{ɰ  :(WVv?~ի_}_RY$HZ9Ml{* 0G2%ZuVIE<ړ,棹RBnG0IHf`jKx3K'P@ENźz.^`: ぀` xD5l$[Ku]7Ѥ3>`mEkA1uB*ax j&jDr0x&Gf|RҘhB3q-5jOzK''"2m ^yv#O! SI3H7 Hi].*^[@I}caYW)&QTc7,"GfȂ9¯}6B1P 0<,Fϧ(ZUќ Bؓѹi,HUj@Ă 1L Ͷ]l . Zecyנ3NKa#UŦxʇ;ώj-kTgUYFJrtؚ׋yFP 34XR * kus 1Drqu`Ϟ^W\Wru_%?a~nL>GđILLtT`uI"lݑDgm98-&ԝ'm%r( 5?7z4G࡚jLIڅM1)&-+"ԙLT#9~ (d$ \VXB12` 6M k <|T9 ŀѨ˗4 \9Fcn1MrsSazj5X}vR(zjMrIMcw R%U$N|2P`1aJ.6Ǒ#G>pY&ƵGڦuLh1ùN{61N~1 "hݦ@%ra࢛ݢu ^8W=bTB4 º:^ʥļ X8m|:&T&zyiEU OS`aSG32sՌFtܦ#'$h㓺zOwQT,#3$]x){!E7pxSTZ"O=I( ="p((`rLC8@ j'T2E:tnY?$d2sãGׇjGC}Y$\ZUBK Z@3Xb!@! dүnڍA&^ul30oZ @kږFq1wҁ'"$|ӺB\P,R2(Vo`hQ+q-=/Y^_{FG㼭×L{ @2I#{TdU]we$΍K  ٕ3f hM5(=&~ر*#JF}H9E߼ͼ觯}q,l\skC(ь6il΁ÛsIej0 YM,ý0Yy*ozSsͽ^Y6M1fР,]D4Eq1.ѭՖ ї;/e]_Z}^~GoMK(HQ ߏXqݨq~*;z;eW!ӚMM ˌLݧqu}%!G@@Ju|延a:mo_kz;gxtn2|eΌbsz>&jZV7N>Ti3(i"l8 T' #3lNaTwj)>:6F 3uQ ~At0w;DS2}3q^PpV-'ur p">$`|ħܕ3tƋ7A߭CW9e T'`!+]dH" u2 L[uI4IGȐKڻQH|q?(頻F(xyL $lbXUٟݲx|/榱U}8MC̉(Q]B2VyBe׿@Sտ/$C1%nPݎÇom6pR7ipicdIp”ѕy^Ÿo?O\_ڒH6Vkp: ,sDxb]sP|>i 0lM1at:Ǧl@iה^;i"xLXf4sȡ "65 )rN(SׇZrCAc)0e*-aIw:JU%B'~l -OJ͓ '06mmFZdgWBݐMEr4[]1DҹL:b6Z?fSUT'XqԕTٵ q|2_t,R&e׾VEO}3t{7r xs 3v o?/ߐ+{ bgMf= |N3KEz$+4Wvu}Mo{޿5"; f&}2.rJEϸjeJ|fs":fNwbfqYru-kM 0s+Lğ3P`] 3P֭woCOqHȬӜ =.^\ I4Ǜ*"5RA],A9C^)P$.7EoΧoViޖR3~=NL$0z˜^r' BtB|~V_yN5h ɳ`c8<ȗ7=&wEM+TNafL4 V~gWu أuC$/Mױ(F`0{| V C.ˀȃ,McH`}k7 R6.Æ%ΊFϔ3ϒj%ipec]97nOV3"U_nt|~$qҨDq18:~gU( ݳwO?_nE/z%/x%&,SpDQl"[ o)/Z ThfV:sm=sڦݝ1gLiyi[:$YmYIVrSgw?^_1Dzi }|Xq&˻Bͭ'F6g35 jNzSЊg>sw^EZB5i&ΏBQ*~N_Enï}-oyϦ﨣Ǭ9\~o,."hm+b9qF`7^LkWv-鉰9{RIKd'2K֡RS&UF=Y}RR'0;?ah*n;の7Q Л'?NhN%vdlU;,вS.g;h 9߼k+_ꎷwi݃ڵ$ Q%:g}|hiñT*;̬kmFQiR&=S.xֳ.`U}w3=IPp BQ_ 7>[H)v] )ӏ)Ri-mX@y'.DOU_Lw>b7#cݓZ$'{bԻ@)N. bS .or~~EBDdS袰͓,Vh73b0zJS,Kvn[Vp7N^wLY(Q9dcbZy'_#חs{"l"z8QEd͙ {҈- )~0aF `_VV]gss۳BSSY;3]@oE2sP]"w]ݐ fGvd*M+ҡDFnIg3$UuFBηFrYdf?h;@(?3q뚺:ZW;wI i"-@fzҷ. '% naJ$̔5㿗&&u ;!Pgj~t!Ưެ=pxvj$ƺ!/B'v1N1 Ygј-G{RcJԟCv\MI}2 f)qңލ0 A7?.x6- x@rֈU^gH5\/?xߎcG_}}9VwAT KCAT<>]J4 ҝ. +(_׿p߻w˦I>}\рLacl4Ľ(N3vx^۬y޽߇~Z/b/fTuY0=6uRU31ssn[9j3ʿ FFtzP%XFrS)TP(9)Ber'rGQ E7(P9Aawڗx(6A #:k.61BCww]7!L> X~Ft4QxZ@+^r~ o-E\c@X3;cݱ(3(fōF@,Ho{_g=뿕ss Bb>mπ14 0ӞtD@1oF:gc'4Lm 1JTƥ"n$>zm{j~>q B#3OC0[TT( xe9WO|ЃO,h".$"IWGAFZH&tTe`uBt*kcTI L=lte?;axjr z~S'nAuyc P.pXwE9 :D%p0H)"}jMKii؀iȘf!(b[}K˛XX/{ԩDp$cx)a׶>㜈, iq"" "`4+K// ֎//ovut8~נE){PZvzhb2#v-r9=T#;ȑvHJM=&Ti,_gKb:.}{ot2 1 XM8tX͓&Ip%v#Q?֡) >rhMh$?( 4玗X#1瓋!@}[ްi9o^Woxï\ʗ_Zx1zw`ض-mO9`fB>y>9/ (8Ceڢw{ߨ;rr_3|YX5fIǧti@]×(n:#6ukMnFHo퀚QjAi։n }]@pz4usU˷ML{d9eC ͘ʄuPEQΈs[3_ _c]]^/!l$@:L ]8 HQ U'#p.i&Xva@)})=a3KV:U1mB^`FU)>VКK9ay6jec7KkUwbZ L`_s4klb[#r2"'Hc)a5CF+iuMPeY`"l.jT֣Qur4**.|w}_#"M42ވZOM`Ayڂlh@!T`'\).fq4q@ &S`=Mrd|b?Q eTBS`@6,J,g]/Ǣ,ds磋~ҹN+bfΜ9´d/ؖ۬H9zhgﭬ말Og9!Y׌Qj\7ܧI`*^1?xȃρ_>8W {nr競6@#"Cf;Sf%pl$+`C1Hyi**F ّ@;ҬK9#C󺐎k@U*#\ʪ;NVn)˥}I7 g.ɍ@v+5M8"ȸڴ/QqJ@n}LuZ{chQ.G{{S}se<^o} e8FQy|yepcKc+kѵXUzT5<7QWd-8,ŚcFqfZ0RN Y҇oCDX]dV 1#`DyM()t9L` Ox;:J-kQ]{jfWQXo08ErgQQ{s3u`}.( OZBD**_W:c"K =塘4 )rYȈKJJ\qƬm5)nm]c6Qǰ I5)h4yvc_iXȦHI^!?螐Ӌ$}yOs'~ox/Zoa"Wjh]$=Cƌ$cT 4rcjhC"dE2W/"}޹uwSrisȁc*1@D$9Qf{*͘,<{Ѻ"L;ɀ*m"P=r JB k*=vI_\]UuI)dF&' Y+⒴pڤZȨ3}rZQUSkR..|O9'xti) ^ *%J\v&8]$o݌Z@DFFMZh!=[7TM&ab&)՚jdO|oCcT' +퓃}׌6SVMsj8⣀qDG|0aD|}zXYN' Qg}ߵkO|/2̽I8,,@vor@Q눜jzBT%6e@A uׇT͗D"ʒ ߃dfW\G<%šV gҁz4zMgmǩhJd=e۞p3Ѝ#4hkaJ99xH=ߝ@ LDk&sbNj՟љd%f+HŴ4nFw%BƵpT,UbS7 ohFL@,BGYY!Ii2{\gp_- `!ݍ!ۖG i&e^JZkѬcaשy ōRPWZ^Y<ٯ>~]\^3p>¼ϖz5fI0I[&WK,l%s?v  i9 "_)H'E}6z'XCw*vn8(gʄj߅yT.Nf1嗀rsiDZPB6W"E)Һ@+@Qj_T m-,Rx{H(),żw^=:ラlWuGpY$FylNF&(s ;L^͞z+`#GĐ-I(3IAcPp)VNP "N̉2EHJ0#-VN\Wc򃣥c2R 7!? a֓g:-éT iV]L?fOI%㘉kĚ\8鳔LNdwC1)#"†w?y$ HrZ:@wLs,N9VN!ҩ>V>Yh5HTC@nc_j$&U9;8-1}e⻴>33gpģLK [\Woz~Z[ z`=CB'k$Y>g؄+ޙYpmh9kDf)ppM )wXG~y?>yf#[=I 3xӀT Ν˺0$ҌzBbeeW3Yjٖ=,٠j6tƯ_k\DoN/:8ɶ]/3".ϣ,ĽcӠZ]67JhQTwIsagst]خ +y %83K| m[7!5(q'""iɭ@8)b;l\;k.$;CRHSU2\YyvX~}i$Aa,U$(& %v,t&'/5 J^'(DzSr ;(OiE_5[C&6؏EAgAv=tY` :*3䌌ۉO=3=s aoaO!FHpØOj "S < K8+c4uH·mcFH5 qrzZunXuˎ%w5٩=vKo0_Oift.q?(S :?U£ASDoje6>(=tH{OFQGfd17AI [鎟vB ܇T? {* yU/y>>Aad`F}ipF]۲}F̸~q^`SzGf`l@uEE<^p@:׀$i[؊$);.$2BNnJaI;L##Tw juWͼ^ٱYK' aQlaDNfMn($$Q) _4M|?В*P1eCC:=5zd ¸+~{x;$n0XUUccIFv@Tm,T[|P754-qV N0 )5sa#ҍ@-)fA*q^Mӄ=HƴI$] ]LiÈjGE͌^'0Ç}É/au$|$-J$]:3^ۑP 6bH ֋P۟)xv}C2_rCDNhL2rq7/^IȜŢ`O\6C-aAC5AG}׻׿iILy 8FA(dޢk"E'G'5Y"[9|6MAcG?=W?qѠ֟U9†]fG}یK]Lg;;PS pjlIb:Hq/5 .q?i$?wH:Wzmb8@ tt]Q; (D뤆j(^3R6j};·T `p贇.ƿEw5!J1AXY#,,m[PN~?Ӂ$1^ `a\PIVOC`a2qrAF(hW1ݭkjx3:rpx{n8Z,F_1!`"Eݯ̀"EJ-MtZZxF!%Y>ս8JtgEѕ7t5aWE!m‚D*B^љ0FMKo>sn?]v0TtDz-:'ьF.he&Lѵ4Z3>gѥ:hw{DE^Ģ7DǷ!@BzR{ZBah >,QԲ|Vz H  $4OD%h"$4;ng9s}Mr?iZk17: X)&Ϭs5žFZ@ ٿ IDAT2jlX#n2=3`S@)Dj[ܶfG׮/,/P~d6l,RΠԒf?Ԯ5todͨxyU-.V.ysX;Y6͠Pd4@_8iY4`\h0<<; KOva%/vDj_mEu(C I!?g^@R=\dep{'mEcFAJ=PSmBmOqD8Cz͹mNȃdrz=)@Բ=̴6ѠȎT7t(EB׎P*UIpXf "Qwh ukk*Q"0;P;3Gv#g8gz~OjWVߘãX ɑ ut`;qK|?oVڕ4>+ŭ"%|W^%x3ER0"R T;N1DU}"FX׉*JTRȈ oSjEoGBM`^ LP{Ivw!{ 7>(J I4ujៅBPtv@% `2TDӹ~St#*5PY^2ylJDU4٭{Dd򀗈IHH38m4Xh`m 8 d䗰 Y̌YY#bJt ~X#,t!Ekby~7Cc]ws2ҲjTՖHW:GZݣGɃٍF\L5X:,GrW.\**C&c+ <}fX&}[iDI1ZނJ>٣qRwt L÷Bw@_!MUa<\ٳCQT5%Q= h;p:tٌ-Z\paF˖'?diJㆣrbZp(v85 49_ 0tHJ;騤n?*WfIkA~:C4! 7 m_Icn ̣e.QDnID %*N)g5љDPxxRz0-H[!z 84 ] ֲx~K"E^h {ӳխ1jVیf1v=|Qi*LWTVS eA=Ldt=wq;)\EDLBd6M C+TO`@`*s_xޥ_Dzd#Ad;bu@FU^z89B$tyt5Mi(,a~ UaAl^:Օo WToY͑k!<1c\XϭgLyv}:z-.+813 T-eD6񴢬J٫ftoC,h?t3޷v ˛HSʶSڭ)ލ) ̘>:]x\:[xkaΝoŝ8j6RJӟuw;'kӢC\Cq$OHP:Ǥ\^Op*[4B04.}'*|7%TV;w_ ]"Tfy1cxMEf%;EJJI/":&'w,X@J[?Tz6o: hϥ&wggԀsVJ(^7DƵ4y,#d [>ϻ?}U?PAct@%Ri:v*]J;U9-ɄwmbDjAbёJxL/XԺ4:%K[~_捞GEv6 A\ ;DdQDV3[)z#CRpCw}d{2_ºF=ج\ $Px-YZjrS1!Vk cB RA,3GZD+sW߳!\{f=ko|&=)DtInlV^*Tl0(С]9j+{?J|ӃQpMmq.}6q6ZXysn2HiZ5T<[%mʀ(R<:xu}CW?xK)nj‹[Ykzq}RGqp oq *);ENPB( t"GUϚrZ*"FR8[S.y#mŦrz>R7me2FOF ʛ ;~Nυ*7Z$>@gGNU`͈+%Tvf?vmb oa wQ_wt^ =@]2 )X}i&AT%D%tp*Eݵqi;K&iqc1;Eӭ=DD%T}]?ؾ!g Ӗ;>RuOjl .%Dg"ŕtg_#"Y'eʩPפMSkq|P43(СxVZf]$'(YwꙔnyC:Ƙ+FuDHL̮Wbp,4V@$4۱•F+ Ϋv@${9Ե" r^EO4@#)7ڌFa QioT%=P:۪3,-{@$·g<$MëP 3,&1Z:xe<1B3]:FxҌzW_{\qf_6mvب: =\3u67++O:UtdTMf#Z`=YvHr_NӎRiP}Ģ`w3vMk3Ny/|G)0+KAU4Rd2,'86 SpW;b?1 FZAq~"@p/Ƈ8~𶃴cZ \+B߷eJje5 Z~5,tru"MW-X]qE0wF EZogGoհàE2͠7/FMSd,loJx8woۯ Tȃvk4ՋX[D^v?T|i A8̺͜YJZucau&ϣŒsqXk\jczäD܈cj.놓+?{aY-psDc%537vGkeQA&]wXy.,ߢ5m}[V,nHAXu*8Ɍ0@4b]CW/o첏ף֫vxx^?BNs.Zrmn㝏-{Qc,KJ40tPHQmosXH ts@ Ԇ)^*I):6\)o+iӹ,CSouҙ˜tI?ώ~7:&gAtLyd}A]"U``5(hGG/0bF:Ep Mx @{)cȘMƇԓWϟRvZ k# צ\. idtg)|tGozÓ8f<>f{ʱ҃l|ҍQ@bb4[~I @v"" m:'])"`Fܲ<hYͅI x#-I4 F\\> ut2`h#n%@am1?Y?nmt9o'%*v1w TN>"A,Jܱ7< =x,BvҾ\f&ur8G(S%r~d76Xa8Jon :Gb“p,s,PԼ.NkSH[,%P#Ie@-KkTX|f]`6yBPj´,v&f" l^\tS^zʣ5(:Mw)ܰEJfh<ݴl']&mc˸@)3DnE8[VƎ&03SRh>](5e0X/떫f p`̘6UCz[3o۠ۤʨ/B$ܡ.uJ!* >o2:Mgy? 'si@5kI忨 IDATd2,-_{\#y&MVasjA #INfa)Z E,YbGf4KaLn,-+v_yI"B.uϫrk}n(6=;/~/du奟}& ,Y3_l\6@2ضu¶my/B!]Oq"HɱOC2pS,sCv¬=_=R ͇̬P:Nvn ?8ij]T즍؆Eטj{T[QyOw?3-[;'CYj|@wZ bFFDӿ.#rJir+fte%;JEr+*xtd4f$BZvŁ%5FG\dp4o?or}h20\ώ7NyNH按p9ܺO;&aCteС6mZX# X@yu7{eP#">zljqdTD̢CstL:0I dpvv:#݆DnBSBdFH/h2v@t윊9W #Gs3 p˱W#P*\ }MDw+.fDr|,:4mBi.tȠtF=Q}{ lb H ؉^{UЉWkY4hX+8"FGy],bDLiU*7cW}0|NFU/գ?uimgUC ~ SW5uymtMqt2_s8oD\WZB)ML~Xފox2#Jpގ75JUm&ymLw Nev΍!d%*Z4*Nh OCL`Ac%,lRᆒUO oB1#L5z W}\sUG<GrӃtcx,uJrҍ YPNwnDS7#wN~Oƒi 7O;җUO~1 +N|te?Eb`!Ãx>Ip($sK(&'-/Jq\m^ZTj{oxXo'~0NNt$[Y%!5%`.,mҨCe#Fh/n BQ#! G%hwZE^hV|-a̦y}aPәX/2NVVnl>~UW "rϿt[4yE F0_Yx?;db^rϡqy;PJƲ3;.Oҳ?e0sQ\sU%M?z@/: ls>(ٮQKpu&Uo0J5NA ix+~vxVq Z4M+vdJDDBUz".7.|A[ɄzTM%}(jˆBEn;Uz6[!T =TR( IH8Xd = `>f!UeAÎK=F9JQ2mb*m XS3O9͚o5:]R@fT? 7U!tﰎI;!LS 7>[cM@tc0Pz^x4>5 `:^hxH639TB%ҹTP^B}Ќ}!br`Y8Z` 1|׻-=zӛN2H ē{kl2@J2d5q}u1etE/}OVV^j0jaaYZF:&_FGwݐY-rwW!IA'զO\ܲmp.e+3)~1dhVT0G:Wu|G}^]ii:chu+Ŕl ͍U ggT/-a]Ҍ'3tRN0%gmewLh\P[ypǣ1ڲh` "zJ܍EԵCw PKJdDFdq-34KVd62CZ"oށa! Dfq;4xî@9 QIđG4k"@;:Uѣr%'HO.fT_0 HӲiFpt3{8[eja0ɍaC\o\d:2 EL]L \#^9^|~T iB3= F>b{I=|!;PuɳytuVj= X3~_5:gvSDbNۀq:_.57* ) Gk7|z_D~— "\T-nz"zĬ2B~nG)QH*`61ض9" 5W$Sng'8_$z/mق_mbCad:ؐXul!"N{w_ſS M4YYYoٲY3&,B:\sMew|N.#2rml"Rܬt`V9u1Pk۲,B ƏA1h$S$4.?|-T6B6~ëᖕYC~vk\"`F+'PLEahp>,d?yC4Ed]=*@1H- *dFKc"}1\v0V,|77vLO(,gԦkpV_{Lp2W|x 7Ya.!ŀp8q=QUr#(Q(q6h Z (LJQiA8Mn^LXYGd4?T,#.s?2Eb+dJ2G,s4DdY#:X8b%H!jYXi)2,l:?ݧDQ Ndwҋݸ6UZPϋȳEWfzOA0-.xyX K%fyF,nU z֛iaΉyDΞf@`M:s">6pL_N^^8:|xH= Z˝CRJZL}];`~OPAI٬rKúr31ìeK2:kg,=o&*y5DEYf pG[ K6Xer x:XI< zgtM>oܽaGC?\#k@4kCf h9~ ޮYF0)S~F~GHHha]q)C/pY&:`.`HiݧEgMú5ѧYׯ OwHĭY:wRf@ ' $4" -N7띩G.ݶXWaK5Hu.tk{1k3{Şh G7`2;9@$C8-,EI*\^J.1`SFZpw3tghtnF{BxZoO g *)NM~?rnfhвDc^|[^׾Md<$l)H3I2A"'ђO6Q@'PP* m NA8_Tӻz bݷ\0b^BU+^' kGqcwbp&F XMA7I!VH{qjT,N~f,3 S.x&]lTȬB )'Řa:#dg "tK ߋO?\MeM]LKs`TGpZ\WMM5n $\˸JtSWrZ6CFZ،9=Qz֫N)UBw~ZIIn{~+y?~IC6_fn,xHxݫ1t)EIMaB0/{pzn:?(>2\e=l9n~;rѣ}YoqzBt yh%!7U-Q)nMp{B>Ny.eM&9hGj;Q JFOaox֯?Y0HLsq@O)55`\4h Uf4I3Wgod+eX1XJ2,: 3߇XۑCmvTQ`ͤe:g%ƕ09ޔ3BV%Nki͠T lcϭl&(']R},*b$Vk}Fֿ{`trXʩ0ݐo)CB&L8$"_7~{ :u5'a CO! II8TB%J7)EPEӁi8T٦\gpW76͈u}@*Q}w^LB[N+гcin(it)ciPL:b7FJ[j0K Q|E؀ Io a|ZzG3~CɽxvrMi_\\i.ltm̯+EW{Foq,]3K3wd_Ќ&ils7&TGg/lny#PhΩ}$B5N@}}n›7uUsOU8Ёiz$HPxO/9!jjƓi&z}jN􊥩SA8 #5T Z1Y`[185l쵕s*=acs11hA6U$ȨEY[ڵNC#:q" q"TkZXO$iS^uvKCW/!fur;} "6)0 ,@F} !Mщ- ܋?;vm!90R֒lA%"ZI!so1jM)(vxLXŰMB߱2Ӻi"҃i>𮥕}^{nOS?NS"CY*N]Lz3N+7%vZ;[iKׄB$醹 sHqNM]sRHU- >s\O*/l3!5S\$[ [Z/QDꥥW_ȇ}_~vstug'N IDAT&-9ĥ2}4,lHNz;~Ň>N­{AF݀jFrN@yE8ֽn}=1?|#SQU5ZS v?5{1bXD-+vA TOϯMG6/K4ҁHs$i~=$o/ر /6dQqW}DR坶UNș:IY{DP+j0Ri.- nK4#}hxVҕ9oWLvZ\+Mxɋ" ǧ`L5̺\6m(+^hoH1RЁ0l}gtYJ0JfA=I_xǢWT*p_s(H$3 PTSDS|tb)A% ٙ>8pCBtW=aw_JhXDMӊJC$J3 Oo vN [^{v,T,.r-|gO~?Eo;⹷ )&vm .hCzQ~tl6&T:ً IW=P 4krT >GuuO,exu:u! vX2h{{Aw\qϞ۴elwu(ٮW:%DzwnDhOml!;}ձ"#~}{t_uq<_. ; )4$m AA5r7PEioU58{^_ 6M{!6pRaռ!0+_g i:r?ӿ_x['إBLIئh> I ۧwr\JUMX_ CF6>;`"v0WRtN"OI:IJVn'LUS՝-Jo2RAv&U6,4ھDsJq"MBvyP/Td;LuoaiT7#=8qc)1K|.4) Zﮪ("Wo8WIZua y,nJ)֫a ux,(+ͽٺGY]2|n޼ =+Tpj0Fjݎ8ctFFM(`vZC\nf1iִA8TYf=|$4`jcX Sbwuk|o49y7q20@!9)0Nu|#semA Cx3u?''I+_}>0mdL.VvDbCŲCu]K_ߺ^-[M[4`9zS(k(lӵ8">wO"[%vLk[|F(}CX5NkyrUůzgv\ʍYh(z;m|m+uEr T~o9cr`߼!z_鐛gb9L86X. if֓As²,CkwɈ$.4N E6ATCF7^~OLʌ9AǼO"ؠٌ1VPnxbt}є>( h6]Z])ѿA^X lEsv>Z+׮=~6t IcXPl-0~ցX+>/QenCc )`ͽ'~'^%  xƎ~ O#O';tBh oWMaΩYnlUn9;I H C.%mb!ɴ"u_'i(ḧ́ '>z^svT& _أVm}GCSL'9{k^{ ;Qd4maKT_I: R A* ]tѵ+OzEKdù4?cҢ)æodSmk^K=>cmdu*:@EbӘt䃅4mTU%զM߰qTt:!Qٱk1IdԀ VhA 2̧ $?]Ja\x )3%K-CŎTFwq6=QBru2޷ <JT & XʸxfހE(" /Ēnl ڹI=yB~ъ1Gɬa:QPڇg`?$')MQ/nK` wxl R0LI2ڐx,J /5eךvi5X8ɲ>E\Pbj0.U}>Q)~ &⤼5Ы3qj`\BKٞ&4B ZѪ~I]C| UUI~~UɦA6/6o6y#_p_ ?pM 5(b*4P8ȗDD|cFXA =nR@j}Hv7F"l(Z=F̸dMR)K۲EٰM3dSbûzr '?^Dy OVe$ #E7Ҁ+H`.}%~Cg&5:`.Q56)j96QӯzU'>+{|{oIOS*>|fkcwn;>g@yw'MMNPAlEףz_5۲)#z  Ry !5DMZO~D}k~} I""K7ݴG鉿vN ;Y:6,!1ǻQ\եF%*(O{iF}9[;Q?sL#4 ʂ$őB8RH~RG5 AK/jnZm?(b N_T!anMEbMWtb4J(Y[[ (i%1L4S ߢ{5݁xҢ`]FΉ|iX Y-xj.~DTȐ?wٚzJRR dFWr̢ѐ2lįC#[O;"|V%*-lT !TZj;RM8 @3*JmZJRtPQD~ʂ.𸉝 ףܑ_|-?aHR7h7@4 NYuT*ʱv_u:E/ډMC f- DD*8NNv*F o3$'_i 2S|IL1kl9t,W]yۅ?Y//e9Ѷ-Ke5سyQ5TaIJBՀHU zrVcj@JY64 xGyrs+.#__f}jxM#C:N6Xi`J,-KU3$ sh%2w qj@ .YLȑb-ԀCYFIKj[.$gX0@a)%: *}!x}͞Ngo[@jy&i}>QTxJu(Tq+L=*jI{AG?;`$8p*dWZ6<& L4AO _D=0dm[ղ3\-N$qHҫ9e@sҸԮNVkHilyf8 ".a#:`)oUD>)g%ӍӦbAࠗ/E)*@6OO,=Xܙ TF0IM(3ѹ:x}ze#;_GG[/BugAG0v2J<+NM?Gfr??GާU OH(m.^fdC;XJP PTZOӯm.- tP >ԃ=HGD' T6kZnu" &eiuXjjgL3h&*G;1o{@&j-j0Xmژs-ՈZ;^؎"i >1'hkAT-;n )B2Di !"bF͵4XIMơ4"N]⽔ <$"0 \$ 쇎3GU:H6ObĚm3J꜏IrK$>Vww=]QW f(q[v\-ҥ@u5VޏmoO{yXylQDóedȸf~+X9.$F ҡ9GN3MɝKu_"ߪ'L6Cx暫9i~4^ib߈S Qy.(vf=Oߺ-g3g?9)D'yWrje!r8?FBzts76cm#+1N33>?Hb]Yy^Z`uʼAͤn9m0Ev90H9[޳UGndem:uMf$+GH~۷ SRՓ rD# a,&}j`8-8Hfxy--ljf|YvI]1-3Mr:tY PFdZCV :X*[+Vc4U~d~Omaڡ!ŊT0$kpiӮQDv=PDU ILC@1s6r|`v}#LE.d h{ '増g]v_$"Q1ytm Ыd]3_0~P:,@{b`MqID"9_en@c- Xt-rNvm~ AC?Gidzh%2_C}߭JNM6tŰ !S%X07RU}|N.0ڵI5=*asƱiQHZ\(>pyտiAS1n5ݎw( N}٣ji/<ŕ 霳L!ˬ2@f05@`V]%t̞_kHY6-~px/_ڳc' gg h7["ft "`qg~OڛFzub="Dth8Av'o `y)!v{z|md5`5iLgPbYyij|F 4޼o]#nL0'" =?mܼ02(^D6}J =_J9cݎQ7tYr4=#gD%%9L|*EZ4PoDǣ3|mϯm ID(|u/^޳+{o&㜄hqXQӪ"4)SBO*|MIxxefJF X-sA~iJ&fbz Ѱt`T70RmI-p$#Kwhu]v}}5M?TYy,ݯT i%+WF[dj8.@̮q(&HH_sm>iVdfx. se&Q,Db{F,$p)>JҀAGTb@ͭ5Je@ ;z^1t q[ M@Ek#T.M~3Rֹ Ij;GNŶ)NTÒ@{*5yT}$sBBL#d6m(B1Sx8g e*<bKQ5jҝv?wSMs<e RxjFZ` QB23:,ff z^xO>č5b%Y8vet9qZ_:Q)sb4B).sbt6ϔh~-*BaRU#7@!IWh6դcCYi\C'')2m__ 7ϟoD֭O~o7᧚.y~~պѱ)D[Æ:[[UAȟ0w>i֣^<&kPXA68Y:q;dvJUHpܔ L`FuysaCicr[UU[`遒'>'fbqFdG((8}̖_۩5 dҬ1QZ.HJum,s 3}T5PڏNZi7H +l%S`D'6էoMBYͲU@m0G t*̠wP,AGϗY_bի.`3&R^s⭢;zt| E#b߯}tPM'";[)tSU iB?ltѓhH5}k6ToBx>)RN6luR; HjXtF6ep<;zыϐ~wL!xLCU@YbdmI-'eS]4U;?gOb!tK/"}=[S@ u`ŦǺl233mZdeNAϠF4QP̚ 弸-tڬv.Ѷ䱷_s T'\[O&K/Y<ع`c}ZɏR1""=Wf<9$>NK[16t'5"amjvOJؖJkto)O֎ M-li&GI3 p3>z|f2Mdu}7p4K*ѡYGyfIE5B Ly7 &QhआŒrrL& '9kׯ j IDAT?xԂjG'T XXj͕K\ p+-V P0釹 -]d[PB*V@dfEĆ,[@X'?K &* qɌ/蔛lzD>Vգ7TNɱ2`dZw0iߡhZ/!д/DHORRab'2dR;B&AD*Ӹ iKS<$1K^g,:ϯhaЮ.ݙ̙lj#ǫ7l~u뷧M΄ePlP әq~*-Jzg~׿c#}7޸D IX}|]/V- O˲83 莆5.jd3;&?ys{p-c0d$mF漂;igᔿILG"mLW>xN-‚iwhf:Coqb "4եf4"'FPz(ZoJTg$2V̦Yb8.3lx△oU‘a"5') ltXU.Z=kF8y?UU`pVx:A5WU2xArN(<4==Aq("7D^KJ3&0BX}@uigFGͶlGqZαQN[ OXRH "hrKңHN(S&?d9gC-o'M9KB01҈7_&6[v)$ƩwUȲ|L]:{.YaL~pr|ѭ2L7e\\+ c[[ GOd;Ǽ_m Rc]etʁPS?&exbDg{?6N)F>ЎQzpzn~q$HGn5.3(Ƣ8`01f#;zvVG!QWV#>z24.-̯5f^G>{E۟ύ`l~`j0`6!(%5&]նr@-!N&d&r:t7:= s ;u-4ºz2nx\F8,7^#om~6f2æqUչZNU34ЀИTz7I\4w_ 4Ơ$hI0TWw];?ouCuٿU=g=ONesYՌtEq]g-xWj-_D;+1Z4U` dTXEo@6pX1BȺt~G/{|`h@SĤ1&IQt2F ħGS1NV8nDJ18EfmJ'D,!&9b(Z$臼Ywbnzney'@Bi[/JR!uUYC?:0 { ,5 5Q40FG4䉡nLǼU = n&سq26.|%[z#yl|-Xn6}81 [` 6,@%̃dJU$8y q 9ҝUW>=g@.j婺C@5 Q8KwuZ;v:4>tz ӃP9ic-\D'Mnx#!՞ajcJ\bE /ҕ`#q!{*"\y1'܆AHsuR~ek82W1+u[ o%8qP)Lwn1C}‹6Tֵʴ.I#~{; "@,o.CDs3P1-QHsIq; \U%Nsix-Da-,ØiYe&1C dv_̩fuFLjof+3p92: {D-+L oEm*I[-}?[[غTO9j굄L<Ujg$yFD+zmVc>`B1c IaDPky¢~/.`噊nߙ ]gQeqt]Sv̸ lDH_S#9熅[oF R8 IhA@b.׃CdÀG2 @k/D7qrmAyk6AQ> `&,$~X(P: FenßkAa61I1uF&j$-ܕPGMǿKݫM!$!Xs:1*|TW|ޞ7İ=| >^͵(E@Gyq,Gq6U&C=3Qņܕec%!@eS{"XhXV2%QH 9 #uYYHZhq\s^V{Eh,?jREG"y:M=%b4e^xmS_栝0HKgNwNNfm$mhY(tӬke|3Yt6^}?΀ڋ6SI{ ;ovk_{E21Ҋ+#kc!b,` ^)D ĂefEU?{` HBZ;v\?kݳÛІ~{\0vg#8s&s$9=՞aŦRM_}< UOс9EFA1 ttz3{\Uq-+љ@ʐH9[ZwiC͠'NG#a Ύnf !kY\gąsFڄJ&dxPEŐkrm#&A*RQmbs"AII@ۍ,v$qV^Znhݨ,s :BfHHTT/(0nT +H`^L;2Y "[URM!1}H ,:06u{-;m0O %DأP/,(:ƬF溜ĜkQ r .UjWk"vuQZȨD|uk<g/]QI(ݱ cS?䳒_DڳFc}TTLRA#@3%c}g7n%|KgNx ,i~M۽,uNw{t>K{evHY_:ݢ H{xы66=mۢd kqjtp;kptp(wCJ˿{];w0mƞܗGڋ1NN1Qo5ZFAcM\Hw87:Ƞ,dNdJh+,3P$ZpQ9s6l2⼽!k{'@)yb$ p- 1qg0tI* V-+@ ,;QA;YJ!JG7q$MD#D+Ї)- <˴c%fR@mIRs?^nCjwylZԨjMR'X#Vbl5čx|]_s' lKTх4.v>>0>A@Tw"Pϵn!tbAvf-Qns ޞ @P'ݐs53 |y^HZSS7g!j4^l*sjyX㌨R]U*$L+$1sMr㓴ޕ67v<)ڝi$&d;׻7t/<}ٮՎ;QJ.5Fj#{t0TP7CN2 Zv]3=^!*3*1wi)h|QHcQ`R0f_9@X{|X) h9s H uG/0HZTr}$([[^#&pkI%j@1w wKaK}rXRkΝMoK' m3J0 uV̏^5#Hm=Փs= ).U^e˛~ ;Tt}|SLrpcQ Xyrp㡿 :9 мȏdHI+d 㰎g = Nbfc/dMt3`9Ub.j0|r@~m= _sqBS>7TmxVOզR"8{(4oeμ%v;{M]Nihs$_]{nk񽐤 !FPmȄN#ɞ)3Z"&D,ŒК/iy}p|j384/7IV?TǛZ}e\&f*1LU*oPWuʳ%Ұb ڠo2p ؇ (*r1N Iq f3@oP] \ yvS"Kk 3`!7-GQ0##aDh#K#"(P9N>7tDFnj:SP6UjA]=+xK6OD>¾ݽXhp뿧zwzUe{s,xtN=,|:(] 'Dյk7橯BbTq ΅n4h)iEcdxeM7g;G)>ן"SkgEf:dOK4KkuN/-wҋH^[2r Er&۫9Q%yzTT*rbTI2k5P|U wd \Q08" \xUS.>PѠ,k"!YQ{v7=8LCkn*O{x|쌨^?7O6I2a*$ 1}'8--!ʜU==8zoH$ Zu>`Hb0vU9%=s3'7$I<ڿf܆43Gbx`_2٦>dQe`huHh0M@dx$lr& &%n= x)8 .ǡ3)O] Vv bm#[">WR'%t[vQXuBmڏb;VJڻT`R:O{5wmJ]JG)J[$ N'kg휿+xƉQ g'DӣzmUTWqGh U!IB0j[5V!FrVz߶U6 Bh *Wl+FQV1  ~H~{ ΃B` eD 7Ce PD0H1bQ'%aik7`Ĕ;)wyX{RLNDPe1GH&{(hi0\bq>~W;?=ЃoK/*a?~d p(3B5$7|F2>6}3;ЬaQ[#s: |\D` cLCm)MOϿK/}e鏻v| Uqa$:+^׻S9d:,M0idk}W5f QH:AtZޅ3VI<>?ɺW ay$-b`i% &=G3kwEǮʪO'V^R\jVM.;^Ok `)FX!&D#%,'*(P"E!}3")_Ǥ1{"Z˼ iǻ@8JVlwZ13*JhmE 니V?qK!^җnӟaE6л/!҂Uqo(9tkА{ƥ &b'#W"OW~h`;ߤ%@j|Z[R"$%Ue6` +qQ\+FEwcu&z bߪIOuN(ƨѸ /׾vW D\]9X .+LTC#j ,Jg:g߱@?^}up'Nzg%+>nI%"D˺@ ZH]CD(fk DҔz֕{zvVt94{;薭l(r-c^2Dh Sf/+=@C}6 9[v%#ބ 4^8}G.^MN>Ek߫$(* bٿ9A-tx_u-%Oƅ w { :0RR@D`0i6 7bva,$d{KrYY}>n|{~5Վ n Ƅ#dsäP>sN<)@LnZ@Jk8Q@eL AP; f1i(ѨčK4}ij:o>CcdOU oyT@_ c S#-H'YhQU6:j8Ҭ+?Lő`!>b{EEl:#Pr@R9aLvٶ@.d\ђÕKjL<賑=?V#Dwi4Lwzsձ7zgcL51hAS: aЀ^6BZEZIwAV .D#@2bdn?85tDzNÈU8' =wX+ )*:j@!` ϳ] 1L(BmͰC|_2"l =2H@[c|C E/h2y/?j*A7OL;!þcHɍ1%l\VNo5JbdL?bo2ևy3"woiz\tۯfJ\Ze2(i"?p?m'CfgPX"kZQoq ipo(PFjjR0K}HItWrtL$Dӎ QJ/82sWeET]Hi:։``y珖@R9qկ~Oju2-W*J$9#)ߨZI[Vۣw|Q 7'j~z5ߙ4kMZ1I8\fy@<`n;xe ,Tx H\UyI,J"|};|SWQ)lIQ37V=N$&Mc/CqfxM A t8H\N/jL v'Y 5]J#t j 2һa% =BXPg/ |&#Ttr0o=Nsw@ic}&ީA"(2 p:[%;lYYio'e1fH^ Jq7VM}Ԯ~a"tAw@bkݨ!@n{#x 2TpFRW@2(rpSk˪WA! 9j#.xBŢ9%^3;Aܱ_M#k}pxյ;OɨV !G<\X bh§]׎>,bsQA,+mG!͑\1@?:2c(6M'Y*~de=Z+B '"02 2= ԀaP(Փ+6})S ɯK@V4D/Z{Q|%(zlOX2r,+ .(}`S$#O%VP1ڸ+_tz3Z/Bع}6 >'8cݛ/c_گ#ܒX?h6z\I?fՀX9PYd{0fj}*?IvS(3Q%. X|-vP4wqu૵g=Ċ˒+NjĉQ"x;ovcK 8G,kA<|muac0I8ݳg#EK6v"EhB8KvQjmSB}`N,gY棆cy 5h]LNR$t ʱ\&8lϝ+sdVnogPZݹͪ D>@PzD@BC L &R1&_?'0m*@ rp7ّy B>^UXauZ en\ Mh"0J5?ni@R@]Ȩlg&(ց* NLIy ?) "eTkab6qH|McSd;ΨN;M)DkBk D!ƏgH{DpKK#HH `H2Ghd GwaʔaL9Q?㌤qύ?YZ?68(IkW~S=gKyAFC,@ R*1ZC5.J~u=O^Ɵ3e˕͝;{ siE7l "BT! }rAő6 5HRnFDƙROo$S^C2sP%A_(wm%ZOOh02ջ? #ڄԻC#2qxޅll0ߐkSa pgii!<B9|ZV5;{iW< 1@4D2Ĵ\K8 4BmC041y1^x!E(=j>Xwn/JԜ|_F21$vޚgi=8{䃯ލ&9`*hp-(gWV@ڊCp)!RTE!1[ZlCkHDVjw PEc1U(F~_?b\IL58JbؐݔH!&9~!>kI{ΐвٴp CB5hX:e,%TcD)'+՝M5m,xxLTR3aE+ouˀ,ܹb %8!2bi@K-myS[5?Q@eMu1 "67F;I @a~B z+00a%oO}Nwn;/߲<]i edxwjWH"ACCntAzq| s q :c%Q ѳƼ  95aHAʔ$ be#&{ۗ]tDܽ f$u#N] U\pt!'%ўBއ^ RqFU+/HV8E\w)l<[6e{K!6' xmLzC.e[lnG~`RMm8ݿ|KG;gY/hB sNj0t|b֪dzgp 1ت<? F?L].Q( xMEe A0X,l Ff"} +/Cv[ 1!A%WVɉmbh?gX.3o!m(U_i㐤M5DvNY')&t<1YF֫ctՇjS=XK@E7} C-,aH}^s'1n++_+Sɺ*xe>뒔 Z THLHml 5Lp㞌 APrPDY?>оʥ)~UYKNOj.2[:ɥ/B P;h*|e%\D$,Yϑ,Ϻ$[t_W D2(]ʱG㬳Έt%',`>a[lc;NT$zy+v*}bڙ}[Ѽ 3qOn3dq8ƌ'& n:83-ӢOXœa2` U>~ùL_i %HSL[Dk{!0).\]jP= ⃡y+ FB'OdD`ߎu0q44* ոB ! H]k9njA YVːEh;"ʂz@,[gaC*Mۼuc*/~lEcL!&zWhă|j(T |l{y(5O@~>-].[gISvegKoݴg%T{O̭ +^Ff<H}_3Dh~qZ&sZ[>>@cJrm$6HF,Wχx@Ɋx,q"wX-*bYLg$ڮBboT8Gu%5@֢ڗ\=UȁzJKB$%A"gbH kޡڣD!EYX|%nlX~x܆|z:׵a0sBKu:YncU 9P 9[T7jf.2ҡBlCXpɯIeѪۺ5ɏ?4>JA JbX /r『/z/$~6%~L2P$G qd+.npLle{jv|ftz(>l{ooG7_cnn۳m| 9D9uCŀ$9P@rx@Ӭ} _P -M눃*Q!TdQ*6n"$QhPfم:uMŒbkr%1=L@wQt64B!p?%o7{Zt1I7r, l 5 4cp6q ض&C'JIld^o>N^|=13w0I&_!(5_ÁB" pd'֖gq2&]&}vNh8Q!!,d5G3Y6HƤ4Ђ7ِI.v0{Kգ[h':%NF a% W#@9 %3@ma;/j+rc5ﭬC?6ͶumޝR:[Q`:P>&%]bl@#2p&A3jZx;iVfw-v5פ@`{cdIKoFT%u@bX$ Q%e܋}뮽ٍPnNFĝx(")!t11@֐gyқߑگҋƁ`rRO͆k< ]dCj{l`qxމba:cDvC4{C|@l_nIXcK_h|Vsv\ٲ@>aPDa,O2Т-X+ ;O<'LNyM<>vAu3M(v>M8xB>->Js9.P "dBwvz? R1/䲀y RRSNA1SIp u#Xȉ7~'H,E E(M{(c \1)'ÒQE֊K&c9HmGsXLėTm43s0u_ $9N$=JېkK_:lʃn 8~ɁM+8Eά>"]81=}Pڪ@{>,$DŽ]S0_t42}c =Ⰵ Q SB)h1o DjCeEpFXDL'|.c@ҽˌgvZoMeQ2=B<#v); N?;@9c©$ *MHc*MΩBnPxp:sCF:hr,QZʕye (4,@UCYMRq|n%IDȪ;m~/Ѿ ;qqjOO~`qjsͩs)M?,mPg"c:\.RV~Z~̉QCOx6n}Ƽ 8TdX@L|QUhS r7FqtB mCLw]hBSVbBeIsY"F+ *ಜ"lsbTiDoˁi.3/ zf(u|%Te;VLTNZrjÓ`p3wB d`y0 Kv2?~bc_#{N;ol]3hmrkGVKWUP4! b *=,j$a~R(d])D)SR1! I6U. [ٕbȄ3ѱYQND{Y;p9L4db,asqlwδ@W?1>NLd(\2!պ$[BjJN }@Ue0e ]Ub~ͧd[K:{|0KMje댬v4QQ.>8:DL0 a] zݥ矉 %/n;C7{?w+/xQDR}$ MIĹa4 }Xo1"ke$ 1aT>YӕJȶ PFЛLzThDaxd XV-$Ӧ\:&C 6=ؖ R#1=,fdu$~q'vcu}%ֲD(I5@R9Z1Yu!@RBm=|.yn \Nj"du-a?wsov£~sls)|rAz`6rQ!gE^rSGL%1E .#uA'6Vjl!6ǩ3r7 H^"y@$pq#‰94l% j p[u  ) ]C5z484\Bo+w*/)?LNOQ({L c(k/!0(Fg0juk/> /gȵUHxUNܰq_*wYI@WlL вF5?20(bb~gE87Y%DН{8MO_kVq3QYcزRL{3/h$ssε#PBgϞ?Q>Pϟo_csqjmY'{6e$y䁜\ p)1Fڒ6Y.<,NUƆP@bJU P$F4M Q6 0 Kdzv0 $e ;~'訤PECbHROU? Ҳ!|Y I%If/ @@>qdԝ| Q 0zCx #Q":GL_afNQkоaRNr RE$,ɲ)fDjW$&&d;7ǹf$hK# FTv$Fv >U,.{6PƂeQ| O;?xlmg/ a$ t{\\ ,lb DR1S߮fWIa,3/Yܭ;r5mdrH 9aHX8i]I(Dz*ccccq'g0udtOޢe!yAO>[v6&NU` ]Xt.a(=w폽7fݎM醠4,X*A #bh tʘPm?" zhГHJR4qDҙI:f_f-ۈ9 D6юavR4g~m[r`0.(NH kmO.T c f44z#C:VVFDGޘB?Y;c #rPH*O.&$ǟ𵽛2$(aK(9rh-A&.@6E5hF$be~54kʇ_/ p!A)A CS#y}څ7Loamo8OhhΣJvS(QX<:pmZW1>-)/>_L.t[an l-nR $c5KVNQo /& @Kadt]KPaH6C$W:1^::ӛ((C[nO;w~c?wl-̧Tda4ro mJ=!(UNA52B*=mO5k'L1ЊܭQ"S8d-B@v3_j}@yfJ xIp/Q pcp IPj.ۓ4:RҹmY" .B*} u?xDL͗eU( ul/34q5urﰥJz |1lD{^ˆu؛3|~z3 b##g;f75 ,Z{BG/Akdk;?;]["(?fObgYv_庳Ux)އn[o=>C :ŵ`"a`cFz(r』?+.ju܅W7485Ν3iEeE=X7zt00!%AɇϹS5~iuU= lȣ#md[yHRDS@bQVr8,dyk!i~2>i,tslY Y[F &A*8-2FY=rP8 5HGRo0ty " șDn[]S$I8ؿh\ lO!InFmW1֦E{)`YNjaˀlm+G9}Q;?LXek$Vg҈F@֌(:WNs_oټo/q{oHkzݹ=q DA r നR/җR L@hm@K#e:xr"Q 6Z-Β{8qIqPK qSaf{:Jo|#峟-@"P_PˈA+T"xlzh=kf(!tgYbiCs$3coR o}˺ K㉀&vJsAu:id͒z2YY!3!U$4tBwnM݅ ^y tPninB0^@ˢ{ *a1p`b }}5%\I~6j% uJ 8c^+^n* "]RpoŢ)i/מ5k6 v: N$'cD.;ڕ `HT$c@JDշ0E#K6A&wk~ <{4oqwl+kL[aAi{\ 83`΀pC ~|_c7ICIb8v 73zaђ'Xz.6~ܪ-1<`(Q hY(_D_:<=/`he8W7137_~]WIF EF_$#\ʎ} )WK @47IF". $ /ql ~5[-.wR1p=lj }րAő'zNs6 BFeDݹ{>g7\ W=P0Ä7`|:轸gπk Fy]t,ذ\ GIZ(qPկ<ɳL>_ ?+PϛUJy#T.1iض9xAQ>|HۖӮF{󀜻bd$㐍կyG0G lSF+Z_WDϾUKm$~"֞5ƥ>=;@c0z;nzMՓNzsgFР׊)DYgoDṙ[-hܤ7$XhfD-$J6ɜnW.[Ċf9ZvL.8B@Av2A Oj"D<}ge63j?|A. 2C'gȜYힷaYF\f(RFIaRNB;i"a96o 3/GQ.(צ'ܫ\%9ur q|Sd>uR+A95kngۮ[ \rfuŊ?"cU(24, %iK.q"C_*ضIAB)$2;rP0=~,R6׻}]wyڇ~/, MK@Pvkc H nD)O=Ri2|7 5H {h(M!Q?"տUfVmBɄn(#EF;`Pd?p_G:3{qf3s䛓F 5km4eu6DN`Lh"o2aydMݺIQ/ȲE y}B0ʦXhrLʸa2c%%4 G+-]a|;jzLx8T:*!Hc2 Ci@/=9e'"r1u:m=.:^Ma 3 3Fʏإ]=vLkC-6YOniBgisa$:vLEM#G1KgU?a?p1:nnNDr~>f d媡wNc\3ǎ R>y7Za/j%W X1JP7=%xxss>m~r8;rajqqgՅ#'30a眹%uf9:~ʟ,+' \9IΦc `0(.g羼as-ֿQ1l̇ۺyEKin5F'eʨq_._ֺ-+7q|EH!ܴ&ЍsiYbJوԗUMs׋9^g`rQb-}Yp`s456>5.Tm{5(߬6yߘX79~Qcי^x/@޺q~Kj yޣ ?/q8rBWPZY]%ݷ(05Y v.ɵhÆs~6к<\s)jLW;\ )^A ڴ9/c[}ݙiot7(msvZOd嗀6uRDp]~X'Gt)ַݛZ{HWwE6%)0}["˝~Xjyt-{s y˭ @Jmԇ /r~/ΖۖcD<۬U>g:n1u‰b>Nc{FC-6ukNYd-j-mccsLr}}ZVՏ_4JHie?ݟ )/\,vf9'P-"gŘNՁۘf<+WqnԿ5bP_яύv?z@X鄖l_Ⱥe"-;Xwqʜ aY垫ZrI{k>DbS$:I1=36k ^$5'eIlwck wZ)#%nDS}i!}o}‚@wP e~\Y$G]Dg'?/w9 O1:1F9nA"-c|;nVs+/d5Y͗E'u ̿ Z븒9DRgӺ:=JW?]yz# έa[8R]Gkfa؎480-nYS F$Gr\+\EJb9hŬlۨe1m{t27F䦆8դ2{{krr'k06 cV 1j$4t`Δٕ>VojN_|Ya87g.TBA)wy_UXk׮bi5Jڿ'툯.nt'߾gʇu0xcp;IC[# m =0#]-JI_K3 l"vm?+Iх Ue of[%_|g=ZC<~y"3K^d׹>۬F<KWpϾ,WxoK7|UMq A -)wEZk nkproj!뜇%Q̓uy-[ˋa5gV:3( xZEsmm_m#֗XLN^oi8N( f;26#c"DrG+j`Wjas?M{x:Nn#UH\{n8vA0l [-rG5mYS"R/) T[u6.Ɲ_dVsB=4}aDZ'V7&ر{}k1qLZ5Zu9 9۾5b7ӧ[S~AzSS[a↓ܳ}d=`fcC>y>1SG16ev82ʦ_ GG:36.FkN`$8.(Yw{)nw r#sLqI_>ۿ?gyۼ>;>oy{s'ʜם5+9nrgQ&s 8W+9*1/.OkUCVM&Y掙YU ٢~0:?c<w;}l|{y_9^cSYyQYakRK|WA5wQ6jN+Hre:.ـam룃;:{W,y2SҟxK/'>%+YŽ 2`D]0#l_'>7^+?=DaڙD璀ӊ(!9 x0cPkb8 G(I;Qhr$? ,&!I!>o{/^EE1 Y(145Xv4r> {3Zc|Um]~0>K4BHHRAxo<∿[EaryUgk4m# lw k64[x4nҗ5b&O$WXҗ?}s>=u&ױ3 Tm2v%pYm,-["Z4] OUm)~lEL=z _S=X1=&d4;.nF?,;oX08s%k_g䀨,E7R]0(9GO(.j!`̟/1lm$10xsDB! {e؛^K=lafj?Lɺ ogkBU &HD{u=첥}Vxconrp~K4Ǻ` 7v! yt  vaxKSy>tм%dϥlXf080?Rt poER!Bq@ !I<tw?v&p(940T! ]\ǧujȔff k{E|ǟ%ziN28PStp,,k(:):%i=Y"zTg=GcuU M]p/_&C}B!$8ub͚'ȬTVz ]XwJ ܖ?Cڍ)f#B1N3ʗss`vvQ{߲eҖ-G"(~zFK3%ɍCy/8X֖+)I,b "ZGϬbbR=Ie&ˤh&{S#,FI?.gB!)Xܢ>jyZ‹BM˙vQjI=vZ[ "iݧt:ŁM@1*&dvwT.. v#3W^l$Sn\hm}ӽ7Y + PB8]sgeVzF3g懑ppv8F;Ňl !B@ի=zpEXeE|W/hlbb&RTwbnqu7U*y`uZҝ-CS3 ggʅ^Ж ,7O6j f/#p>H.ДpiYU Fc3eP,cni"8dD"A&W߾~ȓ4B!E$tS'֬9r Jb-`m-]9ȬHхS(kLTvRQLeRT ffA \el $|o0x_NѹWx~3|wzbkͬ!i;k[llT.^B!,r$=Ί_tPժW99\MjȂMZ[vw Fȵ,cɘ¥EaVVG#[RoI0(童v[4B<5H(Npa,bB1a; IIKwYKE: ?zY{4[26-IQ B}p6AB!4r$= OW:grݺ7Ӯ^#pg(( -IYY\(FE6\[yH^K8,_Q n$v`!HW ?l{bNS,%}#;]j)o)˿ EJDB!\[wk֬vi:YqT)D 0>)Ak(b,;4E70dtLIYf(wmKKK4BsCq=6E}s"-Kf>䬙va6lu)m:Ě6r$̛}FQ!Bz!8c:gbꗸ~$.0U4Tb+ ;q%l/QVM]*yxeMK~02eu!Bu2SLrXۋ,m̵- jGR2vEúmOUg]MB!ļXUV¹R^eN"s1w u(O;ף-e'm|K5xC97үkԅ5; <ۖBuԔNO䂗ؑ[B@v^3d]G]CpB!Bbի~ibڗIBk彔ZיK $kp8^ KY'lq}i83sF]]1ɡsY] >s`.E)(o[ bm\#6֤b[6L8am1޴aS!B$G8׬kR4شnE4&iģ|K-)M v} -P>R-.뮓EAQJTS*.W#CwV֜f[yl&o6ٙ\6#lXs B!$۬:ɟX$CnY IDAT\-_׍E|J]'R"άn$: eɦOZZgg?;4B4rӔ4hB҈vyZˤ(#]f҆Nf 8%d$PNqK!B$vC49qPoŊX9VC55fa#\B.W:ŮX-K .dfUrvAѹ_iԅX_f:7|lSLMk`1L i. m*cp2|a@| E(%" !B@ӧ27W^Vv@N5^V#v.wQ1QOmg,_Uc76Z^#M7{bbX/%9RIfl#,n7s3Zhfݙ1=ln0]xdw7`-WB!ˣe[W>~#N` !B }9914a%iG4B!BJ[ vZ뭀sYud8"6ucSbɓ= ]Zsdnm6 c|OQ-.?k_۬b<巀 * E^jf0O1J~ 3cS$w~ؤ$:7) nfe濸zaS!B'AB]HȤX]ʖ/,4j:eegF*}_¬~O]ŪM_K+n(ggU,O^E >dYW0 #ĸQ<+ H&q.jyw|#ɃL]~H !BdHHGQ,:cțuڴK:idžڰ BS\xz9i,I2Q.?^~`7U-x޿8{`'yB6B@Z\ s:bh:_h; t'3[ v I!BJOolX>brV֤:U$•4lYVFj"R%7<j87FYQׯbŏ ̬4Rf55 :D&}kH4k j{E3w1 !B !Iz`>L"f5DbRд(YmfRycNJrC1=G{8Y*CϟxKd!ڬ8?% z9WHH2 =q6/ї.;>x˨c2L$@㊄@->p=}FoI!BJnacte-",l8ra>p192Ghu"٪dfYKU0wP#,(o7}+^l&@GG]u HiNt\H8 3< s1􈴐93Ǒpf@a=$" !B"!Ip*AtQkpigeΥZ]etczõ/yv[U]g8/wۼFXQ 8Gf]dD#h0OҲƍشwQ 3Ac 8pxҵS# 9HB!BHH VΝOݕZɺWؓFGH_tsFvLpbs" ù_~htQQ430R눴zyyo.Ycd!¥pl`rҗ[* WXT0B!?$ncZ\Yg}Vuyl1%FŚo(K V֨ 17} G\<pZZt$UC5jI6G_J2BmIL@AZ<&hR "SFC'7i5]B!@BmjXҎn-`w)>7ޭAB!@B#vW93 -̖$kGk76oAdR]Rp1vp .QU[s;?4FTt2&Zi#:0|u$D4ut("TOKZl)gf+oJ;NҠ !BC"!I]zUK[}9[suyRhV=,>moQ3;•a_kruyss32snh ww_P816fuXCR8ͳYPf#1ۼ#n| p#Y_솥Y G_8d B!$>L wqKz*ۢNS҄h[{1tbZ ƥj<(o?Q!9JnY::q*07p$<=LVT|$@owM{B~Gc}goՠ !BkbO+֋/~{s1l⏲.j#ahb7Y<6-j):hK-_Z+W(Dr58׮"qh IҴ8ݢ0b'DuGu$(Ԫ#̢jdqWg* n : 7# 7B!O@in>v?\E/6h )׍!.cCH%jHLyؠ&l8pίv͛k]ye_E$'A1.قgkɘ].uc,Qttѥ-p"r~/Р !B$63\n\ez%Z+R$)-QXӃP/'R\6a2k|t03/*C4IF=̢Hh0Ǧ,A_XZ/JB?@:tG,5Kk0AAAq㪸SHB!B9$$.-<>}W۰z0aey+iEkuhےdl0+I@c};+ĥ_\ fvm!5z0 `Ehʀh7-Y.90l*:DI7Mm] YZr/esV_ p$_$B!@<#GZp!眳iP/[ +V>:f4yJpGjO:P;I)qwFVU/|ZZ_6df(~p`g\ifIBQK] ZraT(N"ꂶqV,}֙GQG6K%tvqweGSR1X2gQbvR린w6F#> Q7ti/ S !B$*Kowߵ09Z_T:3;t=TW|3fC¹* Ý3s7iTāN*犉x5 >ͼkE1,QIn$YTRjZ-O6*o ׍py _> B!O3¦MOׯ~8|!ιΡ*ud YZ{ kUrvL97?^tх@+[$"p;iڤ745!n1[]JK!GJ(5%D q+a1d"I!B Ie˅MwN]`9G}ekJQ\3F(%rep >{IO;$" !Bw$ >X}cns/ssh)z)?4JssÙU_o첇"R868{#+& d@AjTVQ6yb~٨Bdۥ4R 4yGy7'c[>U+B!ӌ$O1x¦M;Z3(t\K52u~RQ8:2!N;K9o'$B!AB'YڼyqaӦzntz$8^4KN+prn~G5Ν?>{u;ZrS;+ICt[R߈H@uf,!Y8s v ]J Zv 5B!B%@}YGgu~V m4WՂ/.^>_[-ﻩ;)dkA n6y#tFET0xƼCJ,ۮQH2vQthm@Y Tm `lؑ'|c^/};\!B!~Ūtmocz^oCHo%%CUY>nf[ ?QD 3D$D BcH83L%nua.׏NYv(Wk☂Ij;.o?xM_J+B!^DBx6PrBQo wx 3x#&X-EP Icb7Srh`fd(s {9qV[\ gðDGΘ lfS^9 B!{,@%kB,u7 /㰉u'|HLQHԬٸmqCDR}MHtf5_fODB!-Bg'߄g;uw6 bD:jof#Eu51QRlL:Rdk@#<5.hfB!xQ6!x'] )$"\c6bVG!$1mjţ]-W3mYҒb.#PFrU+=4 B!0r$ !ij?08LT8w":G )l[pة`1 >fGxlma2y)0 IXJbHDB!bBIB,ssV0 ߼g8f.8\T{c.^cvWwnK BGR7DȔ`1;kvCr`xD$!B!$$ !ij?z/;&]ICIlI"Žr$EK B*--5pMRжZ5"ڝv e ~]"B!*mBU}ߟ)λE k 6m-osT#oFG Z=ʞѳĩ:~m.0 xB!א#I!s?}+V^kf/5ZπrQ25QsIlhH fnpᰪ,Px֫)I!B9b HP e8AX*cXgGǶke>q\;=5!mΑ7~f$ !B@BB<üƋ *P <8+Ag!VZ+9k)s7:n>e#!gӳ!Lc>26=HB!BHm_wiKpʀmYdA[tr} (`f m_?,5B!BHHB=埊 I01\@`j{Xw^딬風8x !$[p}{O8-ZZ20nM:%c!B!`$$ !B_`r"0CKnvU?| dD0RڳGnƽVKOd+urm:nqRB!8e*_WD@* %'T# HIX20j(e}X:9|HlGo[xFqx*[е $B!HHBϜL8;1*>>, Er2q'Qvk|#|HJoFylז:[* l9xU&B!h!!I!~k[~{j/H(0ƢrF,f7iEPv ):,96{KEptBp!mp{}Z!B1$!x*,ࢫ)/V8& hT@<̬xRQM@ ,vmsShf!.$( 37jUݥ-JTcE؍Χ,$!B!rHHB'~7WV; E\/Q HYIɺ27R<<;/L jM63EGRE2>٭$-.$ !B! IBgpKwA𧂮Jؘ2RiZnHGaނTW,+tCSxa ;pQN`2\E( CB!BHHB| z(`|16hT!LF1Cɝx#hY \Lj "@$f7^!a}0[4/w!JB!b$8nDY {yFPEQբͥ@];,l75ndgW6K[xA4Be@*Wxv~&-@o C?B!b !IqO4j=w؀+Ŭ1|ôwyrQ)n4¬ou6_yImE|mww0 L1ۋ/{\!BG$xsi@2h+wz) `ZPTVFR)\l<31q);Ea܎+B۝n82<ئџs!B!ӂ$!ij/|X9.VG@Dpx1 EeQA-& NٚƉTw^U׶͚P3XpB9n 2rw!B!ӊ$!ijKS!TU<ǃ\`eiָ,f5خ8#-(s.5]Ur$s)=1{ ٲھ*#s!B!^CBb-6<qp(0=X1+c )"Psw4hp0`=U(3KG>A 3mu~MWPzy6Pu]`*k@NX&7/sΊDw|?B!BHHB[`j³S5" 9XFS sӎs.ku6ƻ\:qO+V@< Mq$ExB!<<}Hp5QpPܰ:07)èU:pM6<&=>fay5C(e3(j,rC [4î%B!bAB!/[ N# !ϭjrQ1l#Tj"4m-({LzGBCe%6ޱop=$I^I^gǮ2 [\![hцTA5V1+jC4\"E}0T8Q?=gT+Z[D@;Zy?b̥V+x $I^JI߭7::Q^yb}3"\Bl>hkbz]&VDͫ2ʱlcT>m{O|VI$I: $s~GB:u(v:" RlUGQNKR&*8֎W#혜]F8Vmk؆ [[$IS IBFYG:QڱVr 3AOLep Ɩ}~|9 rl/+" :}/wظr jx$IS I|1\|!֨AOVmkX@OĉC5-kȣk4 ŞogAQ4ܟql[[ڝD:*mԧp k$Itܚ)͗퇡ъj`5"&& qr1-ZqΜLy0vIQQh] sЪ6LLe/xxמGxbc%I$$IWF ^nT{8PӇA(*cZNj2UڰqQ/jƘ7 B9FË꼥Œv v!CO,<׸n} _T$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$Iv|IENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-success-128x128.ico000066400000000000000000002040761477231770000256260ustar00rootroot00000000000000 (( !8KVdpwwpdVK8! GG  JJ =qq=,,D  # ( - 0 1 0 - ) #   D B. %I0`7o=yA"F$I&K'N(O)O)O)N(L'I&F$B"=y8o0`%I. B R+ 0`D#J&L'M(O)O*P*P*P*P*P*P*P*P*P*P*P*P*P*P)O*P)O)M(L'J&C"0`+ RN' 3eM(O)O)O)O*P*P*P*P*P+Q*P+P+Q*Q+Q+Q+Q+Q+Q+P*Q*P+P*P*P*P*P*O*O*O)O)L(4g)  N & 'N?} G$L'O*P*P*P*P+Q+Q*Q+Q+Q+Q+R+Q+R+Q,R+R,R+R+Q+Q+R+Q+Q+Q+Q+Q+Q+P*P*P*P*O)O)O)F#-Y,  ] !B5iE#G%H%I%K'Q+Q+Q,R,R,R,R,R,S-R-R-S-S-S-S-S-S-S-S-S-S-S-S-S-S,R,R,R,R,R+Q+Q+Q+P*P*P*O*L'=z &M]* 'M@} D"F$G$H%I&K'K(P*R,S,R,S-S-S-S-S-T-T.S-T-T.T.T.T-T.T.T-T.T-S-T-S-S-S-S,R-R-R,R,Q,Q+Q+Q+Q*O*O)N(I%-Z *e :@ C"E#F$G%H%I%J'K'K(O)S-S-S-T-S.T.T.T.T.T.T.U/U.T.U.U/U.U.U.U.T.U.T.T.T.T-T.S-S-S-R,S,R,R,Q,Q+Q+P*P*P)O)L'"Ee0 8lC"D"D#F#G$G%I&I&K(L(L)M)S-T.T.T.U.U.U/U/U/V/U/U/U/U/U/V/V/U/V/V/V/V/U/U/U.T.T.T.T.T.S-S-S-S-R,R,R+Q+Q+P*P*O*O)C#: 0^B!C"D"E#F$G%H%I'J'K'L(M)N*O+R-U/V/V/V0V0V0V0V0W0W1W0W1W1W1W1W1W1W1W1W1W0V0V0V0V0V0V/U/U/U/U.T.T.S-S-S-S,R,R,Q+Q+P*P*N(;v) .$F>{A!C"C"E#E$G$H%H&J'K(L(M)N*O+P+R-U/V0W0W1W1W1X1X1X1X1X1X1X1X2X2X2X1X1X1X1X1W1W1W1W0W0W0W0V/V/V/U/T.T.T.T-S-S-S-R,Q+Q+Q+P*O)L'/^.H 4g@| A B!C"D#E$F%G%I&J'K'L)L)M*O+P,P,S.V/W1W1X1X2X1X2X2Y2Y2Y2Y2Y2Y2Y2Y2Y2Y2X2Y2X2X2X2X2X1W1W1W1W0V0V0V0U/U/U/U.T.T-S-S-R,R,R+Q+P*P*O)?~! HG, :q?{ @} B!C"D#D#F%H&H&J'J(K(L)N)N,P+Q-Q-S.V1[5\7Y2Y2Y2Y2Y3Y3Z3Y3Z3Z3Z3Z3Z3[3Y3Z3Z3Y2Y2Y2Y2Y2X2X2X1X1W1W1W0V0V/V/V/U/T.T.S-S-S,R,R,Q+P+P*P*H%8G8$G=v?{A|!B~"B"D"F$F%G%H&J'K(K)M)N+O+P,Q,S.S.pR{ʄˆɀ}_[4[4[4[4[4[4[5[4[4[5\4[4[5[4[4[4[4[4Z3Z3Z3Z3Y3Y2X2Y2X1X1W1W1V0V0V0U/U.T.T.S-S-R,R,Q+Q*P*O)0^8F0\=x?z@{ A~!B#C#E$F$F%H&I'J(L(M*N*O,O,Q-R-T0ăϝsϜ֠ғsR\5\5\5\6\5\5\5\6\5\5\5\5\5\5\5[5[5[4[4Z4Z4Z3Y3Y2Y3Y2X1X1X1W0V0V0V0U/U.T.T.S-R-R,R,Q+P*O):t FE1_=w?x@z@}!B!C#D$F$F&H&I'K(K)L)M*N+P,Q-Q-S.ŅBN<"ujʘ٣}gB]6]6]6]6]6]6]6]6]6]6]6\5\6\5\6[5[5[5[5[4Z4Z4Z3Y3Y2X2X2W1W1W0W0V0V/U/U.T.S-S-R,Q+Q+P*P*>| E, 1`w?y A{ A}"C"D#E%F%G&H'J(K)L)M*N+O,P-R-S/wZϞES?09+|pڤ֜hfA]7^7^7^7^7^7^7^7^7^7]6]6]6]6\6\5\5\6[4[4Z4Z4Z3Y3Y3Y2X1X1W1W0V0V/U/U.T.T-S-R,R+Q+P+P*D#, 4e=u=v>y @{!B}"B~"D#D$F%G&I'I(J(K*M+O+P,P-R.R/\9Ȋ}8C3ŕڥЎqO_8_9`9_8_8_8_8_8_8_8_8_8^7^7]7]6]6]6\6\5[5[4[4Z4Z3Y3Y3X2X1X1W1V0V/U/U/T.S-S-R,Q+Q+P*G%(  6hw@z!A{!B}"C~#D$F%F%H'I'J)L)L*N+O+P-Q.S/T/|aӢ"~ڥЌgB`9`9`9`9`9`9`9`9`9_8_8_8_8_7^7^7^7]6]6\6\5\5\4[4Z4Z3Z3Y2X2X1W1W0V0U/U/T.T-S-R,Q+Q+P*G%1_w @y A{!B}"D$D$F%G&H'J(J)K)M+N+O-Q-Q/S/T1t fy\ܦ̀d>b;b;b;b;b;a;a:a:a:a:a:`9`9`9`9_8_8_8^7]6]6]6\5[5[5[4Z4Z3Y2Y2X2W1W0V0V/U.T.T-S-R,Q+Q+P*?{  Z#'M:p;q=t>v ?x @y!A|"C~#D$E%F&H'I'K(K*M*N,O,Q.R.S0T0kKӢDQ>VfMܧыg@cv?w @z!B|"C~#D$E%G'H'I(K)K*M+O,P-Q.R/S0U1~c̛e>e>e>e>e>e=e=d=d=d=ct>v ?y!Az"C|"C~$E$F&G'I'J)L*L*N,O,P.Q.R/T0_>ʐs~qިݧbf>f>f?f?f>f>f>e>e>e>e>e=d=d=d=ct?w!@y!B{#C}#D%F%F&H(I(K)M+N+O-P-R.R/T1V1hӣ8C2ިިՎkEg@g@g@g@g@g@g@g@g?g?f?f?f>f>e>e>e=d=c;cџTbLÔߩަahAhAhAhAhAhAhAh@h@h@g@g@g?g?f?f>f>e>e>d=ce>e=d=d=cu @w!Ay"B{$D}$E%F&G'I(K)L*M+O,O-Q/R/S0U2iJ͟9C3 ߩަrMjCjCjBjBjBjBjBjBjBjBiBiBiAiAhAh@h@g@g?f?f>f>e=d=d=cs ?v @w"Az#C|$E~$P1cHlRpVpUlR`AO,P.R/S0U1U2mӣwdwZolDkDlDlDlDkDlDkDkDkDkCkCkCiCjBjBiBiAiAhAh@g?f?f?f?e>d=d=ct @v!Ay#Bz#mU–×ęƚǙbCS0T1U1W3iIԣդեZjRٕpIlDlEmDlElEmElDlElElDlDkDkDkCkCjCjCjBiBiBiAh@h@g@g?f?e>d=d=de>e>e=csQ5w]kU&,# ͠~W5U2cCpSfD[7eAyԞ٦ڧu4>/CP=׍qInFnFnFnFnFnGnFnFnFnFnFnFmEmEmEmElDlDkDlCjCjCjBiBiAh@g@g?f?f>e>d=cd=d=c;b;a:a:`9_8_8^7]6\5[5[4Z3X2X1X1V0U/T.T-S-R,Q+&K90\6h9j:l;oe=d=c;b;b;a:`9_8^8^7\6\5[4[4Y2Y2X1W1V0U/T.S-R,R+H%# 6g7h9k:me>e=ce>d=ce>d=d=ce>d=de=d=c;b;a:`:_8^8^7]6\5[4Z3Z3X2W1V0V/T.T-S-R,@~"e3a7g8i9k;nS:6>2shˠ̡v[S1sVѣ*1&d=d=b;b;a:`9_8^7]7\6[5[4Z3Y2X1W1V0U/T.S-S-Q+ 2 6f8h8j:k/ߙقdwNwNwOxOxOxOxOxOxOxOwOxOwNwNvNvNvMuMuLuLtKsKsKrIqIqHpHpHnGnFmEmElDkCjCiBhAh@g?f?e>d=cd=dd=cd=d=ce=c=b;a;a:`9_7^7]6\4[4Z3Y3X2W1V0U/T.S-R,;u /Z7f7h9k;l;m=p vb29.ƝɠW5U4‰***jlEkDjBiBiAh@f?f>e>d=ce=ce>dr ?t"~j›HRB ȟȝfJP/}$N^GhnFmEkDkCjBiAh@g?f?e>d=cd=cd=cd=cr ?t"Av"Cx$D{$F}&G(I(I)K,M-ɝvl yyy }WsJqIqIpHoGmFmElDkCjBiAh@g@f>e=d=ce=d=cd=cd=c 3a7h9j;l;n=p ?s @u"Bx$Cz$D|&F'G(I)K+M,N-P/Q0T1U2]=uΛԥզ|r19, !!!111AAA888kvNuMtLtLrJrIqHpHnFmEژ+3' #|pިۡnJdr ?t!@v"Bx$D{$E|&F(H)I)K+M,N-P/Q0T2U3X4X5[7]9_:_r ?t!Av"Bx$D{%E}&G(H)I)K+M,N-Q/Q0T2V3X4Y6[7]9_:`/?L9ڦznGxSҟ&.#{ܧvb;`9`9^7]6\6[5Z3Y3X2W1V0U/T.;t %F8i9k;n=o >q ?t!@v"Bx#D{%E}&G(I(J)K+N,O-P/R0T2V3W4Y6[7]9^:`c?f@e١ޫ͝>I8JJJ___qyQ}Ulb=I7^nFŖ7B2բۣjE`9_8^7]6\5[5Z3Y3X2W1V0U/T.6h58i:l;n

q ?s!Av#By#D{%E}&G'H(J)L+M-O.P/R0T2V3W4Y5[7]9_:`;b>d?eAgBtQ؝߬BN< >J8{SyQ~VZlRsh׌oHrKؒvk ܦs`9_9^7]6\5[5Z3Y2X2W1V/U/S-,V" 8i:l;n

q @t!Av#By#D|%E}&G(H)J*L+M-O.P/R0U2U3W4Y6[8]9^:`;c>e?e@gBjCkE֔O]Glll---@@@III ΝkzRzQ}V(0$ҟ|YmE}Zݢף%,!ZkQ٤ڣ`9_9^7]6\5[5Z3Y2X2W1V0U/R, ?f 8i:l;m

r @t!Aw#By#D|%E}&G'H)J*L+M-O.Q/R0T2V3W5Y5[8\8^:`;c>d?e@gBiCkEmGӇUeM---IIIcvZ{SzQyQ{Rݎshu׍lElD|ިqg"ϝۦ~__8^7]6\5[4Z3Y2X2W1V/U.P,% f26d9l;m

q @t!Aw#Cy#D|%E~&G'H)J*L+M-O.Q/R0T2V3W4Y6[8\8^:`;b=d?f@gBiCkEmFqJӄݩGT@qqqOOO%͜X{RzQxPxOeܧ7B2L[EܦpIkDrMݤ ۦ·_8^7\6\5[4Z3Y2X2W1V/U.N) 2)M:lr!?t!Aw#Cy#C{%E~&G'H)J)K*M-O.P.R0T2V3W4Y6[7\8^:`;b=d?f@gBiCkDmFnHqJ׋)0%uuular|S{RyQxPxOwN׀r$աgkCjB҆ߨ9D4ey[ۦל_8]6\6\5[4Y3X2X1W1V/U.E%/ 9kr!?t!Aw#Cy#D{%E~&G'H)J*K*M-O-P/R0T1V3W4X5[7]9]:_;b=d?e@gAjDkDlFnHqIrKۑߪ!'~V{RzRyPxPwOvNySϜ}kCiB}[ިasW7B2ۥؠoL]6\6\5[4Y3X2X1W0U/U./Z 9k2^;n=q=r!?t!Aw#By#D|$E~&G'H(I*K+M,N.P/R0S1U3W4Y5Z7\9^9`;be@gAiCkDlEoGqIrJsLZ|p444 N]G}T{RzQyQxPwOvNuMtL{ԡ,5(֌jCiBh@Ր ڥ٢g]6\5[4Z4Y3X2W1V0U/Q,F"A;n=p>s ?t!Ax"By#D{$F}&G'H(J*K*M,N.P.Q/S1U3W4X5Z6\8^:_;b=c>d@fAhBjDlEnFpIrJsKuMeFS?!!!WWW  ԡc{RzQyQyPxOwNvNuMtK|Μ'/#“ՌjBhAh@{͛  ӟ٣m]6\5[4Z3Y2X2W1V0U.8l $ ;o=p>s >t!Ax#By#D{$E~&G'H(I*J*M,N.O.Q/T2U2W4Y5[7\8]:_:a=c>d?fAhBjDlEmFpHqJsKuLwO|Ӡmcވ{SzQzQyPxPwNvNvMuMsKېΜԈjBhAg@q֡  ̚٣o\6\5[4Z3X2W1W1V0U./ s 3`=p>s >t!Aw"By$C{$E~%F&I(I*K*L+O-O.Q0S0U2V4X4Z6\8]9_:a(Kr ?t!@w"Bz$C{$E~%G&H(J)K*M+N-P.R0S1U2V3X4Z6\7]9^:`e@hBhCjDmFnGpIrKtLvNwN~XQ`I***HHHvvvvkmzRzQyQxOxOwNvNuLtLtKׂި1;-RaJۦaiAh@f?y ڥ٣m]5[4Z4Z3X2W1V0U/3b>* :l>r?u!@w"By#C{$E}%G&H(I)J*M+N-O.Q/S0U2V3X5Z6[8]8_:`;b=c>e?gBhBjDlFnGpHqJsLuMwNwOpբ!!!SSS'.#ߑ|UzQyPxOxOwNvNvMtLtKstiyiBiAg?f?ԍ{  ڥ٣i[5[4Z3Y3X1W1V/T.27f=s?u!Aw"By#C|$D}%G'H(I)K*L,N,O.Q/R0T2V3X4Y6[7]8^:`;ae?gAiBkClEmFoHqIsKuLvNwNxQޒqfgyQyPyPyQxPwOwNvNxQy,5(НтiAhAg?f>ۢg{]+4'ڤء}^[5Z4Y3Y2W1W0V/E& :!@=r?u!@w"Ay#C|$D~%F&H'I)K*L*N,P-P.R0T1U3W4Z6Z7\7^9`;ae?f@hBjClDmEpHqIrKtKvMvNwNcҠ&-"WgN݌yQyPc}ۉڇ}{ڋΜ_qU|pwThAh@g?uRݧAM:TeMڤמoN[4Z4Y3X2W1V0U./[:=s?t @w"Ax#C{#D}%F&H'I)J*L+M,O-P.R0T1U3W3Y6Z6[7]9_;a;b>d>f@gBjBkDmFnGpHrJsKuLuMvNwNلi}_Ӡ]xP{ОȘؤ͜~SdK#KZD҃iCh@g?f?Ó!'~٤֝`:[4Y3Y2X2W1V/Q,Y$B>t @w!By#B{#D~%E&G'I(J)L*M,O-P-R0T1U2V3Y5Z6\7\8_:a;bf@gAhBkClEnFpHqIsKtLuLuLvMwOuj}xPav 1;,ݠ{Xh@g@g?kEٟ{o ʘ٤ό[5Z4Y3Y2X1W0U/9oY =r @w!Ay#Bz$D}$E&G&I(J)L*M,O-P.R/S1U2V3X5Z5\7\8_:`;be>gAhBjClEmFoGqIsJsKtKtLuMvMxfz\,5(ӡwOwNم٥"(8C2٥ߨth@g@g?f?uԟ7B2GU@ӟ٤e[4Z3Y3X2X1V0U/ ^"B@v Ay"Bz#D}$E&G&I(J(K*M,N-P.R/S0U2V3W4Z5[7]8^9`;bf@gBiBkDmFnGpHrJsJsKsLtLuMyQ sgwNvN܎բ  \nSأсiAh@g@f?tQ؜ur٤٤a<[4Y3Y2X1W0V/4d_ 8kAx"Bz#C}$E&F&H'J)K*M+N,O.R.R0T1V2W4Y5Z6\8^9_:`;c=d>f?gAhBjClEmEoGqIqIrJsKsKtKtL|cwZ4>/ڋwNuMuLیީCO<M\EujChAg@f?g?ҋН(0$̙٤|[4Z3Y2X2W1V0P, j%E@x!Bz"B|$E%F&G'I(K)L+N,O-Q.Q0S1U2X3X5Z6[7]8^:`:ae?g@gBjCkDmFnFpHqIqIrJrJsKsK_О̚auLtLtLtϝO^G 2<-qfϜިޥdjBhAh@f?f>σݧ+3'ncڤ٣iG[4Z3X2X1W1V0*Pj :nB{#B|#D%F&G'H(K)L+M+N,Q.R/T1U1V3Y4Y6[7]7_9`;a;c=e>f?hAiBlDlEnFoHpHqIqIrJrJsJsKۏިFS?`rW؅tLtLtKsKsKߝިߥԈqLiAhAh@g?f>τۥN^G&."ڤ٤}]7Z4Y3X1W1V0H' u0 Bz"B|#D~$F%G'H(J(L+M+O,P.R/S0T1V3X4Z6Z6\8^9`:a;b=d?f?gAiAkClEmEnGoGpHpHqIrIrJrJot$ؤ|UtKsKsKrJrJzT؊ޡٔ~yUjBiAhAh@g@rN҈ڥVhN ٤֝pNZ4Z3Y2W1W1U/%Fu-UB}#C~%E%G&H'J(L*L*N,O-Q/S/T0V2W3X4Z5\7]8^9a;bd>g@hAjBkCmEnEnFnGoGoHpHqHqIsL!znqrJrKrIrIqIqIpHpH~Zdkkkhb}ZqKjCjBiAhAh@g?lؙҟ:F5~q٤٤y[4Z3Y2X2W1V0A|#4 e>g@hAjBkDlDmEmEmFnFoGoGoGpGacwZw~ZqIpHpHpHpHpHoGoGoFnFqJoҀ}k~[uQxS`u֐ިў=I7HWBՠ٤٤oL[4Y3Y2X2W1V05 )NE$E&G&I'J)L*L*N,P-Q.R0T0V2W3Y5Z5[7]8_9`:bf?h@iAkCkClDlDmDmEnFnFnFoFoGʙCQ=ՂqIpHpHpHoGoGnGnFnFnFbޠΜĕ֢ߩݧўxcwZ3=.QaIأ٤ء|][4Z3Y3X2X1V02_ K7iE%F&I'J(K)M*N,O,P.R/T1T1V2X4Z4[6\7]8_:a:bf@h@iBjBjCkClClDlDlEmEmEnFy:F4  ZlRԠޡ|XnFnGnFnFnFmFmEnElEqJݠz%$'/#-6).7*+4'$+   wlآ٤ءb[4Z3Y3Y2W1W0C%K C}#F%H'I(J(L*M+O,P.Q.S0U1V2W3Y5[5\7]7_9`:b;ce?g@hAiBjBjCjCkCkDlDlDlEmErLݝݧڤڥߨznFnFnFmFmEmEmElElDlDoۦ'/#٣٤٣}^\4Z4Z3Y2W1W0Q- E$G'I'K(K*M+N,P-Q.S0T0V2V2X5Z5\7\7^9`9a;cg?h@iAiAiBjBjCkCkCkClDlDlDxTڕ{mEmEmEmEmElDlElDlDkCkCр fz\ڥڥڤ֜sR\5Z4Z3Y2X2W1U/- "0D%I'J(K)M*N,O-Q.R/S0U1V2W3Y4[6\6]8_9`:bf>g@g@hAiAiAiBjBjCjCkCkDkCkDlDg|}tvRlElDlDlDlDlDlDkCkDkCjCjB}ި#)  3=.h|]Λڥڥء}d@\4Z4Z3Y3X2W1U/ ="? >H&I(K)L*M+N,P.R.S0T0V2W2X4Z4\6]7^9_9a;b;d=d=f?f?g?g?h@hAhAiAiBiBiBjBjBjBjCkCkCkCkCkCkCkCkCjCjBjBiBiBiBiAhA^ߩCP<%ER>shΛڥڥڤ͇uT]7[5[4Z3Y3Y2W1V/&I?a#CI'J(K*M*N,O,Q-S/T0U1W2X3Y4[6]7^8^8_:b;cf>f?g?g@g@h@hAhAiAiAiAiBiBiBjBjBjBjBjBjBjBiBiBiAiAiAhAh@h@lE֐ި٤עНƕӟաףڤڥڥڥϊpO`9\6[5[4Z3Y3Y2W1W1-Va"BJ(J)M*N+O,Q-R.T/T1W1X3Y3Z5\5]7_8_9a;b;d=d=e>e>e>f>f?g?g@g@h@hAhAhAhAiAiAiAiAiAiAiAiAiAiAiAh@hAh@h@g@g@g?qMԏܤݦݧݧݧܧܧܦۦۥڣؠ֛xmK]7\6\5\5[4Z3Z3Y2W1V00];F'K*M+O+O-Q.S.T0U0W2Y3Y4[4]7^8^8`:b:cf>f?f?g?g?g@g@g@h@h@hAh@hAhAhAh@h@h@h@h@h@g@g@g?f?f?f?e>e>{Yo}σφφφ΄~scoM_8^7]6]6\5\5[4Z3Y3Y2X1S.'K. H'L*N+P,P-S.T/U0V1X2Y4Z5[5]7^7_9`9b:b;c;ce>e>f>f?f?f?f?f?f?f?f?f?f?f?f?f?f?f>e>e>e>e=d=d=d=ce>e>e>e>e>f>f>f>f>e>f>e>e>e>e>e>e>d=d=d=de>e>e>e=d>e=d=d=d=d=d=dE&W0W1Y2Z4Z4[4[4[5\5\5\6]6]6]6]7^6^7^7^7^7^7^7^7_7^7^7^7^7^7^7^7^7]7^6]6]6]6]6\5\5\5[5[4Z3Z3Y3Y3X2X1H("A5c1G(X1Y3Z3Z3[3[4[5[4\5\5\6\5\6]6]6]6]6]6]6]6^7]7]7^6]6]6]7]6]6\6]6]6\5\5\5\5[5[4[4Z4Z3Y3Y2Y2X2I)5c* A{%S/W1Y3Z3Z3Z4[4[5[5\5\5\5\5\5\6\6]6\6]6]6]6]6]6]6\5\5\5\5\5\5\5[5[5[4Z4Z3Z3Y3Y2W1R/@z$* H )O@x$N-Y3Z3Z3Z4Z4[4[4[5[5[5\5[5\5\5\5\5\5\5\5\5\5[5[5[5[4[4[4Z4Z4Z3Z3Z3Y2O,@y$)N HK ;4bK*Y2Y2Y3Y3Z3Z3Z3Z3Z3Z3Z4Z4Z3Z4Z4Z4Z4Z3Z3Z3Z3Z3Y3Y3Y2Y2X2J*4b; K- 62`G(W1Y3Y2Y2Y3Y3Y3Y3Y2Y3Y3Y3Y2Y3Y3Y2Y2Y2W1F(2`7 -* "B.X7h>v#E'I)J*I)E'>v#6h.X#B* VVSS 88 ,nn,?????????????apprise-1.9.3/apprise/assets/themes/default/apprise-success-128x128.png000066400000000000000000000420461477231770000256350ustar00rootroot00000000000000PNG  IHDR>a pHYs   IDATxw\guϽSvfvgjw+zfInrB&ɛ` ͐@z1! l+rm٫fh{3s{geTdď>ݹw9)ssx{=G_|` PA@@Y $q< xޚ?dup)pϋG6Yoh{L{S˩>,qDN;+K6D ֗+%P + >HdL,Ef,Iz$dɞqGȌ$O;K`o pb#`!t #"(]XGZBh>-pSIy-)hu1Ht ;@~Rȴ< P| [4QTNZNdjAhZr}T*']c#d(1dp#qx%WX7ԗRS5E pNrA}!'#D29'5^Ft9VK5<6@c@+ZXLxVZByQl $ۡs1TC"%ȬJ3q`"hmF/2z[}([4?[Jhz%en\ @Ro~@R]Ɛ&S$:G9oHef02W`5=`I7<(z@+\MyQ$Lc/w[m%L0Z"Q:n}b*y76x` 79Oh!Ry.Є@hBDžk?b 'b>n^sVmG" o݅TtxD~d~-W,$<\4]C8V)vO|JS Ñ6[ߥȪjl|2)%~{>UO1 d< .]@KTE}a_ˉ~Mɽ-D"ErLLP':^5RJ vyʒTתPz*Z8@lG"n$|{ԙ7S:"e@峈t[ꄢ9HV( {X6xu"d,Ұ{ T7@b?0ľ=xa(i}YT2 5DMB 4 ! mU[5+(HmUP~cZ~%m8}5;+VYr+ SsLw:E m:xfDz~K4W}%UД1lMBPhcbE;woTUM%kXU`;}Tdɮ14l',S:V9硗G-iA0,`Ք^XpR dQTP-z.XI4!mgHd6D&Hj   ) b6=ČYuL9Dt\=[̀ĵ@$y0%BnU@3 p~BU[9a5%& p:pfЊ]p˘k}*V0'Dͯj >ky<@\='XXYRΫ_Eu jcde2Mi sٵQ9S|D](ՕlIf&y3tL"-&(Hafb].`q{l7+ 2C7$VpTݭ"u&(fCL{ }VlQA&M}ԇф;9hj;(~?ܺ $'\2`0H+OH) Bh$# W0c,v PU_C]s=kr F 3.:ý<{SNBihBJ27>d~I[*ziO2YbN (^ڵ/d>Mf^h%z.wfk ED!UAy\An3.ew/c8 w?gб6a `Z=)T9u+Ȍ$8OYE2J!0H&kl{N_{?Ad/pҰ0\4t?]?4bXaTJoZPuL[&\>~ P_Sk@N.)vU&C̋N-{zu1m ΚFmCnƤ bsM'x}?BOt:WazC3Xv5+/\(]='@FXr*|fw3I$*#çV?ݠ4@{ SL ǃԇ"Kb!`dbi`UUqEg3YFYs+kT `m`0=Kg()#M2b4H|=cxmh/fξ<\gv' 2g|h6k #snbɃ|5V͑>1OR5e=34rj)" nJZ Pa O9f}mGx{{>Tg/MsӲh=DM3էDAHeg! k3ĹG/Dmө VГ0b1ɾu9F8jdlyWm'4|Gx3_a|{ىIո.+MG A2x*ZיyZCH\sI ^JӶjudnM`%w|4w t#%@m@| -Dq_+q~~C#H,5-U:Ta]?"c]\3o"w:A s+S(U6r4y*ʭ0oUsZlr5Pd(s~^kM./%RDZyeӘ?ͯkyWnЅFG/Ћg%3_΅f&w/-w120LjЅάH=A3lGUseI%R6h|h$ + BB+~#nYK! pAuui"RmzU 45%O*ڧ",剮i ƒ&~,G7飀0^L?` 1#0#H0qn":_)%71L1g||>?S5DaźA)H0j-sglfi h1P !k8 1fW- mS~T8vu oWӦ=/|f!WLXI|||\#Yϒ&}: u}w|_F0/:Umʡ1p2Vn}<#/aZ~)G߉25Ni+vEYIzR(ȕFҜ6utKhBr!4r¸w}fit6w\k 4 ݧ4Tn}YYԗTXB^ka^e j=^⼼ڢR? xD7t{ΊWt&lihG dGGC?OXȔJ׼s.xmsŅDB*8f4kJ KXqcUt-w9֯î\>:zdk>4M=֝R,Ͻ`aSd2ŊPP_:OQ0[].pB p 5S~tJ GdOsZuS\@rz_-'EZ>;6XڪErn.q#!XA"aXST̋#{vy4m:XXl*Ot쵃;ѪrCw3Ii5kDҎQWl~:oRL|wM鸲rn)ng"Ѕ`Yt>܃p}?p=u*0cx/ ]l<=ʝO_˧۽0 "slI,QFYKks﫯+VhT`7._Ry6@-pz <-m(VR*:[]zm;rhT"}3hDzBuo三CHӤSi=<7o|ufM|}Пf"c4ЈC9ulP6@ޱ` Qe)+"j9{)Um/0,!=ȊyhBo7}a…~ƶmLWA.>yNB&,%\O}IM~5۴ 쎵z민%g.w!|_!Θ%`6MAsH5ħUsS 5k cEc2Y!_iW"Pq]HI^+(-R}yKVg0<'ͽjAԄ񕗸'k3Ĵ{Pa1BZsg h+W;$zR`~ CփXPuaM75Lj\cL*V;~wM;ˆk:G _Lc1@ ' J]8p-\IGƱRԋr- T<:Ðd Sii)3g<.{3䪫x;c#`n _gN5ʢ'|6: 1HynRMܐ4yO^ӝXYl %FIsESR!;@/\{E۱&K:"X@/̙Uх{+𩮮@4>D\b#[t,:T|6C"~=SmQeF 0岖).oX~,vEY_YHwPzlʰ_']0%to￷Ҵ5qFenL;ک5ϟϵ^y;cM:O cjғixD U{lƾ &_.NSm6yLDeb6p,pYI}%#)%JFtݺu'>E[H >MgqLt+w@ 3:8؀Ocx4i {h2i;Ӵ,4NbeΏ~`"V趢=_9^:V=)W:08v͡)H)ioV<~m.XPT th%*Jjm&*/to˾ciyЗ_b&8кQx # $Gg;.vfV8R7 ۤ?r^pzjBHxͽGtΨY¬ҩS|ξ^?N_|۷wwE׾),~:zU.ƯA`JeI''kw5K^0-RO:v]l. WB ld 5OF.-ƉnN+o94 -lcEͽ˦M:;0 ϚJdW ej~~l}Ej;mj?22U'4vCv8!w1]\/EJ@]QLH#9è~o9YׯT`Wo]ՋtYe>L*P 5ʵ1<UNIE HWJ J`7u!= /n SzKzg}@뮻*7;$'BcI,418Wmd9j%MfVЎ&(Y\>7^w>6^{`CgN{C~*25`C* ۤhHå,lY]us `=o=Ӌ;|0=I3LQ}f*O/eni'~Z>/ލS IDAT7 88q{Ast~s`[x=\N,gžbNs/VG/| lC:‘疯%A4ҩ2 LTq͂_D/@<[o+Vp4=|Eo~RSHG;k*D6}=ϲ#,h~~3$}'_WфX] 0}?#)\b@ J\MNP"Bvr@[Tq5\9BQb<ƿaݏ|bX,ơC<'? DyPOaY;zdlG6Ǹ68<9NI$DYM m_E5Sla#QvΕG McS@u޴r\ iBkܚ3/d'0v`,c߾}y+ϦMذa{ /:wͿf70m uL-#уc$]^?@MC-hY8TvG'#"jD2j"͵\N]? TB ɓ<tZM_F/}=o?d L&ON)^qFhnnSPS/\r%. ?ЄƢh =&e8zpDltP$DE]5t@II Їx'q_}V]xhe2 bkyY$ۆmhҜbDI[V Pz4muedP,~/SFU+RWSF|03}uԵA =ύ 2Fg 3Y2vJ xf#Hʎ;hmmeΝ1118XT*?Ζ-[<׿u>򑏸 }_sjNg]-̵)&iR!tcCO4& 䤫ĠӶ[ BLyr[^ YuӸrF^B? 7~+f̘Akk+eemmm,[,ZUN{//*5VZŎ;<[dRlN+fV,7 D?W)1 |ka9 WU0JQn!Pg!Ř*Y0e&ldrּSuuuxgOQap E`/}KEo̧-*APQ}5Oi,Vk&q#HW0Ȗ`g'4inX޼p|7ًp3i:1U֪}jlݺo[bJ&9x 7o旿%ַx2v`-wdnV#4jA5l6o_˅N"#sJ)1SB Ak%e 幇 = $w~\8y >gRɹzYC$r'''jM/a%T*l]EjLdMIwEͪrHF z}64H/ ;4 =Cʁ > ZEKTZ'/y}ND/]GOOGa)Lf'yew8VtyzBIUR( +$ T&{b%%/ IG%C? ^quM4F]=:=I}'y9.YO T! RF /+O56qj.I4ME)`jp` ; ]A__5eW BԂ^ÜwIߩ/"NH P-v3֔U2Wy݇nz4MP 0m#>4ϼlFy#Q .>a-( %ud//=xfo*ET=^ifJױ= X]lU3^= =ݮry`- Ŭ1 4AɌB?aY/}D졡YӈKId$㢳/`2?gq>{/jcw0[Gc ^& ;fsU#J:xi3U2BjģpΖ**X ~qD2\}kdmu.l#= 3(0KtLO2gJw Y׼ qhC]W>zB6ÿkg_u>]I !W5*DC&Q#'S:{*רS/\c8̰t;))v-hyx/Q)kkj3J)C %Ռ N:V-:r8AHZ򼟯V^*:5cfԈ)` >Upn±yh 0 JpYE&mIP|ͬ\^ɗ@+ R^]I$b"h7_勖 PQS_w ?ñn&ɰn17'LOQV%\)\G~WS$M2WZypҁJ({XՊ=3<R%#Hg|l?NN X ^Pi#@ls2oB*D7޿m_N4!BH+Fi39ϟIIK}4Fj\5 T"]߽{ʺjXiB,ns#U۽^J{W%zn^+(ݔϐ^MrW!kE2@'XyF"ChFehidxra[ʳpp>GƊ^ PQSɌ) M0e͊,>}Lys OQfMRlXFP6/u7g_q!R*et'6rJ`NJ (@ͩb*v2j[Ͽ(0C{};p< 0 A,Epz%z؟rP|C]4Om\N(&H 8Wvg.+EJC O6IZq`_ g5.c]RDB7nNa;F (уMtcHI']wPsD.@:#ոS @˕>#v6@$Kս+ 0!oȕ͚;WB3*-[\Myt=Is5vjZ.,gʴF\ԙTV5.2R&He,ųϦ`'~&=x;w[# Fϋ,!sz6`sƟ*pkPR28Bh6#QJW1;G~gTmKpJinV\-X1me%uRb8ĨJ7+⽋/Η?ƽl:s\_='yȳe4.<Ҭz$ ֺKEHT.=<31# i_Ȃoп=w8rOG),İ6! ^mū) E&\§dedpu,Ҍ4J`ve3Hś>1xH/~N.jgi3;GE( +3+,d)YTYc..A#d2!]p2e^Ŀm\q蒖AM+iۿkڹiFcO 71D|(tj?T߾p@JIW/[CP-62._wI%J}+\ !=旽jVdA?nDHK9::^OM](-̚3m4ϛAi $GLlaWGξ3y)D7\*xk'tNjd'^x;9 zԄ#!^Qao)lW al]h6)u 1ݜ{?i#CG(Ru.`H:`e%(Lh%y-f_*Kԕ W`uV$VhzQ\ɫ ̢O1rF*NoCGv;gGTЕ'`4ҶKD:3q`x!okG?@`^\ak rzOa JGrqLUEcϏn'._:CiCuacCO'v{gMV mC6>RV`$;G_\}+.o2%\ؑ^ҿҊ a ʫ+D&ad 7T1~rk2a P3r05"zCZ!C1 t V׿X4bJm$3^o&tfL|Y$QS#~Bm~l#(d 4X9w)#C~Z4yG:/n繞m [6!;J!cz!:<-,H.(8" PVSwMeY;ta6΅s 4͌h50{Z5ϣ t1R])qV3L:_cz 'P }{fiH]=* pR՟ XYgfDf%XiVM\ֱjkfѬͮjfALB0Liƺ9ti{bR"WIx;WdrWl?GsN? ;+5|Sz,"i W{^]DHMC YHzP A ^ `_UFCUSU(\K{^,65#'H)dd| }P2=(Bi;|#i4 |I֢uG LCzsox{ & yxm͍v'OkP6HA6*Mbj"I}.!UCPdIE.LĈm! :FO!:V(b{B' 6tIb$޽l#6Qm[S^P YH pe5|˗gdj^P!RHW Vm/B:dHG 3 1+giƶt,BK \܊eMD>м3ͧ+ ("<6 uxn^/;%1Jl{/#ϴNk.&;7Lȍ(f;8F-'%2e*JKJ/P(_va;@!aĶ<4r4>72[*fe;4脦SqN ѕb AQE !b;/3Y^adaG̺>g?椿ٹ{QdQՍՠG[P022E%cd#L;[og# 1@xmm<~ârSDokfsl4,"Of1&Ɍi#&3$;d1gۀaxL[q!E`%P‰=7=Zg1@" &d#&HHr{O`XrFVE=Yc[yR'1@nT[ 3x hy^`)gް% ޅl~S~:|_e.NìnNjtIܒad*fxd-KD,Bb 32fY郘q7{3Ty#`IENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-success-256x256.png000066400000000000000000001371311477231770000256410ustar00rootroot00000000000000PNG  IHDR\rf pHYs   IDATxw$Gy3sOtw) ؘ16cmd ȒB("SNyo6܆ٝ {{fN-V;USy>̘3cf̌13f̘3cf̌13f̘3cf̌13^C,GC 5KܼK[]GQwv(G:?YF`#/x(<ή~" G< "8 p pRQ/&yMĻj"!mʄ12`Bm2D*W_^7dQ&v1m- NRKep1p9p43f*79BE-@hs hTQ 1-o=x/d9W>!k]Kف rL;SwRz.e & E0F?{;};01!Įa 8>X3cFf^u+iC%ۉ}PLn o"e/]{IeW#bhHI(sX2a&%7t'YBjUlUަUE!ji~ Q-S* @HU}EʆAolQƞg}L|VɆ?Y3 htc#o:uh{b;. d$BFCJE.}Wɤ&JӼPEh{z6T KX LM;8z@̐f2>|h?'"ԯKwչBn y&WsfifH3_5i _w&6iI)e@gwf䡃91ݱ f+tXxW_Mswla[_7M՝n  PQbR:slM |~U1ho( II3pNF:Hf?L,(W>|X?@ӹ _N|Q-anOoz>2GDt y{ q<ʐ4 [a I9qF?mu\8go;.Œx8Ipcη~mDpRtK~"‡{Z@۵(oӥ* )dzr:ׯaR/>NhB,xAHn)oFJ&sv*w~i\ҹ*tL @H l_>;ɏed&x2p-vN~],"^P[o X W4J! U!]A G04>%A*?u@zbb[9z~ǤoLcQ/\Mj4:sd 7%]ܲ'ߎY lύ@]Q{,P VI LĽ ,nT S0d,:TKd ݵXN$#3 '_ѯjDkդcE,oG,prꑈX  d=X d*8WP"(a$:E@*d >+A[w|3r3ޱcYG(d\|*"bx\:ߺ^Xn9;!<_X8%_Xx4KZW5)wJz9\ϷA%C8)Ԋ}o2 uB 霫snU}Q/}Ff^ԒVG-Ov}cA8(mbЅ_'U M EP g1Z~`D@@?/(e蓇Ʒ(vq~FF6Ϫa7.لQwvwe9BvHВ^t@b#B;`7qfҟPɯ+P!ǐ <0i:'/|Պȧ ޾?Bz?Q/ҙ98>&U{.h_BoO /v2`x%S@q%LAݼ6DJCp>W p-)}y9B!tFAsb{q`83 bWmˆu|*̃ u%a9 钁J.X* yЎM/eo8頾,F'bbDo+D9_>H1ȠI $4db}yV[0]w`{㴜{d@gy}!jB(|M؅pJa ~^g Q9_h*K/tUB n3+@ce>Q".Yɻwr U]ؕO:cw"Uwne+^l?NPx ԝdhX-($SHK;V(E (q¾ )}_}]<Е<\%(\2}\5~a|} ze/;fnmdN,igrF0k'S~Ě K0,P7:A"|9ŏ͠%ja0*`* *y畮3"@-xJ8Z~%HP=E>wSR9U%(hcbrrQdat<„' [n;Mߏڗ|}dI<[n$@*p>2! L'RL@˂' ɒ~w (5@=|:R/Dӈr хVZȇ E*)5` {.'uUK5x"(Ӧ;(1:Jν%0P,2'o(%kBZJЭ`+r1 Dt>  NVTR}M(4pfTMo pc0p6Una7,S潌=^U{(PDڮsoKh/4r5 }.*Xt[ALS:yH'LY X1[PYO^HXn &BH(LTD [d,-LsE$,Rt~\v^b`Њ Sy_S-&p8kϚG녋zVCz.(-|/J,`)k ,Q1o˿?`X B_080(A F $,bM'h7P!ZK<#@Ku% cS ě025cSi\yEXHCgԀbHOsgOVxvšPt|Z_%xq~ey$VRx VtÇ(0}|# ,(;Un*L;H*B0@ΚJpDO i%"#\0?R)sM̌o0GP0^*E?FEo|ޫfM@WY]@U2^Wb:_x *@ ,Oy@,/g!j* +Pj*iFcЮ#TRYj!SV<7H,gᴶU4 ~>A*BE"F%_4ʌ)NiY-5/L-.LRQKZB1'khK_R^j;sBފUh`k ;€p.Ї9p@gDP+NAF1}x4 Ũy_@YBmZ)-}߷X(0>2 ƇGc|x8|!HDTC- mM$jkԤjHj%>qzdA$<@*_ɣitFǫbNC녋Cg7L|-sw9JwY" UX b\EX9?5_P1QvLr/+t,C p#2O_x]Fjƻxqh>";zX!zZhjcN=f3g|zQ~;ɡc"!K "I?kF \< <1Atkg^f.DPg;o Fh>r@K D"PN]h5 6@+q>(㚦Ell>I__zL_͈Ģ$S45 bn܎~'E<<1%vO(H`p_]Ff)9 o_UrW znj+A\2TdѾ`J#4^@dPָSa2s~pN"Z6\"Iڸǿ{~ Ñr1os6F}],JGM * @:t (C!cϥ0ش<ǀM)|/[LCik!`tp \ePlT]K#\ - K R`Uxi qaI8IνZV&lY!,"oR(/1$z9tSy-rJr>'F\+8r[`?u$SE(~BSP =s?7LO-3ቻ[cP? MԷ6jD#{H&ʎjZP) nYP<ʢNb̫.P,hVI5B9z^dXC; ly%)wJl[VPE [֫UBQ IUQ麖R5KkPUT w=0s 9UHE 7]s׳q ך,@KSfմ ppC*DBLD@VSc8orR& ]|Jԭ4SV1H+Z_eJ%G "ܖ =T!$#)~vi&[?߸^yw^w3ݹPJѕhadVWз reDKh7IB2Exl+y)?͎G}v'-=mw"̯a8;!uw(l8[ z_aS'nw&\smx%ո⣻1x( *F-?X$-! K`zT/ 2 ]a 7ti{~]LeOf-}_|? Np݁W@Y}:+Ϋ;7r$pտW?Gz ts^͝5SSro*eth9rsHkxGsu~p3OsRG&* <󉛦= /x!@؎'pX0ZMbvYfJ~b:O`(݊( և[%(d!·<uPbZJ?Zsj:c_W 4u7]Nmc}O`Z-ϼnE]љh+ !)t\GRJ?oLCق}_?Sw>ۍ(*bgnt" B | 8i:oHac7_V%_XA淙[ <2_n5ac߻t~)$1'~x[]eғd&3|WMlG^uW>F)dvxj9"VYkq%G?RA8}V''jyߛ8՛H֥H3",F~AoZF B¢3ʑaFJF lPru^N \?+ _Mӫ(~ސӈKf)?nmC<ZQQ7X.#<zkNQ[;CNmYNS9n:>ds@6]P}}|;P}a|ÓF IDATER$qE &"FVt %J!=O`d`N;Ua+ĬT3QjRh@l̾Y_ ]sAY?Zn3{fE_ƛqzYeA g@ܘZ?rGx̰e6zR  0,Hu3'>DrƏV=9ى kH(l/HIF$-I1+R%XD!3G Hҍɍ3=J3~^Ma {8!VTA#p4L٬9{=k@PdS;kݲ8U$aNm';G3UNzK-k)L*ˀ>rkpҦ5rwpG]YT[=>~aaSK>sQXD%K6ZbMKu17I we`{䲏}3ڳۯV7=5;n7q0ut}?OX YBQuݚ֍5Z~bN4RiwugiIDYѭB-I`!8(EI֊ +U*f7,GCFhFLgdz:[PC3_?Fxr` /Pm+.^ &>Ѫ2XXÜd?|[+#G%,߸x3[ ki)75g}P5͗Gs?֣{9bVd8 kGWnJi.>A~Qv*x+{8(ChydCH x'+Ck:kVouBu{{گYMޭX&ll&EA#7EM 2,bA,zI3aveV[$ TȔ;1ȞC gGi7 E15ggoV ;ygoPC4:j5jV$D9 tG'.ˢz]ݩ#Z2΄զ3(yL^k}0/}PcIXfzRj-ʄfMn1eQ_ͫȳ/̐dOs)Z~@N޿.pFDynae6-amb4,bU"V;?n]4 Ery f)ἒ#lG29ޠ}D*l!>4d#[lsd8A2K ,ښdjpe;Y@=y5n V$d{^\܄n{.O,&*D0`*-r(\kEtPhAO%][%2g_[bN^FN;  E3ѫSvŠb,k:OeAx#u$P&,V"bԆkh71/ɪƅ,K̊0ϐ'0%/;@&'ٮ%JBa֟*C~(#d&2#YŲD(R`nR"B@5FK 6Lɺe lرz߿лs:|K\P< vn6ixuTGLL"EX O5N^ײM;H2,f x*RvBXqY'QXc}W'@__1N{3z_۶L7%w>}K%@</U `~7kpl__ z]oGu(mjp p r T0Ks 0wS*ywwq5{͸4bgwQD+OBj[ӥat矸F2R,mE $GfOru7ˮfS;,Dx"\ b젻}>r2ԪD3%-X H,œyZ$%GxљD% pxb 4ÄTb:Ďɓ/9p/)o9 +znثl>$Z%u?7@s۷NBV@jѶ\UpDafWҦ2S?O}[wShj-6p03UjHabS }~.w0)%w7AZ56dE|s^3U|J= s56}7Y ѽ`wjXd}6pcG{)N*E9x@r41MZ|w,CzR-%&Ar*jnvT'%`)#OR: aK-'K:d-YQWٲqDeKs K1ZKofBFNJ<qZC?'g[x3,ZR%,:s:.EYwr9;۽p6UVw< ,kձehS2"A N2s&}sV6Tu1:]#|#Oí,TyݼiيZ3+^'y|hRu)w^nm-mh[Aoc{犵$i:g~K5:2y)(V/j5 ^( mR".ŕ'& JIJE g¥Y /X0 U1nSEPo%/W+=FM⍴4}_ 5. - ;?Zo|?C=n7󪎵JX#Zy숶֞EkUuv;<Ʈ%,uk5_7[_p}‘]|a!H,l!ҐPvNioRRGvW`j)|pײૹgj avfsҎnh Z#BcnһkK6B,"~fx2Ggkb^MQHyͦkɬ^{usY^?Oƌ9n96Qu Z F>6-K=WP5aXQ{Q>]0+ۍq§('}~ɳ4)RwRIr0kr[P(sp%zRKiSHE,ϭW~^;qa[:Z0owSg|郟uAںD;pv'vhΩ=#MԄc]2S"5.i%F>M{SJTKi,;o  gкuKc0 -/\W#%%Q|v;N\S՟jؾgLkWԅk؛sWu)vϚ:ZXa%Nv", BS5&I֥HDI!Lz7˙o>X">=<:M1|QALKu (G Cӄ&38}-m: j s~ QIW)Px1@Wmin)XX/dSHjcJ1wC`2][Cg #uXa|7OaǪ`VY;OckXb~t=/0:8† O'Cf$}SäKy2,fX[8]e,Xuʫz.7r{zzi,߸E!=z~civ<F,nM huMt$iչoY6~Ȣ x ,u$ {z *+fJBK _hΑZnRGK-!7Tr[ރJ"$*"Ԫ[jo~cUՒeyNZnFى /8ry'-b9Ntq tjYz)silk"PK.Emc==]UgepԵ4)m9Tfgw"Zz'˧(7%xnmKߎ%l{x }{{a_p8@0UȱcdŒ)ZѨ{G/T$n7boNND\d@L^ChcImT"䋀L[2* rTQ|M9¿(5Hv[5#ü#Sңi)uZIjQ#<٦20V&R[w}Ϲ잫9M߲Ս m7Lw2Uw U]s;+VK2*I_QfHX =hٽ:  Pf|^;*Pwd)k캠~1j牀>\Ja^FF"yh\#()H_z0ja#4)7A"cc2+(cGG3b{ cN V-q.\^3c JRRR@A/P&dS_{lxK܂B>A6^t2@;ړHk& %_OvIOgwd<7pE.Y)k_ 7*CV+eiR^N0+j>q?Z|n=AUe /ʄ؁YK%_d ]K]e 1g|uv8-Nq$7qE;5Ďq(3Hov}y&-ٖϾ~F,:kB1M W.flq&Uk^XDϢɾtnթ1Bs9B>^,YϐbX/CQ!чURseÎH8 pDĺTN CnW8d H=>o24C.\u| 5x˕S([ ZH[-o~{'/q\/m ̱k[nö~e0hnOǸn9 'r٧F̥EwuOuAap$‚K >UskBq!Y4 6H~#ϻh/tO3 @٭V@z[R/n',)IHE҄Iݟ\~wY+99>ɯ.ً |oqZ # j{&|u'YMs둇xpx-,'nբp w^{3%xԊ~.v) %&aͮ0 4RXXLPӊi Tv3NGB,P'>>H/lze]"KC|ڎ@4TXnnT5GmT*z(]NaƆGW\>{ƒq,NO̊*o$gVb;m}COQ ǯcUm{ܾLm8i|>ͻ}iWK5֑Zcã6`뜪 ij0-5Vl>W `J},jFJyFftȢMȠRQ˴7Ϊ5( QH$&edY/B (*MQ E-&kg]G>ˮvmF (**j[RKּ(glOǵ59oziԂchNYS$"P`r,W4DH@`) V4Lɝ.*jI/ v`S4FZjKe⮓HeyʻvXԔ@(r#M72YE BJGhtxg-g^M9Ze%4Vٛ-m3$!53g5-cO0%365 ў_{P /O}]Uu |0j z~pjQ}qUQFz|XBY,yRJ!_h@Q3݀Xx8>/ :dSAU{}+C`ஂrآ_kcr^zG\eH\sXU@˚xqO~͉8Aڳ:qVàZC;Т:g@XD$6=:?6<#Êḹ"7*p-k^"NKWEZP+RVtχA켖 IDAT%Tv8IpET O\{- hdAO<POe$17I!WQ5}8??!@.;ů3FFgQr"gQTp83lӎ1͚Z*[D-kʑ.;T&<0G蛘\WF"-J2~< `jݤ6@˖H7p3 :PVHuA Pʠ*jr ~o^{l{x 'x';",N"fq!-HDt0 Z䠩 Yq7B;u#$,Bn3;RI4b&VnMq(ΏdYjqMSfԕ)!)鳧|bZXWCjW-!XWh?gCJ/#Cp7GꘟT jil3)NyJm몶TT:kdk,ҭgғU2U/|փM)(C?uq(v_ౡ8+yUi{xYKz6*9ƒ2Hw'hXZy)Bp>a :♇8k?Ov>q+#zgfRFP,8hisr|$ڤ*LM-JꀩNmL~<pz7Í ~Y^[",W |YTpC4 I4 P,Nbu"#Gȑa^No^ާw*nE9n,=mD`:4yASGdՍin(LciB/RW!AꤶJp#ω( ڞ$0!qeP_jqs`]P0E `(A3B7R5IEđrxGy9٘-)R-jSYk\:h5h-ǥ{:9"ښĪTg.*0CPC\Uj*T"ևz`p4HLL_0*ISЃЪv2s>y['|G455ŋs]wpuW!8.G?xf0B֭?!7o.Eq)Sh;DݲQ "w^{ _TAJN۵Kh7ўhIwPig <<16Ee1:.HfHCb˞86m) N纄\rBgIZp~ZRHX>: _;|[[ַ[x%\…^044Ts{B{Е0;F(eߖ\V}"mw˖eaa?@Ֆ[Yx,&؅g"2`#rZr(!RYlro:R}^ Q)xK&Rm¯3CGs4!H N3wrs*}esYg}g>xi?W[bqj6!]2SUs?wj  N2W ]_?̵^ {젷X5*A먭U[q "xn OV\fFIsw/J|@>sni^XF.} yKr~OC/%NxS|ߠ >i+ 83~vӦM߿elKtӻzx ևkDԆks_+髒Ώx{ɧaj?Yۨ÷W/L#\& L5AkʄX5mID{fh,cݺu?{!rꩧ[?d6`AMYQ+K 7T{ŜRmlEYtac۱T8Y,w6WqkoBctmVfQHw*VZs< 2DdV/ V*]5ƥ{ʞDkG5-4-sϱss_|9W^y%z+6lK/}{o۹馛\?i=5mꭌOO֌9a\8.@y r}fU5w>ZtenF5 $&ǏK*7$-H?<"j^V?zp>ݏ¿l2jVZ_׾5FFF͛7|g˖--o!L/|+zqǵ7]sOuݵ ˸u`H^I% t.`OSJ{әha]R,es .lzٴtPvCVSkjar `Xˁ@UrVV@z Ivkf@l) R/1Q^M)~&p֩k]ٺIϞ'?ɪUЇ>ħ>) ~֭[{^6ô >Ɓ4Kp}X+k ʢ2Z枴`3o"=5ћ`~o"޺ӈ~MDV Hɓ; :k!-e_[TR#ǬfkcLh|b@< *u%i/3Ua.v򦹯r|_ os }kkkҷwx+{v___^x!vU1w`>~]}{gVX͒mIp7cfCL}@!GHSK<%5y8!61`c"jJݙ{g$c;{|'յRu91 Jaؐ #L1|:|}$ᮻƍ /$3g"??pp̙7n0n݊?woK.[oex-<v;>UTwVc/dUvTԡ-ةn=UxKp,O@}E ,6+ .aP{9* 81EJyb;Ogf-A# ]cos%(jTAnJ EVwvX>JH_g^U$MEbuGHsPy $f$2-GR~H"E_y_ "Eё]D}\YM":%M]Ñ? j }|Sσ>x6m^/6l؀ .O χykAa1޹s'.] ǃcǎ!55/O G]] @ g O~# ]Ž#Egk.NsP}\ӈv"SGpM\d!{|r aCtX]R10z4-fz6@e_SL䄈Ud <X?%bP"d8jo#h'^yqs:%;;7zc?A|Ʈ#<X)^pN" t{wWqSxkΈ"3"nlm>mGaAʼn`tJf[V}Gtow+f,Y''7;Aksb+4vr;)H `nMt mpXS\0I@aYHdC~)bĤB#qH =R $XYu35]~86)&N'|SLA\38q˖-ömnO>cǎᅬK|ZZn7dYFww7 n믿e˖᫯ܹsqwW_E @(B||ק4MZVq䮏bj#)o*)FF`ٔ@fO*_A\ d3`:Dv|j[_“?|;c XbvڅsbժU(**B\\/^`0FBVUU7D0e][ZZPUU @}:=Z?;wl<#rqC#?EoþRliُy?i==uJMKqS o׌@[{BfR,S)R jK8< jtD`q9'Z$S15b3B~l JqT Q d!+AA@X$cp5uo~/][_`رXlz'`ɒ%e^x!MW_EYYn7~f}%zcԩꪫpTuJ)܈1&Pf()%<Yj@(*e U?nn}G2P+eR2\bOa$ΈVJJ(;GDdۢ`j _ _PSa eД|k0&ca._(BJ<|/?>ֆ>u 駟 ٰa6nrmvWW ͟?s{:5exտZ ;w^06p<= a$Tspr.<r΃bat長-Ƨ^L?QPPy!??8r 1a 8K)ň#0k,ݻ}>ٳgcĈpO Ŗe˖aժUxM)=*c¼H:- F<Ϟ5Q`WC9I%cfNԅ31tH>WXrύHJ DW=6d߱+W4fTU$ɞ}[^o50ﴐؘHaRˈ\OC$N3qYK6\/-ּT^$̜9w`ڵhll… # g… _Պ;YdL8]!X,HOOm݆&vm;g  g,sHJR3D)Yzbe1{0tHfFlߡ ^67Q} M@G ILaqZo( FQGCS%"af\{| Onx/+_yuS_1|u]LN&Lʕ+e/uPWWY Ry\=voҥKsN|>dffrw^~~# IDAT0tPlٲGSS^utuua+r?ѝ1驣[ ɠܸeSOxk#DHaCuQJ (򽈂J,It!bLY{hihVk$철4)B4mMxzBaWzVlm:]"To]/b0i_٢ ֘DcK9󒑺`R0p_U9Rp0\(j ZA1Iu}?Qs`By1HKlWo7҂1m4<裘2e F]K.;k`O{Eww7Fj8yW7߄fýދEX] ¶<[Kh)!Tfaرx܌D,Zbƍx뭷0n8lPRRK/P>,:;;;^UUUhkk;g]k[?ބ}Z_]b|0uA%87dnG]認?1B-[󚔠ZF3Rv< fY$i0&C5ܟ [Q=dq 0l)w@40}Gg̵ oj,^T%L<WƸqb0`-Zg}֭ß'\qطoJ}$nҬR>nGmm-^~e䪨~;tvv^裏b8~8RSSgGYY^{5xg=hklڿBPڵ _= qF}(//իqfo_>|qp͙Y(,DbCU)(%қ@׌ &X[ɨg`Cu'QA4l(%g7ߖuv75*"0P(;!iؒ %#;1C0t{bg_{5446l}]yz=l߾*RSStR3&L~{0p@{$ K.EQQW!xgM679n':***pwc+{nzB3`*ST( ٠èPS@<;*j14pGl) aʇ>/9t?A6D|kim@W{酛mmRa/~sw^;=}kŦM`Xp0rH,YpUW˖-Ú5kps95k_pyM^۷o?R|߸H gScD@)o5Ĭni> &hx{z*H[Bn"84&S^vvbSk'J f]yr Ntrڅ6`͆{ <8555_>ȲI&fF|wXr%^/\z︫ /^/zL>WC]EoŁChS~ QAӋx-\bOŋ۠6&(LDPM.GGRpwg3g4 9$k_1*If~$HHUChT Mfdb Yr;wСCs=v܉~f}L9JKKq5jVX ??F/~ ̚5?[(J1/2nwtu`KSayH' +qfN`yJ(l{ZE ۥ=Ǫ0e: 8 ii)3t njzոeUnB,T5w;#gHP(Sn@YYY;g!;;[CSOPtD>!R8y$K/AeXV\R0;:::Tv`II|kƑ*g!% Z9tGu Ԁ ѿ1W)1A ¡B(`oЙ2@8)P]JNa1pٜNKGQj(&S{j_Ipۯ 1S!CСCXx1{=߬YfU!G+ƈ#p뭷_d yf5NX;?{lذvwj?ↂg:#KrczqœO̲0U/#=';HE;h@ATM!u!HAگV"ay31 >ԏWbd\Pو*CnjkqQJ\t"33G;1ydc555#=]A322QYY_mihh+0w\,Xkz}rXߕϾBVM c`#7߇ ,uF)Q@ (ӅhyŢtTg ~n)c7XJ ŠjLSg2rEkވi\4Ts:qUĉ"l?s,-p]Z\MMMnWkjjpEaݺusذa*uXp>>|X;v?+W֭[׽8pt^9>XiȏdQRݮL%؝Pàk"QFQJL;84!/DXb0){VOQ$Y*=}qM8?w MnhnK$NUa&e'exc|@ uUӁ}וkaǎjMww7֮]aÆaĈ1# z|RzLNNܹsqcŨ7ߌ^z Bĉ馛_:;< `N@IIl`r <2"I>"go 6m3۪;avB+SAkyLP*{Ӎç2^ N-W-OgދbW!4yڴAi g#o>/f47:ZVC>܌뮻+WIIIx~QUUՊkb֬Yυ]6p8dQQ\."dô.Iu?ʌ ;"E?$s]Dlbj˱F#r WAjDFwHQсT_#"?-&X*lxOS8\y,T#$W:hiS#FF8',V+nۧ_Yq5ר>% Xj51~TbIp"!![lA0W\dlݺO"FQQ~._^O&͟}lqh)8\g X,XR5V | zO&q A 78aV:w0>"+a,E21t é$>|-}O^?h*t8s Sbwߍ_1wo:@Omۆt~j/Q\\K.zݴ6Ɋ1ɅP䣈 y!Q6(|N٦DY0K{uB[F@t~U4DTta#=}a|rḰiǫEeY5"%%4zHKKŋG,7 ছn^uMd9ӑJ!:*H:N 9ܞ7҇>@'Ơ~;0L5q":%5 uXl8<?)ik+mW , P4Wzަ6k9hx.>-zSoffP`ٲehmmE/Daa!mۆS$׿ޏ}_ۙ(yЎy`3G. &D3 rK\ XGO 0Hg0wD5q=]ݩ˃6?yojTT=csF@̯OC0e\^TeeEjj*RSƬY{n曦AoGnn.:I&_FKK .p >o< g>Xᆆr5o )p _g'o@m7ڒ=zx]~;h0;bgd%Y L-}eHi/=osG"٥qC]YbjJKKM%h"X7otOp:X~= g}%%%eh=Ռ]1+ w5t2>U.L(5eFE=ccRԨiH5xР'.`1m/l6v=::Jd8`;!]vj({s& GҀ n(C}WwK'̙cGĒz|4!r֭[h"5Z/6 q}=c̈b"A")SEj}qk( gr m}ט?UUo˩cgצ t`oa3O]0Ə:P(BII -[1cƨ ̞=@ؾ};fΜ+g\EupazpOȯemZpo> oE*U=ӶᾌB0>@  H&ŝ'_TZ|n3zX,!(J e8iӎ L񦩙Xeŷݿ?n>]C}}=~_zz{w܁{8+SuN rri`uaYaas`Yas!I(ȔBAeP P(P ϏP0Ϗ_xp aYUê $SQ>O FLPli;n, `"f3lSF Hp! {u-l ٜ3kot{)Htj,KnlF]1Ks O<~ߣشi?t)À,fg`@n&3NpÕF\qqpƹ)6p;e}}><=u{Ag[:܆:Q:53x4,IqSZ+;%!\= D<F7(7PHBVf+a_v?l)N]T$5Sb lNN& @ p1w6)¦ysX|xWO߿;q#qc1xT!Ӓö+;nO[(wI"HĔ$F(B(@Dgk;Nđr>c:eYw8|i`9~rl"a>JqWY<4!]F!Xot7\#Kh53e|Gv;1aw}s*kPV!)*r;~2GvWVVp8uC6$ 0rhL|.M<I ƤW?w "vqHHA!| u(>b;QW^P0ՊGb-Wa%J:O xMG%-F@P$| "`Τ* Oo`řp>_A E"AܰtzJp$t9ĨcSD@ 7;keM W")H\8,q1;`ȑ#)r"#/yc1-O S9 #DCʡȀ0dȐeeGgj B,dYD,DXa%X+!atL5"*= {݆` naY M2tVaW `<4`caK: zAc;LbZa`qρ`&OM cGHimM/a]OxJ[N bL,9S0nd|;wG:|nU\F`%3P4n |K3ԃPwgz>040 #Li 1exT}[ lHI=[.8ou!ՖKlq1 slHJK#G;O 5}de1, @ &O G'`(1e`2  `郝JRrڌӀa~]omRl[.dvdpX o,V~: $uY=p!̟?4CII93zۅW\ D|J,+D*JPׄP'jM2dȠJ%൩E͈T"P8CԀH DJoʼnlg*2u&Y1::Z1!5AÄ20Ū0Yh-ߏX3>MT xL싀@' ( 7a˧  <5fA G0}X"؅;1C/ 0qijv2sp`Ýoː2Zh]/D[ֆ\sxwl^k{"Fr@8M6H&`;2dgF $Q]'au4 TT Usq6'a?1)*a@wSDJ'l IDAT]MzaT9u"nhpqzou ,/Cʰ¢ e@:k꿵Ѭ}c̼|+Qņ 9P) R=Q(`5GpDNb mןUnjV: ǢۮƸٓ)"9oȇN[oEt7ݘT5h J O ʨF-؉Nk;֨Q'D8~z>n5S{vyu 0.y08\a/AS{8zl_GD"7{ iCΘ/qlaj䪋Mʶqja5mx1l0NLi'~h3(Br&liُMivA9CnQ]r Zn^F?ފiҶEHw1RN 2CՙKV;e6dF稨Akhs~*pΙP!jnV YRSᡇ3w^>} F ?Rx~Oȋm%+llڅZ4xaJ {RTF9pt7TW"QsfɔXf:ZasZveއ'VD1T 1'< sُ1!Zs΁IL}TZuYtF ꃗ3NtaaN$|ub'`X`حʭZmkql|> =6o|V1q3BnQ4BGNۨJ| J@ * ]6 c)(2R]t+fW+m)Aԏk(P:|_O!T7\AH$ThrPFwI37[GDlO4,s}+G=Z "Urb7 =}PNEEk5>Lee@dg BmyNc֭XbE̡{ :{ z>$&q ڂ]֖b:PQZgD`"@cE]p*!TPȨPrK0N ]&c 48*R./l> +9Z,3[;nYu 3NPY$مϘ55`ԔHJOD$NV`prNXWT]lq9SpocgMR#zz>l=Ps p_gDU5+`za`'&R:u#5`Lޭ #|.k٨I?s~b9؀c{?Pv@ߘa4Fܰ fPGߣREcHlv;nˌ}v a HMq% ."DA9"hs1``v/X+]u bЇ+a"XE ,<*i3 j>G/)]T]3" \h=Xjhp5D'1 LzZP ]ʁ0ו_kZ?rk8d! ‘KM =ᾺX  x=7H&$Jԝ8s.N9 9#U5x e' ݭ6XAF,ASm÷yYXC%HHIL(|mn+=a&㼍c  }"zY 5 p0f Xb%NJkRxPT!z䥜A + ;OeKe[}hx8+VmP .ӲGn:'nxd""Y$$$%`]}Qq.YbI4ۡ&4al0ڬHNO[cfWy~o/}v ["Dv&b"C&`%V5ZlpSl{fMZgMvzF &@C =F@cD) 5d"! PQ!oM ]CKir1w3\ `pA VKd'3PB!9ҲU'ΘhiDؑj1(%N@2@z~,V$d[iFv&KO8'rn1y % b /0vEkSa s0>yF&F;EpYh t"$94nW+2<@Hpcia =ë˔ N dCYR$"2E݊LfyI $S񰦸-@sJPWlðk6m8UbN`! (mR ӊCڕIr ULx:_`ڥs ( v58,a.ʜHs$mumuFivۈzC b!Cն.}L 38JME*0m'fZj2Qyu踨~A<Ѻ1f%IZ|k@_ Nf$8&HP0A65a9FQ`MӁؾUsH+ wD%HC(HS)9=m87 )-w?3\{MHJK֔#s;X_me06[$ 'ܙ(Lȅj,p[<(~.#ٝ@I< q"ˡ&zє]4R j20 D-i&sm/EPu?M(o i6(a釫( Ӫ2Z d즮V ٝ 2E遣gl164cؑḦ 3ǡEAI&6v$|jF2dRz .R,A …`1lO{Nl8'cD X$poڛZt?gؓQC@!ue9$Pb:FPQM^? 5_O!F'b߇ƈT<@ez)F -Kc< x@@F !SH- KT}IRfB0xd!QWyfw^?&̝Ix4 BC(LSq B 7k6sH_;f]uQD=H=TƮ#pU կ^|"%3M∄DeUL<4h>(9OGPVx:/1K5Ҕ40Q kNlAkKX% h ODU腷_?Sk2Z0pEE5l4F& PZ@˄D0Rk@1a=%@ ECbdy6A*O |ci Bd"-a_ONtX z1a_ &qkt@'1Q [fV$E9LelEfr:#C9y4 B}U-:[ۿW?sՄSP\F(Mm>NdY%v/;jGz1qV'ÂToml|5ڶknolEjf:N#S >tES1c2ZmH6ΉGԔ? !;L؃"@G!gG0>E ,@dxInY{ |[(?6[Cr@sp2$E3| &5!W  9'9SCO[Q8]F&n ]Mw@ez001.IY/Q^ o8E+OYLҝxWOa߳G2Y2_ %TF}OWgĐk%RL >!p }3fb쾡T7RSC(s5:Nk13&6~QLK;cpu"u^F ֞ l.V؀,(m<ņLRp:P4f8f]1PUe'@ t45hfDA@E[VAwЇ5]f?6?JQ [(J "ÕUKձJ+Y"`aSA"Cuϖ"O>lVs}o3+ lԏhr2ַ7ϳB$zpX$`A \4TRvvqs8hBdC @ NЋjO8,t`Ԕ,Jӆj-28vu7ӄisV40g$ ulGxO[ee&jO.U4z ŚN,qnћL4b$uD(i7(gJ ,6 $U~ !4Wj3?ڔbc IZ헽]BM(4;N7>e/QӤQ誃_1p3цh? 4@ɦ]||D,^ZSC&bqgHqOc-?唅`X )0ʃzR":ह;jb?bkgIrA7Hn"AI܎Mt@keC" jis(B0%R jjbbDz@wꊹRIh6X45_?^|-g\@4pN`ܜ)p'JVX% jzb,` "ᇚ4 B5݅M>Tj thAࡨHԃ#ܶ8v7Ai;y ȾuNCš\ SP  MCʼBM(2\ͻ!)83`Tݥ#%x x;݈;ocڏy9_(@zB~<HȠ! ^A?ÉnհV #>)~"2sԿ8-MEJĖu?S36P  !@WU5TYN_FU}CQTPbooz"3vXz fw8 [.׎&ѼL{O/;CY$j̠hD"($FwhDblUsn› ?}~HhwD,t[7anjryN PqZ)@1 CГG^W=I|X1 J=e-?=kFŇRc߿ƍ:H3}OEle($C> >B+%(c[1X.% _zI`I#DyEsq.XlVܲż` 9qYh{`Lk4Ư| "`61mPE5KG X; r㻩(Jf мaPzn PJ4[_N π0>W!+.v`@VDSוh`#F_g t#ٕ W\Ҳl ZN5s)ULw&Vas|*>hllEJL[j0zِ~L%R})D"@yo_M;f,gq=~ v(2EqӵU8# USvhLDYc'NE} Gو<,7vbTzeu\p9g7EMGK}̜T$ 'F*nV3 c0F d5e:*m(4R!.ԃ2)?r [+D(n頂'a=3aBY:c58CEF$Uxz.?o0p ?s<]Kpc1 ׁ(݂- z?:.K@;j{j= TO;#""&mQBfFйMze.p֐eY^g`Ky &oWB\gCxQ ~p,8V2š<AThF^Rv  [0s/R6jl."'Pqo!`J ^ԘW?  ݱR]w=P,1Cm>HqV3x0FR9}[Cz: Pʉ0 :9sȘ@CO Rgs-ǡm~ΝuCԿe`[)oa =PahIDATT茁}5VׅRD}9!NIAͫ1 xᣣY[%Gvub9GF=_{NxO./ Xynyj "}|4y[1:Hubvv7] E8bM4E J!$NؑVĘc9?nD) J7Xy#?s=Ga@c8@RfO v9$& `~G@_9a-#~-es!ӝ ҳP)ΐq*8xodj+C@g1G (2a F սSB\"1c5Bۋ?m%"Qn[b65DBsF,p=E̥&&h ED"Չ~^N0>(`I@juyBӆi* Ov$7lqAJRׄqs'#!{(@Z2%ǚ &'D ´" EM$MǨPnF}|O`5s>ڃĐ P@ iT?ފ|-%b9ǛQ(sg=!pa $\`X Q%\ -rAȐ1$iz=CrQSz5eU< i\Wyso/,FeɖW2vI H يP PR!l P66YdI-/LόF,=}n{nw6Z=3{-y\m7]G8TK~G%oNʅ6 )Q#f(`,ꌂ (Eip F1Gc)…?#MϿsy}8{{<:D1G^ބ~ڪ2K$qR>[(yF#֢ZFC6r`SJ*t;íl.]9Ξ`25M亍U] -E$RلLj1DH(F. qTʗ$#SՌǛ]rS?yk}Y-E%u;UvjzE<^\}u`/Ut>ibHebPT1HiJu&Y3ߕDjS|PQ';|IGkAqN/nAIHY2[ {H-<~>r~ R^Fp4c͸O;%f,?ZlPWֿW|w'}~ѳr0oxՍ>E>"ѓ%~xaIh"MH$,\=Qvua=gP}1i&4Z[Hw'l[{}@=ZАv[T`DD|Ըbku 2cN@ ,ϲk7о;cp5w\GNxoh'im >@S+FêAB6L?;:-rI/uh&L"~8ůz#e<[ȑ[ӭRdzc 91hb_A~R[bksVdq_8}+9ÇFww9=4 ROi6!vUd]1Pip|4JDiNq }vvxx m] x2H}MkG 0Pl'(IbQ- ڕaDX"ֹ!dy-|0Au!঵'?..0:sÜWFQr/9x*$*B}' a"x ]Xj>h耺dc|-~q{B\h 6M#+5: D T"tCo8tJM(Cj JPu* 6#'G˖')Ϸ}7.+2L)]8ůF^=l g+kuEm[wRO6BX?A& x8?v>KJdwpϴs nF.Ev!;Yk͜@Xzr8:g@)#9 bc`TBp0Scma5n^| sr5wʻ?ndcV1G3p* K/HI^̓qkv ͝C)B%͙wa.Qe,Z(JlB~I<ε8ÀrL2J< q*.j{(,JN |Q Eczaml eFb~fUl*~-k@.3mSϳ{ro,hH=55iOePjAb"pH13`[տQ1'(nv" 4- X`՛EcEPqه޿RHtʾck\jWjqP(ǽῷ kiMqd|dIѪj1SڟXɳ-w aXZ&-Vh>$<4vS91uzJXT r77=^xdL!Ofu(WEjAT Q 8 #qzs=p9ǓMZtGx,ĻƟ(UnM}[ XZ U:WLUN$;s=p~ 2JJqntl%+'I9op41.gmqm+mȄyb8Uau_ 3@&PI-+=`+|zޞPfG*ScE֖u\si ОUEنX<)д, ]x+ ABEP t?sՄfw 3Lte9ʃ0& .-C~=sMciA*>‘ZoۜoG:expr.N΍ZpB2Ru=Ch}՜^(]M}FJ.SH=oH]MDxh{SȥeMgRp5~m徰5 v0gja B{Q%=0ΌݧVc 4D ŭAoqެ80zI!-5= =sT֠ReG):-\Km!\ ?>t-X8RqWtf V*d !Egr hXӉPIW8eaVTmD AqԴH!FXe$3;5?#] RwkYVZC>_]yVl*H0 9zcD3/DC4`Z >:ECO2S,)5"؄>Wef0=ELCm '1v)_5_\+۝{fLA$4JVa'E)O٦dOOI mKz(6[iooG}[t/,􎨠禵z9q/;\swNC2rrTC*T5X/!*.Pt,)E+~fwoPI/?XiK t߸έ6p;۝Coh{h'/Ef{ Ig3<4O/,Qa$=EGN(_&`I/,ztʒ]Cוt]-83Ii>Gwe\Rq8ZJ.D;P_<2ܾQMR.Q08Ot}O2mނ5@d:~Z^^'i)!~?ڵ!J:) x{Tg9"KKeZl/^UPzL.ftݙxn Qx:=]DM+?Ħ[m|ڊ!|d$Zb'ci=W%^{OlΦ#w etlrzȭ"׉[%;%Lꤝ0TwC PyL[ K>[aizEcs,|d-?J/x| gdܞ2[M~}n>E$`iVEHhO\Z,Q)Q::GmzTZ[vIv5<%Rpm *orϔppuw[CnM7=7ȭɹ:2 hQ{ꀀ(XXzDu|jDm7.3WvGK.uu! f霞Ќ2}yB+A'ӝ3 @jFF}q*2** rτ +Xj+ɮ>'5כzh?fצO\LvUp0xΩࢵNx&㑘^J؟K#ueA;ˮj _oH@jsslx׿PΟī%` c~HsRگia=~QBȧrxq`( ~J@j\`3@~ M@ >|煆d_Z{ LGA(K7>/qw~CfI=. /"}hN?N>+GbG34P Rfī7L<nIMt((.$ً`0=6.B2f 3s*tO|5(Pɯr˩\:u[s\vtGZ—gp424g|$ B|Y~sY> R(P Lc?yurp^1=,^(ϥӴ5v!ݱ0=zY:0*<뛘fiY&3&=97|3?6 )X14:=rZ:)pl%6w= d /󳦳hnxWZhfG4m%MP7JDR H8:+mdbqz揝\ܒH)>dp|A;a z"t#6flc 2 bBXIUz8cg .^趻;}t:.^'x*l*͢)t&0 JfL"0&=`$Qv}(@Kg)R.JIq @1C_xPy$:DzvRCe, µF>4V^'B76R&j1%ʢ!ކc;dm(nfH)%p(ҳgj/ٕ),-V>>|m5\aяWc ½tFz{mQ KJ"פtQ+?s{NA7[jzV^nCq3'6 "tw^-Zd*a@W*.25%WFgmQ >LFn7h^#MfP/Et0BOc'z;vJfwHnn=NO͊w8Qs^#t;hW S+FۃtEz bbuM l}-&@)rKW"7Iu]nm$|xВňI ^DIlq6IF.x.{toj^Y՜b᎚'O> \\''F^%7̀IER&TpDGS?N޶1زZ4|K2~p,0 @x^@ 2+Qd{ ![ ;(ePd)R[|#}Ew9t! `lEoMB 4a`( s[9@)J)P $IkCs{ ?@_im"_@Z45@-`P 8+1Vn,ɱ@17ƀP YF̃-c3 P >PI!t M4R)!  KyV&`R*2)~ߝF}/fHĿ> L%T^ڈ1Lpt M7t $MChiam R=s 6&if:>ŵxCĎ!S99 `Q @ME }aMu=iMhh8o>. DZϱ, L)xQ-}d_ Ul gWSs\|ASb<6L 0 GR,yk9ڨ6x5L.M""I0Aʜz)XddK>|*Ppf) X\#ce0dsKgP ]-K3'1HGH'9%QR!wlzW[ ϭ#MX x44@9ɔ"[m~"dΌϥ3]0l1o{d,Baxr?_OcsLp40L,k&qM&9c53Y^9SQ4M 8HA=q"ΦFGc$qbQz;9 {}چ)\0ɡjD##1iwLn8\vUa[;I'StuͬeZYKobyMߤb2C ⭃(*)@ˁoEFyq|ae*Fw:~忘71PNwzjsT"E"gTq2օ `juKYU1EM,*kod.M4lv*V-Ć=u`x =d}ۖY@;jk0} ˆ)MQ1&hM Asq=OAz \]TNf֢y($=!ºEhyrof'wH+X 62|:^K$e8c,9K98'y\g69p:j=ЃJ{tCULX\HM̶8DžKtݲlytM;BWLu_!2teN`E+G7N/|;spW ^-sΜMD?\d1PˉG]䇐(GJF%3lgIPhV ښP儜NdZ͉p5+24D qؾeLy}gxooersфF, tTꚧ1NeF|Y,Vn5We42ϮMЖ +r km~ MP-bjoy-D[Y~LXKGYײuLH鳌F$c jif1f(%税9r AYJH'L*C %<3@x4h(G{x99++8T 'PtzD>rt˥M{zĆOo\xE!HƓF˛Ar*G)@B>&wXux*Vbu5a&NhQ^R-fZpba&)v +WtB ^kע ˶4ae}ZM56@^+&( `LXtkl)[`lqkqG:Χ9T6l`ݺu/P'4%/!х/jJ@lܔ~w@K 1BJ9̹p`(I'B.4Ws6;72؝O͟?,J)9|0r uuDx΢iWK:'Ҫ2@4FHi` 9#\& ³5W3, aӼNrJ(].Ia Vbb}-G rGѠg%\:y;JDZTTT8X.4<o_Yt#E/mynBuxA.[oo63?mAz//)g6Mi^k&T k8S#Rʕ0XX̵Md"e}d "H 3@ƍy #6<X4~*=&Oem(+ɍ:$d&7I&2JXPyOo¾߽δ9̞5H'eK'S4GZ1@|6eΝo{M DZKOYF*% )Aɻ%9ս6AtȖpBsFS("}݂RYLlHWw%Alڴo||lٲ|gPJ gmrrN)TNp@pXMBHP<4{-@+}2ɪ<<d2㦛*QZ]NkL!y S@@2T1X+QζuijM͉;H\4V@)rHP@{iZB߁N| ˧/=x3`Rc%Fv7*bH"0&JȤ1BjZNjp_;U<ӷWM(%YyZK[D$J$- '(JAEM,4.@!˯@c$n'' p `PdNu9}grװn>{OP/yTMaVhGs׮R- "3b.ՑuB|%=][cN.Y+<"4AFf^9-'H% ZCYݑC*m ?`՗O1}Bv:fNXҤrҥZRֳڼH k:=ad. Bd=Z93=Ljœٳ@{ȵ` 9;9:VVerTx6E 9=X1mq8hwcUVUH5]+lM3Iv07X@ݧXy%ԖV0+"Y>0UWN25\tNP18#\"0/T@ْ%m P乒%=M|Ú s@i+>Mi2 2,ZoR*RJ\J2aeIgUߠg3Ox~;/]I:3s%`)$Y3,Tp<TiVE`iW-,8|DsUz؇&楾frjJBȜ `zSlvb ^2g6>۾íd,#]GgL.;kp޺r pdֵRlyDQqk~Cl?b* (JП 5|~!Cl֏ΛDb%KmNvRvJ%7[T!sIYT2_V'PYY%9 eV;i%D&E"BOt 8J|K$|a?qqC C #sc ¹TkgTsi?7~ *}LP[=+ Zos8-K1v@`\QA6" >%)d[<JN"%ANyЍ}HfRε{X zR % Whl{[G Hx|J CېTȜddo]?k;5jBOVJvgݡIkl@A,"&+i=p!WZ֗*eQS#okqx]RJT6GNk[Yq=] &OhI-FkɰI{&nS t]"""Ht?UtpKJYs  ,NzhN5Vי]L=0`lt@3t :hWtV*qY%u1.A0gxgxj/gzcF0G[2Gk5Z*KEnZ= Uq펟p /ڏlC|"@yuXAuք| kh>g8wEH5M :wx[2f]!UV/vէ)A`!/Ք^2 ď0Lp&0|Gƙ//Dş)EUPqϝRo=k)a^?t`;?5-`^PCU!W:_[c9H6r"=`ᮩ'7;W8NO/:ND{[̘nP*[>}ďM9}_ߋ_*>mK.Zff9 *4  i3V 3tcO ?O9)(eLsqŘs@. o1oS@@ȒFnq/Y+|2-w.Q0`~j F&w$ge[+,|_ L*[KX%}bJeIENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-warning-128x128.ico000066400000000000000000002040761477231770000256230ustar00rootroot00000000000000 (( !8KVdpwwpdVK8! GG  JJ =qq=,,D  )&1,81?4C6E4C1?,9&1 )  D B2AOf(g/w37:>?ACCCB@>;74/w(gOf2AB R/<'g9>@ADDDEEEEEEEEEEEEEDDEB@>9)h/<RN )6*n@CCDDDEEEEEEEEEEEEEEEEEEEEEEEDCCA,p-: N )4 Tk5=AEEEEEEEEEEEFEFFFFFFFFFFEEEEEEEEEEDC<%`{/< ]G[,r9<>?@EEEEFEFFGGHHHHHHIIIIHHHHGHGFEEEEEEEEDB4!Sk]* Si59<=>>??CEFGHHIIIIIIIIIIIIIIIIIIIIIIIGGGGEEEEEEC>&a| *e ?O59:<==>>?@CHIIIIIIIIIIIIIIIIIIIIIIIIIIIIIHHGGFEEEEED@K` e 3@.u89;<<==?@BBDHIIIIIIIIIJIJJJJJJJJJIIIIIIIIIIIIIIHFEEEEED:?Q  #+(e68::;<<>@ABCCDGIIIIJJKKKLKLLLLLLLLLKKKKKJIIIIIIIIIIHGEEEEEC2,9.L^4799:;;=?@ABBCDDFHKKLLLLLLLLLLLLLLLLLLLLLLLLLKJJIIIIIIIHGEEEEEA(e.H+o67899:<>?@AABBCDEGKLLLLLLLLLLLLLLLMLLLLLLLLLLLLLLKJIIIIIIIHFEEEED6 #HG0;0y57889:=>??@AABCEFFGJOPLLLMMNNNNONNNNNNNNNNMLMLLLLLLLLKIIIIIIIHFEEEE>=NG8L^2~57788;<>>?@@ABDEFFGftNOOOOOOOOOOOOOOOOOONONMLMLLLLLLKJJIIIIHGFEEEC)f8F &c{456779:==>>?@BCDEEFG|hOPOOOPPOPPPPPOOOOOOOOONNMLLLLLLKJIIIIIHGEEED2}FE(e3~46678:<==>?@ACCDEEG@OV"%q[QRRRRRRRRRQQRPPPOOOOOOOOMMLLLLLKIIIIIIHEEEE6E, (g~3}45678;<<==?@ABCCDFFnCSZ.:?x}\SSSSSSTSSSSSSRQQPOOOOOONNMLLLLLJIIIIIGFEEE9 "+, #+k2}3~5568:;<<=>@AABCDFGQ6CIiUUUUUUUTTTTTSSSRQQQPOOOOOMLLLLLKJIIIIHFEEE<+7  ,o2|4}4568::;<<>@@ABCEFGHs"%]VVVVVVUVUUUTTTTTSSRQPOOOOOMLLLLKJIIIIIFEEE< !+(e|1y2|4~4689:;;=>?@@BCDFFGQ^t} vWWWWWWWVWWVVUUTTTTSRRQPOOOONLLLLLKJIIIIGEEE9[ "Ym1x2z3|4~579::;<>>?@ACDEFFI cyYWWWWWWWWWWWVVVUUTTTSSRQPOOOOMLLLLKIIIIHFEEE5 Z#Re0w1y3{3|5789::;=>?@ACDDEGHbBQXSfo[XXXXXWXXXWWWWWWWVUUTTTSRQOOOONMLLLKIIIIIFEEE1z#HY/u1x2z3{3}6889:;==>?@CCDFGIK l]ZZZZZZZYYXXWWWWWWWVUUTTTSRQOOONMLLLKJIIIIGEED'b~e-6-r0v2x2z4|6~7899;<=>>ABCCEGHKv;GM\ZZZZZZZZZZZZYXWWWWWWVUTTTSRQOOONMLLLLJIIIIFEEB9IM)f|/t1w2x2z5|7~789;<<=>@ABCDGHJV{zy[[[[[[[[[ZZZZZZZYXWWWWVUUTTSRRPOONMLLLKJIIIHFEE= !Qd/s1v1w2y4{6}7~889;<<=@AABEFHIJ{6BH^[\[[[[[[ZZ[[[[ZZZZYXWWWWVUTTTSQPOOOMLLLLJIIIHFEE4A0<-r/t1v1w3z6{6}7~89:;<<@@ABDEHIIUQbiw\\\\\\\\\\[[[ZZ[[ZZZYYXWWWWVUTTSRPOONMLLLKIIIIGEEE$[u= -q/s0t1v1x5z6{6}78:;;<>@@ADEHHIK Xjt\\\\\\\\\\\\\\[[[[[[[ZZZXWWWWUUTTSRPOONMLLLJIIIIGEEB#-q!Uh.q0s0u1v4y5z6|6}79:;;>?@@CEGHIKa7CH"f]]]]]]]]]]\\\\\\\[[[[[ZZZYWWWWVUTTSRPOONLLLKJIIIIFEE0xE+5,n/r0s0u2w5y5z6|7~99DXcffcUEFGHJLau^_________^^^^]]]\\\\[Z[[ZZYXWWWWUTTSRPOONLLLKJIIIGFEC8Hh #Wi.p/r0t2v4w5y5{6|7fXHJKL_Wjr d````````````__^^]]\\\\[Z[[ZZZXWWWWUTTSRPOONLLLKIIIIFEE6 0+3,n/p0r0t2v4x5y9}\~Yjq8DH&.1")+*26DQWKJKLMw8DJ |aaabbbbbbaaa```__^^]]\\\[[Z[ZZZYWWWWUTTRQPOOMLLLJIIIHFEE''0-m.p/q1t1u4wT &(fzrJeTShw wcdddddddddddddccbbb``__^]\\\\[Z[ZZXWWWUUTTRQOOOLLLKIIIIFEE Pg9&au-n.p/q1t2uL%'QJtXjqpTWmWjr dydddddddddccdddddddccba``_^^]\\\[[ZZZXWWWVTTSRPOONLLLJIIIGEE= %-,l.n/p/r2t<{pI~(03r_WYufddddddeeeeedddddddddcbba``^^]\\\Z[ZZZXWWWVTTSQOONLLLKIIIHFECFYq#Wi-l.o/p0s3tf%,.HU[IZk/8BpmHj(04:FK{lghiijjjjjjjjjjjjjiigfffeedddccb``^^]\\Z[ZZYWWWVTTRQOOMLLLJIIHGEE %4?,k-m.n/p2ri"(+/8C$''/3Ykt^]\\[[[ZYWWWUTSRPONLLLKIIIGEEK`BP,k-l.m/p2q2s~ &(WgmrFfDQV  "'.2:EJO^e_pxltzzzunas|Q`g@LS)04"% KZad^\\\[[ZZYWWVUTRPOOMLLKIIIHEE2}%^r,k-l.n/p2r3so09=KI***^^\\[ZZZYWWWUTTPOONLLLJIIIGE>Q,j,k-l/o0p2r3sTSbhveFl$'333lllJJJ _^\\\Z[ZZXWWUTSROOOLLLJIIIGEA #Q ,j-k-l/o2q2r3s5w 7AEHMbu| `^]\\[[ZZXWWVTTRPOOMLLKIIIGEB8H/9,k-k-m/o2q2r3t4vwERW \E$&L\c~`_]\\\[[ZYWWWUTRPOOMLLKIIIHEC!RiBO,k-k-m/o2q2r3t5vBXhoCb]nu;;;***GGG444&&&h`_]\\[[[ZYWWWUTSROOMLLLIIIIEE*h"Sd,k-k/n0o2q2s3t5w8zs'.1EF jjjqqq ``` h}ba_^]\\[[ZYWWWUTSRPONLLKJIIHFE/v&]q,k-l.n0p2q2s3u5w6yEiBg:EI777888'/3kca`_]\\[[ZZXWWVTSROOOMLLJIIHFE3 (au-k-l.n1p2q3s3v5w6y6zWDEz!!! ,,,dcb`_]\\[[[ZYWWVTTSPOOLLLJIIHFE79 (dx-k-l.o1p2r3s3v5w6y6{9}IwpHArlllAAA+++ 555N^eddcba_]\\[[[ZYWWVTTRPOOMLLJIIIFE9 Q+)dx-k-l/o0p2r3t3v5x6y7{9~:;=?As yyy oddcba_^\\[[[ZYWWVTTSPOOMLLKIIIGE< &d9)dy-k-m/o2q2r3t3v5x6y7{9~:;>AA|7AFTTTVgoedddba_^]\[[[ZYWWVUTRQOOMLLKIIIGE= "+rE )ey-k-m/o2q2r3t4v5x6z7{9~:<>AAnx```kedddba_^]\[Z[ZYWWWUTRQOOMLLKIIIGE?%/zJ *ey-k-m/o2q2r3t4w5x6z7{9~:<>ABCXTdj@@@RRRxfedddba_^]]ZWWWUTSQOOMLLKIIIFE?&0vH)ey-l.m/p2q2s3t4w5x6z7|9~:;?ABDEFr4>C iii-6:kfedddca_^ihWVUTSQOONLLKIIIHE>#-k>)dy-l.m/p2q2s3t4w6y6z8|:;<>ABDEFGRx.8< !!!111AAA888gfedddba_^)27 #&xbVTTSQOOMLLKIIIGE= (Y1 )dx-l.m/p2r3s3u4w6y6z7|:;<>ABCFFGJK`uEEEXiqhffedddba_u9EK YUTRQOONLLJIIIGE: !F$(bv-l.n/p2r3s3u4w6y6{7|:;;?ABCFFGJKLOdDQWuuueee "ohg{ba_,5:TTRPOOMLLKIIIFE8$'_r-l.n/p2r3t3u4x6y7{7|:;<>@BCEFGJKLMOPpCCC///iii^pxjoa_N_hAOWgTRPOOMLLJIIIGE5 $Xj-m.n.q2r3t3u4x6z7{7}:;<>@BCFGGJKMMOPQa.7; !#lj2<@=JP_i%-1URPOOMLLKIIIFE1zIX,m.n/q2r3t4v4w6z7{7}:;<>@BCEGGJKMMOPRSUu;GLJJJ___jmi};HNs]6BG^ROOOMLLIIIIFE,n7B,m.o0q1s3t4v4w6z7{7}:;<>@BCEGGIKMMNPRSUWd?LQ ABCEGGILMNNPRSTWY[KZ`lll---@@@III jim&/2n\o$,/WktROONLLLIIIHFCCUf -m.o/q1s3u4v4x5z7|8}8;<>@BCEGHILLNNPQSTVYZ]Qah---III`qyjjiho|\\n"%rOONLLKIIIHFB(3f2+i.o/q0s2u4v4x5z7|8~9;<>@BCEGHHKLNNOQSTVXZ\`CQWqqqOOO$&njigfz5@EIY`a[d"OOMLLKIIIGE@ 2O_.o/q0s2u4w4x5y7|8~8;<=?ACDGHIKLNOOQRTUXZ\^_'/2uuugzjjhffez#&zZ[7DJayONLLLKIIIGE91;-p/q1t2u4w5x5z7|8~8;<=>BCDFHIJMMOOQRTUXZ\]_` %(ljigffeiZZq]r|6BH`NLLLJIIHFE&`z-p/q0s1u4w5x5z7|8~9:<=>ACDEHIIMMOPQRSUWY[\_`baaakkkjigffedd&.1[ZZ nNLLKJIIHEE,8>'`u/q0s1u3w5y5z6}8~99<=?ABDEHIJLNNPPRSUVXZ]^`amv444 KY`kigffeddd+48[ZZ yMLLJIIIGECFBP/r0s0v4w5y5z6|8~99;=>@BDEGIJKNNPPRSTVXZ\^_abwCPV!!!WWW xifffedddd&.1[ZX ~LLLJIIIFE.q &./r0s1u2w5y5z6|89:;=>?BCFFIJKMOOPQSTUWY[^_abci}igfffedddcZYW LLLIIIIFE2@ s(cx0t1u2x5y5{6|79::=>?ACEFHJKKOOQQSTUWX[\__bcc&&&#)-igfffedddciw '+ZXW ~LLJIIIGF? > N_0t1u2x4y6{6|7~9::<>?@CDFGJKKNOPRRTUVWY\]_acdkL\c***HHHvvvqfffeddddcb/:?N_huZXW }LLJIIIGE)f>,6/q1v1w3z6{6}7~8::;>??CDFGIKKLPPRRSUVWX[]__bdd!!!SSS%,0iffeddddcboZXWW yLKIIIHFD5D*h1v1w3z6{6}7~8:;;=?@BCFGHKKLNPQSSUVWXZ\^_acdelyfeefeeeef+49ZWWWcz)48oLJIIIGF8:AO1v1w2y4|6}789;;MTQeobKIIIIFD&_y: 0u1x2y4|7}789;;<>?@BDGHIKLMOPRSTTWXXZ\^_`bbcdyodPai"%HX_YWWW'*PJIIIHEB $YDS2x2y3{6}7889;<>?@ADEHHKLMNORSTUVWXY[]^_`bbcpdt} /9>mWWW\vLJIIIGE/tY 0v2y3{5~7889;<=>AACEGHJKMNOQSTUVVYYZ[^^_``bau*37dd '*5AGWWWW5BGDU]vJIIIGEE ^DS2z3{3}6899:<=>?ABDFIIKLNOPRTUVWWYZ[\^^__`e yydd  XkuXWWWf|yRIIIIGE*h_ -n3|4}5899:<=>>ABCEGIJKMOPPSUUWXXZ[\\]^^__^r|2<@cbb?MSIYaXWWWV&05JIIIHE@jGW2{4}5899:;=>>ABCDFHJKLOPQQTVVXYYZ\\\\]^^qsaa`K[c 0:?lwXWWWU)38iZIIIIGE"Tkj .q4~4699:;=>>?BCDEHHKLMOQQRTVWXZZ[\\\]]]^BPW[ox```__bXWWWUK]e$.2LIIIGE9 u1>4~457::;;=>?@BDDFHJLMOPQRSTWXYZ[[[\\\\]~z#&h__^^^fjYWWWWcSgp aIIIHFDJ^u#Wk4579:;;?@ABDEFIJLMNPQSTTUWZZZZ[[[[\\7CI.8<\\\\\\\\\[Z[[[ZZZYWWWZ\s~QIIIGED4-84668;;<=>@AACEFGHKLNOQRTUUVXXZZZ[[[[[r_s}}k\\\\\\\\ZZ`|offr;IOEW__IIIIFE7F !Qe667:<<==@AABDFGGIKMOPQSUUVWWXZZZ[[[[Z @NU]\[[[[[[[[t_u1>?BBCDDGHIJLNOQRSTWWWWWWXXZZZ`[Z[ZZZZZZZ%/3pIIIIHEA # "688:<>>??BCDDEHIJJMOPRSSUUWWWWWWXXZYhZZZZZZZZZXX azdIIIIHED.;"1=689;>>??ABDDEFHJJKMNPRSTTTVWWWWWWWXWXwdYYYYYXXXWWWW!), 1=Cc{SIIIIHFC@Q"?>N89:<=?@@ADEEFFHJKLMNQQSTTTTUVVWWWWWWWWWWWWWWWWWWWWWWWq@OU%)ARYmgKIIIIHEDLa?aDV9::<>@@AADEFFGIJLMMNQRRSTTTTUUVUWWWWWWWWWWWWWWWWWWVVYaNJIIIIHEE$YraCT::;=?@ABBCFGGHJJLMNOOPRRSTTTTTUUVVVVWWVVWVVWVVUVUUTT`]LJJIIIIGED&`z@BBCDDGGIIJKLMOOOOOOPQRRSSTSSTTTTTTTTTTTSSSSSQQPPPOOOONMLLLLLKIIIIIIGE@9I 6<=>@ACDDEEHIJKKLLNNOOOOOOPPQQRRRRRSRRSRRRRRQRPPOOOOOOOMMLLLLLLJIIIIIHFE? "+ ,o=>>ACDDEEFHIKKLLLMMNOOOOOOOOPPPQQQQQQPQPQPOOOOOOOOOMMLLLLLLJJIIIIIHFE6  Qf;>?@CDEEFGGIJLLLLLMMMNNOOOOOOOOOOOOOOOOOOOOOOONNMLLLLLLLJIIIIIIHGEB%\uU)42|??ABEFFGGHIJKKLLLLLLLMMNNONOOOOOOOONOONNNNMMLLLLLLLKJIIIIIIHGFE9/<U1 Rg@@ABDFGHHIIIJJKLLLLLLLLLLMLMMMMMMMMMLLLLLLLLLLLKKJIIIIIIIGGEE$Zr 1 $-5?ABDEHHIIIIIIJJLLLLLLLLLLLLLLLLLLLLLLLLLLLLKJJIIIIIIIHGEC8$.|DW5BCCEFIIIIIIIIIJJKKKLLLLLLLLLLLLLLLLLKKJJJIIIIIIIIGGEE7J_|5@Q6CDDFGHHIIIIIIIIIIJJJJKKKKKKKKKJJJJIIIIIIIIIHHGFEE8CU5c3@7DEEFFGHHIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIHGFEE96E c+73~ACEEFFGHIIIIIIIIIIIIIIIIIIIIIIIIIIIHGFEC@2}+7H  Pf1z=EEEEEGGHHHHIIIIIIIIIIIIHIHHGGFFFE=2| Pf  HK (a pHYs   IDATxyUǿg23d; I@} Sq}@T'(nPvNX&:YfMf޻g&1A|4ds9p}>\K\`P 3@H  lVDLBbZ)VY}L|H׀]C< ualzezָNχ2{g^ ?ch 16Ȱr?^"^AUJzi:zNg;םk@?;;@χ ggaG(FWʂi̙X^O_L:qGw7vnVm须/M* 0K} c0"̟VɸI@ `+\8 Hi?eg?olluWTZ~ PG=pؐd4_:gU;z_wEA(![̡ba̿Le1#}6-\Ÿ2/KFië+;ѳ?TWFxc3A/ʜI'OE(JoWUP9GO _6/CN,Jɪ-=\g0ہK ڨ26ȭWa0a8x)QVI (b8 %/iIbCf9C!8}D1nD ?>IP1sGj+Ww+弙y艱&^pc1_q8jV߻h*%!/B*]( tGΞs>qE P8߆18(*||a:)AB^qAI kgsk _jQ~L]oO1`@5Ժ@:=u9p!)S9~~t|0С:`_ B hZ:F6C&N6iEhYnvKnDyΒ>y(fl]@Uu_1VI5"OxC5 @hA8)[LF(O.aR" ;בMt rLi{uwot؟W|PLg;ǖ4='mEUQL_o:֨@v4=]=s!BPxP F:%Ow_[Һk]Q4M( ԙHQ=(O`d=h%B dr|Vw:9`޿Z,FOkoyL=:}TQCN*Bѵ:#(JmVyb\pJcSs+?~^yct59Ǐdq,?iG3HLo'դZMjܤ̢izo=ϧU ҧuEE({/'MGV\ 'p޲QxպInGK~DJtM!T7$2VۺXxY3'Q QbT6^GK\PF]|$۱TNWM s?|W  fT%^tłB՜*Æ+XA>+h]EX,t tR& x״ڷ9b9@R9?!oB'L0U+(%T̒0 >R#Vf[XƦւdwsƉGy*Ivo̒MtE:l -Cj)d6%Kk 5TF>zڱ?l ϿDٛ4 9|D$6!SqZYOffeG0>l!bׯ>0B8n|>5P G(v4PRYp|lo;]}7ղ>e,7&&lZ++*Pe(CJ )f8*$o'ѹ7~"ęlʘ/1alBxzBe,{J`kV;fN5'Yf,^)h3(S(b0%޷xj,@h#yu--%^2BO_H|%#_6 _|T9Z&ڑLTt'pH3+8#(Aߋ&u~b6vU~FsM^{c U3PPϮb,_X<}.@(H(W+'\753Ȋ7k tt{YpBm l xõ(t:P(oo `%X;BU̦Hnŋ㝕vg/ʍ|c'ѐ!շÒsfICu:fV} RE) o͓#sЅI;Łẹz)43hRrPPUxU?ݎ *[_1_B[{_ίT?aS>Bh}g}*wjŋg-eùG"h X8wJ5Z{8T_#5XTb$neFZqB 8K5  G1 F>@Q <65M./_Ͽ2ʵsG BM]LVo{K2loÓϾɯ~ ڻbXMBQ^lzw 3N}X|G(Ǣx|Wje[p2zƸW泗dR'l 5!ٳ,/dd$g9Qu鷿 qgطt0K2)WO|M]J(NJrL!-5JʷRE*w=t@oѧ_GJ F8ᛗ}HEQxT;c\yZr0'X+8]=&1ao8zAb[I$hNM imc3(9-'5:a{XBJ}BXWL/=4lBuM9~v׮o TNFR$syX~'#~w4}?^,&z_k+W߬?5rn9&Y:iTAaN(5|gX㨙UCnMrp4DZ*{U> ́FT[>[B0Q(Jfkov0IG:lL|!Ⱥ¾JG N\z49¡ _cG0e(< }l¬ͦ k+ֲvcRB;t@LU ΍&腤-òS=,/Ba!ӠXSbFD7R0g<lz3۩Jp ,VI]}/A!`Y k#(N\ZJlxÛĺ$w_Ip=]}x}C&1c8쳩p=W܀0';rhDiF%޾osq.sIl`EN@XS}+#$:6y4>ؚuL[ H`bccK?}X!`RTUm̧^)H[3(-c<$_xk WTWW3uTN>do?+W"o\~aպ-Ǒ%#@obgdw3͜%,>rt^zt:ELM&lƆZ<]YOqXCB=!xN5^93:(( xO}M]]A97f7jEf's&̑Sh"9FjC׹خsײfqz.f̘aܷ-&NqցuY!߱$ux=N.8K$L"AH(4-x}8g%Ug28' n\u5*&Ro"@Zݴ<}Ŕ@d|kk߹FJKK;w)QU;(=Bf d6E:n׼k6l3qWYz,fv6zϧa*1@H'GvkjiId|Va^s+ b j`t\umܽt)aرC:(;O_^_VL٬UwRS]AMu4xI]C~,:kp[sЛmӬ\&4FLGIajd +f#XQ΍=rQ}a"u@(gyQ*jjjpWtßmll jF ϋ-8g,ٛP O^[To8À~lɼ9160KetK\:GUY9D3Ijҟ3y[+am\}CU~5yd>OO+A7\'TcJh ֐L]׽&Tö=d2v^[v~3_ίyN/>g6?I0(ȓ:tH{o7fyotQµ$Ri>khk/ދ#/x<\r%EWIME%P9ɰE@*C{{aݬfSQIxB`fRi;̜6xToċBGB-p` 0c)Jڽ}]s}sE\ڰ_Ivֲg@Λ7={ܿ;eʔZ`Vnz+UM-j8n$>,K gFY8jfo,-`=GΨr~t@ pӣG(;<B D2Ai -oR( KldK 4mԦϙ3ݳ_.w:W-dѪCsɭΪ7T|=o5!$߼V TL4%NӲgd|>/w ;ɦzEÏLj{"$qGFUUkxRJ:zV.~zvL7_~0 h t*}/1gsUt D۾ SHqb@Xl*O •&;ZJIQ.!jY~qѣ)//w=VWWOS{Ŝy,[ثo/-缳 9qԴtM'\9Z Il{$;?XŪwguZ6')ZQ$wcawGLq8o~v7;I ( /}g}B/4Djwh(Mȏ]. K ~ R𱇣I˯7q5p?>dWU%KtRcm]V*Y⯘PrPފ /v0d LNҋ>7k#PksopŷSϽ\P &$6mbu]|[ߢ~Jn&!)%?.X5*$CKEХ//H[ifS$Y <,K} \F]L${֣e'oC<)Vpx&V^MSSk֬~F$Iy|Ms >G\zv~">p|Zs%Ԡ02/'SY9NQl7A޿z\ac: s;Wn65=?_]wH`̘1455QRRRpl֭̚5 FEQx8묳jyzj/>r+G, ۚדR3Bcr N䎛Rq$5ͽ\t{u&qtai5J.Mآᗀ?TƘ(t8{|8uд, gq GR6l+^yPJAO4b5\S_u ?D'n ۨ'2 c[2ȶ6!텠VM$B}L265o֝:5j1E{V=ƥ$:d 0wygCͼ'? >hQ{i6j<*|,ݣ | /q|UXju%a䒥F?1zUE_G{G7O.~LTEecٱYZV2j.{ Cꀢ|0k;^) rښJUF xRc |O OwC`-H F;Zwɛy{;@EXv߳N<wXjsڵ<䓬X76$tvrBooxX9#Ǣjl+hj6e3ⶔ3BP;r-;x䉗CXo!٠lxښr>ѥ.#ayނ0J{?Vqe{'-PCE٬d뮂V9lhe= m,;_=+vmso+p좹Tr$ٹ>uީlؼ=3gg6mĞ={صk7ok׺y?VGW/m]L7ŽEҔ~Y)P,lj)`71.h- Y5붠i#* sEwOkYyA{[neɒ%A6僰K#@.q]EBA±d]//hng I0TFGg7n?E%TNcjvǔiKGrIgg砟SUH8H8$S^*˩(+!LIKI{M{> 7J, KOrÙ6y Z&A 1aEu| +]C,L~sk ;zc xG5̡TKOh4V$<8e鑌UOvlsѮVz<ꪬ(eƔqL?F0nH43`nZ6'odVVEi-evf۸]mDA[t}\X*CKbe<2ȿ;΃,QlI2LAujc?ҋQ9Xz};eӹpwt|^:8i7RS]NeE ANkKe܃u'5̎d>M9P'Oo_?>Jݰ*lIluZeaHh/dQ|g$Kd8ٔ1GLOj9dvE/C 3 =Aka v?[^dVcKK70 w$Ea:(^vo~ yI]g8naIldwcqa:&իUؼ@3]JtHt-?A S} &z(ZF:x[h<ECa\b+93ӗf2"!e0beXDXm1}Y( XRCލ=ԌJYi 8/Z8)]MϞwHGwE(Cf*ґ !GKu r5 8w8XwBԵ,t؊X_[﹙9A^ݏ k^1{m |m{%Ҙ6Zxʚ`./nQ`̭ ͧ/H&eټZvigXu??ƴcоe9ɾn#DQ$S)|*K)Fwnp1'Qu`V;O'~ϩt8ҥ]w*v}ʉ | ΛVnFoMv0nlfϜ+5J~wF>~Ή(Bvnƙ2q|#L8^wJ@#F@X"ɍ7ǧ/!ya>g>&YDU Pv'eE=@Zm[Wak5oNro֒>3_hQ'g: JƈE;0i:gtuMevt0SI'$$̞#BK׾&{Gj?sh?/?w ϼ. |)- J21kRB\*$VI羀#Rmn5MV }ق>/P$LSJ$1nz;[>|{&pXlLxr<}ί"hu1_ l2D.ssdݨQrHfe٭(ԹFD $ky1qk? &'Vc'~Χ(ߡ rVm])zkLJ6FLM*ÓƔc\r3 ַ]ڳX.bw߳hG3;Wo;hMuS kMy깷?ci( Z&Cb(4.?ܑva!~&od}? K2@5'ص/0> ssn2$c;H۶AB ѰRǏ~qk AeWJvlcQ^%݌̦]rã8%[HΟC '{ڹθ?m8zFSk#*0ןh5HWFaikـaQd_ _J5o LPw #(t;FT͸G+K7n TL2]@pO6cؒ M҈'LOV e1]LT1cBc$7e!+]@LkFP!2nT~C~/yN5WO:Z! ;;?ݏlv;/:mrb` %h Q]7EXm9Bx4is~\ 6>'4BN:dM>2ޜWOz{B|W)/[ ُ>2?Kc7hen- }3iK,ɨRC,[_fܩfla ^2kf.HMI=K_jf%ߑB홼q٠K[?<ӥE3`PgK1I{{ȩ_8`ET뿛 ,xiu^Z ic ZK [5K[6Xfm Ѵr e^7h_ E4iHd~&)F (j,сRA4 qT_mگ-D$$־l{/Dn. = n(,+G4ޥ׭ 7?=vZO `Yv1kb^;@)+sH=y9,CY9v)s7*,{LH&F}5 ė=%9.%e'qUl wP@Lp4[w3eT jj$o9os " T`pfϜƟ=;>N8v.&OI:Nl``|,_d%W\Ka@+ Zû]=WOR~2@^IlMqG#Ô}PVw dd̄YŪu[]/j3⛗]@ȯw&{\:<~=c, VHZrXR0޶]Q.b\؋5Mu戊 k{45M@vm`Qd H: 5ttf6B&mW^t3#ٳhs;m`g P~! n-s~l>{l*ܝ$?K4_'o0m Ur8ƍ,[)U@(0A%UUlcN[ZV]q#=h~wF +\b* SQ\Rv"޴×N:: D٘@i#9>3hDoP'-ޟP} !P|Ha|%5EA˦IG[Ivm<+uOhf<޽DJ+Fcjv~yٕ)xr5(UQM41:3A(T^YG`Eb莳Ul‚i8'ɏ_*G 8$z`d=q^Ō%&/,(E?`T!yU!lƤVW Dw$k\GTnN;ix}u9;ljl9\7ih>@ G@{]s=BpȗͶˁGkD2KaWer(nC=O2`<0yicRS§~|~$W*68PQ8PĭU}UQN{z}u,3P}syЋ>g s+P?Έ`q7q6}ҰDŽ[{GyrhQ߱'Ưm7ۈƋI.c7ȭ/ĘZ\ʒ5|┑F2趿$uj}@Ab'3-]dzNףvW<5UE1F#8n0A^jhKd@iȃϣXƨ i 椹EFO4cT~}bI8pUY#j9|Z9G 9**: >#p_ -Z)uzyx~{:->ɼDW2@n{2Ca#,VαskP{3%r<#ۑ7»ngo* [corL (_a?a8',f'v0Vdwx"K:;$y}e iHkѬ N^8S+)/(x=ָ@¬t&K:JktY_M{ 8=Ac ρź*?SFE02Lc]r?HP%؁jiTF#;|[*IDATH$DYziu'ٱ'Φ(GwV<*XR;\ ~1zTAIC$ z>HBI,,} }C=;0@nMG~9#+>eqwXA~N [UC8.ZYCŸV 3oZsZ{Z`p 18cp 18cp 18Q ?q7I}@7?g}@35hG%p 0 |4g_a|&-{w XiCQdˀ j@TcG?ULC x xx $T} U}!< ea-l1,1>c!GM:ȿnCl^4óSap|\)]v0Oxۍgre Xsʌg88]j43O@G?)+S )/ )}>ϧtZ#ЛH՛'EGW$=):Svc-x xpǠoljÀi!dDBC 0!Ƹ">EȥH R !5Xz5g4LF?ӗ7MgwM=ڭ]lm%2soK{ڠ؟?@O' ~I,g㆕0}lƖQU$_/ͮo[X՛:MHeH54m]=!j7>ʁ?WB PaP}Ua+ ,JC1hk-N뻸-4􀁃&]" _~IjʃLUʜ|x- Ց~&JOn 6\ eZ b0-b¦-mŷZxoC-}{sKOV1 Aw`'8d\'QI#ǟW}v" zM7\]MysKї{sn NA69 US3qd)ѐӧ W E~Z sSFo"ûk۹k&.1KoG /g0hPX@]?Z1Q Թ*XyZ^º]z-I{wL/ `P|LD8mq>sꣀ@,\}&~_ B=ܷh ,ʍb 蕉A׀{%|ġ5!a-..i+m^}VdWMkq5)SNqԬ).އ)~ցfXIBs]=n]e8e 0qaMA5A` sgU3{!,u-`#, a -XIb\4e*}`P@˸^'3vq=ego/sPNYԨ( ġ'}BUƤוk& EO`n(M˨u ,Uæ>rFnx`w?=41&xp?zNQ2ja L{VBt'g }rBؖlAM/xVBД( *-l`;ݖ^/YT(~S8eNgȔ1io54@HS XL  fqQB0w6&z~[N.B''iTޘ \^_Ԩ /Љ2g'0](%<(bwf tOrx\TWWnkwN4=#T8"6ύ"YoQ ʟ \uD^Cd'>Wݏ 1~}jxn+]=Eg <~ >~;@I12i5RYؚ2W/ Ta x[8L@G>~ (h5_\4il[}vXĀ(wφ_ <7A|f2-6ZPm8r& `(zs/#9eN=%~O )B볹N - '@ȅ!2/<RRZ]#TE3F Tk+?u)3/bv7jꦽM|"cQn@V{_/\~8GinJCsS$-)\Zs׼ⱁ׍Aя#Yc+ >CkYc( sv͚ Y\GXI!}`@C2V&$=3?]bk >/>yt肅ASi# Ud4}/gw2`Ϛ`Io.O[Xg]Qzv~>{b#&RN]KmE;, F+8#/Rc;}l)?՛:ٶ |Boq *g\ 8qv ?xöbfoN մw@ZsJ% o=3&fp|za\W> B  (꼖diro˒Ǒ5!>}̐ga. =Tx=$# g7>;FOqwo5}RjnU'm"c5Me^ A?Vz;0| bE؏V';Xϫ+'M^:>{}8ЎA}hw7{U֌ o~(+*J HW{i_SzE}nq9M]Z#ȩ|؁F*?!+gXmW` z;p(rwlrї2sbK3M}WˆX{ afPXpe"l<{}3w܏q H2q: ր0.IH6w׵sO&PT7/>ƺw7_Ux-y UY2f &t;bE 7Dr"Z'!,@8<0'ͮe6)t4g  ŘF JOO+iԷ6k=2 ,U⻛& \g6?vh43 m>2@_n y3kxoCND&o8BϏ.:ُe%",o6ħZ.CQH\7o@HЄ@5; kM돦RX3s ӧC2ҏ/¯X-E& CK'ѳv2hZڕ)H $.M+xʋ_T!6B;~f&)* .s@t_r4Y#_Y3ƕy魖BSu z[@mbgorг" o,6j %W}Cn2DwJd/P0X(~yTV2X,2gIh*,ѭ&"*Hp2. XE>)9޾4oN&.9 y~ՙhs撧*>/~(]߈'ngS}؂w B T(XlTO%Z;{yp:ή:zhimgOk-{:h袯/I2B A0 ^ J1b0((HAWIv4*R"+L!u+Xŭl(Ƨ:ˈ *8 xz1sp~0OZw X|}I=i>#qHs( gfMXMb|zW X]|g"rʆ -KIUE)kPL #8"NoJ[R=6P,.1Ax?{NT昈81"NsU/U^/4B اtdJdٖB)yxfZZ/MJ}^ jy0R=h!~%Ōp(HyY!uU7g|DF nIozwЁ" ?pݫns=z^ˊAU89&(ᲯM6ܼ@@-R1}Ft*Jo?\.M^CLhn s66 {0b?'Eʅiy@$ RZOUV[?Fzc9f \`To+[^$\;ր wvo M`Q ^O, #J?l`er>8=Wø*xB`g jPIy mkA⳥Z6_A@߅M~z]a(j9fyxBXjY}IR}N~;ظyLJ67/r}*DI,@T.REB:$sV{hG'ߏa>{wX]|a,lZ+l=RdyӼB!|vKϪXhC%CL%R=H$•U!T: ѓcWϣB$jf!˜{nᖿ=FGg>ٺ{y7ylR6oI]M95*?RMt8T7D{"5Ucw#τށGp&{\ |q I>L?F[f"lϤIGa>0È8([`^g]z&ei?CwO/S&  +@lɮd]hTCdZc=n D'I kvwh gߨZ)D6̇Jtk~`e.VZZ!\(ŕAi"R)7Wڮ 'R=U}Y~{=\}pг<ضc7F4/֒܊6|f:oЋiV>=Sc܆?R/X{Ѽ*6m4dTS=MTˆk%o%0kB9wjcgOK0a䉞_KeYȳQv{* 8?7`|B3ICsJĽ* +OyovuZْ'TH%SuG' F>KmBW@M- K%DFCQwq=OIk.葍4xI= v{O;lbH4KE;d[0E疗mYY Jd2× ЩŖ F:ba={,UHv>i't!F@ÍBY&ǀY̧/BM%;ֲO /}a!uU0WzI+9𩹿7йuqaC.iYʏ~K-mj頚d.eY AG ƇE";V|kXjÀ}}I{#}}~}Vo`Ρ$//EIs[+i i8h?PB`' ]FKÎ%6psqz2WA[Vuikc XRǠM~omJgGlYw~|--0yW+H#ZZWka&>4[2m]N"|?jsEsI#Y~v/V3fP&3*R=H˘<7Weg^8 I|Yc BI_a~f7]Ssҗ;:Ƈһ{qŀgtxCMY\/(=QCcy wOp8czIu*{ У 5*, &$%\9NFet@(ݎTn[DkїHYxg G̰͞ƚ\ Ѷ dQ?O/[1 i5qdO34WN?f&_6Ia Z[a䖕_X؄!9 F#]轨e FPݹk \&:qCB$:60P A ނ`JKGG]ݽR)Bs-{y,t&9i"R`~6aDW;CϩY?AwAgd2CаgrOZbfKck eIh]a[rOzdU؋%FzRl⬯^[P˒B$ʉaǖHvlrٝVGl;v俖sKyoFlLFK)|3j(NqGNC',( -Gηtr|Ycf'V+[;8 SsR_3hHr Y/zS'g\Hv_-{Q^g/#U~9+iy9e!#,S8]RsZL/DXsX'j\i%M0+5}e7i"0)ӻg'>pѧ^}VodWK LNd2R4]=lܼů.kIAI,J4M bCHvl![R_huk}@_$, L?']FGil޶?yi#wIvUA/Nƪ|%LF OKQD>p\vZN"gڪ9%OZ˃ ,$}Z7,/znm \5@67zٯnf$bz3*+4؃l\9O~QSZƗ.Ҁi굛yxٶc7 Cj!bƓIvkbr b&dUW[ɰ|Ţo{s cʄ焖!ѾBqTx]ttE^f +0kO[ϤeZV f]XK/v_2fk.~ΊTg EW65fr5k?DDnN9a>>U|2N=yE,,]ݽ\rōn>. Yofc' ć"}A];Lw3*MqL=nZ]*vzj,]L)TuY{ysu[>Y!\* pĴ\+k˳Q=Ξhj;i_aw)є.8lcܑPHd |~O>^z\dpDd vn16s*WpC6XIvеݗ\4]Xd _\tJ|o*v8ZaވU(0h] BM0,5!(t]N34Weltr4uMJܚu[xeϷ=bbCl=u'Aw_ˮ1 x#[\o%F.,o{'TvŲ̷ps&‚]jxm 1etB7ON5TOj4&M-мzڜRZFX_|j3tiV\sR9k,z~<ʇ+]pQ`x~Q>xVm8B]k/̷ݳ RʦC&ul^Sܑ``U |TbGxSƖSUtVtw0[PYWHr]sL7gB(KzCS _6(u~Y|g63*syYpw>(wt1+ǺS tn}IRYQ8J? ݿUTxin\Q.WbGphǑS+jL!G(j5*E5zv\4'B €ku ogމ "Ju}O쓴~ܻZ(O)V;f [ qUe}Ys/:Fjod/;_Vzl9(꫊SBob6/?RMp WGPwK\U?˷T<͝,&eo{_V_;]UǦ*}`~ N݊ߞ$Z7CyIo_Zٴe+lwg5{=jj_z62 >XnmwK;:T0C\y5Me][R:Ȓd{;Uom'g66ٛ:c3gj*(/SVZBUE)k2q9O84gE#!(`ϩ'-m=.T*KXJo|8;jFcx3VY 9 `0u>~m) D=ԧ9~)\ aPFSVP;]]mOD?t?@No4j]ӱ=]Vcڤ{_ƙ'xђ/$ ѱA -G #Rx)̩#ՠ!*py&SS6U5!EoM49Am.5DԤk5.Wؾɟ&T>ڰ{a@7*r G*lOk_pQV3VZtocwh_GTnR=ͤIvnm{Vۺd6ҽ{Һ {~ƔL8X!yMۙ>yLmBizX#J.mqa|]6n)\{1ILItl6ڰK攖_h'_@) 1먭 1!bZb긥bsLdr[ ӌ4_aDr`Hxe-'^d\zĨ[tD&ZW۲=k[Gk;62R;k]Ketl7[^ ;seD#d2;6AR=͊ 9mLQ[؊ޝOYOUY^tC6?pP qDC[jm0ӵ~wMȱgmY7ZΩ~Zyn֒T$p!43>::c “.QBmY*lHzt}^[;rǏ7^mE-"B# G]Tn> YxWH⊸ ) oHE$䉆l~ '2=_DyҎK{j4W֏h#_B}•KC=w̱SZ/8{z0% N`\TF+ƺaH'ڕIX[SN(X8_wT&֭}N;;}nRL684( 2)0Wx@ٺz9)@i6jE ufSFftYBhHD/=m溿܋xqwne(]]u-oN,V^ʼnw+5}̚Pϳ ACV?j>yWc%>BgvA eMq0,&}ZN+'*'Ͻ:wq_埏K,V7, *\ޏ HR25OS:߲;[h* :yEWڗT PܙՄSӼoj- f$o0SnVЙ' T45w491{9!~~Kd Qˁ\NL.׷}$AZ!H%YE-F]Daƌ, +ˋ+K^n4PWzzҮ8yܩGH^|FV.3u'%H^O@~0/cjtDmq^֌*{~͚=VV0}H$e*G޲WpeYnR\AB+Orr5vv(zΞJ1dG(,рM% C(د'XU,iT\` ryeWJ=mVndn&ZfO%2ݬebjN#T>hD޵?:}=^4/d4a gY6$"^Թ >jeNniwЏ cy#nO 7(Cv84qq-_1 g.](Lܢ46C/H$/+oq0 {KB#[s蔄BY?*mքƺ ۊX,bhs"oPw8;1aʘIvY+`fmZrR x4IhˆVR m+f׆Y&ߵ*DKpRyK xdB9 LmK)re}ypiUG<}u~=yRkO4jTcM/#FB -hv X#6)m<0ſyd2na#.r*˂V0UX8nQ%VPL;J ̕d2ɫE =sf=@XԤϘna^ֆK;_T(F`[rE5Ec n\ZŠ_|"2B*V 7^w.pҁ8N$ᵷVcnBe#,wг}23EEא*UZs$XmX5V;6)ъ}E! Kf-GD&і•䳀 _TGXsQ1ϻnV.7wO<%\n[M:+@]ERkaESUvZOI( юa3)!LLۜ^B. -~`̰n 2>\5#5s g*bm~M,\R~X~ʇI R6~tuXs97x)% .`˶{}#nnwP VGJOUL?RRY IDATC B颞ìZ2N4H|ۜ]F{,A)^>AUE&D"Xz6ۄ ukb M8EOe.Lx [~'T8[JG{̚5wy;3Jo1n8^x>7/,~|+E(!6D H_dQ)`Qsl. |Ků./5cL#ճKUtL v+j%~(^j+Cҁ~[+ƛѼW&zk2M3:nh=-r©1p3 .g^9h1k,-[Fee%ӟ;v,]w1?.owiĆAdg|;r ɮ0V&Mw>Z5F43qlzE~Ő٫VHQzmceɵFwZd<TPM3?z!9rK=|ㇿُ1jkky97IKKqW㦻V>N@|,ioSyoGs5|v(ܻ[0zGy,sM׷p8Fgg'{,,^K~llKp8+@- 4ɹ޾DQ\Q; LG$|Di,};zP 7wG6N Wh^VpkA=eXUhZX;f p>_# :T~r 7op?R+ٳ9c<3g6mbĉ9+>KG(9_KB+=~s͕2ѹtC1B9LYdVgWpƧQ,]+<&L86 $,S3)/(kl񍔀|C]5WKT jǜD0RW>>Kwbرos=38{wgyRկ B(b֬Ys=KA8nOmuBcB'Hw"]w1e 4Z*?0Jas1PSeVy&w)bo&As|T%;p@@DXPP"Ңz_VԐVXϧy]>`aIݸO }jfwK*}V'M-¢E={6]wwx'sz饗J8393g7p˖-SN/稣[M_m5l<7d*;[+QJ oEyEvJ(sem\SvTg[+bWg Yvw[\iJ/L(dSfn!44$= Ed /eR"|j'RV?nCff.o?qDnM_ε^K[[ɤ3kqٲeq\y,_38X,W_~;mF:LѽE|9ia9AsBf;WҒ(~qp rZuҵ%R3Ei\1N~WTz1CPN%"=Uuy+b)/0l $d4Hktk52NT]evM(YԎ<~K^]ߺP(%\´iӸ ~Ʈ]\nff͚ŗ%-[ѣ ټygO3ݹg)^[EymL<~t+ L_zIdȺx^yKEE:KY=z[VA&Y;@(PSy,[xҫR|!,"?N>>7?= -[Л9̻_ R>'*s|_XNkjjr>%Kp-u/$;v`]cWğDjLq{no*+J-`m/+`g#s~S5t}$:\g/uml @h,pjS`$, 4*ir,jG(;uw;?ȳoaUwĈ477}T*GZ[[?qǦS ~ɤItnS8g͘m]tn}9]#5jlŪ +Fc}5_W_(ٻ6@ O!|AbeèhB0Zx 6mw;>4RH$O?\GMSSWvG}43f N/rl2q`׿:^+  rff ~z 2DFS_ mk gG1e(x9֬ߊcƔuƉF^}mYEc*U_Qϝ6)G&\&CϮwI7|)j ͫZ1x$_MG rwzΚ5k3g==='oW^s𗖖RQQϧYv-G䪫#GsE1zhٸq#+V`͚5d2fϞ͍7Ȟ={/ =bp8̱?… I$\|466HH|Ww/?˒<\6 _0Sɮmtn]\YlҒ1~=_"d'=޶MRU>Yp1+Rg/8*hϴcUsQGqw+vZ***R̛o} 7W_g͛ǨQX|9?gر%W⪫BW^Ʌ^c=׵2q&1 i|J-"Ih[ mIk2N/RK9 ! Jӏzw[WԳdI.үݾiIKG`KS w>LRu㤄cum "YXy#ՍS'w.{_֬Y7aDjr9y饗hjjf{,X*bL.Z[[s0[ncܹ|+_n HJ())?)wu6䷚F6 @u ?u7AoZKqyP%XsRLd.[ei(s%=\~NMw*z\2444 <ӏzD>K:zxm]O ,{m}8Ds4i6}1ǥki7q9nrV._o_q<[oeҥ̝;|1cF9H&1/x\77n]wE2SO޽7yf:?:UOzYd pE1d⊂zn_&"t_ ӽ})]Ow{PSYj&2# qùG?HѳmlhPdTy3tv@F>C؟XxyVE Zއԣ[]G0l+nTo!nۣr@ hkkge8ʳ9SvvW=511%&D^"! Ho.{^qΜ6Ͳ̙3gyv}W_Ç#O?ܹs,8 L:UK/rnf3\CCB|G__|1#R|W!K :OJخRՊTBD ]+AOўv{w,M߁Twը.AzV s&#I+K!*X2NI ~(| dw4@Y/Te|t8SO=]WZի9o .^3gN3)))BSxO>-[z;ka*`KGSu Q&~.EfF*)vV8R4n>]O%_C"! :EB&ՀnG);4j, 7))-je7hYrp+5Twf_~ 5>#O >!~РA;w.f͚Áe˖߆nǂ b8(m6~ᅬk"==rOk-\]v>,Z~kb^:98^~SM|x(p _<V,}aq5Tu׸#eMQ_bD 1.]X|@X=U=r1Rc RLFIdYH+? ?h81ӱuV:t}VXf̞=ݍSO=gVެ4鮻˲;w.&L`m- rrrp 77p-[n >_UBF"Q^{`shZ@jHw B]v,L{(d @Vަ{9zZ ^z^KW< ': zbhH$_\M= J Cf`0{v|ט=6<ʿQWbx9s&nӝhƏŋc۷/֮]+WiiiX|9/=p:jnۍ뮻ǖ-[ /?oߎw}SOף hii믿n,X^x!z7u^ɓO8 %+C6lAH{=ؾݵAوJ5E\{*N.% QR^ nqPIsK>fI/CN1Zg,Ceunݟ@m})ew Gn?.SN/m۶aƌxqꩧb̙={6.Rر /`ʕD~`W6{lxefؼy3/^oY0^~ebҤIQVVUVa޽?<~_bx'w& aIlYZE^ (_F4Ə!]%7mǓ}J-j!HV#j}b>Z+/Jo"3BA#[Q BP+1qҲ{+BLiڢOMmSɓe]G0,X1c ?zՊ_^{-1zha梻݈F8[s(>|Ϡ 9@ID677ЅXhltwT=nSRʬ]E%|ioy;֪u;W d4,MlIWfSuBP4d DKp˟3/oJ '`ƍ8."[L"/^ns9no͆1cCkk+q>@FF~^Ǝ 6 >8;h< < q7#M/}Ad*DܚvċaO5B]w׀xuu3nqO]o6Lc,Ri؏w&>'(D^hUtqٵw.j4{ィ9S0i$|g;vQo<{xgrJ꫸ w w22ԛ4yێ:,\ 7x<<Ø5k:lţ>r˸koZڢK(}au;U&'.RtFң&]Ļ "mÈ!Z@^ϗBחB '0:J=(pHq`Гot>ĉxef3<Ç~. 7p|I,X&LСC1~x\yp8x1g" ᫯#އ{}[̛7C A4իUÇg!##cǎf7|z {Ad?v?3gL@|^, aoCA C!#DI0Ou=5eAATK5~gwVԠ3e(3<>Ѣܢ&~T/E<a/1X߯6M=p+--ᅬo9]qشi^z%dggc=z4Ə+P\\> `2d,COk֬ղ|>a8筷*VEEnVL6 ^x!n/"$`K-͕/Fd-9oHX+4#/ӦCD(S" NR3 k&H G;4<655۷o{goǰabpea͚5X,㏣'|2Ν QQQ˗/\ ї/_k"%%>5lڴǿX{BW- IjOMv&H(4(~]Ӡy "cu'qd2B!.Gܰ^x *1iW~ ڀm6yw}a|y^ru;w믿JyweYL86M[o;wŋpmCHAݍ{@ôip"W Xp5!G|4ѧGOB@@Cwt@R@($QH"REBd!Hq WZi@uײs%!Ж'-[`Ϟ= wj뮻B(Œ3pUWnƲ2< >ov܉ >wy'f̘!W/sC4.\}*~Y=c9v-wcY#q&Vj=>DT/ )zYdQ͎JY}((: #ȩW`/qOz  IDAT@׎Ζ p`佀h4hzqW\7tP={'1訡pLGAGG^x, Պŋ ZK߈M# 3g'R76F1nU<'D!A/JEQp8Xmbz/53F~xeΑ"4B4"z8 ouҏ?Ğ={0gg̘˗ 9/Æ _ܹs1h [N,Y"tY~ZjJJJywx[KdCAmB&9%A"o|| ߗ(n\ ,I{PapE0j^^jd`t)  ? A[t2R\yZ%;~pg%cs|۷ӦM33˖-äIޫ>73|MTVV⮻mo OǬY !RX'Z|=m!%pb7 g(*!0yɶ6֔g=ХCQ0n+a3WjUuBPP@Rٍmڎ v&u#[ZZ4nkkkqYgaʕ,--J 磏>޽{qZyfX6lySSS1sLDQR81rH̙3 /Rµ^=:=^Bp1xpխ]Vt 3zU. I! JN:U5; za.5Æ7[[[ڪPaZV\~Xx322#{Qt/\հZXbf̘/B|W/Y)))o^];,cÕ?M@f\w*dRhVHV ¨ZuCMYSOb&F+ѡ3OY{Eb*Z>+j"_+;GNL\3\LwttS[bAqqEKK/ X,<x֞<#rɓ'{V‚ z+!=/%EPA'˾qg.zŚ[ow߭k#}]aƫ7"''7x[w9眃ݻw㧺V}6IA GV&O568TxٚS@S{QttG@UO@ 4Gj& F?vGŰ,$ÇFbsân/>|L\&@K ggg#++K=eѧOS s!CeYs=kƍxyxOʅ՝kJ%L ǿy&uH_CŖ(ìj-p2ohkGTO]^Qe+6e/Ýxn5kGGaH밆47]7chhhtU9jc|చ8%ˌ-V, cCJz`icZUy*5yU ~Db 󲜴1Hkg~5CSX !^,_1Np2[ 9}pƪ`c tusз 1TVVj_dgg#;vݘ1cn݊EizF{ĉpBaʔ)+n랇_23F3Ir%yZ61m b,'@?яTxøb{ǮCؽO6 gKn8{6Aod5;+军L9KJJp7ud =]9JKK3YMVl޶O i AZ`RlJIX _4yX>`2lFhV4!gEhBkTsTE"Q|y ZVn^?tx>S'a C=>.rSw55!tG_s=>ϒw &3jzZj=-#6h./`/QBn˾o=*,q"hL\'#WJW-Ѐ.U k7njr)MDK.Ř1cfTUU,Ih4bzJjQ}sY?W_ BKGG2lFz#fj,Q@Y ;W^؊fjKen[~qz4y3OĒ=eF8pj=ZP i&L>]tяfxRhqQNO k64 qQD# ^QUmpd،֍=j} RH?Ѽq?_}gYO?[#b@~񲾃22'C ٵm6q0a0`CY\~Xxzjx㍘6mZby%R]6I؈3E?C;tFV\ڮ(j(`Kݑ˰P=sFdsD~#u24ɓjVDzpsXB;^@;:ː[2/6*? ŅgI8S45nvsdH+P^%1\C)c=zݦVY{897yHKEZ i.trbWMP e  # C{]hi@M}3*ԣ=>_kΗZHߩ#D7Rk1JudlD)v{dbU{3Lʽ!·R1$!yh m]=P{U."+(}/C?Qܸ$vq'1p/FcİA ˁ 6vg'ڂ0 \.\N3.X HpPm]<Ҁc}ز} \4[ #٬8vnB\pT'C`u9Te$fM5!?H]n2 +)߈(j;Lz#;$ZNS&ő؝7-5߁E} 4X@5bK`jp' t_Q cYS0dP?s$+aW=FȤlQy @s{|8 ? XXHm\7cGؑCqC8ۋTY/~EaOYw0MD0pI+kջب=@0 êRn'Xq }DSK;^}k +uo4<="!/Zjv d Gc/" "ysulR^tj>) BƆ KŋpH!ƣ7YK.8**h$=M `R? Ww`jD Ǵ\0ƍ.^3gLP݂DB][ r;KS5 tkWꆄ A dܛ8- Am]CO$6g'_#W`l֪f86;yB*S4V~o8&@=}#-ls֕o[[ S|ta(--{**]E {/?  m oFx6!в_(6a!˓I=S1}^HM'R=-ᇪQkt*u>,=UA TEiWY ˂J$D,&QLkc!|x;tYb MdS']]Xt) x^(;׭>~ ~&zX^xV[aO5ѕN4h$I4_-7~ӊ e%Њ{jBuu'*uߵ!? x-X@uر0./9|p S/O)-V; MeT O"zsp9ؽ{7ࢋ.:f?FbʗݿFA^X,H%0"tm3:F 4 " ۊIKo! WUbwA0?( Sͺ"6 ?Q[u1yyY5,I<(`9W0&ʆ|\ i!Ƿ8)ؼmZZ;zeBa̘6a >n?"1PH*QVQ*_7uҐW㩇 EBtB`#^l;!aOȅU f$c)BU$5; ?jj&~zXvI_xX(hp,2Ӭ(s-W C\A0AɐIⱄAǛ@q4&FnNV]mGL@jV1v b4} =Yy9Ypߝ7S˄߲ }xkAc!rʏHE$BO$Zr6cz¦sTq%y/ J{Ok|eGX)rhP O.I ?>J"nM;OGN=e3Oqf iF$]eb_>[ջ@߼ij=驢2~޺\:F{bH, -J.uv>1'jVTq^V6Zz&ӡ0R"cOU<XVC5` uG{%7Zo r|IYZBT:?ƍ9W?AYyD,) b 4] tʮ0ur Gcw`Ѐ"|xO4Ўk!I0J@;-'1,Ƅn3鱉Ԕ]@IҤKitAG i cͶfo*5Kܓ`- ttw5ULY\6,.m"eb_>trJ?^-_Ŕ#${<@rb(X\ %@ YuVԅZ0Hy T(RxF?SxP^~UDs_Y&fo7g"yzeQ: )6@D$@ r9C׶05uM8Xv@k['3Ԭb7b D_foהi?7]7Xܵp:{<YTJ L@-"LnddJ^3r>blѮ0HOsvG:'bщՔ&xaGm&0 IXz(Kj c R} ݆"8ݙ[6b΃0a0-ᮍa`;V@4+Tg!sdy `Xi ho>tTP^ cQipd !V ~4tq#!Oh,"^%إRߟTjPRd5SO<:ɇagU}zx%à,Oq ,B$yM9^  Wx™b"n'# Evס`x 'D}C W8`t}jF6F֧?Oi<X/W_> &\6LX-Y4삧a;B ސ#?2NO)k$*"¦ر / \>ƌ>B&"]Mי@5\b~4}=1١n߀h@Fe-z*l?ة'/+RmT'a60idFyb !@,AׁRX݆GW:& H4‛3]-ef!@9\TC8\U_Lg\6L]u:MqF IDAT^jr J bs!t Ş&|Nzo֬ߎ% wDk3rz?R\ ҭ{&?f{WcO9`%rl+\Yج `*DcG!cYL4RIRt4 ]cư`#sa[KDC]qdzYipeGϿ;y Mb1 8T<cp3z79KfI9qV@P 1̢);>ۯ٩j~pvij-x(z G] })Ѫ.B崠%})HnF ]m5HMυ/&Pu=&OV< ŕOa"v!d]{^-Gq~Z!'!=,6$ Ͻ1}5۴ZQ C ao=(Rps%ԩǴAO^S>v5Ԅ?&{Yzt~o} ͣy^;5 *PɃR "FB MI)Te8L=e=fXf8ɂt7I҂,MpgbsHAӀ86->,VrLGF߱;2 Gǫq` k׾r\yBV06P^&j 4>MB¯~5T E$ 'l57Q?OBGؿ} `0բy?>8DrZRt4WXY A ]K/<h˪kfU~Eyw3 oGpH]M y!݈mj NYQPzlJUW ]ޤ`l>yaM)y>޼J-'j*DWS{ԇr/5x{VQv> 1h ,D(@Ŵ䘮|p`OqPsӁSʹ ?laba:ct`QX,B5`wG:6!mF,=I9%?aNTNjo.m}Gv.& L@JaLҘŷ}RP&/bh}P D!KPQh5~=|nQ=0Ii>ӵlf % 0RA煌$B #ste x!"O54mhhjEW^1C8vP C£MT_]Zw ܙ DZ {{᪺^ƌ?MlV5|PWA>NJ *t> 4!^6S)EJqN'nDCe B!7I*W#~ <>eШ X bN-qE,Rj`ߢ~?1=d , lV`p>zX 2K4;%.qBJOO>WD=*p  3wݵu$E/ m*LYDBGtB 58N+O5 ?Zi?ى m˾n/k$c 5rT}R/UtV8cDi=ݰpi,iffՆZq:kV+Tϟ@3E׷w?>Z: O Mm8ps{0D| QRC<мЫDiXX}wA;}~{M=-x; ׽,'^N*IC*pDLȔ͵ x@cQP6X"= dA;vUYJ?I3`KI@Cs;|3qۙVK['r2PB &ƫ:%DyK 새{j2Π \XڏR]TQ2IHc[wϨrUEbpk \6 "h:# nC3B #6D pn.E  zAiDUı %0x5Xmrxτ"kefb'Oc`I_O< _V]Z[պj >UEWGtzJ+ͰoF <*Uw-k _-1؏0GKFECJ)1jkVQqSBM@hw(PpɜC ,!@mCq~G^ǿ^[x͞92B H" p!/DÂR{z#I0+~H)/[c<`Y|pT7UO ]2jiimlbJ ?̂PVR껳OuͼY_\c-]k'S4-֢:[Y|/ tjҢi\ҡiT':֟Pyj0$/[,`/ P;AF 򝒲Xr2\I7a`(X 16VA2ENĿJK;%&&7FzPt,X~8k͂9v6F4jВ= i)yv^_N_>>(58O`Ŧ&Ԁi>10]K=V0aD&vHH?p5<1VO aybnR2eȆ6G:R\@XTm;{ L4RY{j@q rxk?NEu&{e&iԭ>+znNa/1=4:V{3X<-1 _Q `Y)#2eqB>%WWbؠ zQ'B ې3 1\)XvKJ:jhNj N";O ^؈ &rқ6ij i9R¯~L ScݦZޛ'j4x\ @ E(D,UpďST6DP:1Q$^:v@UMqWHOscڤcKE 6GkN,z1ubSt VnnƳ0sTb9@cnQSkCJRɛ--Q =D-D$PQ&q^j@(QA@,+P->.YNţY*EĔkaD X~ocő4P! V.*_@~5WlG{U ~NúաoxXq؏_ f0bT"RW"SCH7{ 4ዉba2!';h{@4ÑF̛s ֔LD-Q8ݜI"Bi&H#T z)u<#vYw 4+`~J)~T5VDW~M+-Cpχ@ e'aZU6vc#"E#o;?vpahiW|/ Hɹ{&\Noch t֢v0W7igGi<xBu7&})]ޅ')D.qcx#h R*.ѪW=\* n)k=Qv!.>Ǟt5n("}/(*u{5JJMy x?[ 3ʫ}ΰ8!n"@&2BPb?SQ=(-M^bIYd% Y) 蓝_l9&7s/Fa;!O{רqr=*^17~bBIjՋnر&fNH\1@U7dQˏc|Pa bHb/d/@Z+ cv,*.g`I!:ޣz+?r<- oFD2{67 \4<#2KkRe= $)ҁײzADb~ \/~Vk4XbwvVW6lTG%S~M܃+u.A2:(i'C͆t|[D"G5YiYxq)P|w"~8<>3qPO'ѝ' kTzV~ӄ^9Ȱp}?+8{ʻnE# .@{ag!H (-xkz|̞W\rrC"Bc=eS(dU hI6EҩމDW[_ނ=ی&l?D  Ǽ\UX/MDܺ$ @ IzEC]pfjOlV F</[`(M*)óQ7 #NK,"FxEWoj'P=D28ȞxY4KC/AT˚|,Ňp?M=UvQ~ >0@3J/(FN\ A P GeH BlzhA\׳20j lf7>㡯YUSQ'Lp#V׸ 4r3I$0"&TIGOFcB}/9 y_DY;.D=iZFl$)f>(}|)O~ q j < 0_Cq͹()tF:IX%W"b鿙 T66Dr| !{"¤dL8%d uװQs/l=|^$&S#w@SJqH7^p75'PYN ; 201&Vb$`# ~ϲ?Ujs0|).&AVZwWl{X]TPdO(3Wec.A>k//EE)vm1V72((kJCC+)| >y( (қ4B a_#!99oQ3M߲_]yBSͪX(&'>q4 ^XV{_ُ.Gx" ^pOygi|.,`T)0qf*i(Ƭ583A)BDBRwBkOB≬O{ͺ8{<,`]pE>MNT aq؀T̛YD ?F"1z`b|BKjx/!  D"ZB`Fڨ/%4.}, R|4@ɿzc R><:S~Gv" ЉlxY\z"{{P!+9u@ 2IDATZ_d$>BS#r>?8hFޅT/t^<3;?c7_Uo6pOǴÆX+#Պ?\9%n DkP/'fDng l4&ޞB u=$lb9#5aہN~ԵeMُAh~&f=#8(3ӿ6kgS\Dҕ0W+imw_~2U>eeob0sNg:NWXQ BK/&(>A%Ĥ.QYZllKW`trtzgk;C[J{N'|翝Oj/o -~2EN2C_nq:6ʆ%5_ki[^c7ݜ:Dڂ\񿕯Qt/ēVfVp9ۍy㐍N =X7[*0b.eY&C?ߋ}xwOCc*vvTOH{ lr^goOM942C|0xdMx\0;90I*􋆁/XP|\[Pbho*vvP5D0;rxN.nj=eԗxBG9H&æFTL@13sA~<5Ů۳eB4Z΃_AݡQ^d#5L#.YGĮ9ަ]-8ibnZ)sæ+hf:N?nFX; H$A`B{C@Ƣls;mu5vH#-Vq9d<Ȭb`e@fP.>W=HD shNjqQwPC]LxzUlZm~6{ ɔ%PsԵ!n`&}ƹԨUP_]BkmM^4R1$SS&w~wbIno;$g ̿anN0x#ʭBg$"1D%f!FRh@5 ݁өEkUun4y˪z,'4OEɲݤiK@$o8BW_0CCĈ',IdҺS?>baD`$У"7uDiBqéQWUBmJAנEϠ)u]N QLpaVw64/hь\\n1]&n&M309X( 5ggXqէ C8<ڀϭtsk@FWϿ$D5\.Cwa&n 4af]v)C$f4sg3iX3uǻ֗TSpx.řۯi^k: %tA耎+!oTS#^y oB$xLM/ţ1l{u9'poMϯo)bx-Kt@לAa̾*@ R: %4(`v3M~M@)2>5u4eSK?l&her^:76ps(t ['9`d Hv4t]Ör&7]\\wl}Kcs,OtGo'jGbHr{|dxKS]i$Jnz1R'pqoնX22gEvQ2F/p2MBQK"D?ԜƑ߅6D / uBd_B& '!U<8(pq}><8A~Ҏ&B8dPX+&OBёCkq{%rv4 b1 9Ll$F %73ׅB?y>r)A"-!;An]IkҪwZ>RrfL-[治E^NqBv:DL7Z62z )-9$(ޯm#hֈMwsL+PtLnZ<`1 2b(BJ+Bq,P2>敢6zYEN;b3dzx{Xx5N.;5m<{jOgn7,>DijNPq٠;M5q赣'@WMb+.n{8dppF (i{eBI%mCF//l}; ^Se% CHxwt&.9+%:GT}M~RԽ^"oUڞ>B2uI %c,Dxߪnf冦t JVHݺYejt,HszqBaIƊ'nh`vnSrBIrw3YyDzU)¤|(*Q pFf֢l:i0hp!^Xg(E87V*~m[yp$ jbEES3p3[ } ɴI$0 [,NvmeMm8vf_)FܸCˀ;&&M2zDAN8|j6/:PG,p4bqLO&5#tt[Mg;;ʺunwQW@-mH{y1`DO& IENDB`apprise-1.9.3/apprise/assets/themes/default/apprise-warning-72x72.png000066400000000000000000000173511477231770000254670ustar00rootroot00000000000000PNG  IHDRHHUG pHYs  IDATxy|ՕV*X%˲UB8!l&!K0 yla%1m,Hd[t%&yO]]uO9{n!N E@<@H ҁ`4Cl|G@lV;gU'_6CJ(.R`L$6' S=ohV/ۯ|X NeY5A*K * PV0 hhR`D ޕPg$MLwˀdꁫ ?3:LmEʲAq( "28x8[za7k@G%uOvt 91Z8I## QRc ZM-ule+wynnGxa` SEg%EhAɿ}.}_Gı^^)ʇL)d*˛-|bkg-"f&AHi4l&\Qn急gB&ݱzCh%u$p },>!?)й=i(~BJ 4M"́#!d\.e_%Y4wHlL,%c^FF]h>OJ4M =%]M 8 yͷ ,~kQ˺vDٱ'u;T < ̞Te猠,O4..i&5&q:0m3 zhW>Di1R]L&4Wݹ?owN8hHX0YOY$`D#-t.5t@ @YnHvc j@[ e&&nB)ÍvTso8?Z | 4z 8e|+#(- ӥm9z,Hk]5ifʦc@ H=d7r҇V[=X<JVrq,>x&$ٱtl( Xoi忕|KIIIH!t YIai:HM% TM#P\Ox8I6yQK=?^wcoAVϽ+og9_A B[Q~)"4k0'h?KhVtR"4+Ҥ IB+JlϾ.71f(db(#W̠5<;d  Vq+Uf-:ݲhfIAjAtC)'`'b640>Oאv+R[insqupT~r+<+|e3T3 &BC ;ҶݤRHI[XH_ѝquՌkc=$.@LaF6Lo+A ?)…_;;w'LўV4dHc`:liA)rρ 3)MZ [9<#mqI¼?iㅗVM$RzpŃHGJ!_XCa\ $\>p$"L#F6NX'tuh݂Rs~2(3ɣ"Xxu`<AEi*lKѤD: I\p4)4=ɚ!\?Q\{si>w}mT2m$@M#0z,Y.zc-+Wof=CaXnL!% M:!sgMd20=( l>L&|_h]NWfVZ˵pNJ\ nu$ ?2{#Ā<|Y$P2#~om珯撫nǖ+&T5o&W flؼv(L^@Z[6ٓ P), qY.[VdّMJʃ.{;zyk#:ZTJN9igD.<~7d-[OCNt>#YMIQaϟ8T^T׭VxkC;,h 0`~**RU$TR J#`WATXOA$x~<~% o&KFWLfư(sނ"^yNƓ1 Pd@TS)`-Kc#8/:s:m +:CŠ5ᩩ(˥BSH* 'B!^(ƪxu > /o_|۷sxWزm0PL#Ɛ&n1Gp0?'@)WP&k  PRs)/;X|#*UWHI?_IGa@ dKB#J4_L/iAKO;ŋihh/v/T`D |L.o `e ^}sm%%(Hǧ}e^P(ejn*Q)f&BxhBJuy߹RQ JJ r0DN.V)!-NSin~u?s˔)Sۉ\Տ+aÆY]Q}#T14!z;\YTR )NoJ'&6[nu\"L0$ǧ!py5sM^8Gj Q+lܒ_,O2ōXarϚ538M\jW; GH`~5ΝI`첀?itL/f&FXg" +2: 5AI/ȕB ~' {B!I*sMzsshժUi瞱TL3rT"m(p `@0[~3}N# ~zzցǏGR-[m6.]SYYUW]N~2@ јf]ͮquRHf6U~ǖRS]!kF`$C+C޿xAT}x`˞³Jȷ@󅨨CE\\;,pDyy4O?4vr⋙9skEO< M(W]8Z"&X6qʘ?ؾUJefI_B Ώ  *vơ}d@drT^Kj5cmz/54gʹi7n3f[3gK,G/??G,Y喣L/?2m;s0Rns$)U,\tYQ| 3tZ6o˽>[ lƍ%C/r':#p7OdhS/Τ# ԓX1n#KZXzB/H T㭤 AaiͭLO^c/ޕUw%\wL׻)P0~8n_] 7 jf|J:5ltOf6-"GrKIO? asQi=0'ʼUVq=pe1o<\}դR|>.F @O=]HBil0 ‘%ܶÎd E*m#fư󨟰O"NpڂyÇϋ`{졼yq饗rwjSz477ۅh.R/W|-X&-ߊ:jS41W/=k3&T5{yX*>lr $=mNIc'PXʶ}$ͷ׳vVΟNw[#Uf3O[?B3<ã>J&! RT2J-Pa:x$ uXJ7էaM[YeMw8jzJΞ4]JnYʬ2r6/ŏh! gL9S>y$)lMTLe7:H Gl5[ \{Յ$7Mtq/I]l T.+o]~ߝk] %2dh}r<s)@W|4irfN٧/ ǓO&֊I]`HyeJ7L.nrIH_{%ley|bY٧Cs3Hx:;ԑ`Tw0jX޻9:tu|4 8uUv |k'1Rj$g?75?,NQ8f6f<ܣO*ﵵmn0դ4%ڛTt2FMEA}¡/o}PTG&|W w[D!?d~s?OBE}+U'9TI+c@prXʳ:7F?waph_ƲQuEEhRP0Q*}dd}-'L kgpdgs_⤅3gəӨwb$ j*jNl%״<}9 0MFyuI*^J|Χwy[N`nT#Kti3a4M<C)-DK{}+O\y|˳C WS48Vƭߧ=EњyV&+QnKsM?d@kL?Ҷ+FQIa 3%5vm!Ճi8wΩ'et5h*TMk7=KE3vdF:W5V[̳B }Y)\ζ6vw4_NʒBu5VRt'LD0 @O0̬iPv=͈tPjqI3d2YwœN݇;Il59̼GxV" _?!{F<{;} i7ʲ (ati1ww񯿸h!4ٟj0m dfc\X`Yڎ==\Nk Ts3YUJLAI 4$bmz1ӽV'%R\*}o;: *Hu"[%K 0=g9 [x,ϱQ'_ q{nmKʚ Uv n{yŒ&QF/'dkx`,Ca@q7rslPjBvgENfoU#gy!˹.笓Ǔj[Eq [6AےQ"Ur$kWߵۨg[? S fjG1i Y[d. @*T`0miDon'Vڊ.l`x˅ fdDa|~[-q"'P8 8Թ9u{IBP0%յjfr:X@]Њ<(_ \,D \I`oLvEV薋9V$(#NlC$Y兑e9b΀:W2t%R4q<ը:q'Ixt 76%ë. r@ai_yLJ1ίȹʿ*~Vpc;Yjm2ѿu0HXcb?aCy(t\sU>bGcnzh>{w܇}ZV 4J }zw qK-mI{yb û+I}ǹ^,䙕Xv<[Yބ׷[W{kd> ڼ033s|)GEQS@AHws(U0M0LP& 5[~GKD7cd&Y߁ d`R01xP҈O&Ag4)qcsge;PlOu3^X/g5 # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Used for testing from .base import AttachBase from ..manager_attachment import AttachmentManager # Initalize our Attachment Manager Singleton A_MGR = AttachmentManager() __all__ = [ # Reference 'AttachBase', 'AttachmentManager', ] apprise-1.9.3/apprise/attachment/base.py000066400000000000000000000401751477231770000202650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import os import time import mimetypes import base64 from .. import exception from ..url import URLBase from ..utils.parse import parse_bool from ..common import ContentLocation from ..locale import gettext_lazy as _ class AttachBase(URLBase): """ This is the base class for all supported attachment types """ # For attachment type detection; this amount of data is read into memory # 128KB (131072B) max_detect_buffer_size = 131072 # Unknown mimetype unknown_mimetype = 'application/octet-stream' # Our filename when we can't otherwise determine one unknown_filename = 'apprise-attachment' # Our filename extension when we can't otherwise determine one unknown_filename_extension = '.obj' # The strict argument is a flag specifying whether the list of known MIME # types is limited to only the official types registered with IANA. When # strict is True, only the IANA types are supported; when strict is False # (the default), some additional non-standard but commonly used MIME types # are also recognized. strict = False # The maximum file-size we will accept for an attachment size. If this is # set to zero (0), then no check is performed # 1 MB = 1048576 bytes # 5 MB = 5242880 bytes # 1 GB = 1048576000 bytes max_file_size = 1048576000 # By default all attachments types are inaccessible. # Developers of items identified in the attachment plugin directory # are requried to set a location location = ContentLocation.INACCESSIBLE # Here is where we define all of the arguments we accept on the url # such as: schema://whatever/?overflow=upstream&format=text # These act the same way as tokens except they are optional and/or # have default values set if mandatory. This rule must be followed template_args = { 'cache': { 'name': _('Cache Age'), 'type': 'int', # We default to (600) which means we cache for 10 minutes 'default': 600, }, 'mime': { 'name': _('Forced Mime Type'), 'type': 'string', }, 'name': { 'name': _('Forced File Name'), 'type': 'string', }, 'verify': { 'name': _('Verify SSL'), # SSL Certificate Authority Verification 'type': 'bool', # Provide a default 'default': True, }, } def __init__(self, name=None, mimetype=None, cache=None, **kwargs): """ Initialize some general logging and common server arguments that will keep things consistent when working with the configurations that inherit this class. Optionally provide a filename to over-ride name associated with the actual file retrieved (from where-ever). The mime-type is automatically detected, but you can over-ride this by explicitly stating what it should be. By default we cache our responses so that subsiquent calls does not cause the content to be retrieved again. For local file references this makes no difference at all. But for remote content, this does mean more then one call can be made to retrieve the (same) data. This method can be somewhat inefficient if disabled. Only disable caching if you understand the consequences. You can alternatively set the cache value to an int identifying the number of seconds the previously retrieved can exist for before it should be considered expired. """ super().__init__(**kwargs) if not mimetypes.inited: # Ensure mimetypes has been initialized mimetypes.init() # Attach Filename (does not have to be the same as path) self._name = name # The mime type of the attached content. This is detected if not # otherwise specified. self._mimetype = mimetype # The detected_mimetype, this is only used as a fallback if the # mimetype wasn't forced by the user self.detected_mimetype = None # The detected filename by calling child class. A detected filename # is always used if no force naming was specified. self.detected_name = None # Absolute path to attachment self.download_path = None # Track open file pointers self.__pointers = set() # Set our cache flag; it can be True, False, None, or a (positive) # integer... nothing else if cache is not None: try: self.cache = cache if isinstance(cache, bool) else int(cache) except (TypeError, ValueError): err = 'An invalid cache value ({}) was specified.'.format( cache) self.logger.warning(err) raise TypeError(err) # Some simple error checking if self.cache < 0: err = 'A negative cache value ({}) was specified.'.format( cache) self.logger.warning(err) raise TypeError(err) else: self.cache = None # Validate mimetype if specified if self._mimetype: if next((t for t in mimetypes.types_map.values() if self._mimetype == t), None) is None: err = 'An invalid mime-type ({}) was specified.'.format( mimetype) self.logger.warning(err) raise TypeError(err) return @property def path(self): """ Returns the absolute path to the filename. If this is not known or is know but has been considered expired (due to cache setting), then content is re-retrieved prior to returning. """ if not self.exists(): # we could not obtain our path return None return self.download_path @property def name(self): """ Returns the filename """ if self._name: # return our fixed content return self._name if not self.exists(): # we could not obtain our name return None if not self.detected_name: # If we get here, our download was successful but we don't have a # filename based on our content. extension = mimetypes.guess_extension(self.mimetype) self.detected_name = '{}{}'.format( self.unknown_filename, extension if extension else self.unknown_filename_extension) return self.detected_name @property def mimetype(self): """ Returns mime type (if one is present). Content is cached once determied to prevent overhead of future calls. """ if not self.exists(): # we could not obtain our attachment return None if self._mimetype: # return our pre-calculated cached content return self._mimetype if not self.detected_mimetype: # guess_type() returns: (type, encoding) and sets type to None # if it can't otherwise determine it. try: # Directly reference _name and detected_name to prevent # recursion loop (as self.name calls this function) self.detected_mimetype, _ = mimetypes.guess_type( self._name if self._name else self.detected_name, strict=self.strict) except TypeError: # Thrown if None was specified in filename section pass # Return our mime type return self.detected_mimetype \ if self.detected_mimetype else self.unknown_mimetype def exists(self, retrieve_if_missing=True): """ Simply returns true if the object has downloaded and stored the attachment AND the attachment has not expired. """ if self.location == ContentLocation.INACCESSIBLE: # our content is inaccessible return False cache = self.template_args['cache']['default'] \ if self.cache is None else self.cache try: if self.download_path and os.path.isfile(self.download_path) \ and cache: # We have enough reason to look further into our cached content # and verify it has not expired. if cache is True: # return our fixed content as is; we will always cache it return True # Verify our cache time to determine whether we will get our # content again. age_in_sec = \ time.time() - os.stat(self.download_path).st_mtime if age_in_sec <= cache: return True except (OSError, IOError): # The file is not present pass return False if not retrieve_if_missing else self.download() def base64(self, encoding='ascii'): """ Returns the attachment object as a base64 string otherwise None is returned if an error occurs. If encoding is set to None, then it is not encoded when returned """ if not self: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( self.url(privacy=True))) raise exception.AppriseFileNotFound("Attachment Missing") try: with self.open() as f: # Prepare our Attachment in Base64 return base64.b64encode(f.read()).decode(encoding) \ if encoding else base64.b64encode(f.read()) except (TypeError, FileNotFoundError): # We no longer have a path to open raise exception.AppriseFileNotFound("Attachment Missing") except (TypeError, OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while reading {}.'.format( self.name if self else 'attachment')) self.logger.debug('I/O Exception: %s' % str(e)) raise exception.AppriseDiskIOError("Attachment Access Error") def invalidate(self): """ Release any temporary data that may be open by child classes. Externally fetched content should be automatically cleaned up when this function is called. This function should also reset the following entries to None: - detected_name : Should identify a human readable filename - download_path: Must contain a absolute path to content - detected_mimetype: Should identify mimetype of content """ # Remove all open pointers while self.__pointers: self.__pointers.pop().close() self.detected_name = None self.download_path = None self.detected_mimetype = None return def download(self): """ This function must be over-ridden by inheriting classes. Inherited classes MUST populate: - detected_name: Should identify a human readable filename - download_path: Must contain a absolute path to content - detected_mimetype: Should identify mimetype of content If a download fails, you should ensure these values are set to None. """ raise NotImplementedError( "download() is implimented by the child class.") def open(self, mode='rb'): """ return our file pointer and track it (we'll auto close later) """ pointer = open(self.path, mode=mode) self.__pointers.add(pointer) return pointer def chunk(self, size=5242880): """ A Generator that yield chunks of a file with the specified size. By default the chunk size is set to 5MB (5242880 bytes) """ with self.open() as file: while True: chunk = file.read(size) if not chunk: break yield chunk def __enter__(self): """ support with keyword """ return self.open() def __exit__(self, value_type, value, traceback): """ stub to do nothing; but support exit of with statement gracefully """ return @staticmethod def parse_url(url, verify_host=True, mimetype_db=None, sanitize=True): """Parses the URL and returns it broken apart into a dictionary. This is very specific and customized for Apprise. Args: url (str): The URL you want to fully parse. verify_host (:obj:`bool`, optional): a flag kept with the parsed URL which some child classes will later use to verify SSL keys (if SSL transactions take place). Unless under very specific circumstances, it is strongly recomended that you leave this default value set to True. Returns: A dictionary is returned containing the URL fully parsed if successful, otherwise None is returned. """ results = URLBase.parse_url( url, verify_host=verify_host, sanitize=sanitize) if not results: # We're done; we failed to parse our url return results # Allow overriding the default config mime type if 'mime' in results['qsd']: results['mimetype'] = results['qsd'].get('mime', '') \ .strip().lower() # Allow overriding the default file name if 'name' in results['qsd']: results['name'] = results['qsd'].get('name', '') \ .strip().lower() # Our cache value if 'cache' in results['qsd']: # First try to get it's integer value try: results['cache'] = int(results['qsd']['cache']) except (ValueError, TypeError): # No problem, it just isn't an integer; now treat it as a bool # instead: results['cache'] = parse_bool(results['qsd']['cache']) return results def __len__(self): """ Returns the filesize of the attachment. """ if not self: return 0 try: return os.path.getsize(self.path) if self.path else 0 except OSError: # OSError can occur if the file is inaccessible return 0 def __bool__(self): """ Allows the Apprise object to be wrapped in an based 'if statement'. True is returned if our content was downloaded correctly. """ return True if self.path else False def __del__(self): """ Perform any house cleaning """ self.invalidate() apprise-1.9.3/apprise/attachment/base.pyi000066400000000000000000000017011477231770000204260ustar00rootroot00000000000000from typing import Any, Dict, Optional from .. import ContentLocation class AttachBase: max_detect_buffer_size: int unknown_mimetype: str unknown_filename: str unknown_filename_extension: str strict: bool max_file_size: int location: ContentLocation template_args: Dict[str, Any] def __init__( self, name: Optional[str] = ..., mimetype: Optional[str] = ..., cache: Optional[bool] = ..., **kwargs: Any ) -> None: ... @property def path(self) -> Optional[str]: ... @property def name(self) -> Optional[str]: ... @property def mimetype(self) -> Optional[str]: ... def exists(self) -> bool: ... def invalidate(self) -> None: ... def download(self) -> bool: ... @staticmethod def parse_url( url: str, verify_host: bool = ... ) -> Dict[str, Any]: ... def __len__(self) -> int: ... def __bool__(self) -> bool: ... apprise-1.9.3/apprise/attachment/file.py000066400000000000000000000115601477231770000202660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import os from .base import AttachBase from ..utils.disk import path_decode from ..common import ContentLocation from ..locale import gettext_lazy as _ class AttachFile(AttachBase): """ A wrapper for File based attachment sources """ # The default descriptive name associated with the service service_name = _('Local File') # The default protocol protocol = 'file' # Content is local to the same location as the apprise instance # being called (server-side) location = ContentLocation.LOCAL def __init__(self, path, **kwargs): """ Initialize Local File Attachment Object """ super().__init__(**kwargs) # Store path but mark it dirty since we have not performed any # verification at this point. self.dirty_path = path_decode(path) # Track our file as it was saved self.__original_path = os.path.normpath(path) return def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = {} if self._mimetype: # A mime-type was enforced params['mime'] = self._mimetype if self._name: # A name was enforced params['name'] = self._name return 'file://{path}{params}'.format( path=self.quote(self.__original_path), params='?{}'.format(self.urlencode(params, safe='/')) if params else '', ) def download(self, **kwargs): """ Perform retrieval of our data. For file base attachments, our data already exists, so we only need to validate it. """ if self.location == ContentLocation.INACCESSIBLE: # our content is inaccessible return False # Ensure any existing content set has been invalidated self.invalidate() try: if not os.path.isfile(self.dirty_path): return False except OSError: return False if self.max_file_size > 0 and \ os.path.getsize(self.dirty_path) > self.max_file_size: # The content to attach is to large self.logger.error( 'Content exceeds allowable maximum file length ' '({}KB): {}'.format( int(self.max_file_size / 1024), self.url(privacy=True))) # Return False (signifying a failure) return False # We're good to go if we get here. Set our minimum requirements of # a call do download() before returning a success self.download_path = self.dirty_path self.detected_name = os.path.basename(self.download_path) # We don't need to set our self.detected_mimetype as it can be # pulled at the time it's needed based on the detected_name return True @staticmethod def parse_url(url): """ Parses the URL so that we can handle all different file paths and return it as our path object """ results = AttachBase.parse_url(url, verify_host=False) if not results: # We're done early; it's not a good URL return results match = re.match(r'file://(?P[^?]+)(\?.*)?', url, re.I) if not match: return None results['path'] = AttachFile.unquote(match.group('path')) return results apprise-1.9.3/apprise/attachment/http.py000066400000000000000000000326761477231770000203410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import os import requests import threading from tempfile import NamedTemporaryFile from .base import AttachBase from ..common import ContentLocation from ..url import PrivacyMode from ..locale import gettext_lazy as _ class AttachHTTP(AttachBase): """ A wrapper for HTTP based attachment sources """ # The default descriptive name associated with the service service_name = _('Web Based') # The default protocol protocol = 'http' # The default secure protocol secure_protocol = 'https' # The number of bytes in memory to read from the remote source at a time chunk_size = 8192 # Web based requests are remote/external to our current location location = ContentLocation.HOSTED # thread safe loading _lock = threading.Lock() def __init__(self, headers=None, **kwargs): """ Initialize HTTP Object headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with """ super().__init__(**kwargs) self.schema = 'https' if self.secure else 'http' self.fullpath = kwargs.get('fullpath') if not isinstance(self.fullpath, str): self.fullpath = '/' self.headers = {} if headers: # Store our extra headers self.headers.update(headers) # Where our content is written to upon a call to download. self._temp_file = None # Our Query String Dictionary; we use this to track arguments # specified that aren't otherwise part of this class self.qsd = {k: v for k, v in kwargs.get('qsd', {}).items() if k not in self.template_args} return def download(self, **kwargs): """ Perform retrieval of the configuration based on the specified request """ if self.location == ContentLocation.INACCESSIBLE: # our content is inaccessible return False # prepare header headers = { 'User-Agent': self.app_id, } # Apply any/all header over-rides defined headers.update(self.headers) auth = None if self.user: auth = (self.user, self.password) url = '%s://%s' % (self.schema, self.host) if isinstance(self.port, int): url += ':%d' % self.port url += self.fullpath # Where our request object will temporarily live. r = None # Always call throttle before any remote server i/o is made self.throttle() with self._lock: if self.exists(retrieve_if_missing=False): # Due to locking; it's possible a concurrent thread already # handled the retrieval in which case we can safely move on self.logger.trace( 'HTTP Attachment %s already retrieved', self._temp_file.name) return True # Ensure any existing content set has been invalidated self.invalidate() self.logger.debug( 'HTTP Attachment Fetch URL: %s (cert_verify=%r)' % ( url, self.verify_certificate)) try: # Make our request with requests.get( url, headers=headers, auth=auth, params=self.qsd, verify=self.verify_certificate, timeout=self.request_timeout, stream=True) as r: # Handle Errors r.raise_for_status() # Get our file-size (if known) try: file_size = int(r.headers.get('Content-Length', '0')) except (TypeError, ValueError): # Handle edge case where Content-Length is a bad value file_size = 0 # Perform a little Q/A on file limitations and restrictions if self.max_file_size > 0 and \ file_size > self.max_file_size: # The content retrieved is to large self.logger.error( 'HTTP response exceeds allowable maximum file ' 'length ({}KB): {}'.format( int(self.max_file_size / 1024), self.url(privacy=True))) # Return False (signifying a failure) return False # Detect config format based on mime if the format isn't # already enforced self.detected_mimetype = r.headers.get('Content-Type') d = r.headers.get('Content-Disposition', '') result = re.search( "filename=['\"]?(?P[^'\"]+)['\"]?", d, re.I) if result: self.detected_name = result.group('name').strip() # Create a temporary file to work with; delete must be set # to False or it isn't compatible with Microsoft Windows # instances. In lieu of this, __del__ will clean up the # file for us. self._temp_file = NamedTemporaryFile(delete=False) # Get our chunk size chunk_size = self.chunk_size # Track all bytes written to disk bytes_written = 0 # If we get here, we can now safely write our content to # disk for chunk in r.iter_content(chunk_size=chunk_size): # filter out keep-alive chunks if chunk: self._temp_file.write(chunk) bytes_written = self._temp_file.tell() # Prevent a case where Content-Length isn't # provided. In this case we don't want to fetch # beyond our limits if self.max_file_size > 0: if bytes_written > self.max_file_size: # The content retrieved is to large self.logger.error( 'HTTP response exceeds allowable ' 'maximum file length ' '({}KB): {}'.format( int(self.max_file_size / 1024), self.url(privacy=True))) # Invalidate any variables previously set self.invalidate() # Return False (signifying a failure) return False elif bytes_written + chunk_size \ > self.max_file_size: # Adjust out next read to accomodate up to # our limit +1. This will prevent us from # reading to much into our memory buffer self.max_file_size - bytes_written + 1 # Ensure our content is flushed to disk for post-processing self._temp_file.flush() # Set our minimum requirements for a successful download() # call self.download_path = self._temp_file.name if not self.detected_name: self.detected_name = os.path.basename(self.fullpath) except requests.RequestException as e: self.logger.error( 'A Connection error occurred retrieving HTTP ' 'configuration from %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Invalidate any variables previously set self.invalidate() # Return False (signifying a failure) return False except (IOError, OSError): # IOError is present for backwards compatibility with Python # versions older then 3.3. >= 3.3 throw OSError now. # Could not open and/or write the temporary file self.logger.error( 'Could not write attachment to disk: {}'.format( self.url(privacy=True))) # Invalidate any variables previously set self.invalidate() # Return False (signifying a failure) return False # Return our success return True def invalidate(self): """ Close our temporary file """ if self._temp_file: self.logger.trace( 'Attachment cleanup of %s', self._temp_file.name) self._temp_file.close() try: # Ensure our file is removed (if it exists) os.unlink(self._temp_file.name) except OSError: pass # Reset our temporary file to prevent from entering # this block again self._temp_file = None super().invalidate() def __del__(self): """ Tidy memory if open """ self.invalidate() def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) # Prepare our cache value if self.cache is not None: if isinstance(self.cache, bool) or not self.cache: cache = 'yes' if self.cache else 'no' else: cache = int(self.cache) # Set our cache value params['cache'] = cache if self._mimetype: # A format was enforced params['mime'] = self._mimetype if self._name: # A name was enforced params['name'] = self._name # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Apply any remaining entries to our URL params.update(self.qsd) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=self.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=self.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, hostname=self.quote(self.host, safe=''), port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath=self.quote(self.fullpath, safe='/'), params=self.urlencode(params, safe='/'), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = AttachBase.parse_url(url, sanitize=False) if not results: # We're done early as we couldn't load the results return results # Add our headers that the user can potentially over-ride if they wish # to to our returned result set results['headers'] = results['qsd-'] results['headers'].update(results['qsd+']) return results apprise-1.9.3/apprise/attachment/memory.py000066400000000000000000000155271477231770000206660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import os import io import base64 from .base import AttachBase from .. import exception from ..common import ContentLocation from ..locale import gettext_lazy as _ import uuid class AttachMemory(AttachBase): """ A wrapper for Memory based attachment sources """ # The default descriptive name associated with the service service_name = _('Memory') # The default protocol protocol = 'memory' # Content is local to the same location as the apprise instance # being called (server-side) location = ContentLocation.LOCAL def __init__(self, content=None, name=None, mimetype=None, encoding='utf-8', **kwargs): """ Initialize Memory Based Attachment Object """ # Create our BytesIO object self._data = io.BytesIO() if content is None: # Empty; do nothing pass elif isinstance(content, str): content = content.encode(encoding) if mimetype is None: mimetype = 'text/plain' if not name: # Generate a unique filename name = str(uuid.uuid4()) + '.txt' elif not isinstance(content, bytes): raise TypeError( 'Provided content for memory attachment is invalid') # Store our content if content: self._data.write(content) if mimetype is None: # Default mimetype mimetype = 'application/octet-stream' if not name: # Generate a unique filename name = str(uuid.uuid4()) + '.dat' # Initialize our base object super().__init__(name=name, mimetype=mimetype, **kwargs) return def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'mime': self._mimetype, } return 'memory://{name}?{params}'.format( name=self.quote(self._name), params=self.urlencode(params, safe='/') ) def open(self, *args, **kwargs): """ return our memory object """ # Return our object self._data.seek(0, 0) return self._data def __enter__(self): """ support with clause """ # Return our object self._data.seek(0, 0) return self._data def download(self, **kwargs): """ Handle memory download() call """ if self.location == ContentLocation.INACCESSIBLE: # our content is inaccessible return False if self.max_file_size > 0 and len(self) > self.max_file_size: # The content to attach is to large self.logger.error( 'Content exceeds allowable maximum memory size ' '({}KB): {}'.format( int(self.max_file_size / 1024), self.url(privacy=True))) # Return False (signifying a failure) return False return True def base64(self, encoding='ascii'): """ We need to over-ride this since the base64 sub-library seems to close our file descriptor making it no longer referencable. """ if not self: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( self.url(privacy=True))) raise exception.AppriseFileNotFound("Attachment Missing") self._data.seek(0, 0) return base64.b64encode(self._data.read()).decode(encoding) \ if encoding else base64.b64encode(self._data.read()) def invalidate(self): """ Removes data """ self._data.truncate(0) return def exists(self): """ over-ride exists() call """ size = len(self) return True if self.location != ContentLocation.INACCESSIBLE \ and size > 0 and ( self.max_file_size <= 0 or (self.max_file_size > 0 and size <= self.max_file_size)) \ else False @staticmethod def parse_url(url): """ Parses the URL so that we can handle all different file paths and return it as our path object """ results = AttachBase.parse_url(url, verify_host=False) if not results: # We're done early; it's not a good URL return results if 'name' not in results: # Allow fall-back to be from URL match = re.match(r'memory://(?P[^?]+)(\?.*)?', url, re.I) if match: # Store our filename only (ignore any defined paths) results['name'] = \ os.path.basename(AttachMemory.unquote(match.group('path'))) return results @property def path(self): """ return the filename """ if not self.exists(): # we could not obtain our path return None return self._name def __len__(self): """ Returns the size of he memory attachment """ return self._data.getbuffer().nbytes def __bool__(self): """ Allows the Apprise object to be wrapped in an based 'if statement'. True is returned if our content was downloaded correctly. """ return self.exists() apprise-1.9.3/apprise/cli.py000066400000000000000000001047771477231770000160030ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import click import textwrap import logging import platform import sys import os import shutil import re from os.path import isfile from os.path import exists from . import Apprise from . import AppriseAsset from . import AppriseConfig from . import PersistentStore from .utils.parse import parse_list from .utils.disk import dir_size, bytes_to_str, path_decode from .common import NOTIFY_TYPES from .common import NOTIFY_FORMATS from .common import PERSISTENT_STORE_MODES from .common import PersistentStoreState from .common import ContentLocation from .logger import logger from . import __title__ from . import __version__ from . import __license__ from . import __copywrite__ # By default we allow looking 1 level down recursivly in Apprise configuration # files. DEFAULT_RECURSION_DEPTH = 1 # Default number of days to prune persistent storage DEFAULT_STORAGE_PRUNE_DAYS = \ int(os.environ.get('APPRISE_STORAGE_PRUNE_DAYS', 30)) # The default URL ID Length DEFAULT_STORAGE_UID_LENGTH = \ int(os.environ.get('APPRISE_STORAGE_UID_LENGTH', 8)) # Defines the envrionment variable to parse if defined. This is ONLY # Referenced if: # - No Configuration Files were found/loaded/specified # - No URLs were provided directly into the CLI Call DEFAULT_ENV_APPRISE_URLS = 'APPRISE_URLS' # Defines the over-ride path for the configuration files read DEFAULT_ENV_APPRISE_CONFIG_PATH = 'APPRISE_CONFIG_PATH' # Defines the over-ride path for the plugins to load DEFAULT_ENV_APPRISE_PLUGIN_PATH = 'APPRISE_PLUGIN_PATH' # Defines the over-ride path for the persistent storage DEFAULT_ENV_APPRISE_STORAGE_PATH = 'APPRISE_STORAGE_PATH' # Defines our click context settings adding -h to the additional options that # can be specified to get the help menu to come up CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) # Define our default configuration we use if nothing is otherwise specified DEFAULT_CONFIG_PATHS = ( # Legacy Path Support '~/.apprise', '~/.apprise.conf', '~/.apprise.yml', '~/.apprise.yaml', '~/.config/apprise', '~/.config/apprise.conf', '~/.config/apprise.yml', '~/.config/apprise.yaml', # Plugin Support Extended Directory Search Paths '~/.apprise/apprise', '~/.apprise/apprise.conf', '~/.apprise/apprise.yml', '~/.apprise/apprise.yaml', '~/.config/apprise/apprise', '~/.config/apprise/apprise.conf', '~/.config/apprise/apprise.yml', '~/.config/apprise/apprise.yaml', # Global Configuration File Support '/etc/apprise', '/etc/apprise.yml', '/etc/apprise.yaml', '/etc/apprise/apprise', '/etc/apprise/apprise.conf', '/etc/apprise/apprise.yml', '/etc/apprise/apprise.yaml', ) # Define our paths to search for plugins DEFAULT_PLUGIN_PATHS = ( '~/.apprise/plugins', '~/.config/apprise/plugins', # Global Plugin Support '/var/lib/apprise/plugins', ) # # Persistent Storage # DEFAULT_STORAGE_PATH = '~/.local/share/apprise/cache' # Detect Windows if platform.system() == 'Windows': # Default Config Search Path for Windows Users DEFAULT_CONFIG_PATHS = ( '%APPDATA%\\Apprise\\apprise', '%APPDATA%\\Apprise\\apprise.conf', '%APPDATA%\\Apprise\\apprise.yml', '%APPDATA%\\Apprise\\apprise.yaml', '%LOCALAPPDATA%\\Apprise\\apprise', '%LOCALAPPDATA%\\Apprise\\apprise.conf', '%LOCALAPPDATA%\\Apprise\\apprise.yml', '%LOCALAPPDATA%\\Apprise\\apprise.yaml', # # Global Support # # C:\ProgramData\Apprise '%ALLUSERSPROFILE%\\Apprise\\apprise', '%ALLUSERSPROFILE%\\Apprise\\apprise.conf', '%ALLUSERSPROFILE%\\Apprise\\apprise.yml', '%ALLUSERSPROFILE%\\Apprise\\apprise.yaml', # C:\Program Files\Apprise '%PROGRAMFILES%\\Apprise\\apprise', '%PROGRAMFILES%\\Apprise\\apprise.conf', '%PROGRAMFILES%\\Apprise\\apprise.yml', '%PROGRAMFILES%\\Apprise\\apprise.yaml', # C:\Program Files\Common Files '%COMMONPROGRAMFILES%\\Apprise\\apprise', '%COMMONPROGRAMFILES%\\Apprise\\apprise.conf', '%COMMONPROGRAMFILES%\\Apprise\\apprise.yml', '%COMMONPROGRAMFILES%\\Apprise\\apprise.yaml', ) # Default Plugin Search Path for Windows Users DEFAULT_PLUGIN_PATHS = ( '%APPDATA%\\Apprise\\plugins', '%LOCALAPPDATA%\\Apprise\\plugins', # # Global Support # # C:\ProgramData\Apprise\plugins '%ALLUSERSPROFILE%\\Apprise\\plugins', # C:\Program Files\Apprise\plugins '%PROGRAMFILES%\\Apprise\\plugins', # C:\Program Files\Common Files '%COMMONPROGRAMFILES%\\Apprise\\plugins', ) # # Persistent Storage # DEFAULT_STORAGE_PATH = '%APPDATA%/Apprise/cache' class PersistentStorageMode: """ Persistent Storage Modes """ # List all detected configuration loaded LIST = 'list' # Prune persistent storage based on age PRUNE = 'prune' # Reset all (reguardless of age) CLEAR = 'clear' # Define the types in a list for validation purposes PERSISTENT_STORAGE_MODES = ( PersistentStorageMode.LIST, PersistentStorageMode.PRUNE, PersistentStorageMode.CLEAR, ) if os.environ.get('APPRISE_STORAGE_PATH', '').strip(): # Over-ride Default Storage Path DEFAULT_STORAGE_PATH = os.environ.get('APPRISE_STORAGE_PATH') def print_version_msg(): """ Prints version message when -V or --version is specified. """ result = list() result.append('{} v{}'.format(__title__, __version__)) result.append(__copywrite__) result.append( 'This code is licensed under the {} License.'.format(__license__)) click.echo('\n'.join(result)) class CustomHelpCommand(click.Command): def format_help(self, ctx, formatter): formatter.write_text('Usage:') formatter.write_text( ' apprise [OPTIONS] [APPRISE_URL [APPRISE_URL2 [APPRISE_URL3]]]') formatter.write_text( ' apprise storage [OPTIONS] [ACTION] [UID1 [UID2 [UID3]]]') # Custom help message formatter.write_text('') content = ( 'Send a notification to all of the specified servers ' 'identified by their URLs', 'the content provided within the title, body and ' 'notification-type.', '', 'For a list of all of the supported services and information on ' 'how to use ', 'them, check out at https://github.com/caronc/apprise') for line in content: formatter.write_text(line) # Display options and arguments in the default format self.format_options(ctx, formatter) self.format_epilog(ctx, formatter) # Custom 'Actions:' section after the 'Options:' formatter.write_text('') formatter.write_text('Actions:') actions = [( 'storage', 'Access the persistent storage disk administration', [( 'list', 'List all URL IDs associated with detected URL(s). ' 'This is also the default action ran if nothing is provided', ), ( 'prune', 'Eliminates stale entries found based on ' '--storage-prune-days (-SPD)', ), ( 'clean', 'Removes any persistent data created by Apprise', )], )] # # Some variables # # actions are indented this many spaces # sub actions double this value action_indent = 2 # label padding (for alignment) action_label_width = 10 space = ' ' space_re = re.compile(r'\r*\n') cols = 80 indent = 10 # Format each action and its subactions for action, description, sub_actions in actions: # Our action indent ai = ' ' * action_indent # Format the main action description formatted_description = space_re.split(textwrap.fill( description, width=(cols - indent - action_indent), initial_indent=space * indent, subsequent_indent=space * indent)) for no, line in enumerate(formatted_description): if not no: formatter.write_text( f'{ai}{action:<{action_label_width}}{line}') else: # pragma: no cover # Note: no branch is set intentionally since this is not # tested since in 2025.08.13 when this was set up # it never entered this area of the code. But we # know it works because we repeat this process with # our sub-options below formatter.write_text( f'{ai}{space:<{action_label_width}}{line}') # Format each subaction ai = ' ' * (action_indent * 2) for action, description in sub_actions: formatted_description = space_re.split(textwrap.fill( description, width=(cols - indent - (action_indent * 3)), initial_indent=space * (indent - action_indent), subsequent_indent=space * (indent - action_indent))) for no, line in enumerate(formatted_description): if not no: formatter.write_text( f'{ai}{action:<{action_label_width}}{line}') else: formatter.write_text( f'{ai}{space:<{action_label_width}}{line}') # Include any epilog or additional text self.format_epilog(ctx, formatter) @click.command(context_settings=CONTEXT_SETTINGS, cls=CustomHelpCommand) @click.option('--body', '-b', default=None, type=str, help='Specify the message body. If no body is specified then ' 'content is read from .') @click.option('--title', '-t', default=None, type=str, help='Specify the message title. This field is complete ' 'optional.') @click.option('--plugin-path', '-P', default=None, type=str, multiple=True, metavar='PATH', help='Specify one or more plugin paths to scan.') @click.option('--storage-path', '-S', default=DEFAULT_STORAGE_PATH, type=str, metavar='PATH', help='Specify the path to the persistent storage location ' '(default={}).'.format(DEFAULT_STORAGE_PATH)) @click.option('--storage-prune-days', '-SPD', default=DEFAULT_STORAGE_PRUNE_DAYS, type=int, help='Define the number of days the storage prune ' 'should run using. Setting this to zero (0) will eliminate ' 'all accumulated content. By default this value is {} days.' .format(DEFAULT_STORAGE_PRUNE_DAYS)) @click.option('--storage-uid-length', '-SUL', default=DEFAULT_STORAGE_UID_LENGTH, type=int, help='Define the number of unique characters to store persistent' 'cache in. By default this value is {} characters.' .format(DEFAULT_STORAGE_UID_LENGTH)) @click.option('--storage-mode', '-SM', default=PERSISTENT_STORE_MODES[0], type=str, metavar='MODE', help='Specify the persistent storage operational mode ' '(default={}). Possible values are "{}", and "{}".'.format( PERSISTENT_STORE_MODES[0], '", "'.join( PERSISTENT_STORE_MODES[:-1]), PERSISTENT_STORE_MODES[-1])) @click.option('--config', '-c', default=None, type=str, multiple=True, metavar='CONFIG_URL', help='Specify one or more configuration locations.') @click.option('--attach', '-a', default=None, type=str, multiple=True, metavar='ATTACHMENT_URL', help='Specify one or more attachment.') @click.option('--notification-type', '-n', default=NOTIFY_TYPES[0], type=str, metavar='TYPE', help='Specify the message type (default={}). ' 'Possible values are "{}", and "{}".'.format( NOTIFY_TYPES[0], '", "'.join(NOTIFY_TYPES[:-1]), NOTIFY_TYPES[-1])) @click.option('--input-format', '-i', default=NOTIFY_FORMATS[0], type=str, metavar='FORMAT', help='Specify the message input format (default={}). ' 'Possible values are "{}", and "{}".'.format( NOTIFY_FORMATS[0], '", "'.join(NOTIFY_FORMATS[:-1]), NOTIFY_FORMATS[-1])) @click.option('--theme', '-T', default='default', type=str, metavar='THEME', help='Specify the default theme.') @click.option('--tag', '-g', default=None, type=str, multiple=True, metavar='TAG', help='Specify one or more tags to filter ' 'which services to notify. Use multiple --tag (-g) entries to ' '"OR" the tags together and comma separated to "AND" them. ' 'If no tags are specified then all services are notified.') @click.option('--disable-async', '-Da', is_flag=True, help='Send all notifications sequentially') @click.option('--dry-run', '-d', is_flag=True, help='Perform a trial run but only prints the notification ' 'services to-be triggered to stdout. Notifications are never ' 'sent using this mode.') @click.option('--details', '-l', is_flag=True, help='Prints details about the current services supported by ' 'Apprise.') @click.option('--recursion-depth', '-R', default=DEFAULT_RECURSION_DEPTH, type=int, help='The number of recursive import entries that can be ' 'loaded from within Apprise configuration. By default ' 'this is set to {}.'.format(DEFAULT_RECURSION_DEPTH)) @click.option('--verbose', '-v', count=True, help='Makes the operation more talkative. Use multiple v to ' 'increase the verbosity. I.e.: -vvvv') @click.option('--interpret-escapes', '-e', is_flag=True, help='Enable interpretation of backslash escapes') @click.option('--interpret-emojis', '-j', is_flag=True, help='Enable interpretation of :emoji: definitions') @click.option('--debug', '-D', is_flag=True, help='Debug mode') @click.option('--version', '-V', is_flag=True, help='Display the apprise version and exit.') @click.argument('urls', nargs=-1, metavar='SERVER_URL [SERVER_URL2 [SERVER_URL3]]',) @click.pass_context def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, input_format, dry_run, recursion_depth, verbose, disable_async, details, interpret_escapes, interpret_emojis, plugin_path, storage_path, storage_mode, storage_prune_days, storage_uid_length, debug, version): """ Send a notification to all of the specified servers identified by their URLs the content provided within the title, body and notification-type. For a list of all of the supported services and information on how to use them, check out at https://github.com/caronc/apprise """ # Note: Click ignores the return values of functions it wraps, If you # want to return a specific error code, you must call ctx.exit() # as you will see below. debug = True if debug else False if debug: # Verbosity must be a minimum of 3 verbose = 3 if verbose < 3 else verbose # Logging ch = logging.StreamHandler(sys.stdout) if verbose > 3: # -vvvv: Most Verbose Debug Logging logger.setLevel(logging.TRACE) elif verbose > 2: # -vvv: Debug Logging logger.setLevel(logging.DEBUG) elif verbose > 1: # -vv: INFO Messages logger.setLevel(logging.INFO) elif verbose > 0: # -v: WARNING Messages logger.setLevel(logging.WARNING) else: # No verbosity means we display ERRORS only AND any deprecation # warnings logger.setLevel(logging.ERROR) # Format our logger formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) # Update our asyncio logger asyncio_logger = logging.getLogger('asyncio') for handler in logger.handlers: asyncio_logger.addHandler(handler) asyncio_logger.setLevel(logger.level) if version: print_version_msg() ctx.exit(0) # Simple Error Checking notification_type = notification_type.strip().lower() if notification_type not in NOTIFY_TYPES: click.echo( 'The --notification-type (-n) value of {} is not supported.' .format(notification_type)) click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a parameter # issue. For consistency, we also return a 2 ctx.exit(2) input_format = input_format.strip().lower() if input_format not in NOTIFY_FORMATS: click.echo( 'The --input-format (-i) value of {} is not supported.' .format(input_format)) click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a parameter # issue. For consistency, we also return a 2 ctx.exit(2) storage_mode = storage_mode.strip().lower() if storage_mode not in PERSISTENT_STORE_MODES: click.echo( 'The --storage-mode (-SM) value of {} is not supported.' .format(storage_mode)) click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a parameter # issue. For consistency, we also return a 2 ctx.exit(2) # # Apply Environment Over-rides if defined # _config_paths = DEFAULT_CONFIG_PATHS if 'APPRISE_CONFIG' in os.environ: # Deprecate (this was from previous versions of Apprise <= 1.9.1) logger.deprecate( 'APPRISE_CONFIG environment variable has been changed to ' f'{DEFAULT_ENV_APPRISE_CONFIG_PATH}') logger.debug( 'Loading provided APPRISE_CONFIG (deprecated) environment ' 'variable') _config_paths = (os.environ.get('APPRISE_CONFIG', '').strip(), ) elif DEFAULT_ENV_APPRISE_CONFIG_PATH in os.environ: logger.debug( f'Loading provided {DEFAULT_ENV_APPRISE_CONFIG_PATH} ' 'environment variable') _config_paths = re.split( r'[\r\n;]+', os.environ.get( DEFAULT_ENV_APPRISE_CONFIG_PATH).strip()) _plugin_paths = DEFAULT_PLUGIN_PATHS if DEFAULT_ENV_APPRISE_PLUGIN_PATH in os.environ: logger.debug( f'Loading provided {DEFAULT_ENV_APPRISE_PLUGIN_PATH} environment ' 'variable') _plugin_paths = re.split( r'[\r\n;]+', os.environ.get( DEFAULT_ENV_APPRISE_PLUGIN_PATH).strip()) if DEFAULT_ENV_APPRISE_STORAGE_PATH in os.environ: logger.debug( f'Loading provided {DEFAULT_ENV_APPRISE_STORAGE_PATH} environment ' 'variable') storage_path = \ os.environ.get(DEFAULT_ENV_APPRISE_STORAGE_PATH).strip() # # Continue with initialization process # # Prepare a default set of plugin paths to scan; anything specified # on the CLI always trumps plugin_paths = \ [path for path in _plugin_paths if exists(path_decode(path))] \ if not plugin_path else plugin_path if storage_uid_length < 2: click.echo( 'The --storage-uid-length (-SUL) value can not be lower ' 'then two (2).') click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a # parameter issue. For consistency, we also return a 2 ctx.exit(2) # Prepare our asset asset = AppriseAsset( # Our body format body_format=input_format, # Interpret Escapes interpret_escapes=interpret_escapes, # Interpret Emojis interpret_emojis=None if not interpret_emojis else True, # Set the theme theme=theme, # Async mode allows a user to send all of their notifications # asynchronously. This was made an option incase there are problems # in the future where it is better that everything runs sequentially/ # synchronously instead. async_mode=disable_async is not True, # Load our plugins plugin_paths=plugin_paths, # Load our persistent storage path storage_path=path_decode(storage_path), # Our storage URL ID Length storage_idlen=storage_uid_length, # Define if we flush to disk as soon as possible or not when required storage_mode=storage_mode ) # Create our Apprise object a = Apprise(asset=asset, debug=debug, location=ContentLocation.LOCAL) # Track if we are performing a storage action storage_action = True if urls and 'storage'.startswith(urls[0]) else False if details: # Print details and exit results = a.details(show_requirements=True, show_disabled=True) # Sort our results: plugins = sorted( results['schemas'], key=lambda i: str(i['service_name'])) for entry in plugins: protocols = [] if not entry['protocols'] else \ [p for p in entry['protocols'] if isinstance(p, str)] protocols.extend( [] if not entry['secure_protocols'] else [p for p in entry['secure_protocols'] if isinstance(p, str)]) if len(protocols) == 1: # Simplify view by swapping {schema} with the single # protocol value # Convert tuple to list entry['details']['templates'] = \ list(entry['details']['templates']) for x in range(len(entry['details']['templates'])): entry['details']['templates'][x] = \ re.sub( r'^[^}]+}://', '{}://'.format(protocols[0]), entry['details']['templates'][x]) fg = "green" if entry['enabled'] else "red" if entry['category'] == 'custom': # Identify these differently fg = "cyan" # Flip the enable switch so it forces the requirements # to be displayed entry['enabled'] = False click.echo(click.style( '{} {:<30} '.format( '+' if entry['enabled'] else '-', str(entry['service_name'])), fg=fg, bold=True), nl=(not entry['enabled'] or len(protocols) == 1)) if not entry['enabled']: if entry['requirements']['details']: click.echo( ' ' + str(entry['requirements']['details'])) if entry['requirements']['packages_required']: click.echo(' Python Packages Required:') for req in entry['requirements']['packages_required']: click.echo(' - ' + req) if entry['requirements']['packages_recommended']: click.echo(' Python Packages Recommended:') for req in entry['requirements']['packages_recommended']: click.echo(' - ' + req) # new line padding between entries if entry['category'] == 'native': click.echo() continue if len(protocols) > 1: click.echo('| Schema(s): {}'.format( ', '.join(protocols), )) prefix = ' - ' click.echo('{}{}'.format( prefix, '\n{}'.format(prefix).join(entry['details']['templates']))) # new line padding between entries click.echo() ctx.exit(0) # end if details() # The priorities of what is accepted are parsed in order below: # 1. URLs by command line # 2. Configuration by command line # 3. URLs by environment variable: APPRISE_URLS # 4. Default Configuration File(s) # elif urls and not storage_action: if tag: # Ignore any tags specified logger.warning( '--tag (-g) entries are ignored when using specified URLs') tag = None # Load our URLs (if any defined) for url in urls: a.add(url) if config: # Provide a warning to the end user if they specified both logger.warning( 'You defined both URLs and a --config (-c) entry; ' 'Only the URLs will be referenced.') elif config: # We load our configuration file(s) now only if no URLs were specified # Specified config entries trump all a.add(AppriseConfig( paths=config, asset=asset, recursion=recursion_depth)) elif os.environ.get(DEFAULT_ENV_APPRISE_URLS, '').strip(): logger.debug( f'Loading provided {DEFAULT_ENV_APPRISE_URLS} environment ' 'variable') if tag: # Ignore any tags specified logger.warning( '--tag (-g) entries are ignored when using specified URLs') tag = None # Attempt to use our APPRISE_URLS environment variable (if populated) a.add(os.environ[DEFAULT_ENV_APPRISE_URLS].strip()) else: # Load default configuration a.add(AppriseConfig( paths=[f for f in _config_paths if isfile(path_decode(f))], asset=asset, recursion=recursion_depth)) if not dry_run and not (a or storage_action): click.echo( 'You must specify at least one server URL or populated ' 'configuration file.') click.echo("Try 'apprise --help' for more information.") ctx.exit(1) # each --tag entry comprises of a comma separated 'and' list # we or each of of the --tag and sets specified. tags = None if not tag else [parse_list(t) for t in tag] # Determine if we're dealing with URLs or url_ids based on the first # entry provided. if storage_action: # # Storage Mode # - urls are now to be interpreted as best matching namespaces # if storage_prune_days < 0: click.echo( 'The --storage-prune-days (-SPD) value can not be lower ' 'then zero (0).') click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a # parameter issue. For consistency, we also return a 2 ctx.exit(2) # Number of columns to assume in the terminal. In future, maybe this # can be detected and made dynamic. The actual column count is 80, but # 5 characters are already reserved for the counter on the left (columns, _) = shutil.get_terminal_size(fallback=(80, 24)) # Pop 'storage' off of the head of our list filter_uids = urls[1:] action = PERSISTENT_STORAGE_MODES[0] if filter_uids: _action = next( # pragma: no branch (a for a in PERSISTENT_STORAGE_MODES if a.startswith(filter_uids[0])), None) if _action: # pop 'action' off the head of our list filter_uids = filter_uids[1:] action = _action # Get our detected URL IDs uids = {} for plugin in (a if not tags else a.find(tag=tags)): _id = plugin.url_id() if not _id: continue if filter_uids and next( (False for n in filter_uids if _id.startswith(n)), True): continue if _id not in uids: uids[_id] = { 'plugins': [plugin], 'state': PersistentStoreState.UNUSED, 'size': 0, } else: # It's possible to have more then one URL point to the same # location (thus match against the same url id more then once uids[_id]['plugins'].append(plugin) if action == PersistentStorageMode.LIST: detected_uid = PersistentStore.disk_scan( # Use our asset path as it has already been properly parsed path=asset.storage_path, # Provide filter if specified namespace=filter_uids, ) for _id in detected_uid: size, _ = dir_size(os.path.join(asset.storage_path, _id)) if _id in uids: uids[_id]['state'] = PersistentStoreState.ACTIVE uids[_id]['size'] = size elif not tags: uids[_id] = { 'plugins': [], # No cross reference (wasted space?) 'state': PersistentStoreState.STALE, # Acquire disk space 'size': size, } for idx, (uid, meta) in enumerate(uids.items()): fg = "green" \ if meta['state'] == PersistentStoreState.ACTIVE else ( "red" if meta['state'] == PersistentStoreState.STALE else "white") if idx > 0: # New line click.echo() click.echo("{: 4d}. ".format(idx + 1), nl=False) click.echo(click.style("{:<52} {:<8} {}".format( uid, bytes_to_str(meta['size']), meta['state']), fg=fg, bold=True)) for entry in meta['plugins']: url = entry.url(privacy=True) click.echo("{:>7} {}".format( '-', url if len(url) <= (columns - 8) else '{}...'.format( url[:columns - 11]))) if entry.tags: click.echo("{:>10}: {}".format( 'tags', ', '.join(entry.tags))) else: # PersistentStorageMode.PRUNE or PersistentStorageMode.CLEAR if action == PersistentStorageMode.CLEAR: storage_prune_days = 0 # clean up storage results = PersistentStore.disk_prune( # Use our asset path as it has already been properly parsed path=asset.storage_path, # Provide our namespaces if they exist namespace=None if not filter_uids else filter_uids, # Convert expiry from days to seconds expires=storage_prune_days * 60 * 60 * 24, action=not dry_run) ctx.exit(0) # end if disk_prune() ctx.exit(0) # end if storage() if not dry_run: if body is None: logger.trace('No --body (-b) specified; reading from stdin') # if no body was specified, then read from STDIN body = click.get_text_stream('stdin').read() # now print it out result = a.notify( body=body, title=title, notify_type=notification_type, tag=tags, attach=attach) else: # Number of columns to assume in the terminal. In future, maybe this # can be detected and made dynamic. The actual column count is 80, but # 5 characters are already reserved for the counter on the left (columns, _) = shutil.get_terminal_size(fallback=(80, 24)) # Initialize our URL response; This is populated within the for/loop # below; but plays a factor at the end when we need to determine if # we iterated at least once in the loop. url = None for idx, server in enumerate(a.find(tag=tags)): url = server.url(privacy=True) click.echo("{: 4d}. {}".format( idx + 1, url if len(url) <= (columns - 8) else '{}...'.format( url[:columns - 9]))) # Share our URL ID click.echo("{:>10}: {}".format( 'uid', '- n/a -' if not server.url_id() else server.url_id())) if server.tags: click.echo("{:>10}: {}".format('tags', ', '.join(server.tags))) # Initialize a default response of nothing matched, otherwise # if we matched at least one entry, we can return True result = None if url is None else True if result is None: # There were no notifications set. This is a result of just having # empty configuration files and/or being to restrictive when filtering # by specific tag(s) # Exit code 3 is used since Click uses exit code 2 if there is an # error with the parameters specified ctx.exit(3) elif result is False: # At least 1 notification service failed to send ctx.exit(1) # else: We're good! ctx.exit(0) apprise-1.9.3/apprise/common.py000066400000000000000000000145741477231770000165170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. class NotifyType: """ A simple mapping of notification types most commonly used with all types of logging and notification services. """ INFO = 'info' SUCCESS = 'success' WARNING = 'warning' FAILURE = 'failure' NOTIFY_TYPES = ( NotifyType.INFO, NotifyType.SUCCESS, NotifyType.WARNING, NotifyType.FAILURE, ) class NotifyImageSize: """ A list of pre-defined image sizes to make it easier to work with defined plugins. """ XY_32 = '32x32' XY_72 = '72x72' XY_128 = '128x128' XY_256 = '256x256' NOTIFY_IMAGE_SIZES = ( NotifyImageSize.XY_32, NotifyImageSize.XY_72, NotifyImageSize.XY_128, NotifyImageSize.XY_256, ) class NotifyFormat: """ A list of pre-defined text message formats that can be passed via the apprise library. """ TEXT = 'text' HTML = 'html' MARKDOWN = 'markdown' NOTIFY_FORMATS = ( NotifyFormat.TEXT, NotifyFormat.HTML, NotifyFormat.MARKDOWN, ) class OverflowMode: """ A list of pre-defined modes of how to handle the text when it exceeds the defined maximum message size. """ # Send the data as is; untouched. Let the upstream server decide how the # content is handled. Some upstream services might gracefully handle this # with expected intentions; others might not. UPSTREAM = 'upstream' # Always truncate the text when it exceeds the maximum message size and # send it anyway TRUNCATE = 'truncate' # Split the message into multiple smaller messages that fit within the # limits of what is expected. The smaller messages are sent SPLIT = 'split' # Define our modes so we can verify if we need to OVERFLOW_MODES = ( OverflowMode.UPSTREAM, OverflowMode.TRUNCATE, OverflowMode.SPLIT, ) class ConfigFormat: """ A list of pre-defined config formats that can be passed via the apprise library. """ # A text based configuration. This consists of a list of URLs delimited by # a new line. pound/hashtag (#) or semi-colon (;) can be used as comment # characters. TEXT = 'text' # YAML files allow a more rich of an experience when settig up your # apprise configuration files. YAML = 'yaml' # Define our configuration formats mostly used for verification CONFIG_FORMATS = ( ConfigFormat.TEXT, ConfigFormat.YAML, ) class ContentIncludeMode: """ The different Content inclusion modes. All content based plugins will have one of these associated with it. """ # - Content inclusion of same type only; hence a file:// can include # a file:// # - Cross file inclusion is not allowed unless insecure_includes (a flag) # is set to True. In these cases STRICT acts as type ALWAYS STRICT = 'strict' # This content type can never be included NEVER = 'never' # This content can always be included ALWAYS = 'always' CONTENT_INCLUDE_MODES = ( ContentIncludeMode.STRICT, ContentIncludeMode.NEVER, ContentIncludeMode.ALWAYS, ) class ContentLocation: """ This is primarily used for handling file attachments. The idea is to track the source of the attachment itself. We don't want remote calls to a server to access local attachments for example. By knowing the attachment type and cross-associating it with how we plan on accessing the content, we can make a judgement call (for security reasons) if we will allow it. Obviously local uses of apprise can access both local and remote type files. """ # Content is located locally (on the same server as apprise) LOCAL = 'local' # Content is located in a remote location HOSTED = 'hosted' # Content is inaccessible INACCESSIBLE = 'n/a' CONTENT_LOCATIONS = ( ContentLocation.LOCAL, ContentLocation.HOSTED, ContentLocation.INACCESSIBLE, ) class PersistentStoreMode: # Allow persistent storage; write on demand AUTO = 'auto' # Always flush every change to disk after it's saved. This has higher i/o # but enforces disk reflects what was set immediately FLUSH = 'flush' # memory based store only MEMORY = 'memory' PERSISTENT_STORE_MODES = ( PersistentStoreMode.AUTO, PersistentStoreMode.FLUSH, PersistentStoreMode.MEMORY, ) class PersistentStoreState: """ Defines the persistent states describing what has been cached """ # Persistent Directory is actively cross-referenced against a matching URL ACTIVE = 'active' # Persistent Directory is no longer being used or has no cross-reference STALE = 'stale' # Persistent Directory is not utilizing any disk space at all, however # it potentially could if the plugin it successfully cross-references # is utilized UNUSED = 'unused' # This is a reserved tag that is automatically assigned to every # Notification Plugin MATCH_ALL_TAG = 'all' # Will cause notification to trigger under any circumstance even if an # exclusive tagging was provided. MATCH_ALWAYS_TAG = 'always' apprise-1.9.3/apprise/common.pyi000066400000000000000000000006771477231770000166670ustar00rootroot00000000000000import types import typing as t class NotifyType: INFO: NotifyType SUCCESS: NotifyType WARNING: NotifyType FAILURE: NotifyType class NotifyFormat: TEXT: NotifyFormat HTML: NotifyFormat MARKDOWN: NotifyFormat class ContentLocation: LOCAL: ContentLocation HOSTED: ContentLocation INACCESSIBLE: ContentLocation NOTIFY_MODULE_MAP: t.Dict[str, t.Dict[str, t.Union[t.Type["NotifyBase"], types.ModuleType]]] apprise-1.9.3/apprise/config/000077500000000000000000000000001477231770000161075ustar00rootroot00000000000000apprise-1.9.3/apprise/config/__init__.py000066400000000000000000000032171477231770000202230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Used for testing from .base import ConfigBase from ..manager_config import ConfigurationManager # Initalize our Config Manager Singleton C_MGR = ConfigurationManager() __all__ = [ # Reference 'ConfigBase', 'ConfigurationManager', ] apprise-1.9.3/apprise/config/base.py000066400000000000000000001476111477231770000174050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import os import re import yaml import time from .. import plugins from .. import common from ..asset import AppriseAsset from ..url import URLBase from ..utils.parse import GET_SCHEMA_RE, parse_list, parse_bool, parse_urls from ..utils.cwe312 import cwe312_url from ..manager_config import ConfigurationManager from ..manager_plugins import NotificationManager # Test whether token is valid or not VALID_TOKEN = re.compile( r'(?P[a-z0-9][a-z0-9_]+)', re.I) # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() # Grant access to our Configuration Manager Singleton C_MGR = ConfigurationManager() class ConfigBase(URLBase): """ This is the base class for all supported configuration sources """ # The Default Encoding to use if not otherwise detected encoding = 'utf-8' # The default expected configuration format unless otherwise # detected by the sub-modules default_config_format = common.ConfigFormat.TEXT # This is only set if the user overrides the config format on the URL # this should always initialize itself as None config_format = None # Don't read any more of this amount of data into memory as there is no # reason we should be reading in more. This is more of a safe guard then # anything else. 128KB (131072B) max_buffer_size = 131072 # By default all configuration is not includable using the 'include' # line found in configuration files. allow_cross_includes = common.ContentIncludeMode.NEVER # the config path manages the handling of relative include config_path = os.getcwd() def __init__(self, cache=True, recursion=0, insecure_includes=False, **kwargs): """ Initialize some general logging and common server arguments that will keep things consistent when working with the configurations that inherit this class. By default we cache our responses so that subsiquent calls does not cause the content to be retrieved again. For local file references this makes no difference at all. But for remote content, this does mean more then one call can be made to retrieve the (same) data. This method can be somewhat inefficient if disabled. Only disable caching if you understand the consequences. You can alternatively set the cache value to an int identifying the number of seconds the previously retrieved can exist for before it should be considered expired. recursion defines how deep we recursively handle entries that use the `include` keyword. This keyword requires us to fetch more configuration from another source and add it to our existing compilation. If the file we remotely retrieve also has an `include` reference, we will only advance through it if recursion is set to 2 deep. If set to zero it is off. There is no limit to how high you set this value. It would be recommended to keep it low if you do intend to use it. insecure_include by default are disabled. When set to True, all Apprise Config files marked to be in STRICT mode are treated as being in ALWAYS mode. Take a file:// based configuration for example, only a file:// based configuration can include another file:// based one. because it is set to STRICT mode. If an http:// based configuration file attempted to include a file:// one it woul fail. However this include would be possible if insecure_includes is set to True. There are cases where a self hosting apprise developer may wish to load configuration from memory (in a string format) that contains 'include' entries (even file:// based ones). In these circumstances if you want these 'include' entries to be honored, this value must be set to True. """ super().__init__(**kwargs) # Tracks the time the content was last retrieved on. This place a role # for cases where we are not caching our response and are required to # re-retrieve our settings. self._cached_time = None # Tracks previously loaded content for speed self._cached_servers = None # Initialize our recursion value self.recursion = recursion # Initialize our insecure_includes flag self.insecure_includes = insecure_includes if 'encoding' in kwargs: # Store the encoding self.encoding = kwargs.get('encoding') if 'format' in kwargs \ and isinstance(kwargs['format'], str): # Store the enforced config format self.config_format = kwargs.get('format').lower() if self.config_format not in common.CONFIG_FORMATS: # Simple error checking err = 'An invalid config format ({}) was specified.'.format( self.config_format) self.logger.warning(err) raise TypeError(err) # Set our cache flag; it can be True or a (positive) integer try: self.cache = cache if isinstance(cache, bool) else int(cache) if self.cache < 0: err = 'A negative cache value ({}) was specified.'.format( cache) self.logger.warning(err) raise TypeError(err) except (ValueError, TypeError): err = 'An invalid cache value ({}) was specified.'.format(cache) self.logger.warning(err) raise TypeError(err) return def servers(self, asset=None, **kwargs): """ Performs reads loaded configuration and returns all of the services that could be parsed and loaded. """ if not self.expired(): # We already have cached results to return; use them return self._cached_servers # Our cached response object self._cached_servers = list() # read() causes the child class to do whatever it takes for the # config plugin to load the data source and return unparsed content # None is returned if there was an error or simply no data content = self.read(**kwargs) if not isinstance(content, str): # Set the time our content was cached at self._cached_time = time.time() # Nothing more to do; return our empty cache list return self._cached_servers # Our Configuration format uses a default if one wasn't one detected # or enfored. config_format = \ self.default_config_format \ if self.config_format is None else self.config_format # Dynamically load our parse_ function based on our config format fn = getattr(ConfigBase, 'config_parse_{}'.format(config_format)) # Initialize our asset object asset = asset if isinstance(asset, AppriseAsset) else self.asset # Execute our config parse function which always returns a tuple # of our servers and our configuration servers, configs = fn(content=content, asset=asset) self._cached_servers.extend(servers) # Configuration files were detected; recursively populate them # If we have been configured to do so for url in configs: if self.recursion > 0: # Attempt to acquire the schema at the very least to allow # our configuration based urls. schema = GET_SCHEMA_RE.match(url) if schema is None: # Plan B is to assume we're dealing with a file schema = 'file' if not os.path.isabs(url): # We're dealing with a relative path; prepend # our current config path url = os.path.join(self.config_path, url) url = '{}://{}'.format(schema, URLBase.quote(url)) else: # Ensure our schema is always in lower case schema = schema.group('schema').lower() # Some basic validation if schema not in C_MGR: ConfigBase.logger.warning( 'Unsupported include schema {}.'.format(schema)) continue # CWE-312 (Secure Logging) Handling loggable_url = url if not asset.secure_logging \ else cwe312_url(url) # Parse our url details of the server object as dictionary # containing all of the information parsed from our URL results = C_MGR[schema].parse_url(url) if not results: # Failed to parse the server URL self.logger.warning( 'Unparseable include URL {}'.format(loggable_url)) continue # Handle cross inclusion based on allow_cross_includes rules if (C_MGR[schema].allow_cross_includes == common.ContentIncludeMode.STRICT and schema not in self.schemas() and not self.insecure_includes) or C_MGR[schema] \ .allow_cross_includes == \ common.ContentIncludeMode.NEVER: # Prevent the loading if insecure base protocols ConfigBase.logger.warning( 'Including {}:// based configuration is prohibited. ' 'Ignoring URL {}'.format(schema, loggable_url)) continue # Prepare our Asset Object results['asset'] = asset # No cache is required because we're just lumping this in # and associating it with the cache value we've already # declared (prior to our recursion) results['cache'] = False # Recursion can never be parsed from the URL; we decrement # it one level results['recursion'] = self.recursion - 1 # Insecure Includes flag can never be parsed from the URL results['insecure_includes'] = self.insecure_includes try: # Attempt to create an instance of our plugin using the # parsed URL information cfg_plugin = C_MGR[results['schema']](**results) except Exception as e: # the arguments are invalid or can not be used. self.logger.warning( 'Could not load include URL: {}'.format(loggable_url)) self.logger.debug('Loading Exception: {}'.format(str(e))) continue # if we reach here, we can now add this servers found # in this configuration file to our list self._cached_servers.extend( cfg_plugin.servers(asset=asset)) # We no longer need our configuration object del cfg_plugin else: # CWE-312 (Secure Logging) Handling loggable_url = url if not asset.secure_logging \ else cwe312_url(url) self.logger.debug( 'Recursion limit reached; ignoring Include URL: %s', loggable_url) if self._cached_servers: self.logger.info( 'Loaded {} entries from {}'.format( len(self._cached_servers), self.url(privacy=asset.secure_logging))) else: self.logger.warning( 'Failed to load Apprise configuration from {}'.format( self.url(privacy=asset.secure_logging))) # Set the time our content was cached at self._cached_time = time.time() return self._cached_servers def read(self): """ This object should be implimented by the child classes """ return None def expired(self): """ Simply returns True if the configuration should be considered as expired or False if content should be retrieved. """ if isinstance(self._cached_servers, list) and self.cache: # We have enough reason to look further into our cached content # and verify it has not expired. if self.cache is True: # we have not expired, return False return False # Verify our cache time to determine whether we will get our # content again. age_in_sec = time.time() - self._cached_time if age_in_sec <= self.cache: # We have not expired; return False return False # If we reach here our configuration should be considered # missing and/or expired. return True @staticmethod def __normalize_tag_groups(group_tags): """ Used to normalize a tag assign map which looks like: { 'group': set('{tag1}', '{group1}', '{tag2}'), 'group1': set('{tag2}','{tag3}'), } Then normalized it (merging groups); with respect to the above, the output would be: { 'group': set('{tag1}', '{tag2}', '{tag3}), 'group1': set('{tag2}','{tag3}'), } """ # Prepare a key set list we can use tag_groups = set([str(x) for x in group_tags.keys()]) def _expand(tags, ignore=None): """ Expands based on tag provided and returns a set this also updates the group_tags while it goes """ # Prepare ourselves a return set results = set() ignore = set() if ignore is None else ignore # track groups groups = set() for tag in tags: if tag in ignore: continue # Track our groups groups.add(tag) # Store what we know is worth keeping if tag not in group_tags: # pragma: no cover # handle cases where the tag doesn't exist group_tags[tag] = set() results |= group_tags[tag] - tag_groups # Get simple tag assignments found = group_tags[tag] & tag_groups if not found: continue for gtag in found: if gtag in ignore: continue # Go deeper (recursion) ignore.add(tag) group_tags[gtag] = _expand(set([gtag]), ignore=ignore) results |= group_tags[gtag] # Pop ignore ignore.remove(tag) return results for tag in tag_groups: # Get our tags group_tags[tag] |= _expand(set([tag])) if not group_tags[tag]: ConfigBase.logger.warning( 'The group {} has no tags assigned to it'.format(tag)) del group_tags[tag] @staticmethod def parse_url(url, verify_host=True): """Parses the URL and returns it broken apart into a dictionary. This is very specific and customized for Apprise. Args: url (str): The URL you want to fully parse. verify_host (:obj:`bool`, optional): a flag kept with the parsed URL which some child classes will later use to verify SSL keys (if SSL transactions take place). Unless under very specific circumstances, it is strongly recomended that you leave this default value set to True. Returns: A dictionary is returned containing the URL fully parsed if successful, otherwise None is returned. """ results = URLBase.parse_url(url, verify_host=verify_host) if not results: # We're done; we failed to parse our url return results # Allow overriding the default config format if 'format' in results['qsd']: results['format'] = results['qsd'].get('format') if results['format'] not in common.CONFIG_FORMATS: URLBase.logger.warning( 'Unsupported format specified {}'.format( results['format'])) del results['format'] # Defines the encoding of the payload if 'encoding' in results['qsd']: results['encoding'] = results['qsd'].get('encoding') # Our cache value if 'cache' in results['qsd']: # First try to get it's integer value try: results['cache'] = int(results['qsd']['cache']) except (ValueError, TypeError): # No problem, it just isn't an integer; now treat it as a bool # instead: results['cache'] = parse_bool(results['qsd']['cache']) return results @staticmethod def detect_config_format(content, **kwargs): """ Takes the specified content and attempts to detect the format type The function returns the actual format type if detected, otherwise it returns None """ # Detect Format Logic: # - A pound/hashtag (#) is alawys a comment character so we skip over # lines matched here. # - Detection begins on the first non-comment and non blank line # matched. # - If we find a string followed by a colon, we know we're dealing # with a YAML file. # - If we find a string that starts with a URL, or our tag # definitions (accepting commas) followed by an equal sign we know # we're dealing with a TEXT format. # Define what a valid line should look like valid_line_re = re.compile( r'^\s*(?P([;#]+(?P.*))|' r'(?P((?P[ \t,a-z0-9_-]+)=)?[a-z0-9]+://.*)|' r'((?P[a-z0-9]+):.*))?$', re.I) try: # split our content up to read line by line content = re.split(r'\r*\n', content) except TypeError: # content was not expected string type ConfigBase.logger.error( 'Invalid Apprise configuration specified.') return None # By default set our return value to None since we don't know # what the format is yet config_format = None # iterate over each line of the file to attempt to detect it # stop the moment a the type has been determined for line, entry in enumerate(content, start=1): result = valid_line_re.match(entry) if not result: # Invalid syntax ConfigBase.logger.error( 'Undetectable Apprise configuration found ' 'based on line {}.'.format(line)) # Take an early exit return None # Attempt to detect configuration if result.group('yaml'): config_format = common.ConfigFormat.YAML ConfigBase.logger.debug( 'Detected YAML configuration ' 'based on line {}.'.format(line)) break elif result.group('text'): config_format = common.ConfigFormat.TEXT ConfigBase.logger.debug( 'Detected TEXT configuration ' 'based on line {}.'.format(line)) break # If we reach here, we have a comment entry # Adjust default format to TEXT config_format = common.ConfigFormat.TEXT return config_format @staticmethod def config_parse(content, asset=None, config_format=None, **kwargs): """ Takes the specified config content and loads it based on the specified config_format. If a format isn't specified, then it is auto detected. """ if config_format is None: # Detect the format config_format = ConfigBase.detect_config_format(content) if not config_format: # We couldn't detect configuration ConfigBase.logger.error('Could not detect configuration') return (list(), list()) if config_format not in common.CONFIG_FORMATS: # Invalid configuration type specified ConfigBase.logger.error( 'An invalid configuration format ({}) was specified'.format( config_format)) return (list(), list()) # Dynamically load our parse_ function based on our config format fn = getattr(ConfigBase, 'config_parse_{}'.format(config_format)) # Execute our config parse function which always returns a list return fn(content=content, asset=asset) @staticmethod def config_parse_text(content, asset=None): """ Parse the specified content as though it were a simple text file only containing a list of URLs. Return a tuple that looks like (servers, configs) where: - servers contains a list of loaded notification plugins - configs contains a list of additional configuration files referenced. You may also optionally associate an asset with the notification. The file syntax is: # # pound/hashtag allow for line comments # # One or more tags can be idenified using comma's (,) to separate # them. = # Or you can use this format (no tags associated) # you can also use the keyword 'include' and identify a # configuration location (like this file) which will be included # as additional configuration entries when loaded. include # Assign tag contents to a group identifier = """ # A list of loaded Notification Services servers = list() # A list of additional configuration files referenced using # the include keyword configs = list() # Track all of the tags we want to assign later on group_tags = {} # Track our entries to preload preloaded = [] # Prepare our Asset Object asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset() # Define what a valid line should look like valid_line_re = re.compile( r'^\s*(?P([;#]+(?P.*))|' r'(\s*(?P[a-z0-9, \t_-]+)\s*=|=)?\s*' r'((?P[a-z0-9]{1,12}://.*)|(?P[a-z0-9, \t_-]+))|' r'include\s+(?P.+))?\s*$', re.I) try: # split our content up to read line by line content = re.split(r'\r*\n', content) except TypeError: # content was not expected string type ConfigBase.logger.error( 'Invalid Apprise TEXT based configuration specified.') return (list(), list()) for line, entry in enumerate(content, start=1): result = valid_line_re.match(entry) if not result: # Invalid syntax ConfigBase.logger.error( 'Invalid Apprise TEXT configuration format found ' '{} on line {}.'.format(entry, line)) # Assume this is a file we shouldn't be parsing. It's owner # can read the error printed to screen and take action # otherwise. return (list(), list()) # Retrieve our line url, assign, config = \ result.group('url'), \ result.group('assign'), \ result.group('config') if not (url or config or assign): # Comment/empty line; do nothing continue if config: # CWE-312 (Secure Logging) Handling loggable_url = config if not asset.secure_logging \ else cwe312_url(config) ConfigBase.logger.debug( 'Include URL: {}'.format(loggable_url)) # Store our include line configs.append(config.strip()) continue # CWE-312 (Secure Logging) Handling loggable_url = url if not asset.secure_logging \ else cwe312_url(url) if assign: groups = set(parse_list(result.group('tags'), cast=str)) if not groups: # no tags were assigned ConfigBase.logger.warning( 'Unparseable tag assignment - no group(s) ' 'on line {}'.format(line)) continue # Get our tags tags = set(parse_list(assign, cast=str)) if not tags: # no tags were assigned ConfigBase.logger.warning( 'Unparseable tag assignment - no tag(s) to assign ' 'on line {}'.format(line)) continue # Update our tag group map for tag_group in groups: if tag_group not in group_tags: group_tags[tag_group] = set() # ensure our tag group is never included in the assignment group_tags[tag_group] |= tags - set([tag_group]) continue # Acquire our url tokens results = plugins.url_to_dict( url, secure_logging=asset.secure_logging) if results is None: # Failed to parse the server URL ConfigBase.logger.warning( 'Unparseable URL {} on line {}.'.format( loggable_url, line)) continue # Build a list of tags to associate with the newly added # notifications if any were set results['tag'] = set(parse_list(result.group('tags'), cast=str)) # Set our Asset Object results['asset'] = asset # Store our preloaded entries preloaded.append({ 'results': results, 'line': line, 'loggable_url': loggable_url, }) # # Normalize Tag Groups # - Expand Groups of Groups so that they don't exist # ConfigBase.__normalize_tag_groups(group_tags) # # URL Processing # for entry in preloaded: # Point to our results entry for easier reference below results = entry['results'] # # Apply our tag groups if they're defined # for group, tags in group_tags.items(): # Detect if anything assigned to this tag also maps back to a # group. If so we want to add the group to our list if next((True for tag in results['tag'] if tag in tags), False): results['tag'].add(group) try: # Attempt to create an instance of our plugin using the # parsed URL information plugin = N_MGR[results['schema']](**results) # Create log entry of loaded URL ConfigBase.logger.debug( 'Loaded URL: %s', plugin.url( privacy=results['asset'].secure_logging)) except Exception as e: # the arguments are invalid or can not be used. ConfigBase.logger.warning( 'Could not load URL {} on line {}.'.format( entry['loggable_url'], entry['line'])) ConfigBase.logger.debug('Loading Exception: %s' % str(e)) continue # if we reach here, we successfully loaded our data servers.append(plugin) # Return what was loaded return (servers, configs) @staticmethod def config_parse_yaml(content, asset=None): """ Parse the specified content as though it were a yaml file specifically formatted for Apprise. Return a tuple that looks like (servers, configs) where: - servers contains a list of loaded notification plugins - configs contains a list of additional configuration files referenced. You may optionally associate an asset with the notification. """ # A list of loaded Notification Services servers = list() # A list of additional configuration files referenced using # the include keyword configs = list() # Group Assignments group_tags = {} # Track our entries to preload preloaded = [] try: # Load our data (safely) result = yaml.load(content, Loader=yaml.SafeLoader) except (AttributeError, yaml.parser.ParserError, yaml.error.MarkedYAMLError) as e: # Invalid content ConfigBase.logger.error( 'Invalid Apprise YAML data specified.') ConfigBase.logger.debug( 'YAML Exception:{}{}'.format(os.linesep, e)) return (list(), list()) if not isinstance(result, dict): # Invalid content ConfigBase.logger.error( 'Invalid Apprise YAML based configuration specified.') return (list(), list()) # YAML Version version = result.get('version', 1) if version != 1: # Invalid syntax ConfigBase.logger.error( 'Invalid Apprise YAML version specified {}.'.format(version)) return (list(), list()) # # global asset object # asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset() tokens = result.get('asset', None) if tokens and isinstance(tokens, dict): for k, v in tokens.items(): if k.startswith('_') or k.endswith('_'): # Entries are considered reserved if they start or end # with an underscore ConfigBase.logger.warning( 'Ignored asset key "{}".'.format(k)) continue if not (hasattr(asset, k) and isinstance(getattr(asset, k), (bool, str))): # We can't set a function or non-string set value ConfigBase.logger.warning( 'Invalid asset key "{}".'.format(k)) continue if v is None: # Convert to an empty string v = '' if (isinstance(v, (bool, str)) and isinstance(getattr(asset, k), bool)): # If the object in the Asset is a boolean, then # we want to convert the specified string to # match that. setattr(asset, k, parse_bool(v)) elif isinstance(v, str): # Set our asset object with the new value setattr(asset, k, v.strip()) else: # we must set strings with a string ConfigBase.logger.warning( 'Invalid asset value to "{}".'.format(k)) continue # # global tag root directive # global_tags = set() tags = result.get('tag', None) if tags and isinstance(tags, (list, tuple, str)): # Store any preset tags global_tags = set(parse_list(tags, cast=str)) # # groups root directive # groups = result.get('groups', None) if isinstance(groups, dict): # # Dictionary # for _groups, tags in groups.items(): for group in parse_list(_groups, cast=str): if isinstance(tags, (list, tuple)): _tags = set() for e in tags: if isinstance(e, dict): _tags |= set(e.keys()) else: _tags |= set(parse_list(e, cast=str)) # Final assignment tags = _tags else: tags = set(parse_list(tags, cast=str)) if group not in group_tags: group_tags[group] = tags else: group_tags[group] |= tags elif isinstance(groups, (list, tuple)): # # List of Dictionaries # # Iterate over each group defined and store it for no, entry in enumerate(groups): if not isinstance(entry, dict): ConfigBase.logger.warning( 'No assignment for group {}, entry #{}'.format( entry, no + 1)) continue for _groups, tags in entry.items(): for group in parse_list(_groups, cast=str): if isinstance(tags, (list, tuple)): _tags = set() for e in tags: if isinstance(e, dict): _tags |= set(e.keys()) else: _tags |= set(parse_list(e, cast=str)) # Final assignment tags = _tags else: tags = set(parse_list(tags, cast=str)) if group not in group_tags: group_tags[group] = tags else: group_tags[group] |= tags # include root directive # includes = result.get('include', None) if isinstance(includes, str): # Support a single inline string or multiple ones separated by a # comma and/or space includes = parse_urls(includes) elif not isinstance(includes, (list, tuple)): # Not a problem; we simply have no includes includes = list() # Iterate over each config URL for no, url in enumerate(includes): if isinstance(url, str): # Support a single inline string or multiple ones separated by # a comma and/or space configs.extend(parse_urls(url)) elif isinstance(url, dict): # Store the url and ignore arguments associated configs.extend(u for u in url.keys()) # # urls root directive # urls = result.get('urls', None) if not isinstance(urls, (list, tuple)): # Not a problem; we simply have no urls urls = list() # Iterate over each URL for no, url in enumerate(urls): # Our results object is what we use to instantiate our object if # we can. Reset it to None on each iteration results = list() # CWE-312 (Secure Logging) Handling loggable_url = url if not asset.secure_logging \ else cwe312_url(url) if isinstance(url, str): # We're just a simple URL string... schema = GET_SCHEMA_RE.match(url) if schema is None: # Log invalid entries so that maintainer of config # config file at least has something to take action # with. ConfigBase.logger.warning( 'Invalid URL {}, entry #{}'.format( loggable_url, no + 1)) continue # We found a valid schema worthy of tracking; store it's # details: _results = plugins.url_to_dict( url, secure_logging=asset.secure_logging) if _results is None: ConfigBase.logger.warning( 'Unparseable URL {}, entry #{}'.format( loggable_url, no + 1)) continue # add our results to our global set results.append(_results) elif isinstance(url, dict): # We are a url string with additional unescaped options. In # this case we want to iterate over all of our options so we # can at least tell the end user what entries were ignored # due to errors it = iter(url.items()) # Track the URL to-load _url = None # Track last acquired schema schema = None for key, tokens in it: # Test our schema _schema = GET_SCHEMA_RE.match(key) if _schema is None: # Log invalid entries so that maintainer of config # config file at least has something to take action # with. ConfigBase.logger.warning( 'Ignored entry {} found under urls, entry #{}' .format(key, no + 1)) continue # Store our schema schema = _schema.group('schema').lower() # Store our URL and Schema Regex _url = key if _url is None: # the loop above failed to match anything ConfigBase.logger.warning( 'Unsupported URL, entry #{}'.format(no + 1)) continue _results = plugins.url_to_dict( _url, secure_logging=asset.secure_logging) if _results is None: # Setup dictionary _results = { # Minimum requirements 'schema': schema, } if isinstance(tokens, (list, tuple, set)): # populate and/or override any results populated by # parse_url() for entries in tokens: # Copy ourselves a template of our parsed URL as a base # to work with r = _results.copy() # We are a url string with additional unescaped options if isinstance(entries, dict): _url, tokens = next(iter(url.items())) # Tags you just can't over-ride if 'schema' in entries: del entries['schema'] # support our special tokens (if they're present) if schema in N_MGR: entries = ConfigBase._special_token_handler( schema, entries) # Extend our dictionary with our new entries r.update(entries) # add our results to our global set results.append(r) elif isinstance(tokens, dict): # support our special tokens (if they're present) if schema in N_MGR: tokens = ConfigBase._special_token_handler( schema, tokens) # Copy ourselves a template of our parsed URL as a base to # work with r = _results.copy() # add our result set r.update(tokens) # add our results to our global set results.append(r) else: # add our results to our global set results.append(_results) else: # Unsupported ConfigBase.logger.warning( 'Unsupported Apprise YAML entry #{}'.format(no + 1)) continue # Track our entries entry = 0 while len(results): # Increment our entry count entry += 1 # Grab our first item _results = results.pop(0) if _results['schema'] not in N_MGR: # the arguments are invalid or can not be used. ConfigBase.logger.warning( 'An invalid Apprise schema ({}) in YAML configuration ' 'entry #{}, item #{}' .format(_results['schema'], no + 1, entry)) continue # tag is a special keyword that is managed by Apprise object. # The below ensures our tags are set correctly if 'tag' in _results: # Tidy our list up _results['tag'] = set( parse_list(_results['tag'], cast=str)) | global_tags else: # Just use the global settings _results['tag'] = global_tags for key in list(_results.keys()): # Strip out any tokens we know that we can't accept and # warn the user match = VALID_TOKEN.match(key) if not match: ConfigBase.logger.warning( 'Ignoring invalid token ({}) found in YAML ' 'configuration entry #{}, item #{}' .format(key, no + 1, entry)) del _results[key] ConfigBase.logger.trace( 'URL #{}: {} unpacked as:{}{}' .format(no + 1, url, os.linesep, os.linesep.join( ['{}="{}"'.format(k, a) for k, a in _results.items()]))) # Prepare our Asset Object _results['asset'] = asset # Handle post processing of result set _results = URLBase.post_process_parse_url_results(_results) # Store our preloaded entries preloaded.append({ 'results': _results, 'entry': no + 1, 'item': entry, }) # # Normalize Tag Groups # - Expand Groups of Groups so that they don't exist # ConfigBase.__normalize_tag_groups(group_tags) # # URL Processing # for entry in preloaded: # Point to our results entry for easier reference below results = entry['results'] # # Apply our tag groups if they're defined # for group, tags in group_tags.items(): # Detect if anything assigned to this tag also maps back to a # group. If so we want to add the group to our list if next((True for tag in results['tag'] if tag in tags), False): results['tag'].add(group) # Now we generate our plugin try: # Attempt to create an instance of our plugin using the # parsed URL information plugin = N_MGR[results['schema']](**results) # Create log entry of loaded URL ConfigBase.logger.debug( 'Loaded URL: %s', plugin.url( privacy=results['asset'].secure_logging)) except Exception as e: # the arguments are invalid or can not be used. ConfigBase.logger.warning( 'Could not load Apprise YAML configuration ' 'entry #{}, item #{}' .format(entry['entry'], entry['item'])) ConfigBase.logger.debug('Loading Exception: %s' % str(e)) continue # if we reach here, we successfully loaded our data servers.append(plugin) return (servers, configs) def pop(self, index=-1): """ Removes an indexed Notification Service from the stack and returns it. By default, the last element of the list is removed. """ if not isinstance(self._cached_servers, list): # Generate ourselves a list of content we can pull from self.servers() # Pop the element off of the stack return self._cached_servers.pop(index) @staticmethod def _special_token_handler(schema, tokens): """ This function takes a list of tokens and updates them to no longer include any special tokens such as +,-, and : - schema must be a valid schema of a supported plugin type - tokens must be a dictionary containing the yaml entries parsed. The idea here is we can post process a set of tokens provided in a YAML file where the user provided some of the special keywords. We effectivley look up what these keywords map to their appropriate value they're expected """ # Create a copy of our dictionary tokens = tokens.copy() for kw, meta in N_MGR[schema].template_kwargs.items(): # Determine our prefix: prefix = meta.get('prefix', '+') # Detect any matches matches = \ {k[1:]: str(v) for k, v in tokens.items() if k.startswith(prefix)} if not matches: # we're done with this entry continue if not isinstance(tokens.get(kw), dict): # Invalid; correct it tokens[kw] = dict() # strip out processed tokens tokens = {k: v for k, v in tokens.items() if not k.startswith(prefix)} # Update our entries tokens[kw].update(matches) # Now map our tokens accordingly to the class templates defined by # each service. # # This is specifically used for YAML file parsing. It allows a user to # define an entry such as: # # urls: # - mailto://user:pass@domain: # - to: user1@hotmail.com # - to: user2@hotmail.com # # Under the hood, the NotifyEmail() class does not parse the `to` # argument. It's contents needs to be mapped to `targets`. This is # defined in the class via the `template_args` and template_tokens` # section. # # This function here allows these mappings to take place within the # YAML file as independant arguments. class_templates = plugins.details(N_MGR[schema]) for key in list(tokens.keys()): if key not in class_templates['args']: # No need to handle non-arg entries continue # get our `map_to` and/or 'alias_of' value (if it exists) map_to = class_templates['args'][key].get( 'alias_of', class_templates['args'][key].get('map_to', '')) if map_to == key: # We're already good as we are now continue if map_to in class_templates['tokens']: meta = class_templates['tokens'][map_to] else: meta = class_templates['args'].get( map_to, class_templates['args'][key]) # Perform a translation/mapping if our code reaches here value = tokens[key] del tokens[key] # Detect if we're dealign with a list or not is_list = re.search( r'^list:.*', meta.get('type'), re.IGNORECASE) if map_to not in tokens: tokens[map_to] = [] if is_list \ else meta.get('default') elif is_list and not isinstance(tokens.get(map_to), list): # Convert ourselves to a list if we aren't already tokens[map_to] = [tokens[map_to]] # Type Conversion if re.search( r'^(choice:)?string', meta.get('type'), re.IGNORECASE) \ and not isinstance(value, str): # Ensure our format is as expected value = str(value) # Apply any further translations if required (absolute map) # This is the case when an arg maps to a token which further # maps to a different function arg on the class constructor abs_map = meta.get('map_to', map_to) # Set our token as how it was provided by the configuration if isinstance(tokens.get(map_to), list): tokens[abs_map].append(value) else: tokens[abs_map] = value # Return our tokens return tokens def __getitem__(self, index): """ Returns the indexed server entry associated with the loaded notification servers """ if not isinstance(self._cached_servers, list): # Generate ourselves a list of content we can pull from self.servers() return self._cached_servers[index] def __iter__(self): """ Returns an iterator to our server list """ if not isinstance(self._cached_servers, list): # Generate ourselves a list of content we can pull from self.servers() return iter(self._cached_servers) def __len__(self): """ Returns the total number of servers loaded """ if not isinstance(self._cached_servers, list): # Generate ourselves a list of content we can pull from self.servers() return len(self._cached_servers) def __bool__(self): """ Allows the Apprise object to be wrapped in an 'if statement'. True is returned if our content was downloaded correctly. """ if not isinstance(self._cached_servers, list): # Generate ourselves a list of content we can pull from self.servers() return True if self._cached_servers else False apprise-1.9.3/apprise/config/base.pyi000066400000000000000000000000661477231770000175460ustar00rootroot00000000000000from .. import URLBase class ConfigBase(URLBase): ...apprise-1.9.3/apprise/config/file.py000066400000000000000000000141451477231770000174050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import os from .base import ConfigBase from ..utils.disk import path_decode from ..common import ConfigFormat from ..common import ContentIncludeMode from ..locale import gettext_lazy as _ class ConfigFile(ConfigBase): """ A wrapper for File based configuration sources """ # The default descriptive name associated with the service service_name = _('Local File') # The default protocol protocol = 'file' # Configuration file inclusion can only be of the same type allow_cross_includes = ContentIncludeMode.STRICT def __init__(self, path, **kwargs): """ Initialize File Object headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with """ super().__init__(**kwargs) # Store our file path as it was set self.path = path_decode(path) # Track the file as it was saved self.__original_path = os.path.normpath(path) # Update the config path to be relative to our file we just loaded self.config_path = os.path.dirname(self.path) return def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Prepare our cache value if isinstance(self.cache, bool) or not self.cache: cache = 'yes' if self.cache else 'no' else: cache = int(self.cache) # Define any URL parameters params = { 'encoding': self.encoding, 'cache': cache, } if self.config_format: # A format was enforced; make sure it's passed back with the url params['format'] = self.config_format return 'file://{path}{params}'.format( path=self.quote(self.__original_path), params='?{}'.format(self.urlencode(params)) if params else '', ) def read(self, **kwargs): """ Perform retrieval of the configuration based on the specified request """ response = None try: if self.max_buffer_size > 0 and \ os.path.getsize(self.path) > self.max_buffer_size: # Content exceeds maximum buffer size self.logger.error( 'File size exceeds maximum allowable buffer length' ' ({}KB).'.format(int(self.max_buffer_size / 1024))) return None except OSError: # getsize() can throw this acception if the file is missing # and or simply isn't accessible self.logger.error( 'File is not accessible: {}'.format(self.path)) return None # Always call throttle before any server i/o is made self.throttle() try: with open(self.path, "rt", encoding=self.encoding) as f: # Store our content for parsing response = f.read() except (ValueError, UnicodeDecodeError): # A result of our strict encoding check; if we receive this # then the file we're opening is not something we can # understand the encoding of.. self.logger.error( 'File not using expected encoding ({}) : {}'.format( self.encoding, self.path)) return None except (IOError, OSError): # IOError is present for backwards compatibility with Python # versions older then 3.3. >= 3.3 throw OSError now. # Could not open and/or read the file; this is not a problem since # we scan a lot of default paths. self.logger.error( 'File can not be opened for read: {}'.format(self.path)) return None # Detect config format based on file extension if it isn't already # enforced if self.config_format is None and \ re.match(r'^.*\.ya?ml\s*$', self.path, re.I) is not None: # YAML Filename Detected self.default_config_format = ConfigFormat.YAML # Return our response object return response @staticmethod def parse_url(url): """ Parses the URL so that we can handle all different file paths and return it as our path object """ results = ConfigBase.parse_url(url, verify_host=False) if not results: # We're done early; it's not a good URL return results match = re.match(r'[a-z0-9]+://(?P[^?]+)(\?.*)?', url, re.I) if not match: return None results['path'] = ConfigFile.unquote(match.group('path')) return results apprise-1.9.3/apprise/config/http.py000066400000000000000000000223401477231770000174410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from .base import ConfigBase from ..common import ConfigFormat from ..common import ContentIncludeMode from ..url import PrivacyMode from ..locale import gettext_lazy as _ # Support YAML formats # text/yaml # text/x-yaml # application/yaml # application/x-yaml MIME_IS_YAML = re.compile('(text|application)/(x-)?yaml', re.I) # Support TEXT formats # text/plain # text/html MIME_IS_TEXT = re.compile('text/(plain|html)', re.I) class ConfigHTTP(ConfigBase): """ A wrapper for HTTP based configuration sources """ # The default descriptive name associated with the service service_name = _('Web Based') # The default protocol protocol = 'http' # The default secure protocol secure_protocol = 'https' # If an HTTP error occurs, define the number of characters you still want # to read back. This is useful for debugging purposes, but nothing else. # The idea behind enforcing this kind of restriction is to prevent abuse # from queries to services that may be untrusted. max_error_buffer_size = 2048 # Configuration file inclusion can always include this type allow_cross_includes = ContentIncludeMode.ALWAYS def __init__(self, headers=None, **kwargs): """ Initialize HTTP Object headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with """ super().__init__(**kwargs) self.schema = 'https' if self.secure else 'http' self.fullpath = kwargs.get('fullpath') if not isinstance(self.fullpath, str): self.fullpath = '/' self.headers = {} if headers: # Store our extra headers self.headers.update(headers) return def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Prepare our cache value if isinstance(self.cache, bool) or not self.cache: cache = 'yes' if self.cache else 'no' else: cache = int(self.cache) # Define any arguments set params = { 'encoding': self.encoding, 'cache': cache, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) if self.config_format: # A format was enforced; make sure it's passed back with the url params['format'] = self.config_format # Append our headers into our args params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=self.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=self.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}{fullpath}/?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, hostname=self.quote(self.host, safe=''), port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath=self.quote(self.fullpath, safe='/'), params=self.urlencode(params), ) def read(self, **kwargs): """ Perform retrieval of the configuration based on the specified request """ # prepare XML Object headers = { 'User-Agent': self.app_id, } # Apply any/all header over-rides defined headers.update(self.headers) auth = None if self.user: auth = (self.user, self.password) url = '%s://%s' % (self.schema, self.host) if isinstance(self.port, int): url += ':%d' % self.port url += self.fullpath self.logger.debug('HTTP POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) # Prepare our response object response = None # Where our request object will temporarily live. r = None # Always call throttle before any remote server i/o is made self.throttle() try: # Make our request with requests.post( url, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, stream=True) as r: # Handle Errors r.raise_for_status() # Get our file-size (if known) try: file_size = int(r.headers.get('Content-Length', '0')) except (TypeError, ValueError): # Handle edge case where Content-Length is a bad value file_size = 0 # Store our response if self.max_buffer_size > 0 \ and file_size > self.max_buffer_size: # Provide warning of data truncation self.logger.error( 'HTTP config response exceeds maximum buffer length ' '({}KB);'.format(int(self.max_buffer_size / 1024))) # Return None - buffer execeeded return None # Store our result (but no more than our buffer length) response = r.text[:self.max_buffer_size + 1] # Verify that our content did not exceed the buffer size: if len(response) > self.max_buffer_size: # Provide warning of data truncation self.logger.error( 'HTTP config response exceeds maximum buffer length ' '({}KB);'.format(int(self.max_buffer_size / 1024))) # Return None - buffer execeeded return None # Detect config format based on mime if the format isn't # already enforced content_type = r.headers.get( 'Content-Type', 'application/octet-stream') if self.config_format is None and content_type: if MIME_IS_YAML.match(content_type) is not None: # YAML data detected based on header content self.default_config_format = ConfigFormat.YAML elif MIME_IS_TEXT.match(content_type) is not None: # TEXT data detected based on header content self.default_config_format = ConfigFormat.TEXT except requests.RequestException as e: self.logger.error( 'A Connection error occurred retrieving HTTP ' 'configuration from %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return None (signifying a failure) return None # Return our response object return response @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = ConfigBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # Add our headers that the user can potentially over-ride if they wish # to to our returned result set results['headers'] = results['qsd-'] results['headers'].update(results['qsd+']) return results apprise-1.9.3/apprise/config/memory.py000066400000000000000000000054001477231770000177700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from .base import ConfigBase from ..locale import gettext_lazy as _ class ConfigMemory(ConfigBase): """ For information that was loaded from memory and does not persist anywhere. """ # The default descriptive name associated with the service service_name = _('Memory') # The default protocol protocol = 'memory' def __init__(self, content, **kwargs): """ Initialize Memory Object Memory objects just store the raw configuration in memory. There is no external reference point. It's always considered cached. """ super().__init__(**kwargs) # Store our raw config into memory self.content = content if self.config_format is None: # Detect our format if possible self.config_format = \ ConfigMemory.detect_config_format(self.content) return def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ return 'memory://' def read(self, **kwargs): """ Simply return content stored into memory """ return self.content @staticmethod def parse_url(url): """ Memory objects have no parseable URL """ # These URLs can not be parsed return None apprise-1.9.3/apprise/conversion.py000066400000000000000000000143161477231770000174060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re from markdown import markdown from .common import NotifyFormat from .url import URLBase from html.parser import HTMLParser def convert_between(from_format, to_format, content): """ Converts between different suported formats. If no conversion exists, or the selected one fails, the original text will be returned. This function returns the content translated (if required) """ converters = { (NotifyFormat.MARKDOWN, NotifyFormat.HTML): markdown_to_html, (NotifyFormat.TEXT, NotifyFormat.HTML): text_to_html, (NotifyFormat.HTML, NotifyFormat.TEXT): html_to_text, # For now; use same converter for Markdown support (NotifyFormat.HTML, NotifyFormat.MARKDOWN): html_to_text, } convert = converters.get((from_format, to_format)) return convert(content) if convert else content def markdown_to_html(content): """ Converts specified content from markdown to HTML. """ return markdown(content, extensions=[ 'markdown.extensions.nl2br', 'markdown.extensions.tables']) def text_to_html(content): """ Converts specified content from plain text to HTML. """ # First eliminate any carriage returns return URLBase.escape_html(content, convert_new_lines=True) def html_to_text(content): """ Converts a content from HTML to plain text. """ parser = HTMLConverter() parser.feed(content) parser.close() return parser.converted class HTMLConverter(HTMLParser, object): """An HTML to plain text converter tuned for email messages.""" # The following tags must start on a new line BLOCK_TAGS = ('p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'div', 'td', 'th', 'code', 'pre', 'label', 'li',) # the folowing tags ignore any internal text IGNORE_TAGS = ( 'form', 'input', 'textarea', 'select', 'ul', 'ol', 'style', 'link', 'meta', 'title', 'html', 'head', 'script') # Condense Whitespace WS_TRIM = re.compile(r'[\s]+', re.DOTALL | re.MULTILINE) # Sentinel value for block tag boundaries, which may be consolidated into a # single line break. BLOCK_END = {} def __init__(self, **kwargs): super().__init__(**kwargs) # Shoudl we store the text content or not? self._do_store = True # Initialize internal result list self._result = [] # Initialize public result field (not populated until close() is # called) self.converted = "" def close(self): string = ''.join(self._finalize(self._result)) self.converted = string.strip() def _finalize(self, result): """ Combines and strips consecutive strings, then converts consecutive block ends into singleton newlines. [ {be} " Hello " {be} {be} " World!" ] -> "\nHello\nWorld!" """ # None means the last visited item was a block end. accum = None for item in result: if item == self.BLOCK_END: # Multiple consecutive block ends; do nothing. if accum is None: continue # First block end; yield the current string, plus a newline. yield accum.strip() + '\n' accum = None # Multiple consecutive strings; combine them. elif accum is not None: accum += item # First consecutive string; store it. else: accum = item # Yield the last string if we have not already done so. if accum is not None: yield accum.strip() def handle_data(self, data, *args, **kwargs): """ Store our data if it is not on the ignore list """ # initialize our previous flag if self._do_store: # Tidy our whitespace content = self.WS_TRIM.sub(' ', data) self._result.append(content) def handle_starttag(self, tag, attrs): """ Process our starting HTML Tag """ # Toggle initial states self._do_store = tag not in self.IGNORE_TAGS if tag in self.BLOCK_TAGS: self._result.append(self.BLOCK_END) if tag == 'li': self._result.append('- ') elif tag == 'br': self._result.append('\n') elif tag == 'hr': if self._result and isinstance(self._result[-1], str): self._result[-1] = self._result[-1].rstrip(' ') else: pass self._result.append('\n---\n') elif tag == 'blockquote': self._result.append(' >') def handle_endtag(self, tag): """ Edge case handling of open/close tags """ self._do_store = True if tag in self.BLOCK_TAGS: self._result.append(self.BLOCK_END) apprise-1.9.3/apprise/decorators/000077500000000000000000000000001477231770000170075ustar00rootroot00000000000000apprise-1.9.3/apprise/decorators/__init__.py000066400000000000000000000027171477231770000211270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from .notify import notify __all__ = [ 'notify' ] apprise-1.9.3/apprise/decorators/base.py000066400000000000000000000176241477231770000203050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE.USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from ..plugins.base import NotifyBase from ..manager_plugins import NotificationManager from ..utils.parse import URL_DETAILS_RE, parse_url, url_assembly from ..utils.logic import dict_full_update from .. import common from ..logger import logger import inspect # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() class CustomNotifyPlugin(NotifyBase): """ Apprise Custom Plugin Hook This gets initialized based on @notify decorator definitions """ # Our Custom notification service_url = 'https://github.com/caronc/apprise/wiki/Custom_Notification' # Over-ride our category since this inheritance of the NotifyBase class # should be treated differently. category = 'custom' # Support Attachments attachment_support = True # Allow persistent storage support storage_mode = common.PersistentStoreMode.AUTO # Define object templates templates = ( '{schema}://', ) @staticmethod def parse_url(url): """ Parses the URL and returns arguments retrieved """ return parse_url(url, verify_host=False, simple=True) def url(self, privacy=False, *args, **kwargs): """ General URL assembly """ return '{schema}://'.format(schema=self.secure_protocol) @staticmethod def instantiate_plugin(url, send_func, name=None): """ The function used to add a new notification plugin based on the schema parsed from the provided URL into our supported matrix structure. """ if not isinstance(url, str): msg = 'An invalid custom notify url/schema ({}) provided in ' \ 'function {}.'.format(url, send_func.__name__) logger.warning(msg) return None # Validate that our schema is okay re_match = URL_DETAILS_RE.match(url) if not re_match: msg = 'An invalid custom notify url/schema ({}) provided in ' \ 'function {}.'.format(url, send_func.__name__) logger.warning(msg) return None # Acquire our schema schema = re_match.group('schema').lower() if not re_match.group('base'): url = '{}://'.format(schema) # Keep a default set of arguments to apply to all called references base_args = parse_url( url, default_schema=schema, verify_host=False, simple=True) if schema in N_MGR: # we're already handling this object msg = 'The schema ({}) is already defined and could not be ' \ 'loaded from custom notify function {}.' \ .format(url, send_func.__name__) logger.warning(msg) return None # We define our own custom wrapper class so that we can initialize # some key default configuration values allowing calls to our # `Apprise.details()` to correctly differentiate one custom plugin # that was loaded from another class CustomNotifyPluginWrapper(CustomNotifyPlugin): # Our Service Name service_name = name if isinstance(name, str) \ and name else 'Custom - {}'.format(schema) # Store our matched schema secure_protocol = schema requirements = { # Define our required packaging in order to work 'details': "Source: {}".format(inspect.getfile(send_func)) } # Assign our send() function __send = staticmethod(send_func) # Update our default arguments _base_args = base_args def __init__(self, **kwargs): """ Our initialization """ # init parent super().__init__(**kwargs) self._default_args = {} # Some variables do not need to be set if 'secure' in kwargs: del kwargs['secure'] # Apply our updates based on what was parsed dict_full_update(self._default_args, self._base_args) dict_full_update(self._default_args, kwargs) # Update our arguments (applying them to what we originally) # initialized as self._default_args['url'] = url_assembly(**self._default_args) def send(self, body, title='', notify_type=common.NotifyType.INFO, *args, **kwargs): """ Our send() call which triggers our hook """ response = False try: # Enforce a boolean response result = self.__send( body, title, notify_type, *args, meta=self._default_args, **kwargs) if result is None: # The wrapper did not define a return (or returned # None) # this is treated as a successful return as it is # assumed the developer did not care about the result # of the call. response = True else: # Perform boolean check (allowing obects to also be # returned and check against the __bool__ call response = True if result else False except Exception as e: # Unhandled Exception self.logger.warning( 'An exception occured sending a %s notification.', N_MGR[self.secure_protocol].service_name) self.logger.debug( '%s Exception: %s', N_MGR[self.secure_protocol], str(e)) return False if response: self.logger.info( 'Sent %s notification.', N_MGR[self.secure_protocol].service_name) else: self.logger.warning( 'Failed to send %s notification.', N_MGR[self.secure_protocol].service_name) return response # Store our plugin into our core map file return N_MGR.add( plugin=CustomNotifyPluginWrapper, schemas=schema, send_func=send_func, url=url, ) apprise-1.9.3/apprise/decorators/notify.py000066400000000000000000000117511477231770000206760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from .base import CustomNotifyPlugin def notify(on, name=None): """ @notify decorator allows you to map functions you've defined to be loaded as a regular notify by Apprise. You must identify a protocol that users will trigger your call by. @notify(on="foobar") def your_declaration(body, title, notify_type, meta, *args, **kwargs): ... You can optionally provide the name to associate with the plugin which is what calling functions via the API will receive. @notify(on="foobar", name="My Foobar Process") def your_action(body, title, notify_type, meta, *args, **kwargs): ... The meta variable is actually the processed URL contents found in configuration files that landed you in this function you wrote in the first place. It's very easily tokenized already for you so that you can bend the notification logic to your hearts content. @notify(on="foobar", name="My Foobar Process") def your_action(body, title, notify_type, body_format, meta, attach, *args, **kwargs): ... Arguments break down as follows: body: The message body associated with the notification title: The message title associated with the notification notify_type: The message type (info, success, warning, and failure) body_format: The format of the incoming notification body. This is either text, html, or markdown. meta: Combines the URL arguments specified on the `on` call with the ones loaded from a users configuration. This is a dictionary that presents itself like this: { 'schema': 'http', 'url': 'http://hostname', 'host': 'hostname', 'user': 'john', 'password': 'doe', 'port': 80, 'path': '/', 'fullpath': '/test.php', 'query': 'test.php', 'qsd': {'key': 'value', 'key2': 'value2'}, 'asset': , 'tag': set(), } Meta entries are ONLY present if found. A simple URL such as foobar:// would only produce the following: { 'schema': 'foobar', 'url': 'foobar://', 'asset': , 'tag': set(), } attach: An array AppriseAttachment objects (if any were provided) body_format: Defaults to the expected format output; By default this will be TEXT unless over-ridden in the Apprise URL If you don't intend on using all of the parameters, your @notify() call # can be greatly simplified to just: @notify(on="foobar", name="My Foobar Process") def your_action(body, title, *args, **kwargs) Always end your wrappers declaration with *args and **kwargs to be future proof with newer versions of Apprise. Your wrapper should return True if processed the send() function as you expected and return False if not. If nothing is returned, then this is treated as as success (True). """ def wrapper(func): """ Instantiate our custom (notification) plugin """ # Generate CustomNotifyPlugin.instantiate_plugin( url=on, send_func=func, name=name) return func return wrapper apprise-1.9.3/apprise/emojis.py000066400000000000000000002532721477231770000165150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import time from .logger import logger # All Emoji's are wrapped in this character DELIM = ':' # the map simply contains the emoji that should be mapped to the regular # expression it should be swapped on. # This list was based on: https://github.com/ikatyang/emoji-cheat-sheet EMOJI_MAP = { # # Face Smiling # DELIM + r'grinning' + DELIM: '😄', DELIM + r'smile' + DELIM: '😄', DELIM + r'(laughing|satisfied)' + DELIM: '😆', DELIM + r'rofl' + DELIM: '🤣', DELIM + r'slightly_smiling_face' + DELIM: '🙂', DELIM + r'wink' + DELIM: '😉', DELIM + r'innocent' + DELIM: '😇', DELIM + r'smiley' + DELIM: '😃', DELIM + r'grin' + DELIM: '😃', DELIM + r'sweat_smile' + DELIM: '😅', DELIM + r'joy' + DELIM: '😂', DELIM + r'upside_down_face' + DELIM: '🙃', DELIM + r'blush' + DELIM: '😊', # # Face Affection # DELIM + r'smiling_face_with_three_hearts' + DELIM: '🥰', DELIM + r'star_struck' + DELIM: '🤩', DELIM + r'kissing' + DELIM: '😗', DELIM + r'kissing_closed_eyes' + DELIM: '😚', DELIM + r'smiling_face_with_tear' + DELIM: '🥲', DELIM + r'heart_eyes' + DELIM: '😍', DELIM + r'kissing_heart' + DELIM: '😘', DELIM + r'relaxed' + DELIM: '☺️', DELIM + r'kissing_smiling_eyes' + DELIM: '😙', # # Face Tongue # DELIM + r'yum' + DELIM: '😋', DELIM + r'stuck_out_tongue_winking_eye' + DELIM: '😜', DELIM + r'stuck_out_tongue_closed_eyes' + DELIM: '😝', DELIM + r'stuck_out_tongue' + DELIM: '😛', DELIM + r'zany_face' + DELIM: '🤪', DELIM + r'money_mouth_face' + DELIM: '🤑', # # Face Hand # DELIM + r'hugs' + DELIM: '🤗', DELIM + r'shushing_face' + DELIM: '🤫', DELIM + r'hand_over_mouth' + DELIM: '🤭', DELIM + r'thinking' + DELIM: '🤔', # # Face Neutral Skeptical # DELIM + r'zipper_mouth_face' + DELIM: '🤐', DELIM + r'neutral_face' + DELIM: '😐', DELIM + r'no_mouth' + DELIM: '😶', DELIM + r'smirk' + DELIM: '😏', DELIM + r'roll_eyes' + DELIM: '🙄', DELIM + r'face_exhaling' + DELIM: '😮‍💨', DELIM + r'raised_eyebrow' + DELIM: '🤨', DELIM + r'expressionless' + DELIM: '😑', DELIM + r'face_in_clouds' + DELIM: '😶‍🌫️', DELIM + r'unamused' + DELIM: '😒', DELIM + r'grimacing' + DELIM: '😬', DELIM + r'lying_face' + DELIM: '🤥', # # Face Sleepy # DELIM + r'relieved' + DELIM: '😌', DELIM + r'sleepy' + DELIM: '😪', DELIM + r'sleeping' + DELIM: '😴', DELIM + r'pensive' + DELIM: '😔', DELIM + r'drooling_face' + DELIM: '🤤', # # Face Unwell # DELIM + r'mask' + DELIM: '😷', DELIM + r'face_with_head_bandage' + DELIM: '🤕', DELIM + r'vomiting_face' + DELIM: '🤮', DELIM + r'hot_face' + DELIM: '🥵', DELIM + r'woozy_face' + DELIM: '🥴', DELIM + r'face_with_spiral_eyes' + DELIM: '😵‍💫', DELIM + r'face_with_thermometer' + DELIM: '🤒', DELIM + r'nauseated_face' + DELIM: '🤢', DELIM + r'sneezing_face' + DELIM: '🤧', DELIM + r'cold_face' + DELIM: '🥶', DELIM + r'dizzy_face' + DELIM: '😵', DELIM + r'exploding_head' + DELIM: '🤯', # # Face Hat # DELIM + r'cowboy_hat_face' + DELIM: '🤠', DELIM + r'disguised_face' + DELIM: '🥸', DELIM + r'partying_face' + DELIM: '🥳', # # Face Glasses # DELIM + r'sunglasses' + DELIM: '😎', DELIM + r'monocle_face' + DELIM: '🧐', DELIM + r'nerd_face' + DELIM: '🤓', # # Face Concerned # DELIM + r'confused' + DELIM: '😕', DELIM + r'slightly_frowning_face' + DELIM: '🙁', DELIM + r'open_mouth' + DELIM: '😮', DELIM + r'astonished' + DELIM: '😲', DELIM + r'pleading_face' + DELIM: '🥺', DELIM + r'anguished' + DELIM: '😧', DELIM + r'cold_sweat' + DELIM: '😰', DELIM + r'cry' + DELIM: '😢', DELIM + r'scream' + DELIM: '😱', DELIM + r'persevere' + DELIM: '😣', DELIM + r'sweat' + DELIM: '😓', DELIM + r'tired_face' + DELIM: '😫', DELIM + r'worried' + DELIM: '😟', DELIM + r'frowning_face' + DELIM: '☹️', DELIM + r'hushed' + DELIM: '😯', DELIM + r'flushed' + DELIM: '😳', DELIM + r'frowning' + DELIM: '😦', DELIM + r'fearful' + DELIM: '😨', DELIM + r'disappointed_relieved' + DELIM: '😥', DELIM + r'sob' + DELIM: '😭', DELIM + r'confounded' + DELIM: '😖', DELIM + r'disappointed' + DELIM: '😞', DELIM + r'weary' + DELIM: '😩', DELIM + r'yawning_face' + DELIM: '🥱', # # Face Negative # DELIM + r'triumph' + DELIM: '😤', DELIM + r'angry' + DELIM: '😠', DELIM + r'smiling_imp' + DELIM: '😈', DELIM + r'skull' + DELIM: '💀', DELIM + r'(pout|rage)' + DELIM: '😡', DELIM + r'cursing_face' + DELIM: '🤬', DELIM + r'imp' + DELIM: '👿', DELIM + r'skull_and_crossbones' + DELIM: '☠️', # # Face Costume # DELIM + r'(hankey|poop|shit)' + DELIM: '💩', DELIM + r'japanese_ogre' + DELIM: '👹', DELIM + r'ghost' + DELIM: '👻', DELIM + r'space_invader' + DELIM: '👾', DELIM + r'clown_face' + DELIM: '🤡', DELIM + r'japanese_goblin' + DELIM: '👺', DELIM + r'alien' + DELIM: '👽', DELIM + r'robot' + DELIM: '🤖', # # Cat Face # DELIM + r'smiley_cat' + DELIM: '😺', DELIM + r'joy_cat' + DELIM: '😹', DELIM + r'smirk_cat' + DELIM: '😼', DELIM + r'scream_cat' + DELIM: '🙀', DELIM + r'pouting_cat' + DELIM: '😾', DELIM + r'smile_cat' + DELIM: '😸', DELIM + r'heart_eyes_cat' + DELIM: '😻', DELIM + r'kissing_cat' + DELIM: '😽', DELIM + r'crying_cat_face' + DELIM: '😿', # # Monkey Face # DELIM + r'see_no_evil' + DELIM: '🙈', DELIM + r'speak_no_evil' + DELIM: '🙊', DELIM + r'hear_no_evil' + DELIM: '🙉', # # Heart # DELIM + r'love_letter' + DELIM: '💌', DELIM + r'gift_heart' + DELIM: '💝', DELIM + r'heartpulse' + DELIM: '💗', DELIM + r'revolving_hearts' + DELIM: '💞', DELIM + r'heart_decoration' + DELIM: '💟', DELIM + r'broken_heart' + DELIM: '💔', DELIM + r'mending_heart' + DELIM: '❤️‍🩹', DELIM + r'orange_heart' + DELIM: '🧡', DELIM + r'green_heart' + DELIM: '💚', DELIM + r'purple_heart' + DELIM: '💜', DELIM + r'black_heart' + DELIM: '🖤', DELIM + r'cupid' + DELIM: '💘', DELIM + r'sparkling_heart' + DELIM: '💖', DELIM + r'heartbeat' + DELIM: '💓', DELIM + r'two_hearts' + DELIM: '💕', DELIM + r'heavy_heart_exclamation' + DELIM: '❣️', DELIM + r'heart_on_fire' + DELIM: '❤️‍🔥', DELIM + r'heart' + DELIM: '❤️', DELIM + r'yellow_heart' + DELIM: '💛', DELIM + r'blue_heart' + DELIM: '💙', DELIM + r'brown_heart' + DELIM: '🤎', DELIM + r'white_heart' + DELIM: '🤍', # # Emotion # DELIM + r'kiss' + DELIM: '💋', DELIM + r'anger' + DELIM: '💢', DELIM + r'dizzy' + DELIM: '💫', DELIM + r'dash' + DELIM: '💨', DELIM + r'speech_balloon' + DELIM: '💬', DELIM + r'left_speech_bubble' + DELIM: '🗨️', DELIM + r'thought_balloon' + DELIM: '💭', DELIM + r'100' + DELIM: '💯', DELIM + r'(boom|collision)' + DELIM: '💥', DELIM + r'sweat_drops' + DELIM: '💦', DELIM + r'hole' + DELIM: '🕳️', DELIM + r'eye_speech_bubble' + DELIM: '👁️‍🗨️', DELIM + r'right_anger_bubble' + DELIM: '🗯️', DELIM + r'zzz' + DELIM: '💤', # # Hand Fingers Open # DELIM + r'wave' + DELIM: '👋', DELIM + r'raised_hand_with_fingers_splayed' + DELIM: '🖐️', DELIM + r'vulcan_salute' + DELIM: '🖖', DELIM + r'raised_back_of_hand' + DELIM: '🤚', DELIM + r'(raised_)?hand' + DELIM: '✋', # # Hand Fingers Partial # DELIM + r'ok_hand' + DELIM: '👌', DELIM + r'pinched_fingers' + DELIM: '🤌', DELIM + r'pinching_hand' + DELIM: '🤏', DELIM + r'v' + DELIM: '✌️', DELIM + r'crossed_fingers' + DELIM: '🤞', DELIM + r'love_you_gesture' + DELIM: '🤟', DELIM + r'metal' + DELIM: '🤘', DELIM + r'call_me_hand' + DELIM: '🤙', # # Hand Single Finger # DELIM + r'point_left' + DELIM: '👈', DELIM + r'point_right' + DELIM: '👉', DELIM + r'point_up_2' + DELIM: '👆', DELIM + r'(fu|middle_finger)' + DELIM: '🖕', DELIM + r'point_down' + DELIM: '👇', DELIM + r'point_up' + DELIM: '☝️', # # Hand Fingers Closed # DELIM + r'(\+1|thumbsup)' + DELIM: '👍', DELIM + r'(-1|thumbsdown)' + DELIM: '👎', DELIM + r'fist' + DELIM: '✊', DELIM + r'(fist_(raised|oncoming)|(face)?punch)' + DELIM: '👊', DELIM + r'fist_left' + DELIM: '🤛', DELIM + r'fist_right' + DELIM: '🤜', # # Hands # DELIM + r'clap' + DELIM: '👏', DELIM + r'raised_hands' + DELIM: '🙌', DELIM + r'open_hands' + DELIM: '👐', DELIM + r'palms_up_together' + DELIM: '🤲', DELIM + r'handshake' + DELIM: '🤝', DELIM + r'pray' + DELIM: '🙏', # # Hand Prop # DELIM + r'writing_hand' + DELIM: '✍️', DELIM + r'nail_care' + DELIM: '💅', DELIM + r'selfie' + DELIM: '🤳', # # Body Parts # DELIM + r'muscle' + DELIM: '💪', DELIM + r'mechanical_arm' + DELIM: '🦾', DELIM + r'mechanical_leg' + DELIM: '🦿', DELIM + r'leg' + DELIM: '🦵', DELIM + r'foot' + DELIM: '🦶', DELIM + r'ear' + DELIM: '👂', DELIM + r'ear_with_hearing_aid' + DELIM: '🦻', DELIM + r'nose' + DELIM: '👃', DELIM + r'brain' + DELIM: '🧠', DELIM + r'anatomical_heart' + DELIM: '🫀', DELIM + r'lungs' + DELIM: '🫁', DELIM + r'tooth' + DELIM: '🦷', DELIM + r'bone' + DELIM: '🦴', DELIM + r'eyes' + DELIM: '👀', DELIM + r'eye' + DELIM: '👁️', DELIM + r'tongue' + DELIM: '👅', DELIM + r'lips' + DELIM: '👄', # # Person # DELIM + r'baby' + DELIM: '👶', DELIM + r'child' + DELIM: '🧒', DELIM + r'boy' + DELIM: '👦', DELIM + r'girl' + DELIM: '👧', DELIM + r'adult' + DELIM: '🧑', DELIM + r'blond_haired_person' + DELIM: '👱', DELIM + r'man' + DELIM: '👨', DELIM + r'bearded_person' + DELIM: '🧔', DELIM + r'man_beard' + DELIM: '🧔‍♂️', DELIM + r'woman_beard' + DELIM: '🧔‍♀️', DELIM + r'red_haired_man' + DELIM: '👨‍🦰', DELIM + r'curly_haired_man' + DELIM: '👨‍🦱', DELIM + r'white_haired_man' + DELIM: '👨‍🦳', DELIM + r'bald_man' + DELIM: '👨‍🦲', DELIM + r'woman' + DELIM: '👩', DELIM + r'red_haired_woman' + DELIM: '👩‍🦰', DELIM + r'person_red_hair' + DELIM: '🧑‍🦰', DELIM + r'curly_haired_woman' + DELIM: '👩‍🦱', DELIM + r'person_curly_hair' + DELIM: '🧑‍🦱', DELIM + r'white_haired_woman' + DELIM: '👩‍🦳', DELIM + r'person_white_hair' + DELIM: '🧑‍🦳', DELIM + r'bald_woman' + DELIM: '👩‍🦲', DELIM + r'person_bald' + DELIM: '🧑‍🦲', DELIM + r'blond_(haired_)?woman' + DELIM: '👱‍♀️', DELIM + r'blond_haired_man' + DELIM: '👱‍♂️', DELIM + r'older_adult' + DELIM: '🧓', DELIM + r'older_man' + DELIM: '👴', DELIM + r'older_woman' + DELIM: '👵', # # Person Gesture # DELIM + r'frowning_person' + DELIM: '🙍', DELIM + r'frowning_man' + DELIM: '🙍‍♂️', DELIM + r'frowning_woman' + DELIM: '🙍‍♀️', DELIM + r'pouting_face' + DELIM: '🙎', DELIM + r'pouting_man' + DELIM: '🙎‍♂️', DELIM + r'pouting_woman' + DELIM: '🙎‍♀️', DELIM + r'no_good' + DELIM: '🙅', DELIM + r'(ng|no_good)_man' + DELIM: '🙅‍♂️', DELIM + r'(ng_woman|no_good_woman)' + DELIM: '🙅‍♀️', DELIM + r'ok_person' + DELIM: '🙆', DELIM + r'ok_man' + DELIM: '🙆‍♂️', DELIM + r'ok_woman' + DELIM: '🙆‍♀️', DELIM + r'(information_desk|tipping_hand_)person' + DELIM: '💁', DELIM + r'(sassy_man|tipping_hand_man)' + DELIM: '💁‍♂️', DELIM + r'(sassy_woman|tipping_hand_woman)' + DELIM: '💁‍♀️', DELIM + r'raising_hand' + DELIM: '🙋', DELIM + r'raising_hand_man' + DELIM: '🙋‍♂️', DELIM + r'raising_hand_woman' + DELIM: '🙋‍♀️', DELIM + r'deaf_person' + DELIM: '🧏', DELIM + r'deaf_man' + DELIM: '🧏‍♂️', DELIM + r'deaf_woman' + DELIM: '🧏‍♀️', DELIM + r'bow' + DELIM: '🙇', DELIM + r'bowing_man' + DELIM: '🙇‍♂️', DELIM + r'bowing_woman' + DELIM: '🙇‍♀️', DELIM + r'facepalm' + DELIM: '🤦', DELIM + r'man_facepalming' + DELIM: '🤦‍♂️', DELIM + r'woman_facepalming' + DELIM: '🤦‍♀️', DELIM + r'shrug' + DELIM: '🤷', DELIM + r'man_shrugging' + DELIM: '🤷‍♂️', DELIM + r'woman_shrugging' + DELIM: '🤷‍♀️', # # Person Role # DELIM + r'health_worker' + DELIM: '🧑‍⚕️', DELIM + r'man_health_worker' + DELIM: '👨‍⚕️', DELIM + r'woman_health_worker' + DELIM: '👩‍⚕️', DELIM + r'student' + DELIM: '🧑‍🎓', DELIM + r'man_student' + DELIM: '👨‍🎓', DELIM + r'woman_student' + DELIM: '👩‍🎓', DELIM + r'teacher' + DELIM: '🧑‍🏫', DELIM + r'man_teacher' + DELIM: '👨‍🏫', DELIM + r'woman_teacher' + DELIM: '👩‍🏫', DELIM + r'judge' + DELIM: '🧑‍⚖️', DELIM + r'man_judge' + DELIM: '👨‍⚖️', DELIM + r'woman_judge' + DELIM: '👩‍⚖️', DELIM + r'farmer' + DELIM: '🧑‍🌾', DELIM + r'man_farmer' + DELIM: '👨‍🌾', DELIM + r'woman_farmer' + DELIM: '👩‍🌾', DELIM + r'cook' + DELIM: '🧑‍🍳', DELIM + r'man_cook' + DELIM: '👨‍🍳', DELIM + r'woman_cook' + DELIM: '👩‍🍳', DELIM + r'mechanic' + DELIM: '🧑‍🔧', DELIM + r'man_mechanic' + DELIM: '👨‍🔧', DELIM + r'woman_mechanic' + DELIM: '👩‍🔧', DELIM + r'factory_worker' + DELIM: '🧑‍🏭', DELIM + r'man_factory_worker' + DELIM: '👨‍🏭', DELIM + r'woman_factory_worker' + DELIM: '👩‍🏭', DELIM + r'office_worker' + DELIM: '🧑‍💼', DELIM + r'man_office_worker' + DELIM: '👨‍💼', DELIM + r'woman_office_worker' + DELIM: '👩‍💼', DELIM + r'scientist' + DELIM: '🧑‍🔬', DELIM + r'man_scientist' + DELIM: '👨‍🔬', DELIM + r'woman_scientist' + DELIM: '👩‍🔬', DELIM + r'technologist' + DELIM: '🧑‍💻', DELIM + r'man_technologist' + DELIM: '👨‍💻', DELIM + r'woman_technologist' + DELIM: '👩‍💻', DELIM + r'singer' + DELIM: '🧑‍🎤', DELIM + r'man_singer' + DELIM: '👨‍🎤', DELIM + r'woman_singer' + DELIM: '👩‍🎤', DELIM + r'artist' + DELIM: '🧑‍🎨', DELIM + r'man_artist' + DELIM: '👨‍🎨', DELIM + r'woman_artist' + DELIM: '👩‍🎨', DELIM + r'pilot' + DELIM: '🧑‍✈️', DELIM + r'man_pilot' + DELIM: '👨‍✈️', DELIM + r'woman_pilot' + DELIM: '👩‍✈️', DELIM + r'astronaut' + DELIM: '🧑‍🚀', DELIM + r'man_astronaut' + DELIM: '👨‍🚀', DELIM + r'woman_astronaut' + DELIM: '👩‍🚀', DELIM + r'firefighter' + DELIM: '🧑‍🚒', DELIM + r'man_firefighter' + DELIM: '👨‍🚒', DELIM + r'woman_firefighter' + DELIM: '👩‍🚒', DELIM + r'cop' + DELIM: '👮', DELIM + r'police(_officer|man)' + DELIM: '👮‍♂️', DELIM + r'policewoman' + DELIM: '👮‍♀️', DELIM + r'detective' + DELIM: '🕵️', DELIM + r'male_detective' + DELIM: '🕵️‍♂️', DELIM + r'female_detective' + DELIM: '🕵️‍♀️', DELIM + r'guard' + DELIM: '💂', DELIM + r'guardsman' + DELIM: '💂‍♂️', DELIM + r'guardswoman' + DELIM: '💂‍♀️', DELIM + r'ninja' + DELIM: '🥷', DELIM + r'construction_worker' + DELIM: '👷', DELIM + r'construction_worker_man' + DELIM: '👷‍♂️', DELIM + r'construction_worker_woman' + DELIM: '👷‍♀️', DELIM + r'prince' + DELIM: '🤴', DELIM + r'princess' + DELIM: '👸', DELIM + r'person_with_turban' + DELIM: '👳', DELIM + r'man_with_turban' + DELIM: '👳‍♂️', DELIM + r'woman_with_turban' + DELIM: '👳‍♀️', DELIM + r'man_with_gua_pi_mao' + DELIM: '👲', DELIM + r'woman_with_headscarf' + DELIM: '🧕', DELIM + r'person_in_tuxedo' + DELIM: '🤵', DELIM + r'man_in_tuxedo' + DELIM: '🤵‍♂️', DELIM + r'woman_in_tuxedo' + DELIM: '🤵‍♀️', DELIM + r'person_with_veil' + DELIM: '👰', DELIM + r'man_with_veil' + DELIM: '👰‍♂️', DELIM + r'(bride|woman)_with_veil' + DELIM: '👰‍♀️', DELIM + r'pregnant_woman' + DELIM: '🤰', DELIM + r'breast_feeding' + DELIM: '🤱', DELIM + r'woman_feeding_baby' + DELIM: '👩‍🍼', DELIM + r'man_feeding_baby' + DELIM: '👨‍🍼', DELIM + r'person_feeding_baby' + DELIM: '🧑‍🍼', # # Person Fantasy # DELIM + r'angel' + DELIM: '👼', DELIM + r'santa' + DELIM: '🎅', DELIM + r'mrs_claus' + DELIM: '🤶', DELIM + r'mx_claus' + DELIM: '🧑‍🎄', DELIM + r'superhero' + DELIM: '🦸', DELIM + r'superhero_man' + DELIM: '🦸‍♂️', DELIM + r'superhero_woman' + DELIM: '🦸‍♀️', DELIM + r'supervillain' + DELIM: '🦹', DELIM + r'supervillain_man' + DELIM: '🦹‍♂️', DELIM + r'supervillain_woman' + DELIM: '🦹‍♀️', DELIM + r'mage' + DELIM: '🧙', DELIM + r'mage_man' + DELIM: '🧙‍♂️', DELIM + r'mage_woman' + DELIM: '🧙‍♀️', DELIM + r'fairy' + DELIM: '🧚', DELIM + r'fairy_man' + DELIM: '🧚‍♂️', DELIM + r'fairy_woman' + DELIM: '🧚‍♀️', DELIM + r'vampire' + DELIM: '🧛', DELIM + r'vampire_man' + DELIM: '🧛‍♂️', DELIM + r'vampire_woman' + DELIM: '🧛‍♀️', DELIM + r'merperson' + DELIM: '🧜', DELIM + r'merman' + DELIM: '🧜‍♂️', DELIM + r'mermaid' + DELIM: '🧜‍♀️', DELIM + r'elf' + DELIM: '🧝', DELIM + r'elf_man' + DELIM: '🧝‍♂️', DELIM + r'elf_woman' + DELIM: '🧝‍♀️', DELIM + r'genie' + DELIM: '🧞', DELIM + r'genie_man' + DELIM: '🧞‍♂️', DELIM + r'genie_woman' + DELIM: '🧞‍♀️', DELIM + r'zombie' + DELIM: '🧟', DELIM + r'zombie_man' + DELIM: '🧟‍♂️', DELIM + r'zombie_woman' + DELIM: '🧟‍♀️', # # Person Activity # DELIM + r'massage' + DELIM: '💆', DELIM + r'massage_man' + DELIM: '💆‍♂️', DELIM + r'massage_woman' + DELIM: '💆‍♀️', DELIM + r'haircut' + DELIM: '💇', DELIM + r'haircut_man' + DELIM: '💇‍♂️', DELIM + r'haircut_woman' + DELIM: '💇‍♀️', DELIM + r'walking' + DELIM: '🚶', DELIM + r'walking_man' + DELIM: '🚶‍♂️', DELIM + r'walking_woman' + DELIM: '🚶‍♀️', DELIM + r'standing_person' + DELIM: '🧍', DELIM + r'standing_man' + DELIM: '🧍‍♂️', DELIM + r'standing_woman' + DELIM: '🧍‍♀️', DELIM + r'kneeling_person' + DELIM: '🧎', DELIM + r'kneeling_man' + DELIM: '🧎‍♂️', DELIM + r'kneeling_woman' + DELIM: '🧎‍♀️', DELIM + r'person_with_probing_cane' + DELIM: '🧑‍🦯', DELIM + r'man_with_probing_cane' + DELIM: '👨‍🦯', DELIM + r'woman_with_probing_cane' + DELIM: '👩‍🦯', DELIM + r'person_in_motorized_wheelchair' + DELIM: '🧑‍🦼', DELIM + r'man_in_motorized_wheelchair' + DELIM: '👨‍🦼', DELIM + r'woman_in_motorized_wheelchair' + DELIM: '👩‍🦼', DELIM + r'person_in_manual_wheelchair' + DELIM: '🧑‍🦽', DELIM + r'man_in_manual_wheelchair' + DELIM: '👨‍🦽', DELIM + r'woman_in_manual_wheelchair' + DELIM: '👩‍🦽', DELIM + r'runn(er|ing)' + DELIM: '🏃', DELIM + r'running_man' + DELIM: '🏃‍♂️', DELIM + r'running_woman' + DELIM: '🏃‍♀️', DELIM + r'(dancer|woman_dancing)' + DELIM: '💃', DELIM + r'man_dancing' + DELIM: '🕺', DELIM + r'business_suit_levitating' + DELIM: '🕴️', DELIM + r'dancers' + DELIM: '👯', DELIM + r'dancing_men' + DELIM: '👯‍♂️', DELIM + r'dancing_women' + DELIM: '👯‍♀️', DELIM + r'sauna_person' + DELIM: '🧖', DELIM + r'sauna_man' + DELIM: '🧖‍♂️', DELIM + r'sauna_woman' + DELIM: '🧖‍♀️', DELIM + r'climbing' + DELIM: '🧗', DELIM + r'climbing_man' + DELIM: '🧗‍♂️', DELIM + r'climbing_woman' + DELIM: '🧗‍♀️', # # Person Sport # DELIM + r'person_fencing' + DELIM: '🤺', DELIM + r'horse_racing' + DELIM: '🏇', DELIM + r'skier' + DELIM: '⛷️', DELIM + r'snowboarder' + DELIM: '🏂', DELIM + r'golfing' + DELIM: '🏌️', DELIM + r'golfing_man' + DELIM: '🏌️‍♂️', DELIM + r'golfing_woman' + DELIM: '🏌️‍♀️', DELIM + r'surfer' + DELIM: '🏄', DELIM + r'surfing_man' + DELIM: '🏄‍♂️', DELIM + r'surfing_woman' + DELIM: '🏄‍♀️', DELIM + r'rowboat' + DELIM: '🚣', DELIM + r'rowing_man' + DELIM: '🚣‍♂️', DELIM + r'rowing_woman' + DELIM: '🚣‍♀️', DELIM + r'swimmer' + DELIM: '🏊', DELIM + r'swimming_man' + DELIM: '🏊‍♂️', DELIM + r'swimming_woman' + DELIM: '🏊‍♀️', DELIM + r'bouncing_ball_person' + DELIM: '⛹️', DELIM + r'(basketball|bouncing_ball)_man' + DELIM: '⛹️‍♂️', DELIM + r'(basketball|bouncing_ball)_woman' + DELIM: '⛹️‍♀️', DELIM + r'weight_lifting' + DELIM: '🏋️', DELIM + r'weight_lifting_man' + DELIM: '🏋️‍♂️', DELIM + r'weight_lifting_woman' + DELIM: '🏋️‍♀️', DELIM + r'bicyclist' + DELIM: '🚴', DELIM + r'biking_man' + DELIM: '🚴‍♂️', DELIM + r'biking_woman' + DELIM: '🚴‍♀️', DELIM + r'mountain_bicyclist' + DELIM: '🚵', DELIM + r'mountain_biking_man' + DELIM: '🚵‍♂️', DELIM + r'mountain_biking_woman' + DELIM: '🚵‍♀️', DELIM + r'cartwheeling' + DELIM: '🤸', DELIM + r'man_cartwheeling' + DELIM: '🤸‍♂️', DELIM + r'woman_cartwheeling' + DELIM: '🤸‍♀️', DELIM + r'wrestling' + DELIM: '🤼', DELIM + r'men_wrestling' + DELIM: '🤼‍♂️', DELIM + r'women_wrestling' + DELIM: '🤼‍♀️', DELIM + r'water_polo' + DELIM: '🤽', DELIM + r'man_playing_water_polo' + DELIM: '🤽‍♂️', DELIM + r'woman_playing_water_polo' + DELIM: '🤽‍♀️', DELIM + r'handball_person' + DELIM: '🤾', DELIM + r'man_playing_handball' + DELIM: '🤾‍♂️', DELIM + r'woman_playing_handball' + DELIM: '🤾‍♀️', DELIM + r'juggling_person' + DELIM: '🤹', DELIM + r'man_juggling' + DELIM: '🤹‍♂️', DELIM + r'woman_juggling' + DELIM: '🤹‍♀️', # # Person Resting # DELIM + r'lotus_position' + DELIM: '🧘', DELIM + r'lotus_position_man' + DELIM: '🧘‍♂️', DELIM + r'lotus_position_woman' + DELIM: '🧘‍♀️', DELIM + r'bath' + DELIM: '🛀', DELIM + r'sleeping_bed' + DELIM: '🛌', # # Family # DELIM + r'people_holding_hands' + DELIM: '🧑‍🤝‍🧑', DELIM + r'two_women_holding_hands' + DELIM: '👭', DELIM + r'couple' + DELIM: '👫', DELIM + r'two_men_holding_hands' + DELIM: '👬', DELIM + r'couplekiss' + DELIM: '💏', DELIM + r'couplekiss_man_woman' + DELIM: '👩‍❤️‍💋‍👨', DELIM + r'couplekiss_man_man' + DELIM: '👨‍❤️‍💋‍👨', DELIM + r'couplekiss_woman_woman' + DELIM: '👩‍❤️‍💋‍👩', DELIM + r'couple_with_heart' + DELIM: '💑', DELIM + r'couple_with_heart_woman_man' + DELIM: '👩‍❤️‍👨', DELIM + r'couple_with_heart_man_man' + DELIM: '👨‍❤️‍👨', DELIM + r'couple_with_heart_woman_woman' + DELIM: '👩‍❤️‍👩', DELIM + r'family_man_woman_boy' + DELIM: '👨‍👩‍👦', DELIM + r'family_man_woman_girl' + DELIM: '👨‍👩‍👧', DELIM + r'family_man_woman_girl_boy' + DELIM: '👨‍👩‍👧‍👦', DELIM + r'family_man_woman_boy_boy' + DELIM: '👨‍👩‍👦‍👦', DELIM + r'family_man_woman_girl_girl' + DELIM: '👨‍👩‍👧‍👧', DELIM + r'family_man_man_boy' + DELIM: '👨‍👨‍👦', DELIM + r'family_man_man_girl' + DELIM: '👨‍👨‍👧', DELIM + r'family_man_man_girl_boy' + DELIM: '👨‍👨‍👧‍👦', DELIM + r'family_man_man_boy_boy' + DELIM: '👨‍👨‍👦‍👦', DELIM + r'family_man_man_girl_girl' + DELIM: '👨‍👨‍👧‍👧', DELIM + r'family_woman_woman_boy' + DELIM: '👩‍👩‍👦', DELIM + r'family_woman_woman_girl' + DELIM: '👩‍👩‍👧', DELIM + r'family_woman_woman_girl_boy' + DELIM: '👩‍👩‍👧‍👦', DELIM + r'family_woman_woman_boy_boy' + DELIM: '👩‍👩‍👦‍👦', DELIM + r'family_woman_woman_girl_girl' + DELIM: '👩‍👩‍👧‍👧', DELIM + r'family_man_boy' + DELIM: '👨‍👦', DELIM + r'family_man_boy_boy' + DELIM: '👨‍👦‍👦', DELIM + r'family_man_girl' + DELIM: '👨‍👧', DELIM + r'family_man_girl_boy' + DELIM: '👨‍👧‍👦', DELIM + r'family_man_girl_girl' + DELIM: '👨‍👧‍👧', DELIM + r'family_woman_boy' + DELIM: '👩‍👦', DELIM + r'family_woman_boy_boy' + DELIM: '👩‍👦‍👦', DELIM + r'family_woman_girl' + DELIM: '👩‍👧', DELIM + r'family_woman_girl_boy' + DELIM: '👩‍👧‍👦', DELIM + r'family_woman_girl_girl' + DELIM: '👩‍👧‍👧', # # Person Symbol # DELIM + r'speaking_head' + DELIM: '🗣️', DELIM + r'bust_in_silhouette' + DELIM: '👤', DELIM + r'busts_in_silhouette' + DELIM: '👥', DELIM + r'people_hugging' + DELIM: '🫂', DELIM + r'family' + DELIM: '👪', DELIM + r'footprints' + DELIM: '👣', # # Animal Mammal # DELIM + r'monkey_face' + DELIM: '🐵', DELIM + r'monkey' + DELIM: '🐒', DELIM + r'gorilla' + DELIM: '🦍', DELIM + r'orangutan' + DELIM: '🦧', DELIM + r'dog' + DELIM: '🐶', DELIM + r'dog2' + DELIM: '🐕', DELIM + r'guide_dog' + DELIM: '🦮', DELIM + r'service_dog' + DELIM: '🐕‍🦺', DELIM + r'poodle' + DELIM: '🐩', DELIM + r'wolf' + DELIM: '🐺', DELIM + r'fox_face' + DELIM: '🦊', DELIM + r'raccoon' + DELIM: '🦝', DELIM + r'cat' + DELIM: '🐱', DELIM + r'cat2' + DELIM: '🐈', DELIM + r'black_cat' + DELIM: '🐈‍⬛', DELIM + r'lion' + DELIM: '🦁', DELIM + r'tiger' + DELIM: '🐯', DELIM + r'tiger2' + DELIM: '🐅', DELIM + r'leopard' + DELIM: '🐆', DELIM + r'horse' + DELIM: '🐴', DELIM + r'racehorse' + DELIM: '🐎', DELIM + r'unicorn' + DELIM: '🦄', DELIM + r'zebra' + DELIM: '🦓', DELIM + r'deer' + DELIM: '🦌', DELIM + r'bison' + DELIM: '🦬', DELIM + r'cow' + DELIM: '🐮', DELIM + r'ox' + DELIM: '🐂', DELIM + r'water_buffalo' + DELIM: '🐃', DELIM + r'cow2' + DELIM: '🐄', DELIM + r'pig' + DELIM: '🐷', DELIM + r'pig2' + DELIM: '🐖', DELIM + r'boar' + DELIM: '🐗', DELIM + r'pig_nose' + DELIM: '🐽', DELIM + r'ram' + DELIM: '🐏', DELIM + r'sheep' + DELIM: '🐑', DELIM + r'goat' + DELIM: '🐐', DELIM + r'dromedary_camel' + DELIM: '🐪', DELIM + r'camel' + DELIM: '🐫', DELIM + r'llama' + DELIM: '🦙', DELIM + r'giraffe' + DELIM: '🦒', DELIM + r'elephant' + DELIM: '🐘', DELIM + r'mammoth' + DELIM: '🦣', DELIM + r'rhinoceros' + DELIM: '🦏', DELIM + r'hippopotamus' + DELIM: '🦛', DELIM + r'mouse' + DELIM: '🐭', DELIM + r'mouse2' + DELIM: '🐁', DELIM + r'rat' + DELIM: '🐀', DELIM + r'hamster' + DELIM: '🐹', DELIM + r'rabbit' + DELIM: '🐰', DELIM + r'rabbit2' + DELIM: '🐇', DELIM + r'chipmunk' + DELIM: '🐿️', DELIM + r'beaver' + DELIM: '🦫', DELIM + r'hedgehog' + DELIM: '🦔', DELIM + r'bat' + DELIM: '🦇', DELIM + r'bear' + DELIM: '🐻', DELIM + r'polar_bear' + DELIM: '🐻‍❄️', DELIM + r'koala' + DELIM: '🐨', DELIM + r'panda_face' + DELIM: '🐼', DELIM + r'sloth' + DELIM: '🦥', DELIM + r'otter' + DELIM: '🦦', DELIM + r'skunk' + DELIM: '🦨', DELIM + r'kangaroo' + DELIM: '🦘', DELIM + r'badger' + DELIM: '🦡', DELIM + r'(feet|paw_prints)' + DELIM: '🐾', # # Animal Bird # DELIM + r'turkey' + DELIM: '🦃', DELIM + r'chicken' + DELIM: '🐔', DELIM + r'rooster' + DELIM: '🐓', DELIM + r'hatching_chick' + DELIM: '🐣', DELIM + r'baby_chick' + DELIM: '🐤', DELIM + r'hatched_chick' + DELIM: '🐥', DELIM + r'bird' + DELIM: '🐦', DELIM + r'penguin' + DELIM: '🐧', DELIM + r'dove' + DELIM: '🕊️', DELIM + r'eagle' + DELIM: '🦅', DELIM + r'duck' + DELIM: '🦆', DELIM + r'swan' + DELIM: '🦢', DELIM + r'owl' + DELIM: '🦉', DELIM + r'dodo' + DELIM: '🦤', DELIM + r'feather' + DELIM: '🪶', DELIM + r'flamingo' + DELIM: '🦩', DELIM + r'peacock' + DELIM: '🦚', DELIM + r'parrot' + DELIM: '🦜', # # Animal Amphibian # DELIM + r'frog' + DELIM: '🐸', # # Animal Reptile # DELIM + r'crocodile' + DELIM: '🐊', DELIM + r'turtle' + DELIM: '🐢', DELIM + r'lizard' + DELIM: '🦎', DELIM + r'snake' + DELIM: '🐍', DELIM + r'dragon_face' + DELIM: '🐲', DELIM + r'dragon' + DELIM: '🐉', DELIM + r'sauropod' + DELIM: '🦕', DELIM + r't-rex' + DELIM: '🦖', # # Animal Marine # DELIM + r'whale' + DELIM: '🐳', DELIM + r'whale2' + DELIM: '🐋', DELIM + r'dolphin' + DELIM: '🐬', DELIM + r'(seal|flipper)' + DELIM: '🦭', DELIM + r'fish' + DELIM: '🐟', DELIM + r'tropical_fish' + DELIM: '🐠', DELIM + r'blowfish' + DELIM: '🐡', DELIM + r'shark' + DELIM: '🦈', DELIM + r'octopus' + DELIM: '🐙', DELIM + r'shell' + DELIM: '🐚', # # Animal Bug # DELIM + r'snail' + DELIM: '🐌', DELIM + r'butterfly' + DELIM: '🦋', DELIM + r'bug' + DELIM: '🐛', DELIM + r'ant' + DELIM: '🐜', DELIM + r'bee' + DELIM: '🐝', DELIM + r'honeybee' + DELIM: '🪲', DELIM + r'(lady_)?beetle' + DELIM: '🐞', DELIM + r'cricket' + DELIM: '🦗', DELIM + r'cockroach' + DELIM: '🪳', DELIM + r'spider' + DELIM: '🕷️', DELIM + r'spider_web' + DELIM: '🕸️', DELIM + r'scorpion' + DELIM: '🦂', DELIM + r'mosquito' + DELIM: '🦟', DELIM + r'fly' + DELIM: '🪰', DELIM + r'worm' + DELIM: '🪱', DELIM + r'microbe' + DELIM: '🦠', # # Plant Flower # DELIM + r'bouquet' + DELIM: '💐', DELIM + r'cherry_blossom' + DELIM: '🌸', DELIM + r'white_flower' + DELIM: '💮', DELIM + r'rosette' + DELIM: '🏵️', DELIM + r'rose' + DELIM: '🌹', DELIM + r'wilted_flower' + DELIM: '🥀', DELIM + r'hibiscus' + DELIM: '🌺', DELIM + r'sunflower' + DELIM: '🌻', DELIM + r'blossom' + DELIM: '🌼', DELIM + r'tulip' + DELIM: '🌷', # # Plant Other # DELIM + r'seedling' + DELIM: '🌱', DELIM + r'potted_plant' + DELIM: '🪴', DELIM + r'evergreen_tree' + DELIM: '🌲', DELIM + r'deciduous_tree' + DELIM: '🌳', DELIM + r'palm_tree' + DELIM: '🌴', DELIM + r'cactus' + DELIM: '🌵', DELIM + r'ear_of_rice' + DELIM: '🌾', DELIM + r'herb' + DELIM: '🌿', DELIM + r'shamrock' + DELIM: '☘️', DELIM + r'four_leaf_clover' + DELIM: '🍀', DELIM + r'maple_leaf' + DELIM: '🍁', DELIM + r'fallen_leaf' + DELIM: '🍂', DELIM + r'leaves' + DELIM: '🍃', DELIM + r'mushroom' + DELIM: '🍄', # # Food Fruit # DELIM + r'grapes' + DELIM: '🍇', DELIM + r'melon' + DELIM: '🍈', DELIM + r'watermelon' + DELIM: '🍉', DELIM + r'(orange|mandarin|tangerine)' + DELIM: '🍊', DELIM + r'lemon' + DELIM: '🍋', DELIM + r'banana' + DELIM: '🍌', DELIM + r'pineapple' + DELIM: '🍍', DELIM + r'mango' + DELIM: '🥭', DELIM + r'apple' + DELIM: '🍎', DELIM + r'green_apple' + DELIM: '🍏', DELIM + r'pear' + DELIM: '🍐', DELIM + r'peach' + DELIM: '🍑', DELIM + r'cherries' + DELIM: '🍒', DELIM + r'strawberry' + DELIM: '🍓', DELIM + r'blueberries' + DELIM: '🫐', DELIM + r'kiwi_fruit' + DELIM: '🥝', DELIM + r'tomato' + DELIM: '🍅', DELIM + r'olive' + DELIM: '🫒', DELIM + r'coconut' + DELIM: '🥥', # # Food Vegetable # DELIM + r'avocado' + DELIM: '🥑', DELIM + r'eggplant' + DELIM: '🍆', DELIM + r'potato' + DELIM: '🥔', DELIM + r'carrot' + DELIM: '🥕', DELIM + r'corn' + DELIM: '🌽', DELIM + r'hot_pepper' + DELIM: '🌶️', DELIM + r'bell_pepper' + DELIM: '🫑', DELIM + r'cucumber' + DELIM: '🥒', DELIM + r'leafy_green' + DELIM: '🥬', DELIM + r'broccoli' + DELIM: '🥦', DELIM + r'garlic' + DELIM: '🧄', DELIM + r'onion' + DELIM: '🧅', DELIM + r'peanuts' + DELIM: '🥜', DELIM + r'chestnut' + DELIM: '🌰', # # Food Prepared # DELIM + r'bread' + DELIM: '🍞', DELIM + r'croissant' + DELIM: '🥐', DELIM + r'baguette_bread' + DELIM: '🥖', DELIM + r'flatbread' + DELIM: '🫓', DELIM + r'pretzel' + DELIM: '🥨', DELIM + r'bagel' + DELIM: '🥯', DELIM + r'pancakes' + DELIM: '🥞', DELIM + r'waffle' + DELIM: '🧇', DELIM + r'cheese' + DELIM: '🧀', DELIM + r'meat_on_bone' + DELIM: '🍖', DELIM + r'poultry_leg' + DELIM: '🍗', DELIM + r'cut_of_meat' + DELIM: '🥩', DELIM + r'bacon' + DELIM: '🥓', DELIM + r'hamburger' + DELIM: '🍔', DELIM + r'fries' + DELIM: '🍟', DELIM + r'pizza' + DELIM: '🍕', DELIM + r'hotdog' + DELIM: '🌭', DELIM + r'sandwich' + DELIM: '🥪', DELIM + r'taco' + DELIM: '🌮', DELIM + r'burrito' + DELIM: '🌯', DELIM + r'tamale' + DELIM: '🫔', DELIM + r'stuffed_flatbread' + DELIM: '🥙', DELIM + r'falafel' + DELIM: '🧆', DELIM + r'egg' + DELIM: '🥚', DELIM + r'fried_egg' + DELIM: '🍳', DELIM + r'shallow_pan_of_food' + DELIM: '🥘', DELIM + r'stew' + DELIM: '🍲', DELIM + r'fondue' + DELIM: '🫕', DELIM + r'bowl_with_spoon' + DELIM: '🥣', DELIM + r'green_salad' + DELIM: '🥗', DELIM + r'popcorn' + DELIM: '🍿', DELIM + r'butter' + DELIM: '🧈', DELIM + r'salt' + DELIM: '🧂', DELIM + r'canned_food' + DELIM: '🥫', # # Food Asian # DELIM + r'bento' + DELIM: '🍱', DELIM + r'rice_cracker' + DELIM: '🍘', DELIM + r'rice_ball' + DELIM: '🍙', DELIM + r'rice' + DELIM: '🍚', DELIM + r'curry' + DELIM: '🍛', DELIM + r'ramen' + DELIM: '🍜', DELIM + r'spaghetti' + DELIM: '🍝', DELIM + r'sweet_potato' + DELIM: '🍠', DELIM + r'oden' + DELIM: '🍢', DELIM + r'sushi' + DELIM: '🍣', DELIM + r'fried_shrimp' + DELIM: '🍤', DELIM + r'fish_cake' + DELIM: '🍥', DELIM + r'moon_cake' + DELIM: '🥮', DELIM + r'dango' + DELIM: '🍡', DELIM + r'dumpling' + DELIM: '🥟', DELIM + r'fortune_cookie' + DELIM: '🥠', DELIM + r'takeout_box' + DELIM: '🥡', # # Food Marine # DELIM + r'crab' + DELIM: '🦀', DELIM + r'lobster' + DELIM: '🦞', DELIM + r'shrimp' + DELIM: '🦐', DELIM + r'squid' + DELIM: '🦑', DELIM + r'oyster' + DELIM: '🦪', # # Food Sweet # DELIM + r'icecream' + DELIM: '🍦', DELIM + r'shaved_ice' + DELIM: '🍧', DELIM + r'ice_cream' + DELIM: '🍨', DELIM + r'doughnut' + DELIM: '🍩', DELIM + r'cookie' + DELIM: '🍪', DELIM + r'birthday' + DELIM: '🎂', DELIM + r'cake' + DELIM: '🍰', DELIM + r'cupcake' + DELIM: '🧁', DELIM + r'pie' + DELIM: '🥧', DELIM + r'chocolate_bar' + DELIM: '🍫', DELIM + r'candy' + DELIM: '🍬', DELIM + r'lollipop' + DELIM: '🍭', DELIM + r'custard' + DELIM: '🍮', DELIM + r'honey_pot' + DELIM: '🍯', # # Drink # DELIM + r'baby_bottle' + DELIM: '🍼', DELIM + r'milk_glass' + DELIM: '🥛', DELIM + r'coffee' + DELIM: '☕', DELIM + r'teapot' + DELIM: '🫖', DELIM + r'tea' + DELIM: '🍵', DELIM + r'sake' + DELIM: '🍶', DELIM + r'champagne' + DELIM: '🍾', DELIM + r'wine_glass' + DELIM: '🍷', DELIM + r'cocktail' + DELIM: '🍸', DELIM + r'tropical_drink' + DELIM: '🍹', DELIM + r'beer' + DELIM: '🍺', DELIM + r'beers' + DELIM: '🍻', DELIM + r'clinking_glasses' + DELIM: '🥂', DELIM + r'tumbler_glass' + DELIM: '🥃', DELIM + r'cup_with_straw' + DELIM: '🥤', DELIM + r'bubble_tea' + DELIM: '🧋', DELIM + r'beverage_box' + DELIM: '🧃', DELIM + r'mate' + DELIM: '🧉', DELIM + r'ice_cube' + DELIM: '🧊', # # Dishware # DELIM + r'chopsticks' + DELIM: '🥢', DELIM + r'plate_with_cutlery' + DELIM: '🍽️', DELIM + r'fork_and_knife' + DELIM: '🍴', DELIM + r'spoon' + DELIM: '🥄', DELIM + r'(hocho|knife)' + DELIM: '🔪', DELIM + r'amphora' + DELIM: '🏺', # # Place Map # DELIM + r'earth_africa' + DELIM: '🌍', DELIM + r'earth_americas' + DELIM: '🌎', DELIM + r'earth_asia' + DELIM: '🌏', DELIM + r'globe_with_meridians' + DELIM: '🌐', DELIM + r'world_map' + DELIM: '🗺️', DELIM + r'japan' + DELIM: '🗾', DELIM + r'compass' + DELIM: '🧭', # # Place Geographic # DELIM + r'mountain_snow' + DELIM: '🏔️', DELIM + r'mountain' + DELIM: '⛰️', DELIM + r'volcano' + DELIM: '🌋', DELIM + r'mount_fuji' + DELIM: '🗻', DELIM + r'camping' + DELIM: '🏕️', DELIM + r'beach_umbrella' + DELIM: '🏖️', DELIM + r'desert' + DELIM: '🏜️', DELIM + r'desert_island' + DELIM: '🏝️', DELIM + r'national_park' + DELIM: '🏞️', # # Place Building # DELIM + r'stadium' + DELIM: '🏟️', DELIM + r'classical_building' + DELIM: '🏛️', DELIM + r'building_construction' + DELIM: '🏗️', DELIM + r'bricks' + DELIM: '🧱', DELIM + r'rock' + DELIM: '🪨', DELIM + r'wood' + DELIM: '🪵', DELIM + r'hut' + DELIM: '🛖', DELIM + r'houses' + DELIM: '🏘️', DELIM + r'derelict_house' + DELIM: '🏚️', DELIM + r'house' + DELIM: '🏠', DELIM + r'house_with_garden' + DELIM: '🏡', DELIM + r'office' + DELIM: '🏢', DELIM + r'post_office' + DELIM: '🏣', DELIM + r'european_post_office' + DELIM: '🏤', DELIM + r'hospital' + DELIM: '🏥', DELIM + r'bank' + DELIM: '🏦', DELIM + r'hotel' + DELIM: '🏨', DELIM + r'love_hotel' + DELIM: '🏩', DELIM + r'convenience_store' + DELIM: '🏪', DELIM + r'school' + DELIM: '🏫', DELIM + r'department_store' + DELIM: '🏬', DELIM + r'factory' + DELIM: '🏭', DELIM + r'japanese_castle' + DELIM: '🏯', DELIM + r'european_castle' + DELIM: '🏰', DELIM + r'wedding' + DELIM: '💒', DELIM + r'tokyo_tower' + DELIM: '🗼', DELIM + r'statue_of_liberty' + DELIM: '🗽', # # Place Religious # DELIM + r'church' + DELIM: '⛪', DELIM + r'mosque' + DELIM: '🕌', DELIM + r'hindu_temple' + DELIM: '🛕', DELIM + r'synagogue' + DELIM: '🕍', DELIM + r'shinto_shrine' + DELIM: '⛩️', DELIM + r'kaaba' + DELIM: '🕋', # # Place Other # DELIM + r'fountain' + DELIM: '⛲', DELIM + r'tent' + DELIM: '⛺', DELIM + r'foggy' + DELIM: '🌁', DELIM + r'night_with_stars' + DELIM: '🌃', DELIM + r'cityscape' + DELIM: '🏙️', DELIM + r'sunrise_over_mountains' + DELIM: '🌄', DELIM + r'sunrise' + DELIM: '🌅', DELIM + r'city_sunset' + DELIM: '🌆', DELIM + r'city_sunrise' + DELIM: '🌇', DELIM + r'bridge_at_night' + DELIM: '🌉', DELIM + r'hotsprings' + DELIM: '♨️', DELIM + r'carousel_horse' + DELIM: '🎠', DELIM + r'ferris_wheel' + DELIM: '🎡', DELIM + r'roller_coaster' + DELIM: '🎢', DELIM + r'barber' + DELIM: '💈', DELIM + r'circus_tent' + DELIM: '🎪', # # Transport Ground # DELIM + r'steam_locomotive' + DELIM: '🚂', DELIM + r'railway_car' + DELIM: '🚃', DELIM + r'bullettrain_side' + DELIM: '🚄', DELIM + r'bullettrain_front' + DELIM: '🚅', DELIM + r'train2' + DELIM: '🚆', DELIM + r'metro' + DELIM: '🚇', DELIM + r'light_rail' + DELIM: '🚈', DELIM + r'station' + DELIM: '🚉', DELIM + r'tram' + DELIM: '🚊', DELIM + r'monorail' + DELIM: '🚝', DELIM + r'mountain_railway' + DELIM: '🚞', DELIM + r'train' + DELIM: '🚋', DELIM + r'bus' + DELIM: '🚌', DELIM + r'oncoming_bus' + DELIM: '🚍', DELIM + r'trolleybus' + DELIM: '🚎', DELIM + r'minibus' + DELIM: '🚐', DELIM + r'ambulance' + DELIM: '🚑', DELIM + r'fire_engine' + DELIM: '🚒', DELIM + r'police_car' + DELIM: '🚓', DELIM + r'oncoming_police_car' + DELIM: '🚔', DELIM + r'taxi' + DELIM: '🚕', DELIM + r'oncoming_taxi' + DELIM: '🚖', DELIM + r'car' + DELIM: '🚗', DELIM + r'(red_car|oncoming_automobile)' + DELIM: '🚘', DELIM + r'blue_car' + DELIM: '🚙', DELIM + r'pickup_truck' + DELIM: '🛻', DELIM + r'truck' + DELIM: '🚚', DELIM + r'articulated_lorry' + DELIM: '🚛', DELIM + r'tractor' + DELIM: '🚜', DELIM + r'racing_car' + DELIM: '🏎️', DELIM + r'motorcycle' + DELIM: '🏍️', DELIM + r'motor_scooter' + DELIM: '🛵', DELIM + r'manual_wheelchair' + DELIM: '🦽', DELIM + r'motorized_wheelchair' + DELIM: '🦼', DELIM + r'auto_rickshaw' + DELIM: '🛺', DELIM + r'bike' + DELIM: '🚲', DELIM + r'kick_scooter' + DELIM: '🛴', DELIM + r'skateboard' + DELIM: '🛹', DELIM + r'roller_skate' + DELIM: '🛼', DELIM + r'busstop' + DELIM: '🚏', DELIM + r'motorway' + DELIM: '🛣️', DELIM + r'railway_track' + DELIM: '🛤️', DELIM + r'oil_drum' + DELIM: '🛢️', DELIM + r'fuelpump' + DELIM: '⛽', DELIM + r'rotating_light' + DELIM: '🚨', DELIM + r'traffic_light' + DELIM: '🚥', DELIM + r'vertical_traffic_light' + DELIM: '🚦', DELIM + r'stop_sign' + DELIM: '🛑', DELIM + r'construction' + DELIM: '🚧', # # Transport Water # DELIM + r'anchor' + DELIM: '⚓', DELIM + r'(sailboat|boat)' + DELIM: '⛵', DELIM + r'canoe' + DELIM: '🛶', DELIM + r'speedboat' + DELIM: '🚤', DELIM + r'passenger_ship' + DELIM: '🛳️', DELIM + r'ferry' + DELIM: '⛴️', DELIM + r'motor_boat' + DELIM: '🛥️', DELIM + r'ship' + DELIM: '🚢', # # Transport Air # DELIM + r'airplane' + DELIM: '✈️', DELIM + r'small_airplane' + DELIM: '🛩️', DELIM + r'flight_departure' + DELIM: '🛫', DELIM + r'flight_arrival' + DELIM: '🛬', DELIM + r'parachute' + DELIM: '🪂', DELIM + r'seat' + DELIM: '💺', DELIM + r'helicopter' + DELIM: '🚁', DELIM + r'suspension_railway' + DELIM: '🚟', DELIM + r'mountain_cableway' + DELIM: '🚠', DELIM + r'aerial_tramway' + DELIM: '🚡', DELIM + r'artificial_satellite' + DELIM: '🛰️', DELIM + r'rocket' + DELIM: '🚀', DELIM + r'flying_saucer' + DELIM: '🛸', # # Hotel # DELIM + r'bellhop_bell' + DELIM: '🛎️', DELIM + r'luggage' + DELIM: '🧳', # # Time # DELIM + r'hourglass' + DELIM: '⌛', DELIM + r'hourglass_flowing_sand' + DELIM: '⏳', DELIM + r'watch' + DELIM: '⌚', DELIM + r'alarm_clock' + DELIM: '⏰', DELIM + r'stopwatch' + DELIM: '⏱️', DELIM + r'timer_clock' + DELIM: '⏲️', DELIM + r'mantelpiece_clock' + DELIM: '🕰️', DELIM + r'clock12' + DELIM: '🕛', DELIM + r'clock1230' + DELIM: '🕧', DELIM + r'clock1' + DELIM: '🕐', DELIM + r'clock130' + DELIM: '🕜', DELIM + r'clock2' + DELIM: '🕑', DELIM + r'clock230' + DELIM: '🕝', DELIM + r'clock3' + DELIM: '🕒', DELIM + r'clock330' + DELIM: '🕞', DELIM + r'clock4' + DELIM: '🕓', DELIM + r'clock430' + DELIM: '🕟', DELIM + r'clock5' + DELIM: '🕔', DELIM + r'clock530' + DELIM: '🕠', DELIM + r'clock6' + DELIM: '🕕', DELIM + r'clock630' + DELIM: '🕡', DELIM + r'clock7' + DELIM: '🕖', DELIM + r'clock730' + DELIM: '🕢', DELIM + r'clock8' + DELIM: '🕗', DELIM + r'clock830' + DELIM: '🕣', DELIM + r'clock9' + DELIM: '🕘', DELIM + r'clock930' + DELIM: '🕤', DELIM + r'clock10' + DELIM: '🕙', DELIM + r'clock1030' + DELIM: '🕥', DELIM + r'clock11' + DELIM: '🕚', DELIM + r'clock1130' + DELIM: '🕦', # Sky & Weather DELIM + r'new_moon' + DELIM: '🌑', DELIM + r'waxing_crescent_moon' + DELIM: '🌒', DELIM + r'first_quarter_moon' + DELIM: '🌓', DELIM + r'moon' + DELIM: '🌔', DELIM + r'(waxing_gibbous_moon|full_moon)' + DELIM: '🌕', DELIM + r'waning_gibbous_moon' + DELIM: '🌖', DELIM + r'last_quarter_moon' + DELIM: '🌗', DELIM + r'waning_crescent_moon' + DELIM: '🌘', DELIM + r'crescent_moon' + DELIM: '🌙', DELIM + r'new_moon_with_face' + DELIM: '🌚', DELIM + r'first_quarter_moon_with_face' + DELIM: '🌛', DELIM + r'last_quarter_moon_with_face' + DELIM: '🌜', DELIM + r'thermometer' + DELIM: '🌡️', DELIM + r'sunny' + DELIM: '☀️', DELIM + r'full_moon_with_face' + DELIM: '🌝', DELIM + r'sun_with_face' + DELIM: '🌞', DELIM + r'ringed_planet' + DELIM: '🪐', DELIM + r'star' + DELIM: '⭐', DELIM + r'star2' + DELIM: '🌟', DELIM + r'stars' + DELIM: '🌠', DELIM + r'milky_way' + DELIM: '🌌', DELIM + r'cloud' + DELIM: '☁️', DELIM + r'partly_sunny' + DELIM: '⛅', DELIM + r'cloud_with_lightning_and_rain' + DELIM: '⛈️', DELIM + r'sun_behind_small_cloud' + DELIM: '🌤️', DELIM + r'sun_behind_large_cloud' + DELIM: '🌥️', DELIM + r'sun_behind_rain_cloud' + DELIM: '🌦️', DELIM + r'cloud_with_rain' + DELIM: '🌧️', DELIM + r'cloud_with_snow' + DELIM: '🌨️', DELIM + r'cloud_with_lightning' + DELIM: '🌩️', DELIM + r'tornado' + DELIM: '🌪️', DELIM + r'fog' + DELIM: '🌫️', DELIM + r'wind_face' + DELIM: '🌬️', DELIM + r'cyclone' + DELIM: '🌀', DELIM + r'rainbow' + DELIM: '🌈', DELIM + r'closed_umbrella' + DELIM: '🌂', DELIM + r'open_umbrella' + DELIM: '☂️', DELIM + r'umbrella' + DELIM: '☔', DELIM + r'parasol_on_ground' + DELIM: '⛱️', DELIM + r'zap' + DELIM: '⚡', DELIM + r'snowflake' + DELIM: '❄️', DELIM + r'snowman_with_snow' + DELIM: '☃️', DELIM + r'snowman' + DELIM: '⛄', DELIM + r'comet' + DELIM: '☄️', DELIM + r'fire' + DELIM: '🔥', DELIM + r'droplet' + DELIM: '💧', DELIM + r'ocean' + DELIM: '🌊', # # Event # DELIM + r'jack_o_lantern' + DELIM: '🎃', DELIM + r'christmas_tree' + DELIM: '🎄', DELIM + r'fireworks' + DELIM: '🎆', DELIM + r'sparkler' + DELIM: '🎇', DELIM + r'firecracker' + DELIM: '🧨', DELIM + r'sparkles' + DELIM: '✨', DELIM + r'balloon' + DELIM: '🎈', DELIM + r'tada' + DELIM: '🎉', DELIM + r'confetti_ball' + DELIM: '🎊', DELIM + r'tanabata_tree' + DELIM: '🎋', DELIM + r'bamboo' + DELIM: '🎍', DELIM + r'dolls' + DELIM: '🎎', DELIM + r'flags' + DELIM: '🎏', DELIM + r'wind_chime' + DELIM: '🎐', DELIM + r'rice_scene' + DELIM: '🎑', DELIM + r'red_envelope' + DELIM: '🧧', DELIM + r'ribbon' + DELIM: '🎀', DELIM + r'gift' + DELIM: '🎁', DELIM + r'reminder_ribbon' + DELIM: '🎗️', DELIM + r'tickets' + DELIM: '🎟️', DELIM + r'ticket' + DELIM: '🎫', # # Award Medal # DELIM + r'medal_military' + DELIM: '🎖️', DELIM + r'trophy' + DELIM: '🏆', DELIM + r'medal_sports' + DELIM: '🏅', DELIM + r'1st_place_medal' + DELIM: '🥇', DELIM + r'2nd_place_medal' + DELIM: '🥈', DELIM + r'3rd_place_medal' + DELIM: '🥉', # # Sport # DELIM + r'soccer' + DELIM: '⚽', DELIM + r'baseball' + DELIM: '⚾', DELIM + r'softball' + DELIM: '🥎', DELIM + r'basketball' + DELIM: '🏀', DELIM + r'volleyball' + DELIM: '🏐', DELIM + r'football' + DELIM: '🏈', DELIM + r'rugby_football' + DELIM: '🏉', DELIM + r'tennis' + DELIM: '🎾', DELIM + r'flying_disc' + DELIM: '🥏', DELIM + r'bowling' + DELIM: '🎳', DELIM + r'cricket_game' + DELIM: '🏏', DELIM + r'field_hockey' + DELIM: '🏑', DELIM + r'ice_hockey' + DELIM: '🏒', DELIM + r'lacrosse' + DELIM: '🥍', DELIM + r'ping_pong' + DELIM: '🏓', DELIM + r'badminton' + DELIM: '🏸', DELIM + r'boxing_glove' + DELIM: '🥊', DELIM + r'martial_arts_uniform' + DELIM: '🥋', DELIM + r'goal_net' + DELIM: '🥅', DELIM + r'golf' + DELIM: '⛳', DELIM + r'ice_skate' + DELIM: '⛸️', DELIM + r'fishing_pole_and_fish' + DELIM: '🎣', DELIM + r'diving_mask' + DELIM: '🤿', DELIM + r'running_shirt_with_sash' + DELIM: '🎽', DELIM + r'ski' + DELIM: '🎿', DELIM + r'sled' + DELIM: '🛷', DELIM + r'curling_stone' + DELIM: '🥌', # # Game # DELIM + r'dart' + DELIM: '🎯', DELIM + r'yo_yo' + DELIM: '🪀', DELIM + r'kite' + DELIM: '🪁', DELIM + r'gun' + DELIM: '🔫', DELIM + r'8ball' + DELIM: '🎱', DELIM + r'crystal_ball' + DELIM: '🔮', DELIM + r'magic_wand' + DELIM: '🪄', DELIM + r'video_game' + DELIM: '🎮', DELIM + r'joystick' + DELIM: '🕹️', DELIM + r'slot_machine' + DELIM: '🎰', DELIM + r'game_die' + DELIM: '🎲', DELIM + r'jigsaw' + DELIM: '🧩', DELIM + r'teddy_bear' + DELIM: '🧸', DELIM + r'pinata' + DELIM: '🪅', DELIM + r'nesting_dolls' + DELIM: '🪆', DELIM + r'spades' + DELIM: '♠️', DELIM + r'hearts' + DELIM: '♥️', DELIM + r'diamonds' + DELIM: '♦️', DELIM + r'clubs' + DELIM: '♣️', DELIM + r'chess_pawn' + DELIM: '♟️', DELIM + r'black_joker' + DELIM: '🃏', DELIM + r'mahjong' + DELIM: '🀄', DELIM + r'flower_playing_cards' + DELIM: '🎴', # # Arts & Crafts # DELIM + r'performing_arts' + DELIM: '🎭', DELIM + r'framed_picture' + DELIM: '🖼️', DELIM + r'art' + DELIM: '🎨', DELIM + r'thread' + DELIM: '🧵', DELIM + r'sewing_needle' + DELIM: '🪡', DELIM + r'yarn' + DELIM: '🧶', DELIM + r'knot' + DELIM: '🪢', # # Clothing # DELIM + r'eyeglasses' + DELIM: '👓', DELIM + r'dark_sunglasses' + DELIM: '🕶️', DELIM + r'goggles' + DELIM: '🥽', DELIM + r'lab_coat' + DELIM: '🥼', DELIM + r'safety_vest' + DELIM: '🦺', DELIM + r'necktie' + DELIM: '👔', DELIM + r't?shirt' + DELIM: '👕', DELIM + r'jeans' + DELIM: '👖', DELIM + r'scarf' + DELIM: '🧣', DELIM + r'gloves' + DELIM: '🧤', DELIM + r'coat' + DELIM: '🧥', DELIM + r'socks' + DELIM: '🧦', DELIM + r'dress' + DELIM: '👗', DELIM + r'kimono' + DELIM: '👘', DELIM + r'sari' + DELIM: '🥻', DELIM + r'one_piece_swimsuit' + DELIM: '🩱', DELIM + r'swim_brief' + DELIM: '🩲', DELIM + r'shorts' + DELIM: '🩳', DELIM + r'bikini' + DELIM: '👙', DELIM + r'womans_clothes' + DELIM: '👚', DELIM + r'purse' + DELIM: '👛', DELIM + r'handbag' + DELIM: '👜', DELIM + r'pouch' + DELIM: '👝', DELIM + r'shopping' + DELIM: '🛍️', DELIM + r'school_satchel' + DELIM: '🎒', DELIM + r'thong_sandal' + DELIM: '🩴', DELIM + r'(mans_)?shoe' + DELIM: '👞', DELIM + r'athletic_shoe' + DELIM: '👟', DELIM + r'hiking_boot' + DELIM: '🥾', DELIM + r'flat_shoe' + DELIM: '🥿', DELIM + r'high_heel' + DELIM: '👠', DELIM + r'sandal' + DELIM: '👡', DELIM + r'ballet_shoes' + DELIM: '🩰', DELIM + r'boot' + DELIM: '👢', DELIM + r'crown' + DELIM: '👑', DELIM + r'womans_hat' + DELIM: '👒', DELIM + r'tophat' + DELIM: '🎩', DELIM + r'mortar_board' + DELIM: '🎓', DELIM + r'billed_cap' + DELIM: '🧢', DELIM + r'military_helmet' + DELIM: '🪖', DELIM + r'rescue_worker_helmet' + DELIM: '⛑️', DELIM + r'prayer_beads' + DELIM: '📿', DELIM + r'lipstick' + DELIM: '💄', DELIM + r'ring' + DELIM: '💍', DELIM + r'gem' + DELIM: '💎', # # Sound # DELIM + r'mute' + DELIM: '🔇', DELIM + r'speaker' + DELIM: '🔈', DELIM + r'sound' + DELIM: '🔉', DELIM + r'loud_sound' + DELIM: '🔊', DELIM + r'loudspeaker' + DELIM: '📢', DELIM + r'mega' + DELIM: '📣', DELIM + r'postal_horn' + DELIM: '📯', DELIM + r'bell' + DELIM: '🔔', DELIM + r'no_bell' + DELIM: '🔕', # # Music # DELIM + r'musical_score' + DELIM: '🎼', DELIM + r'musical_note' + DELIM: '🎵', DELIM + r'notes' + DELIM: '🎶', DELIM + r'studio_microphone' + DELIM: '🎙️', DELIM + r'level_slider' + DELIM: '🎚️', DELIM + r'control_knobs' + DELIM: '🎛️', DELIM + r'microphone' + DELIM: '🎤', DELIM + r'headphones' + DELIM: '🎧', DELIM + r'radio' + DELIM: '📻', # # Musical Instrument # DELIM + r'saxophone' + DELIM: '🎷', DELIM + r'accordion' + DELIM: '🪗', DELIM + r'guitar' + DELIM: '🎸', DELIM + r'musical_keyboard' + DELIM: '🎹', DELIM + r'trumpet' + DELIM: '🎺', DELIM + r'violin' + DELIM: '🎻', DELIM + r'banjo' + DELIM: '🪕', DELIM + r'drum' + DELIM: '🥁', DELIM + r'long_drum' + DELIM: '🪘', # # Phone # DELIM + r'iphone' + DELIM: '📱', DELIM + r'calling' + DELIM: '📲', DELIM + r'phone' + DELIM: '☎️', DELIM + r'telephone(_receiver)?' + DELIM: '📞', DELIM + r'pager' + DELIM: '📟', DELIM + r'fax' + DELIM: '📠', # # Computer # DELIM + r'battery' + DELIM: '🔋', DELIM + r'electric_plug' + DELIM: '🔌', DELIM + r'computer' + DELIM: '💻', DELIM + r'desktop_computer' + DELIM: '🖥️', DELIM + r'printer' + DELIM: '🖨️', DELIM + r'keyboard' + DELIM: '⌨️', DELIM + r'computer_mouse' + DELIM: '🖱️', DELIM + r'trackball' + DELIM: '🖲️', DELIM + r'minidisc' + DELIM: '💽', DELIM + r'floppy_disk' + DELIM: '💾', DELIM + r'cd' + DELIM: '💿', DELIM + r'dvd' + DELIM: '📀', DELIM + r'abacus' + DELIM: '🧮', # # Light & Video # DELIM + r'movie_camera' + DELIM: '🎥', DELIM + r'film_strip' + DELIM: '🎞️', DELIM + r'film_projector' + DELIM: '📽️', DELIM + r'clapper' + DELIM: '🎬', DELIM + r'tv' + DELIM: '📺', DELIM + r'camera' + DELIM: '📷', DELIM + r'camera_flash' + DELIM: '📸', DELIM + r'video_camera' + DELIM: '📹', DELIM + r'vhs' + DELIM: '📼', DELIM + r'mag' + DELIM: '🔍', DELIM + r'mag_right' + DELIM: '🔎', DELIM + r'candle' + DELIM: '🕯️', DELIM + r'bulb' + DELIM: '💡', DELIM + r'flashlight' + DELIM: '🔦', DELIM + r'(izakaya_)?lantern' + DELIM: '🏮', DELIM + r'diya_lamp' + DELIM: '🪔', # # Book Paper # DELIM + r'notebook_with_decorative_cover' + DELIM: '📔', DELIM + r'closed_book' + DELIM: '📕', DELIM + r'(open_)?book' + DELIM: '📖', DELIM + r'green_book' + DELIM: '📗', DELIM + r'blue_book' + DELIM: '📘', DELIM + r'orange_book' + DELIM: '📙', DELIM + r'books' + DELIM: '📚', DELIM + r'notebook' + DELIM: '📓', DELIM + r'ledger' + DELIM: '📒', DELIM + r'page_with_curl' + DELIM: '📃', DELIM + r'scroll' + DELIM: '📜', DELIM + r'page_facing_up' + DELIM: '📄', DELIM + r'newspaper' + DELIM: '📰', DELIM + r'newspaper_roll' + DELIM: '🗞️', DELIM + r'bookmark_tabs' + DELIM: '📑', DELIM + r'bookmark' + DELIM: '🔖', DELIM + r'label' + DELIM: '🏷️', # # Money # DELIM + r'moneybag' + DELIM: '💰', DELIM + r'coin' + DELIM: '🪙', DELIM + r'yen' + DELIM: '💴', DELIM + r'dollar' + DELIM: '💵', DELIM + r'euro' + DELIM: '💶', DELIM + r'pound' + DELIM: '💷', DELIM + r'money_with_wings' + DELIM: '💸', DELIM + r'credit_card' + DELIM: '💳', DELIM + r'receipt' + DELIM: '🧾', DELIM + r'chart' + DELIM: '💹', # # Mail # DELIM + r'envelope' + DELIM: '✉️', DELIM + r'e-?mail' + DELIM: '📧', DELIM + r'incoming_envelope' + DELIM: '📨', DELIM + r'envelope_with_arrow' + DELIM: '📩', DELIM + r'outbox_tray' + DELIM: '📤', DELIM + r'inbox_tray' + DELIM: '📥', DELIM + r'package' + DELIM: '📦', DELIM + r'mailbox' + DELIM: '📫', DELIM + r'mailbox_closed' + DELIM: '📪', DELIM + r'mailbox_with_mail' + DELIM: '📬', DELIM + r'mailbox_with_no_mail' + DELIM: '📭', DELIM + r'postbox' + DELIM: '📮', DELIM + r'ballot_box' + DELIM: '🗳️', # # Writing # DELIM + r'pencil2' + DELIM: '✏️', DELIM + r'black_nib' + DELIM: '✒️', DELIM + r'fountain_pen' + DELIM: '🖋️', DELIM + r'pen' + DELIM: '🖊️', DELIM + r'paintbrush' + DELIM: '🖌️', DELIM + r'crayon' + DELIM: '🖍️', DELIM + r'(memo|pencil)' + DELIM: '📝', # # Office # DELIM + r'briefcase' + DELIM: '💼', DELIM + r'file_folder' + DELIM: '📁', DELIM + r'open_file_folder' + DELIM: '📂', DELIM + r'card_index_dividers' + DELIM: '🗂️', DELIM + r'date' + DELIM: '📅', DELIM + r'calendar' + DELIM: '📆', DELIM + r'spiral_notepad' + DELIM: '🗒️', DELIM + r'spiral_calendar' + DELIM: '🗓️', DELIM + r'card_index' + DELIM: '📇', DELIM + r'chart_with_upwards_trend' + DELIM: '📈', DELIM + r'chart_with_downwards_trend' + DELIM: '📉', DELIM + r'bar_chart' + DELIM: '📊', DELIM + r'clipboard' + DELIM: '📋', DELIM + r'pushpin' + DELIM: '📌', DELIM + r'round_pushpin' + DELIM: '📍', DELIM + r'paperclip' + DELIM: '📎', DELIM + r'paperclips' + DELIM: '🖇️', DELIM + r'straight_ruler' + DELIM: '📏', DELIM + r'triangular_ruler' + DELIM: '📐', DELIM + r'scissors' + DELIM: '✂️', DELIM + r'card_file_box' + DELIM: '🗃️', DELIM + r'file_cabinet' + DELIM: '🗄️', DELIM + r'wastebasket' + DELIM: '🗑️', # # Lock # DELIM + r'lock' + DELIM: '🔒', DELIM + r'unlock' + DELIM: '🔓', DELIM + r'lock_with_ink_pen' + DELIM: '🔏', DELIM + r'closed_lock_with_key' + DELIM: '🔐', DELIM + r'key' + DELIM: '🔑', DELIM + r'old_key' + DELIM: '🗝️', # # Tool # DELIM + r'hammer' + DELIM: '🔨', DELIM + r'axe' + DELIM: '🪓', DELIM + r'pick' + DELIM: '⛏️', DELIM + r'hammer_and_pick' + DELIM: '⚒️', DELIM + r'hammer_and_wrench' + DELIM: '🛠️', DELIM + r'dagger' + DELIM: '🗡️', DELIM + r'crossed_swords' + DELIM: '⚔️', DELIM + r'bomb' + DELIM: '💣', DELIM + r'boomerang' + DELIM: '🪃', DELIM + r'bow_and_arrow' + DELIM: '🏹', DELIM + r'shield' + DELIM: '🛡️', DELIM + r'carpentry_saw' + DELIM: '🪚', DELIM + r'wrench' + DELIM: '🔧', DELIM + r'screwdriver' + DELIM: '🪛', DELIM + r'nut_and_bolt' + DELIM: '🔩', DELIM + r'gear' + DELIM: '⚙️', DELIM + r'clamp' + DELIM: '🗜️', DELIM + r'balance_scale' + DELIM: '⚖️', DELIM + r'probing_cane' + DELIM: '🦯', DELIM + r'link' + DELIM: '🔗', DELIM + r'chains' + DELIM: '⛓️', DELIM + r'hook' + DELIM: '🪝', DELIM + r'toolbox' + DELIM: '🧰', DELIM + r'magnet' + DELIM: '🧲', DELIM + r'ladder' + DELIM: '🪜', # # Science # DELIM + r'alembic' + DELIM: '⚗️', DELIM + r'test_tube' + DELIM: '🧪', DELIM + r'petri_dish' + DELIM: '🧫', DELIM + r'dna' + DELIM: '🧬', DELIM + r'microscope' + DELIM: '🔬', DELIM + r'telescope' + DELIM: '🔭', DELIM + r'satellite' + DELIM: '📡', # # Medical # DELIM + r'syringe' + DELIM: '💉', DELIM + r'drop_of_blood' + DELIM: '🩸', DELIM + r'pill' + DELIM: '💊', DELIM + r'adhesive_bandage' + DELIM: '🩹', DELIM + r'stethoscope' + DELIM: '🩺', # # Household # DELIM + r'door' + DELIM: '🚪', DELIM + r'elevator' + DELIM: '🛗', DELIM + r'mirror' + DELIM: '🪞', DELIM + r'window' + DELIM: '🪟', DELIM + r'bed' + DELIM: '🛏️', DELIM + r'couch_and_lamp' + DELIM: '🛋️', DELIM + r'chair' + DELIM: '🪑', DELIM + r'toilet' + DELIM: '🚽', DELIM + r'plunger' + DELIM: '🪠', DELIM + r'shower' + DELIM: '🚿', DELIM + r'bathtub' + DELIM: '🛁', DELIM + r'mouse_trap' + DELIM: '🪤', DELIM + r'razor' + DELIM: '🪒', DELIM + r'lotion_bottle' + DELIM: '🧴', DELIM + r'safety_pin' + DELIM: '🧷', DELIM + r'broom' + DELIM: '🧹', DELIM + r'basket' + DELIM: '🧺', DELIM + r'roll_of_paper' + DELIM: '🧻', DELIM + r'bucket' + DELIM: '🪣', DELIM + r'soap' + DELIM: '🧼', DELIM + r'toothbrush' + DELIM: '🪥', DELIM + r'sponge' + DELIM: '🧽', DELIM + r'fire_extinguisher' + DELIM: '🧯', DELIM + r'shopping_cart' + DELIM: '🛒', # # Other Object # DELIM + r'smoking' + DELIM: '🚬', DELIM + r'coffin' + DELIM: '⚰️', DELIM + r'headstone' + DELIM: '🪦', DELIM + r'funeral_urn' + DELIM: '⚱️', DELIM + r'nazar_amulet' + DELIM: '🧿', DELIM + r'moyai' + DELIM: '🗿', DELIM + r'placard' + DELIM: '🪧', # # Transport Sign # DELIM + r'atm' + DELIM: '🏧', DELIM + r'put_litter_in_its_place' + DELIM: '🚮', DELIM + r'potable_water' + DELIM: '🚰', DELIM + r'wheelchair' + DELIM: '♿', DELIM + r'mens' + DELIM: '🚹', DELIM + r'womens' + DELIM: '🚺', DELIM + r'restroom' + DELIM: '🚻', DELIM + r'baby_symbol' + DELIM: '🚼', DELIM + r'wc' + DELIM: '🚾', DELIM + r'passport_control' + DELIM: '🛂', DELIM + r'customs' + DELIM: '🛃', DELIM + r'baggage_claim' + DELIM: '🛄', DELIM + r'left_luggage' + DELIM: '🛅', # # Warning # DELIM + r'warning' + DELIM: '⚠️', DELIM + r'children_crossing' + DELIM: '🚸', DELIM + r'no_entry' + DELIM: '⛔', DELIM + r'no_entry_sign' + DELIM: '🚫', DELIM + r'no_bicycles' + DELIM: '🚳', DELIM + r'no_smoking' + DELIM: '🚭', DELIM + r'do_not_litter' + DELIM: '🚯', DELIM + r'non-potable_water' + DELIM: '🚱', DELIM + r'no_pedestrians' + DELIM: '🚷', DELIM + r'no_mobile_phones' + DELIM: '📵', DELIM + r'underage' + DELIM: '🔞', DELIM + r'radioactive' + DELIM: '☢️', DELIM + r'biohazard' + DELIM: '☣️', # # Arrow # DELIM + r'arrow_up' + DELIM: '⬆️', DELIM + r'arrow_upper_right' + DELIM: '↗️', DELIM + r'arrow_right' + DELIM: '➡️', DELIM + r'arrow_lower_right' + DELIM: '↘️', DELIM + r'arrow_down' + DELIM: '⬇️', DELIM + r'arrow_lower_left' + DELIM: '↙️', DELIM + r'arrow_left' + DELIM: '⬅️', DELIM + r'arrow_upper_left' + DELIM: '↖️', DELIM + r'arrow_up_down' + DELIM: '↕️', DELIM + r'left_right_arrow' + DELIM: '↔️', DELIM + r'leftwards_arrow_with_hook' + DELIM: '↩️', DELIM + r'arrow_right_hook' + DELIM: '↪️', DELIM + r'arrow_heading_up' + DELIM: '⤴️', DELIM + r'arrow_heading_down' + DELIM: '⤵️', DELIM + r'arrows_clockwise' + DELIM: '🔃', DELIM + r'arrows_counterclockwise' + DELIM: '🔄', DELIM + r'back' + DELIM: '🔙', DELIM + r'end' + DELIM: '🔚', DELIM + r'on' + DELIM: '🔛', DELIM + r'soon' + DELIM: '🔜', DELIM + r'top' + DELIM: '🔝', # # Religion # DELIM + r'place_of_worship' + DELIM: '🛐', DELIM + r'atom_symbol' + DELIM: '⚛️', DELIM + r'om' + DELIM: '🕉️', DELIM + r'star_of_david' + DELIM: '✡️', DELIM + r'wheel_of_dharma' + DELIM: '☸️', DELIM + r'yin_yang' + DELIM: '☯️', DELIM + r'latin_cross' + DELIM: '✝️', DELIM + r'orthodox_cross' + DELIM: '☦️', DELIM + r'star_and_crescent' + DELIM: '☪️', DELIM + r'peace_symbol' + DELIM: '☮️', DELIM + r'menorah' + DELIM: '🕎', DELIM + r'six_pointed_star' + DELIM: '🔯', # # Zodiac # DELIM + r'aries' + DELIM: '♈', DELIM + r'taurus' + DELIM: '♉', DELIM + r'gemini' + DELIM: '♊', DELIM + r'cancer' + DELIM: '♋', DELIM + r'leo' + DELIM: '♌', DELIM + r'virgo' + DELIM: '♍', DELIM + r'libra' + DELIM: '♎', DELIM + r'scorpius' + DELIM: '♏', DELIM + r'sagittarius' + DELIM: '♐', DELIM + r'capricorn' + DELIM: '♑', DELIM + r'aquarius' + DELIM: '♒', DELIM + r'pisces' + DELIM: '♓', DELIM + r'ophiuchus' + DELIM: '⛎', # # Av Symbol # DELIM + r'twisted_rightwards_arrows' + DELIM: '🔀', DELIM + r'repeat' + DELIM: '🔁', DELIM + r'repeat_one' + DELIM: '🔂', DELIM + r'arrow_forward' + DELIM: '▶️', DELIM + r'fast_forward' + DELIM: '⏩', DELIM + r'next_track_button' + DELIM: '⏭️', DELIM + r'play_or_pause_button' + DELIM: '⏯️', DELIM + r'arrow_backward' + DELIM: '◀️', DELIM + r'rewind' + DELIM: '⏪', DELIM + r'previous_track_button' + DELIM: '⏮️', DELIM + r'arrow_up_small' + DELIM: '🔼', DELIM + r'arrow_double_up' + DELIM: '⏫', DELIM + r'arrow_down_small' + DELIM: '🔽', DELIM + r'arrow_double_down' + DELIM: '⏬', DELIM + r'pause_button' + DELIM: '⏸️', DELIM + r'stop_button' + DELIM: '⏹️', DELIM + r'record_button' + DELIM: '⏺️', DELIM + r'eject_button' + DELIM: '⏏️', DELIM + r'cinema' + DELIM: '🎦', DELIM + r'low_brightness' + DELIM: '🔅', DELIM + r'high_brightness' + DELIM: '🔆', DELIM + r'signal_strength' + DELIM: '📶', DELIM + r'vibration_mode' + DELIM: '📳', DELIM + r'mobile_phone_off' + DELIM: '📴', # # Gender # DELIM + r'female_sign' + DELIM: '♀️', DELIM + r'male_sign' + DELIM: '♂️', DELIM + r'transgender_symbol' + DELIM: '⚧️', # # Math # DELIM + r'heavy_multiplication_x' + DELIM: '✖️', DELIM + r'heavy_plus_sign' + DELIM: '➕', DELIM + r'heavy_minus_sign' + DELIM: '➖', DELIM + r'heavy_division_sign' + DELIM: '➗', DELIM + r'infinity' + DELIM: '♾️', # # Punctuation # DELIM + r'bangbang' + DELIM: '‼️', DELIM + r'interrobang' + DELIM: '⁉️', DELIM + r'question' + DELIM: '❓', DELIM + r'grey_question' + DELIM: '❔', DELIM + r'grey_exclamation' + DELIM: '❕', DELIM + r'(heavy_exclamation_mark|exclamation)' + DELIM: '❗', DELIM + r'wavy_dash' + DELIM: '〰️', # # Currency # DELIM + r'currency_exchange' + DELIM: '💱', DELIM + r'heavy_dollar_sign' + DELIM: '💲', # # Other Symbol # DELIM + r'medical_symbol' + DELIM: '⚕️', DELIM + r'recycle' + DELIM: '♻️', DELIM + r'fleur_de_lis' + DELIM: '⚜️', DELIM + r'trident' + DELIM: '🔱', DELIM + r'name_badge' + DELIM: '📛', DELIM + r'beginner' + DELIM: '🔰', DELIM + r'o' + DELIM: '⭕', DELIM + r'white_check_mark' + DELIM: '✅', DELIM + r'ballot_box_with_check' + DELIM: '☑️', DELIM + r'heavy_check_mark' + DELIM: '✔️', DELIM + r'x' + DELIM: '❌', DELIM + r'negative_squared_cross_mark' + DELIM: '❎', DELIM + r'curly_loop' + DELIM: '➰', DELIM + r'loop' + DELIM: '➿', DELIM + r'part_alternation_mark' + DELIM: '〽️', DELIM + r'eight_spoked_asterisk' + DELIM: '✳️', DELIM + r'eight_pointed_black_star' + DELIM: '✴️', DELIM + r'sparkle' + DELIM: '❇️', DELIM + r'copyright' + DELIM: '©️', DELIM + r'registered' + DELIM: '®️', DELIM + r'tm' + DELIM: '™️', # # Keycap # DELIM + r'hash' + DELIM: '#️⃣', DELIM + r'asterisk' + DELIM: '*️⃣', DELIM + r'zero' + DELIM: '0️⃣', DELIM + r'one' + DELIM: '1️⃣', DELIM + r'two' + DELIM: '2️⃣', DELIM + r'three' + DELIM: '3️⃣', DELIM + r'four' + DELIM: '4️⃣', DELIM + r'five' + DELIM: '5️⃣', DELIM + r'six' + DELIM: '6️⃣', DELIM + r'seven' + DELIM: '7️⃣', DELIM + r'eight' + DELIM: '8️⃣', DELIM + r'nine' + DELIM: '9️⃣', DELIM + r'keycap_ten' + DELIM: '🔟', # # Alphanum # DELIM + r'capital_abcd' + DELIM: '🔠', DELIM + r'abcd' + DELIM: '🔡', DELIM + r'1234' + DELIM: '🔢', DELIM + r'symbols' + DELIM: '🔣', DELIM + r'abc' + DELIM: '🔤', DELIM + r'a' + DELIM: '🅰️', DELIM + r'ab' + DELIM: '🆎', DELIM + r'b' + DELIM: '🅱️', DELIM + r'cl' + DELIM: '🆑', DELIM + r'cool' + DELIM: '🆒', DELIM + r'free' + DELIM: '🆓', DELIM + r'information_source' + DELIM: 'ℹ️', DELIM + r'id' + DELIM: '🆔', DELIM + r'm' + DELIM: 'Ⓜ️', DELIM + r'new' + DELIM: '🆕', DELIM + r'ng' + DELIM: '🆖', DELIM + r'o2' + DELIM: '🅾️', DELIM + r'ok' + DELIM: '🆗', DELIM + r'parking' + DELIM: '🅿️', DELIM + r'sos' + DELIM: '🆘', DELIM + r'up' + DELIM: '🆙', DELIM + r'vs' + DELIM: '🆚', DELIM + r'koko' + DELIM: '🈁', DELIM + r'sa' + DELIM: '🈂️', DELIM + r'u6708' + DELIM: '🈷️', DELIM + r'u6709' + DELIM: '🈶', DELIM + r'u6307' + DELIM: '🈯', DELIM + r'ideograph_advantage' + DELIM: '🉐', DELIM + r'u5272' + DELIM: '🈹', DELIM + r'u7121' + DELIM: '🈚', DELIM + r'u7981' + DELIM: '🈲', DELIM + r'accept' + DELIM: '🉑', DELIM + r'u7533' + DELIM: '🈸', DELIM + r'u5408' + DELIM: '🈴', DELIM + r'u7a7a' + DELIM: '🈳', DELIM + r'congratulations' + DELIM: '㊗️', DELIM + r'secret' + DELIM: '㊙️', DELIM + r'u55b6' + DELIM: '🈺', DELIM + r'u6e80' + DELIM: '🈵', # # Geometric # DELIM + r'red_circle' + DELIM: '🔴', DELIM + r'orange_circle' + DELIM: '🟠', DELIM + r'yellow_circle' + DELIM: '🟡', DELIM + r'green_circle' + DELIM: '🟢', DELIM + r'large_blue_circle' + DELIM: '🔵', DELIM + r'purple_circle' + DELIM: '🟣', DELIM + r'brown_circle' + DELIM: '🟤', DELIM + r'black_circle' + DELIM: '⚫', DELIM + r'white_circle' + DELIM: '⚪', DELIM + r'red_square' + DELIM: '🟥', DELIM + r'orange_square' + DELIM: '🟧', DELIM + r'yellow_square' + DELIM: '🟨', DELIM + r'green_square' + DELIM: '🟩', DELIM + r'blue_square' + DELIM: '🟦', DELIM + r'purple_square' + DELIM: '🟪', DELIM + r'brown_square' + DELIM: '🟫', DELIM + r'black_large_square' + DELIM: '⬛', DELIM + r'white_large_square' + DELIM: '⬜', DELIM + r'black_medium_square' + DELIM: '◼️', DELIM + r'white_medium_square' + DELIM: '◻️', DELIM + r'black_medium_small_square' + DELIM: '◾', DELIM + r'white_medium_small_square' + DELIM: '◽', DELIM + r'black_small_square' + DELIM: '▪️', DELIM + r'white_small_square' + DELIM: '▫️', DELIM + r'large_orange_diamond' + DELIM: '🔶', DELIM + r'large_blue_diamond' + DELIM: '🔷', DELIM + r'small_orange_diamond' + DELIM: '🔸', DELIM + r'small_blue_diamond' + DELIM: '🔹', DELIM + r'small_red_triangle' + DELIM: '🔺', DELIM + r'small_red_triangle_down' + DELIM: '🔻', DELIM + r'diamond_shape_with_a_dot_inside' + DELIM: '💠', DELIM + r'radio_button' + DELIM: '🔘', DELIM + r'white_square_button' + DELIM: '🔳', DELIM + r'black_square_button' + DELIM: '🔲', # # Flag # DELIM + r'checkered_flag' + DELIM: '🏁', DELIM + r'triangular_flag_on_post' + DELIM: '🚩', DELIM + r'crossed_flags' + DELIM: '🎌', DELIM + r'black_flag' + DELIM: '🏴', DELIM + r'white_flag' + DELIM: '🏳️', DELIM + r'rainbow_flag' + DELIM: '🏳️‍🌈', DELIM + r'transgender_flag' + DELIM: '🏳️‍⚧️', DELIM + r'pirate_flag' + DELIM: '🏴‍☠️', # # Country Flag # DELIM + r'ascension_island' + DELIM: '🇦🇨', DELIM + r'andorra' + DELIM: '🇦🇩', DELIM + r'united_arab_emirates' + DELIM: '🇦🇪', DELIM + r'afghanistan' + DELIM: '🇦🇫', DELIM + r'antigua_barbuda' + DELIM: '🇦🇬', DELIM + r'anguilla' + DELIM: '🇦🇮', DELIM + r'albania' + DELIM: '🇦🇱', DELIM + r'armenia' + DELIM: '🇦🇲', DELIM + r'angola' + DELIM: '🇦🇴', DELIM + r'antarctica' + DELIM: '🇦🇶', DELIM + r'argentina' + DELIM: '🇦🇷', DELIM + r'american_samoa' + DELIM: '🇦🇸', DELIM + r'austria' + DELIM: '🇦🇹', DELIM + r'australia' + DELIM: '🇦🇺', DELIM + r'aruba' + DELIM: '🇦🇼', DELIM + r'aland_islands' + DELIM: '🇦🇽', DELIM + r'azerbaijan' + DELIM: '🇦🇿', DELIM + r'bosnia_herzegovina' + DELIM: '🇧🇦', DELIM + r'barbados' + DELIM: '🇧🇧', DELIM + r'bangladesh' + DELIM: '🇧🇩', DELIM + r'belgium' + DELIM: '🇧🇪', DELIM + r'burkina_faso' + DELIM: '🇧🇫', DELIM + r'bulgaria' + DELIM: '🇧🇬', DELIM + r'bahrain' + DELIM: '🇧🇭', DELIM + r'burundi' + DELIM: '🇧🇮', DELIM + r'benin' + DELIM: '🇧🇯', DELIM + r'st_barthelemy' + DELIM: '🇧🇱', DELIM + r'bermuda' + DELIM: '🇧🇲', DELIM + r'brunei' + DELIM: '🇧🇳', DELIM + r'bolivia' + DELIM: '🇧🇴', DELIM + r'caribbean_netherlands' + DELIM: '🇧🇶', DELIM + r'brazil' + DELIM: '🇧🇷', DELIM + r'bahamas' + DELIM: '🇧🇸', DELIM + r'bhutan' + DELIM: '🇧🇹', DELIM + r'bouvet_island' + DELIM: '🇧🇻', DELIM + r'botswana' + DELIM: '🇧🇼', DELIM + r'belarus' + DELIM: '🇧🇾', DELIM + r'belize' + DELIM: '🇧🇿', DELIM + r'canada' + DELIM: '🇨🇦', DELIM + r'cocos_islands' + DELIM: '🇨🇨', DELIM + r'congo_kinshasa' + DELIM: '🇨🇩', DELIM + r'central_african_republic' + DELIM: '🇨🇫', DELIM + r'congo_brazzaville' + DELIM: '🇨🇬', DELIM + r'switzerland' + DELIM: '🇨🇭', DELIM + r'cote_divoire' + DELIM: '🇨🇮', DELIM + r'cook_islands' + DELIM: '🇨🇰', DELIM + r'chile' + DELIM: '🇨🇱', DELIM + r'cameroon' + DELIM: '🇨🇲', DELIM + r'cn' + DELIM: '🇨🇳', DELIM + r'colombia' + DELIM: '🇨🇴', DELIM + r'clipperton_island' + DELIM: '🇨🇵', DELIM + r'costa_rica' + DELIM: '🇨🇷', DELIM + r'cuba' + DELIM: '🇨🇺', DELIM + r'cape_verde' + DELIM: '🇨🇻', DELIM + r'curacao' + DELIM: '🇨🇼', DELIM + r'christmas_island' + DELIM: '🇨🇽', DELIM + r'cyprus' + DELIM: '🇨🇾', DELIM + r'czech_republic' + DELIM: '🇨🇿', DELIM + r'de' + DELIM: '🇩🇪', DELIM + r'diego_garcia' + DELIM: '🇩🇬', DELIM + r'djibouti' + DELIM: '🇩🇯', DELIM + r'denmark' + DELIM: '🇩🇰', DELIM + r'dominica' + DELIM: '🇩🇲', DELIM + r'dominican_republic' + DELIM: '🇩🇴', DELIM + r'algeria' + DELIM: '🇩🇿', DELIM + r'ceuta_melilla' + DELIM: '🇪🇦', DELIM + r'ecuador' + DELIM: '🇪🇨', DELIM + r'estonia' + DELIM: '🇪🇪', DELIM + r'egypt' + DELIM: '🇪🇬', DELIM + r'western_sahara' + DELIM: '🇪🇭', DELIM + r'eritrea' + DELIM: '🇪🇷', DELIM + r'es' + DELIM: '🇪🇸', DELIM + r'ethiopia' + DELIM: '🇪🇹', DELIM + r'(eu|european_union)' + DELIM: '🇪🇺', DELIM + r'finland' + DELIM: '🇫🇮', DELIM + r'fiji' + DELIM: '🇫🇯', DELIM + r'falkland_islands' + DELIM: '🇫🇰', DELIM + r'micronesia' + DELIM: '🇫🇲', DELIM + r'faroe_islands' + DELIM: '🇫🇴', DELIM + r'fr' + DELIM: '🇫🇷', DELIM + r'gabon' + DELIM: '🇬🇦', DELIM + r'(uk|gb)' + DELIM: '🇬🇧', DELIM + r'grenada' + DELIM: '🇬🇩', DELIM + r'georgia' + DELIM: '🇬🇪', DELIM + r'french_guiana' + DELIM: '🇬🇫', DELIM + r'guernsey' + DELIM: '🇬🇬', DELIM + r'ghana' + DELIM: '🇬🇭', DELIM + r'gibraltar' + DELIM: '🇬🇮', DELIM + r'greenland' + DELIM: '🇬🇱', DELIM + r'gambia' + DELIM: '🇬🇲', DELIM + r'guinea' + DELIM: '🇬🇳', DELIM + r'guadeloupe' + DELIM: '🇬🇵', DELIM + r'equatorial_guinea' + DELIM: '🇬🇶', DELIM + r'greece' + DELIM: '🇬🇷', DELIM + r'south_georgia_south_sandwich_islands' + DELIM: '🇬🇸', DELIM + r'guatemala' + DELIM: '🇬🇹', DELIM + r'guam' + DELIM: '🇬🇺', DELIM + r'guinea_bissau' + DELIM: '🇬🇼', DELIM + r'guyana' + DELIM: '🇬🇾', DELIM + r'hong_kong' + DELIM: '🇭🇰', DELIM + r'heard_mcdonald_islands' + DELIM: '🇭🇲', DELIM + r'honduras' + DELIM: '🇭🇳', DELIM + r'croatia' + DELIM: '🇭🇷', DELIM + r'haiti' + DELIM: '🇭🇹', DELIM + r'hungary' + DELIM: '🇭🇺', DELIM + r'canary_islands' + DELIM: '🇮🇨', DELIM + r'indonesia' + DELIM: '🇮🇩', DELIM + r'ireland' + DELIM: '🇮🇪', DELIM + r'israel' + DELIM: '🇮🇱', DELIM + r'isle_of_man' + DELIM: '🇮🇲', DELIM + r'india' + DELIM: '🇮🇳', DELIM + r'british_indian_ocean_territory' + DELIM: '🇮🇴', DELIM + r'iraq' + DELIM: '🇮🇶', DELIM + r'iran' + DELIM: '🇮🇷', DELIM + r'iceland' + DELIM: '🇮🇸', DELIM + r'it' + DELIM: '🇮🇹', DELIM + r'jersey' + DELIM: '🇯🇪', DELIM + r'jamaica' + DELIM: '🇯🇲', DELIM + r'jordan' + DELIM: '🇯🇴', DELIM + r'jp' + DELIM: '🇯🇵', DELIM + r'kenya' + DELIM: '🇰🇪', DELIM + r'kyrgyzstan' + DELIM: '🇰🇬', DELIM + r'cambodia' + DELIM: '🇰🇭', DELIM + r'kiribati' + DELIM: '🇰🇮', DELIM + r'comoros' + DELIM: '🇰🇲', DELIM + r'st_kitts_nevis' + DELIM: '🇰🇳', DELIM + r'north_korea' + DELIM: '🇰🇵', DELIM + r'kr' + DELIM: '🇰🇷', DELIM + r'kuwait' + DELIM: '🇰🇼', DELIM + r'cayman_islands' + DELIM: '🇰🇾', DELIM + r'kazakhstan' + DELIM: '🇰🇿', DELIM + r'laos' + DELIM: '🇱🇦', DELIM + r'lebanon' + DELIM: '🇱🇧', DELIM + r'st_lucia' + DELIM: '🇱🇨', DELIM + r'liechtenstein' + DELIM: '🇱🇮', DELIM + r'sri_lanka' + DELIM: '🇱🇰', DELIM + r'liberia' + DELIM: '🇱🇷', DELIM + r'lesotho' + DELIM: '🇱🇸', DELIM + r'lithuania' + DELIM: '🇱🇹', DELIM + r'luxembourg' + DELIM: '🇱🇺', DELIM + r'latvia' + DELIM: '🇱🇻', DELIM + r'libya' + DELIM: '🇱🇾', DELIM + r'morocco' + DELIM: '🇲🇦', DELIM + r'monaco' + DELIM: '🇲🇨', DELIM + r'moldova' + DELIM: '🇲🇩', DELIM + r'montenegro' + DELIM: '🇲🇪', DELIM + r'st_martin' + DELIM: '🇲🇫', DELIM + r'madagascar' + DELIM: '🇲🇬', DELIM + r'marshall_islands' + DELIM: '🇲🇭', DELIM + r'macedonia' + DELIM: '🇲🇰', DELIM + r'mali' + DELIM: '🇲🇱', DELIM + r'myanmar' + DELIM: '🇲🇲', DELIM + r'mongolia' + DELIM: '🇲🇳', DELIM + r'macau' + DELIM: '🇲🇴', DELIM + r'northern_mariana_islands' + DELIM: '🇲🇵', DELIM + r'martinique' + DELIM: '🇲🇶', DELIM + r'mauritania' + DELIM: '🇲🇷', DELIM + r'montserrat' + DELIM: '🇲🇸', DELIM + r'malta' + DELIM: '🇲🇹', DELIM + r'mauritius' + DELIM: '🇲🇺', DELIM + r'maldives' + DELIM: '🇲🇻', DELIM + r'malawi' + DELIM: '🇲🇼', DELIM + r'mexico' + DELIM: '🇲🇽', DELIM + r'malaysia' + DELIM: '🇲🇾', DELIM + r'mozambique' + DELIM: '🇲🇿', DELIM + r'namibia' + DELIM: '🇳🇦', DELIM + r'new_caledonia' + DELIM: '🇳🇨', DELIM + r'niger' + DELIM: '🇳🇪', DELIM + r'norfolk_island' + DELIM: '🇳🇫', DELIM + r'nigeria' + DELIM: '🇳🇬', DELIM + r'nicaragua' + DELIM: '🇳🇮', DELIM + r'netherlands' + DELIM: '🇳🇱', DELIM + r'norway' + DELIM: '🇳🇴', DELIM + r'nepal' + DELIM: '🇳🇵', DELIM + r'nauru' + DELIM: '🇳🇷', DELIM + r'niue' + DELIM: '🇳🇺', DELIM + r'new_zealand' + DELIM: '🇳🇿', DELIM + r'oman' + DELIM: '🇴🇲', DELIM + r'panama' + DELIM: '🇵🇦', DELIM + r'peru' + DELIM: '🇵🇪', DELIM + r'french_polynesia' + DELIM: '🇵🇫', DELIM + r'papua_new_guinea' + DELIM: '🇵🇬', DELIM + r'philippines' + DELIM: '🇵🇭', DELIM + r'pakistan' + DELIM: '🇵🇰', DELIM + r'poland' + DELIM: '🇵🇱', DELIM + r'st_pierre_miquelon' + DELIM: '🇵🇲', DELIM + r'pitcairn_islands' + DELIM: '🇵🇳', DELIM + r'puerto_rico' + DELIM: '🇵🇷', DELIM + r'palestinian_territories' + DELIM: '🇵🇸', DELIM + r'portugal' + DELIM: '🇵🇹', DELIM + r'palau' + DELIM: '🇵🇼', DELIM + r'paraguay' + DELIM: '🇵🇾', DELIM + r'qatar' + DELIM: '🇶🇦', DELIM + r'reunion' + DELIM: '🇷🇪', DELIM + r'romania' + DELIM: '🇷🇴', DELIM + r'serbia' + DELIM: '🇷🇸', DELIM + r'ru' + DELIM: '🇷🇺', DELIM + r'rwanda' + DELIM: '🇷🇼', DELIM + r'saudi_arabia' + DELIM: '🇸🇦', DELIM + r'solomon_islands' + DELIM: '🇸🇧', DELIM + r'seychelles' + DELIM: '🇸🇨', DELIM + r'sudan' + DELIM: '🇸🇩', DELIM + r'sweden' + DELIM: '🇸🇪', DELIM + r'singapore' + DELIM: '🇸🇬', DELIM + r'st_helena' + DELIM: '🇸🇭', DELIM + r'slovenia' + DELIM: '🇸🇮', DELIM + r'svalbard_jan_mayen' + DELIM: '🇸🇯', DELIM + r'slovakia' + DELIM: '🇸🇰', DELIM + r'sierra_leone' + DELIM: '🇸🇱', DELIM + r'san_marino' + DELIM: '🇸🇲', DELIM + r'senegal' + DELIM: '🇸🇳', DELIM + r'somalia' + DELIM: '🇸🇴', DELIM + r'suriname' + DELIM: '🇸🇷', DELIM + r'south_sudan' + DELIM: '🇸🇸', DELIM + r'sao_tome_principe' + DELIM: '🇸🇹', DELIM + r'el_salvador' + DELIM: '🇸🇻', DELIM + r'sint_maarten' + DELIM: '🇸🇽', DELIM + r'syria' + DELIM: '🇸🇾', DELIM + r'swaziland' + DELIM: '🇸🇿', DELIM + r'tristan_da_cunha' + DELIM: '🇹🇦', DELIM + r'turks_caicos_islands' + DELIM: '🇹🇨', DELIM + r'chad' + DELIM: '🇹🇩', DELIM + r'french_southern_territories' + DELIM: '🇹🇫', DELIM + r'togo' + DELIM: '🇹🇬', DELIM + r'thailand' + DELIM: '🇹🇭', DELIM + r'tajikistan' + DELIM: '🇹🇯', DELIM + r'tokelau' + DELIM: '🇹🇰', DELIM + r'timor_leste' + DELIM: '🇹🇱', DELIM + r'turkmenistan' + DELIM: '🇹🇲', DELIM + r'tunisia' + DELIM: '🇹🇳', DELIM + r'tonga' + DELIM: '🇹🇴', DELIM + r'tr' + DELIM: '🇹🇷', DELIM + r'trinidad_tobago' + DELIM: '🇹🇹', DELIM + r'tuvalu' + DELIM: '🇹🇻', DELIM + r'taiwan' + DELIM: '🇹🇼', DELIM + r'tanzania' + DELIM: '🇹🇿', DELIM + r'ukraine' + DELIM: '🇺🇦', DELIM + r'uganda' + DELIM: '🇺🇬', DELIM + r'us_outlying_islands' + DELIM: '🇺🇲', DELIM + r'united_nations' + DELIM: '🇺🇳', DELIM + r'us' + DELIM: '🇺🇸', DELIM + r'uruguay' + DELIM: '🇺🇾', DELIM + r'uzbekistan' + DELIM: '🇺🇿', DELIM + r'vatican_city' + DELIM: '🇻🇦', DELIM + r'st_vincent_grenadines' + DELIM: '🇻🇨', DELIM + r'venezuela' + DELIM: '🇻🇪', DELIM + r'british_virgin_islands' + DELIM: '🇻🇬', DELIM + r'us_virgin_islands' + DELIM: '🇻🇮', DELIM + r'vietnam' + DELIM: '🇻🇳', DELIM + r'vanuatu' + DELIM: '🇻🇺', DELIM + r'wallis_futuna' + DELIM: '🇼🇫', DELIM + r'samoa' + DELIM: '🇼🇸', DELIM + r'kosovo' + DELIM: '🇽🇰', DELIM + r'yemen' + DELIM: '🇾🇪', DELIM + r'mayotte' + DELIM: '🇾🇹', DELIM + r'south_africa' + DELIM: '🇿🇦', DELIM + r'zambia' + DELIM: '🇿🇲', DELIM + r'zimbabwe' + DELIM: '🇿🇼', # # Subdivision Flag # DELIM + r'england' + DELIM: '🏴󠁧󠁢󠁥󠁮󠁧󠁿', DELIM + r'scotland' + DELIM: '🏴󠁧󠁢󠁳󠁣󠁴󠁿', DELIM + r'wales' + DELIM: '🏴󠁧󠁢󠁷󠁬󠁳󠁿', } # Define our singlton EMOJI_COMPILED_MAP = None def apply_emojis(content): """ Takes the content and swaps any matched emoji's found with their utf-8 encoded mapping """ global EMOJI_COMPILED_MAP if EMOJI_COMPILED_MAP is None: t_start = time.time() # Perform our compilation EMOJI_COMPILED_MAP = re.compile( r'(' + '|'.join(EMOJI_MAP.keys()) + r')', re.IGNORECASE) logger.trace( 'Emoji engine loaded in {:.4f}s'.format((time.time() - t_start))) try: return EMOJI_COMPILED_MAP.sub(lambda x: EMOJI_MAP[x.group()], content) except TypeError: # No change; but force string return return '' apprise-1.9.3/apprise/exception.py000066400000000000000000000044411477231770000172150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import errno class AppriseException(Exception): """ Base Apprise Exception Class """ def __init__(self, message, error_code=0): super().__init__(message) self.error_code = error_code class ApprisePluginException(AppriseException): """ Class object for handling exceptions raised from within a plugin """ def __init__(self, message, error_code=600): super().__init__(message, error_code=error_code) class AppriseDiskIOError(AppriseException): """ Thrown when an disk i/o error occurs """ def __init__(self, message, error_code=errno.EIO): super().__init__(message, error_code=error_code) class AppriseFileNotFound(AppriseDiskIOError, FileNotFoundError): """ Thrown when a persistent write occured in MEMORY mode """ def __init__(self, message): super().__init__(message, error_code=errno.ENOENT) apprise-1.9.3/apprise/i18n/000077500000000000000000000000001477231770000154215ustar00rootroot00000000000000apprise-1.9.3/apprise/i18n/__init__.py000066400000000000000000000000001477231770000175200ustar00rootroot00000000000000apprise-1.9.3/apprise/i18n/apprise.pot000066400000000000000000000250631477231770000176160ustar00rootroot00000000000000# Translations template for apprise. # Copyright (C) 2025 Chris Caron # This file is distributed under the same license as the apprise project. # FIRST AUTHOR , 2025. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: apprise 1.9.3\n" "Report-Msgid-Bugs-To: lead2gold@gmail.com\n" "POT-Creation-Date: 2025-03-30 15:32-0400\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: Babel 2.17.0\n" msgid "A local Gnome environment is required." msgstr "" msgid "A local Microsoft Windows environment is required." msgstr "" msgid "API Access Token" msgstr "" msgid "API Key" msgstr "" msgid "API Secret" msgstr "" msgid "API Token" msgstr "" msgid "API Version" msgstr "" msgid "Access Key" msgstr "" msgid "Access Key ID" msgstr "" msgid "Access Secret" msgstr "" msgid "Access Token" msgstr "" msgid "Account Email or Object ID" msgstr "" msgid "Account SID" msgstr "" msgid "Action" msgstr "" msgid "Action Mapping" msgstr "" msgid "Add Tokens" msgstr "" msgid "Alert Type" msgstr "" msgid "Alias" msgstr "" msgid "Amount" msgstr "" msgid "App Access Token" msgstr "" msgid "App ID" msgstr "" msgid "App Token" msgstr "" msgid "App User Name" msgstr "" msgid "App Version" msgstr "" msgid "Application ID" msgstr "" msgid "Application Key" msgstr "" msgid "Application Secret" msgstr "" msgid "Attach" msgstr "" msgid "Attach File As" msgstr "" msgid "Attach Filename" msgstr "" msgid "Auth ID" msgstr "" msgid "Auth Token" msgstr "" msgid "Authentication Key" msgstr "" msgid "Authentication Type" msgstr "" msgid "Avatar Image" msgstr "" msgid "Avatar URL" msgstr "" msgid "Badge" msgstr "" msgid "Batch Mode" msgstr "" msgid "Blind Carbon Copy" msgstr "" msgid "Bot Name" msgstr "" msgid "Bot Token" msgstr "" msgid "Bot Webhook Key" msgstr "" msgid "Cache Age" msgstr "" msgid "Cache Results" msgstr "" msgid "Call" msgstr "" msgid "Carbon Copy" msgstr "" msgid "Category" msgstr "" msgid "Chanify" msgstr "" msgid "Channel ID" msgstr "" msgid "Channels" msgstr "" msgid "Class" msgstr "" msgid "Click" msgstr "" msgid "Client ID" msgstr "" msgid "Client Secret" msgstr "" msgid "Component" msgstr "" msgid "Consumer Key" msgstr "" msgid "Consumer Secret" msgstr "" msgid "Content Placement" msgstr "" msgid "Country" msgstr "" msgid "Currency" msgstr "" msgid "Custom Data" msgstr "" msgid "Custom Details" msgstr "" msgid "Custom Icon" msgstr "" msgid "Custom Image URL" msgstr "" msgid "Cycles" msgstr "" msgid "DBus Notification" msgstr "" msgid "Data Entries" msgstr "" msgid "Decode Template Args" msgstr "" msgid "Delay" msgstr "" msgid "Details" msgstr "" msgid "Detect Bot Owner" msgstr "" msgid "Device" msgstr "" msgid "Device API Key" msgstr "" msgid "Device ID" msgstr "" msgid "Device Name" msgstr "" msgid "Discord Event ID" msgstr "" msgid "Display Footer" msgstr "" msgid "Domain" msgstr "" msgid "Duration" msgstr "" msgid "Email" msgstr "" msgid "Email Header" msgstr "" msgid "Embed URL" msgstr "" msgid "Enable Contents" msgstr "" msgid "Entity" msgstr "" msgid "Entity ID" msgstr "" msgid "Event" msgstr "" msgid "Events" msgstr "" msgid "Expire" msgstr "" msgid "Facility" msgstr "" msgid "Feishu" msgstr "" msgid "Fetch Method" msgstr "" msgid "Flair ID" msgstr "" msgid "Flair Text" msgstr "" msgid "Flash" msgstr "" msgid "Footer Logo" msgstr "" msgid "Forced File Name" msgstr "" msgid "Forced Mime Type" msgstr "" msgid "Free-Mobile" msgstr "" msgid "From" msgstr "" msgid "From Email" msgstr "" msgid "From Name" msgstr "" msgid "From Phone ID" msgstr "" msgid "From Phone No" msgstr "" msgid "GET Params" msgstr "" msgid "Gateway" msgstr "" msgid "Gateway ID" msgstr "" msgid "Gnome Notification" msgstr "" msgid "Group" msgstr "" msgid "HTTP Header" msgstr "" msgid "Hostname" msgstr "" msgid "IRC Colors" msgstr "" msgid "Icon Type" msgstr "" msgid "Icon URL" msgstr "" msgid "Idempotency-Key" msgstr "" msgid "Identifier" msgstr "" msgid "Image Link" msgstr "" msgid "Include Footer" msgstr "" msgid "Include Image" msgstr "" msgid "Include Segment" msgstr "" msgid "Integration ID" msgstr "" msgid "Integration Key" msgstr "" msgid "Interpret Emojis" msgstr "" msgid "Is Ad?" msgstr "" msgid "Is Spoiler" msgstr "" msgid "Kind" msgstr "" msgid "Label" msgstr "" msgid "Language" msgstr "" msgid "Language Code" msgstr "" msgid "Level" msgstr "" msgid "Local File" msgstr "" msgid "Locale" msgstr "" msgid "Log PID" msgstr "" msgid "Log to STDERR" msgstr "" msgid "Long-Lived Access Token" msgstr "" msgid "MacOSX Notification" msgstr "" msgid "Markdown Version" msgstr "" msgid "Master Key" msgstr "" msgid "Matrix API Verion" msgstr "" msgid "Media Type" msgstr "" msgid "Memory" msgstr "" msgid "Message Hook" msgstr "" msgid "Message Mode" msgstr "" msgid "Message Type" msgstr "" msgid "Meta Extras" msgstr "" msgid "Modal" msgstr "" msgid "Mode" msgstr "" msgid "NSFW" msgstr "" msgid "Name" msgstr "" msgid "Nextcloud Talk" msgstr "" msgid "No dependencies." msgstr "" msgid "Notification Color" msgstr "" msgid "Notification ID" msgstr "" msgid "Notify Format" msgstr "" msgid "OAuth Access Token" msgstr "" msgid "OAuth2 KeyFile" msgstr "" msgid "" "Only works with Mac OS X 10.8 and higher. Additionally requires that " "/usr/local/bin/terminal-notifier is locally accessible." msgstr "" msgid "Open/Click URL" msgstr "" msgid "Organization" msgstr "" msgid "Originating Address" msgstr "" msgid "Overflow Mode" msgstr "" msgid "PGP Encryption" msgstr "" msgid "PGP Public Key Path" msgstr "" msgid "Packages are recommended to improve functionality." msgstr "" msgid "Packages are required to function." msgstr "" msgid "Password" msgstr "" msgid "Path" msgstr "" msgid "Payload Extras" msgstr "" msgid "Persistent Storage" msgstr "" msgid "Port" msgstr "" msgid "Postback Data" msgstr "" msgid "Prefix" msgstr "" msgid "Priority" msgstr "" msgid "Private Key" msgstr "" msgid "Project ID" msgstr "" msgid "Provider Key" msgstr "" msgid "Pushkey" msgstr "" msgid "QOS" msgstr "" msgid "Query Method" msgstr "" msgid "Recipient Phone Number" msgstr "" msgid "Region" msgstr "" msgid "Region Name" msgstr "" msgid "Remove Tokens" msgstr "" msgid "Reply To" msgstr "" msgid "Reply To Email" msgstr "" msgid "Resend Delay" msgstr "" msgid "Resubmit Flag" msgstr "" msgid "Retain Messages" msgstr "" msgid "Retry" msgstr "" msgid "Room ID" msgstr "" msgid "Route Group" msgstr "" msgid "SMS Mode" msgstr "" msgid "SMTP Server" msgstr "" msgid "Salt" msgstr "" msgid "Schema" msgstr "" msgid "Secret" msgstr "" msgid "Secret API Key" msgstr "" msgid "Secret Access Key" msgstr "" msgid "Secret Key" msgstr "" msgid "Secure Mode" msgstr "" msgid "Send Replies" msgstr "" msgid "Sender ID" msgstr "" msgid "Sender Name" msgstr "" msgid "Sensitive Attachments" msgstr "" msgid "Server Discovery" msgstr "" msgid "Server Key" msgstr "" msgid "Server Timeout" msgstr "" msgid "Service ID" msgstr "" msgid "Service Password" msgstr "" msgid "Severity" msgstr "" msgid "Short URL" msgstr "" msgid "Show Status" msgstr "" msgid "Signature" msgstr "" msgid "Silent Notification" msgstr "" msgid "Société Française du Radiotéléphone" msgstr "" msgid "Socket Connect Timeout" msgstr "" msgid "Socket Read Timeout" msgstr "" msgid "Sound" msgstr "" msgid "Sound Link" msgstr "" msgid "Source" msgstr "" msgid "Source Email" msgstr "" msgid "Source Phone No" msgstr "" msgid "Space ID" msgstr "" msgid "Special Text Color" msgstr "" msgid "Splunk On-Call" msgstr "" msgid "Spoiler Text" msgstr "" msgid "Sticky" msgstr "" msgid "Subtitle" msgstr "" msgid "TTS Voice" msgstr "" msgid "Tags" msgstr "" msgid "Target Callsign" msgstr "" msgid "Target Channel" msgstr "" msgid "Target Channel ID" msgstr "" msgid "Target Chat ID" msgstr "" msgid "Target Contact" msgstr "" msgid "Target Device" msgstr "" msgid "Target Email" msgstr "" msgid "Target Emails" msgstr "" msgid "Target Encoded ID" msgstr "" msgid "Target Escalation" msgstr "" msgid "Target Group" msgstr "" msgid "Target Group ID" msgstr "" msgid "Target Phone" msgstr "" msgid "Target Phone No" msgstr "" msgid "Target Player ID" msgstr "" msgid "Target Queue" msgstr "" msgid "Target Room Alias" msgstr "" msgid "Target Room ID" msgstr "" msgid "Target Routing Key" msgstr "" msgid "Target Schedule" msgstr "" msgid "Target Short Code" msgstr "" msgid "Target Stream" msgstr "" msgid "Target Subreddit" msgstr "" msgid "Target Team" msgstr "" msgid "Target Threema ID" msgstr "" msgid "Target Topic" msgstr "" msgid "Target User" msgstr "" msgid "Target User ID" msgstr "" msgid "Targets" msgstr "" msgid "Targets " msgstr "" msgid "Team Name" msgstr "" msgid "Template" msgstr "" msgid "Template Data" msgstr "" msgid "Template ID" msgstr "" msgid "Template Mapping" msgstr "" msgid "Template Name" msgstr "" msgid "Template Path" msgstr "" msgid "Template Tokens" msgstr "" msgid "Tenant Domain" msgstr "" msgid "Test Only" msgstr "" msgid "Text To Speech" msgstr "" msgid "Third Party ID" msgstr "" msgid "Thread ID" msgstr "" msgid "Thread Key" msgstr "" msgid "Timeout" msgstr "" msgid "To Channel ID" msgstr "" msgid "To Email" msgstr "" msgid "To User ID" msgstr "" msgid "Token" msgstr "" msgid "Token A" msgstr "" msgid "Token B" msgstr "" msgid "Token C" msgstr "" msgid "Token D" msgstr "" msgid "Topic" msgstr "" msgid "Topic Thread ID" msgstr "" msgid "Transmitter Groups" msgstr "" msgid "URL" msgstr "" msgid "URL Prefix" msgstr "" msgid "URL Title" msgstr "" msgid "Unicode Characters" msgstr "" msgid "Upload" msgstr "" msgid "Urgency" msgstr "" msgid "Use Avatar" msgstr "" msgid "Use Blocks" msgstr "" msgid "Use Fields" msgstr "" msgid "Use Session" msgstr "" msgid "User Email" msgstr "" msgid "User Key" msgstr "" msgid "User Name" msgstr "" msgid "Username" msgstr "" msgid "Verify SSL" msgstr "" msgid "Version" msgstr "" msgid "Vibration" msgstr "" msgid "Visibility" msgstr "" msgid "Volume" msgstr "" msgid "Web Based" msgstr "" msgid "Web Page Preview" msgstr "" msgid "Webhook" msgstr "" msgid "Webhook ID" msgstr "" msgid "Webhook Key" msgstr "" msgid "Webhook Mode" msgstr "" msgid "Webhook Token" msgstr "" msgid "Workflow ID" msgstr "" msgid "Workspace" msgstr "" msgid "Wrap Text" msgstr "" msgid "X-Axis" msgstr "" msgid "Y-Axis" msgstr "" msgid "libdbus-1.so.x must be installed." msgstr "" msgid "ttl" msgstr "" msgid "validity" msgstr "" apprise-1.9.3/apprise/i18n/en/000077500000000000000000000000001477231770000160235ustar00rootroot00000000000000apprise-1.9.3/apprise/i18n/en/LC_MESSAGES/000077500000000000000000000000001477231770000176105ustar00rootroot00000000000000apprise-1.9.3/apprise/i18n/en/LC_MESSAGES/apprise.po000066400000000000000000000105261477231770000216170ustar00rootroot00000000000000# English translations for apprise. # Copyright (C) 2019 Chris Caron # This file is distributed under the same license as the apprise project. # Chris Caron , 2019. # msgid "" msgstr "" "Project-Id-Version: apprise 1.4.5\n" "Report-Msgid-Bugs-To: lead2gold@gmail.com\n" "POT-Creation-Date: 2019-05-28 16:56-0400\n" "PO-Revision-Date: 2019-05-24 20:00-0400\n" "Last-Translator: Chris Caron \n" "Language: en\n" "Language-Team: en \n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: Babel 2.6.0\n" msgid "API Key" msgstr "API Key" msgid "Access Key" msgstr "Access Key" msgid "Access Key ID" msgstr "Access Key ID" msgid "Access Secret" msgstr "Access Secret" msgid "Access Token" msgstr "Access Token" msgid "Account SID" msgstr "Account SID" msgid "Add Tokens" msgstr "Add Tokens" msgid "Application Key" msgstr "Application Key" msgid "Application Secret" msgstr "Application Secret" msgid "Auth Token" msgstr "Auth Token" msgid "Authorization Token" msgstr "Authorization Token" msgid "Avatar Image" msgstr "Avatar Image" msgid "Bot Name" msgstr "Bot Name" msgid "Bot Token" msgstr "Bot Token" msgid "Channels" msgstr "Channels" msgid "Consumer Key" msgstr "Consumer Key" msgid "Consumer Secret" msgstr "Consumer Secret" msgid "Detect Bot Owner" msgstr "Detect Bot Owner" msgid "Device ID" msgstr "Device ID" msgid "Display Footer" msgstr "Display Footer" msgid "Domain" msgstr "Domain" msgid "Duration" msgstr "Duration" msgid "Events" msgstr "Events" msgid "Footer Logo" msgstr "Footer Logo" msgid "From Email" msgstr "From Email" msgid "From Name" msgstr "From Name" msgid "From Phone No" msgstr "From Phone No" msgid "Group" msgstr "Group" msgid "HTTP Header" msgstr "HTTP Header" msgid "Hostname" msgstr "Hostname" msgid "Include Image" msgstr "Include Image" msgid "Modal" msgstr "Modal" msgid "Notify Format" msgstr "Notify Format" msgid "Organization" msgstr "Organization" msgid "Overflow Mode" msgstr "Overflow Mode" msgid "Password" msgstr "Password" msgid "Port" msgstr "Port" msgid "Priority" msgstr "Priority" msgid "Provider Key" msgstr "Provider Key" msgid "Region" msgstr "Region" msgid "Region Name" msgstr "Region Name" msgid "Remove Tokens" msgstr "Remove Tokens" msgid "Rooms" msgstr "Rooms" msgid "SMTP Server" msgstr "SMTP Server" msgid "Schema" msgstr "Schema" msgid "Secret Access Key" msgstr "Secret Access Key" msgid "Secret Key" msgstr "Secret Key" msgid "Secure Mode" msgstr "Secure Mode" msgid "Server Timeout" msgstr "Server Timeout" msgid "Sound" msgstr "Sound" msgid "Source JID" msgstr "Source JID" msgid "Target Channel" msgstr "Target Channel" msgid "Target Chat ID" msgstr "Target Chat ID" msgid "Target Device" msgstr "Target Device" msgid "Target Device ID" msgstr "Target Device ID" msgid "Target Email" msgstr "Target Email" msgid "Target Emails" msgstr "Target Emails" msgid "Target Encoded ID" msgstr "Target Encoded ID" msgid "Target JID" msgstr "Target JID" msgid "Target Phone No" msgstr "Target Phone No" msgid "Target Room Alias" msgstr "Target Room Alias" msgid "Target Room ID" msgstr "Target Room ID" msgid "Target Short Code" msgstr "Target Short Code" msgid "Target Tag ID" msgstr "Target Tag ID" msgid "Target Topic" msgstr "Target Topic" msgid "Target User" msgstr "Target User" msgid "Targets" msgstr "Targets" msgid "Text To Speech" msgstr "Text To Speech" msgid "To Channel ID" msgstr "To Channel ID" msgid "To Email" msgstr "To Email" msgid "To User ID" msgstr "To User ID" msgid "Token" msgstr "Token" msgid "Token A" msgstr "Token A" msgid "Token B" msgstr "Token B" msgid "Token C" msgstr "Token C" msgid "Urgency" msgstr "Urgency" msgid "Use Avatar" msgstr "Use Avatar" msgid "User" msgstr "User" msgid "User Key" msgstr "User Key" msgid "User Name" msgstr "User Name" msgid "Username" msgstr "Username" msgid "Verify SSL" msgstr "Verify SSL" msgid "Version" msgstr "Version" msgid "Webhook" msgstr "Webhook" msgid "Webhook ID" msgstr "Webhook ID" msgid "Webhook Mode" msgstr "Webhook Mode" msgid "Webhook Token" msgstr "Webhook Token" msgid "X-Axis" msgstr "X-Axis" msgid "XEP" msgstr "XEP" msgid "Y-Axis" msgstr "Y-Axis" apprise-1.9.3/apprise/locale.py000066400000000000000000000214421477231770000164560ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import ctypes import locale import contextlib import os import re from os.path import join from os.path import dirname from os.path import abspath from .logger import logger # This gets toggled to True if we succeed GETTEXT_LOADED = False try: # Initialize gettext import gettext # Toggle our flag GETTEXT_LOADED = True except ImportError: # gettext isn't available; no problem; Use the library features without # multi-language support. pass class AppriseLocale: """ A wrapper class to gettext so that we can manipulate multiple lanaguages on the fly if required. """ # Define our translation domain _domain = 'apprise' # The path to our translations _locale_dir = abspath(join(dirname(__file__), 'i18n')) # Locale regular expression _local_re = re.compile( r'^((?PC)|(?P([a-z]{2}))([_:](?P[a-z]{2}))?)' r'(\.(?P[a-z0-9-]+))?$', re.IGNORECASE) # Define our default encoding _default_encoding = 'utf-8' # The function to assign `_` by default _fn = 'gettext' # The language we should fall back to if all else fails _default_language = 'en' def __init__(self, language=None): """ Initializes our object, if a language is specified, then we initialize ourselves to that, otherwise we use whatever we detect from the local operating system. If all else fails, we resort to the defined default_language. """ # Cache previously loaded translations self._gtobjs = {} # Get our language self.lang = AppriseLocale.detect_language(language) # Our mapping to our _fn self.__fn_map = None if GETTEXT_LOADED is False: # We're done return # Add language self.add(self.lang) def add(self, lang=None, set_default=True): """ Add a language to our list """ lang = lang if lang else self._default_language if lang not in self._gtobjs: # Load our gettext object and install our language try: self._gtobjs[lang] = gettext.translation( self._domain, localedir=self._locale_dir, languages=[lang], fallback=False) # The non-intrusive method of applying the gettext change to # the global namespace only self.__fn_map = getattr(self._gtobjs[lang], self._fn) except FileNotFoundError: # The translation directory does not exist logger.debug( 'Could not load translation path: %s', join(self._locale_dir, lang)) # Fallback (handle case where self.lang does not exist) if self.lang not in self._gtobjs: self._gtobjs[self.lang] = gettext self.__fn_map = getattr(self._gtobjs[self.lang], self._fn) return False logger.trace('Loaded language %s', lang) if set_default: logger.debug('Language set to %s', lang) self.lang = lang return True @contextlib.contextmanager def lang_at(self, lang, mapto=_fn): """ The syntax works as: with at.lang_at('fr'): # apprise works as though the french language has been # defined. afterwards, the language falls back to whatever # it was. """ if GETTEXT_LOADED is False: # Do nothing yield None # we're done return # Tidy the language lang = AppriseLocale.detect_language(lang, detect_fallback=False) if lang not in self._gtobjs and not self.add(lang, set_default=False): # Do Nothing yield getattr(self._gtobjs[self.lang], mapto) else: # Yield yield getattr(self._gtobjs[lang], mapto) return @property def gettext(self): """ Return the current language gettext() function Useful for assigning to `_` """ return self._gtobjs[self.lang].gettext @staticmethod def detect_language(lang=None, detect_fallback=True): """ Returns the language (if it's retrievable) """ # We want to only use the 2 character version of this language # hence en_CA becomes en, en_US becomes en. if not isinstance(lang, str): if detect_fallback is False: # no detection enabled; we're done return None # Posix lookup lookup = os.environ.get localename = None for variable in ('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE'): localename = lookup(variable, None) if localename: result = AppriseLocale._local_re.match(localename) if result and result.group('lang'): return result.group('lang').lower() # Windows handling if hasattr(ctypes, 'windll'): windll = ctypes.windll.kernel32 try: lang = locale.windows_locale[ windll.GetUserDefaultUILanguage()] # Our detected windows language return lang[0:2].lower() except (TypeError, KeyError): # Fallback to posix detection pass # Built in locale library check try: # Acquire our locale lang = locale.getlocale()[0] # Compatibility for Python >= 3.12 if lang == 'C': lang = AppriseLocale._default_language except (ValueError, TypeError) as e: # This occurs when an invalid locale was parsed from the # environment variable. While we still return None in this # case, we want to better notify the end user of this. Users # receiving this error should check their environment # variables. logger.warning( 'Language detection failure / {}'.format(str(e))) return None return None if not lang else lang[0:2].lower() def __getstate__(self): """ Pickle Support dumps() """ state = self.__dict__.copy() # Remove the unpicklable entries. del state['_gtobjs'] del state['_AppriseLocale__fn_map'] return state def __setstate__(self, state): """ Pickle Support loads() """ self.__dict__.update(state) # Our mapping to our _fn self.__fn_map = None self._gtobjs = {} self.add(state['lang'], set_default=True) # # Prepare our default LOCALE Singleton # LOCALE = AppriseLocale() class LazyTranslation: """ Doesn't translate anything until str() or unicode() references are made. """ def __init__(self, text, *args, **kwargs): """ Store our text """ self.text = text super().__init__(*args, **kwargs) def __str__(self): return LOCALE.gettext(self.text) if GETTEXT_LOADED else self.text # Lazy translation handling def gettext_lazy(text): """ A dummy function that can be referenced """ return LazyTranslation(text=text) apprise-1.9.3/apprise/logger.py000066400000000000000000000154111477231770000164750ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import os import logging from io import StringIO # The root identifier needed to monitor 'apprise' logging LOGGER_NAME = 'apprise' # Define a verbosity level that is a noisier then debug mode logging.TRACE = logging.DEBUG - 1 # Define a verbosity level that is always used even when no verbosity is set # from the command line. The idea here is to allow for deprecation notices logging.DEPRECATE = logging.ERROR + 1 # Assign our Levels into our logging object logging.addLevelName(logging.DEPRECATE, "DEPRECATION WARNING") logging.addLevelName(logging.TRACE, "TRACE") def trace(self, message, *args, **kwargs): """ Verbose Debug Logging - Trace """ if self.isEnabledFor(logging.TRACE): self._log(logging.TRACE, message, args, **kwargs) def deprecate(self, message, *args, **kwargs): """ Deprication Warning Logging """ if self.isEnabledFor(logging.DEPRECATE): self._log(logging.DEPRECATE, message, args, **kwargs) # Assign our Loggers for use in Apprise logging.Logger.trace = trace logging.Logger.deprecate = deprecate # Create ourselve a generic (singleton) logging reference logger = logging.getLogger(LOGGER_NAME) class LogCapture: """ A class used to allow one to instantiate loggers that write to memory for temporary purposes. e.g.: 1. with LogCapture() as captured: 2. 3. # Send our notification(s) 4. aobj.notify("hello world") 5. 6. # retrieve our logs produced by the above call via our 7. # `captured` StringIO object we have access to within the `with` 8. # block here: 9. print(captured.getvalue()) """ def __init__(self, path=None, level=None, name=LOGGER_NAME, delete=True, fmt='%(asctime)s - %(levelname)s - %(message)s'): """ Instantiate a temporary log capture object If a path is specified, then log content is sent to that file instead of a StringIO object. You can optionally specify a logging level such as logging.INFO if you wish, otherwise by default the script uses whatever logging has been set globally. If you set delete to `False` then when using log files, they are not automatically cleaned up afterwards. Optionally over-ride the fmt as well if you wish. """ # Our memory buffer placeholder self.__buffer_ptr = StringIO() # Store our file path as it will determine whether or not we write to # memory and a file self.__path = path self.__delete = delete # Our logging level tracking self.__level = level self.__restore_level = None # Acquire a pointer to our logger self.__logger = logging.getLogger(name) # Prepare our handler self.__handler = logging.StreamHandler(self.__buffer_ptr) \ if not self.__path else logging.FileHandler( self.__path, mode='a', encoding='utf-8') # Use the specified level, otherwise take on the already # effective level of our logger self.__handler.setLevel( self.__level if self.__level is not None else self.__logger.getEffectiveLevel()) # Prepare our formatter self.__handler.setFormatter(logging.Formatter(fmt)) def __enter__(self): """ Allows logger manipulation within a 'with' block """ if self.__level is not None: # Temporary adjust our log level if required self.__restore_level = self.__logger.getEffectiveLevel() if self.__restore_level > self.__level: # Bump our log level up for the duration of our `with` self.__logger.setLevel(self.__level) else: # No restoration required self.__restore_level = None else: # Do nothing but enforce that we have nothing to restore to self.__restore_level = None if self.__path: # If a path has been identified, ensure we can write to the path # and that the file exists with open(self.__path, 'a'): os.utime(self.__path, None) # Update our buffer pointer self.__buffer_ptr = open(self.__path, 'r') # Add our handler self.__logger.addHandler(self.__handler) # return our memory pointer return self.__buffer_ptr def __exit__(self, exc_type, exc_value, tb): """ removes the handler gracefully when the with block has completed """ # Flush our content self.__handler.flush() self.__buffer_ptr.flush() # Drop our handler self.__logger.removeHandler(self.__handler) if self.__restore_level is not None: # Restore level self.__logger.setLevel(self.__restore_level) if self.__path: # Close our file pointer self.__buffer_ptr.close() self.__handler.close() if self.__delete: try: # Always remove file afterwards os.unlink(self.__path) except OSError: # It's okay if the file does not exist pass if exc_type is not None: # pass exception on if one was generated return False return True apprise-1.9.3/apprise/manager.py000066400000000000000000000653301477231770000166350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import os import re import sys import time import hashlib import inspect import threading from .utils.module import import_module from .utils.singleton import Singleton from .utils.parse import parse_list from .utils.disk import path_decode from os.path import dirname from os.path import abspath from os.path import join from .logger import logger class PluginManager(metaclass=Singleton): """ Designed to be a singleton object to maintain all initialized loading of modules in memory. """ # Description (used for logging) name = 'Singleton Plugin' # Memory Space _id = 'undefined' # Our Module Python path name module_name_prefix = f'apprise.{_id}' # The module path to scan module_path = join(abspath(dirname(__file__)), _id) # For filtering our result when scanning a module module_filter_re = re.compile(r'^(?P((?!_)[A-Za-z0-9]+))$') # thread safe loading _lock = threading.Lock() def __init__(self, *args, **kwargs): """ Over-ride our class instantiation to provide a singleton """ self._module_map = None self._schema_map = None # This contains a mapping of all plugins dynamicaly loaded at runtime # from external modules such as the @notify decorator # # The elements here will be additionally added to the _schema_map if # there is no conflict otherwise. # The structure looks like the following: # Module path, e.g. /usr/share/apprise/plugins/my_notify_hook.py # { # 'path': path, # # 'notify': { # 'schema': { # 'name': 'Custom schema name', # 'fn_name': 'name_of_function_decorator_was_found_on', # 'url': 'schema://any/additional/info/found/on/url' # 'plugin': # }, # 'schema2': { # 'name': 'Custom schema name', # 'fn_name': 'name_of_function_decorator_was_found_on', # 'url': 'schema://any/additional/info/found/on/url' # 'plugin': # } # } # Note: that the inherits from # NotifyBase self._custom_module_map = {} # Track manually disabled modules (by their schema) self._disabled = set() # Hash of all paths previously scanned so we don't waste # effort/overhead doing it again self._paths_previously_scanned = set() # Track loaded module paths to prevent from loading them again self._loaded = set() def unload_modules(self, disable_native=False): """ Reset our object and unload all modules """ with self._lock: if self._custom_module_map: # Handle Custom Module Assignments for meta in self._custom_module_map.values(): if meta['name'] not in self._module_map: # Nothing to remove continue # For the purpose of tidying up un-used modules in memory loaded = [m for m in sys.modules.keys() if m.startswith( self._module_map[meta['name']]['path'])] for module_path in loaded: del sys.modules[module_path] # Reset disabled plugins (if any) for schema in self._disabled: self._schema_map[schema].enabled = True self._disabled.clear() # Reset our variables self._schema_map = {} self._custom_module_map = {} if disable_native: self._module_map = {} else: self._module_map = None self._loaded = set() # Reset our path cache self._paths_previously_scanned = set() def load_modules(self, path=None, name=None, force=False): """ Load our modules into memory """ # Default value module_name_prefix = self.module_name_prefix if name is None else name module_path = self.module_path if path is None else path with self._lock: if not force and module_path in self._loaded: # We're done return # Our base reference module_count = len(self._module_map) if self._module_map else 0 schema_count = len(self._schema_map) if self._schema_map else 0 if not self: # Initialize our maps self._module_map = {} self._schema_map = {} self._custom_module_map = {} # Used for the detection of additional Notify Services objects # The .py extension is optional as we support loading directories # too module_re = re.compile( r'^(?P(?!base|_)[a-z0-9_]+)(\.py)?$', re.I) t_start = time.time() for f in os.listdir(module_path): tl_start = time.time() match = module_re.match(f) if not match: # keep going continue # Store our notification/plugin name: module_name = match.group('name') module_pyname = '{}.{}'.format(module_name_prefix, module_name) if module_name in self._module_map: logger.warning( "%s(s) (%s) already loaded; ignoring %s", self.name, module_name, os.path.join(module_path, f)) continue try: module = __import__( module_pyname, globals(), locals(), fromlist=[module_name]) except ImportError: # No problem, we can try again another way... module = import_module( os.path.join(module_path, f), module_pyname) if not module: # logging found in import_module and not needed here continue module_class = None for m_class in [obj for obj in dir(module) if self.module_filter_re.match(obj)]: # Get our plugin plugin = getattr(module, m_class) if not hasattr(plugin, 'app_id'): # Filter out non-notification modules logger.trace( "(%s.%s) import failed; no app_id defined in %s", self.name, m_class, os.path.join(module_path, f)) continue # Add our plugin name to our module map self._module_map[module_name] = { 'plugin': set([plugin]), 'module': module, 'path': '{}.{}'.format( module_name_prefix, module_name), 'native': True, } fn = getattr(plugin, 'schemas', None) schemas = set([]) if not callable(fn) else fn(plugin) # map our schema to our plugin for schema in schemas: if schema in self._schema_map: logger.error( "{} schema ({}) mismatch detected -" ' {} already maps to {}' .format(self.name, schema, self._schema_map[schema], plugin)) continue # Assign plugin self._schema_map[schema] = plugin # Store our class module_class = m_class break if not module_class: # Not a library we can load as it doesn't follow the simple # rule that the class must bear the same name as the # notification file itself. logger.trace( "%s (%s) import failed; no filename/Class " "match found in %s", self.name, module_name, os.path.join(module_path, f)) continue logger.trace( '{} {} loaded in {:.6f}s'.format( self.name, module_name, (time.time() - tl_start))) # Track the directory loaded so we never load it again self._loaded.add(module_path) logger.debug( '{} {}(s) and {} Schema(s) loaded in {:.4f}s' .format( self.name, len(self._module_map) - module_count, len(self._schema_map) - schema_count, (time.time() - t_start))) def module_detection(self, paths, cache=True): """ Leverage the @notify decorator and load all objects found matching this. """ # A simple restriction that we don't allow periods in the filename at # all so it can't be hidden (Linux OS's) and it won't conflict with # Python path naming. This also prevents us from loading any python # file that starts with an underscore or dash # We allow for __init__.py as well module_re = re.compile( r'^(?P[_a-z0-9][a-z0-9._-]+)?(\.py)?$', re.I) # Validate if we're a loadable Python file or not valid_python_file_re = re.compile(r'.+\.py(o|c)?$', re.IGNORECASE) if isinstance(paths, str): paths = [paths, ] if not paths or not isinstance(paths, (tuple, list)): # We're done return def _import_module(path): # Since our plugin name can conflict (as a module) with another # we want to generate random strings to avoid steping on # another's namespace if not (path and valid_python_file_re.match(path)): # Ignore file/module type logger.trace('Plugin Scan: Skipping %s', path) return t_start = time.time() module_name = hashlib.sha1(path.encode('utf-8')).hexdigest() module_pyname = "{prefix}.{name}".format( prefix='apprise.custom.module', name=module_name) if module_pyname in self._custom_module_map: # First clear out existing entries for schema in \ self._custom_module_map[module_pyname]['notify']\ .keys(): # Remove any mapped modules to this file del self._schema_map[schema] # Reset del self._custom_module_map[module_pyname] # Load our module module = import_module(path, module_pyname) if not module: # No problem, we can't use this object logger.warning('Failed to load custom module: %s', _path) return # Print our loaded modules if any if module_pyname in self._custom_module_map: logger.debug( 'Custom module %s - %d schema(s) (name=%s) ' 'loaded in %.6fs', _path, len(self._custom_module_map[module_pyname]['notify']), module_name, (time.time() - t_start)) # Add our plugin name to our module map self._module_map[module_name] = { 'plugin': set(), 'module': module, 'path': module_pyname, 'native': False, } for schema, meta in\ self._custom_module_map[module_pyname]['notify']\ .items(): # For mapping purposes; map our element in our main list self._module_map[module_name]['plugin'].add( self._schema_map[schema]) # Log our success logger.info('Loaded custom notification: %s://', schema) else: # The code reaches here if we successfully loaded the Python # module but no hooks/triggers were found. So we can safely # just remove/ignore this entry del sys.modules[module_pyname] return # end of _import_module() return for _path in paths: path = path_decode(_path) if (cache and path in self._paths_previously_scanned) \ or not os.path.exists(path): # We're done as we've already scanned this continue # Store our path as a way of hashing it has been handled self._paths_previously_scanned.add(path) if os.path.isdir(path) and not \ os.path.isfile(os.path.join(path, '__init__.py')): logger.debug('Scanning for custom plugins in: %s', path) for entry in os.listdir(path): re_match = module_re.match(entry) if not re_match: # keep going logger.trace('Plugin Scan: Ignoring %s', entry) continue new_path = os.path.join(path, entry) if os.path.isdir(new_path): # Update our path new_path = os.path.join(path, entry, '__init__.py') if not os.path.isfile(new_path): logger.trace( 'Plugin Scan: Ignoring %s', os.path.join(path, entry)) continue if not cache or \ (cache and new_path not in self._paths_previously_scanned): # Load our module _import_module(new_path) # Add our subdir path self._paths_previously_scanned.add(new_path) else: if os.path.isdir(path): # This logic is safe to apply because we already # validated the directories state above; update our # path path = os.path.join(path, '__init__.py') if cache and path in self._paths_previously_scanned: continue self._paths_previously_scanned.add(path) # directly load as is re_match = module_re.match(os.path.basename(path)) # must be a match and must have a .py extension if not re_match or not re_match.group(1): # keep going logger.trace('Plugin Scan: Ignoring %s', path) continue # Load our module _import_module(path) return None def add(self, plugin, schemas=None, url=None, send_func=None): """ Ability to manually add Notification services to our stack """ if not self: # Lazy load self.load_modules() # Acquire a list of schemas p_schemas = parse_list(plugin.secure_protocol, plugin.protocol) if isinstance(schemas, str): schemas = [schemas, ] elif schemas is None: # Default schemas = p_schemas if not schemas or not isinstance(schemas, (set, tuple, list)): # We're done logger.error( 'The schemas provided (type %s) is unsupported; ' 'loaded from %s.', type(schemas), send_func.__name__ if send_func else plugin.__class__.__name__) return False # Convert our schemas into a set schemas = set([s.lower() for s in schemas]) | set(p_schemas) # Valdation conflict = [s for s in schemas if s in self] if conflict: # we're already handling this schema logger.warning( 'The schema(s) (%s) are already defined and could not be ' 'loaded from %s%s.', ', '.join(conflict), 'custom notify function ' if send_func else '', send_func.__name__ if send_func else plugin.__class__.__name__) return False if send_func: # Acquire the function name fn_name = send_func.__name__ # Acquire the python filename path path = inspect.getfile(send_func) # Acquire our path to our module module_name = str(send_func.__module__) if module_name not in self._custom_module_map: # Support non-dynamic includes as well... self._custom_module_map[module_name] = { # Name can be useful for indexing back into the # _module_map object; this is the key to do it with: 'name': module_name.split('.')[-1], # The path to the module loaded 'path': path, # Initialize our template 'notify': {}, } for schema in schemas: self._custom_module_map[module_name]['notify'][schema] = { # The name of the send function the @notify decorator # wrapped 'fn_name': fn_name, # The URL that was provided in the @notify decorator call # associated with the 'on=' 'url': url, } else: module_name = hashlib.sha1( ''.join(schemas).encode('utf-8')).hexdigest() module_pyname = "{prefix}.{name}".format( prefix='apprise.adhoc.module', name=module_name) # Add our plugin name to our module map self._module_map[module_name] = { 'plugin': set([plugin]), 'module': None, 'path': module_pyname, 'native': False, } for schema in schemas: # Assign our mapping self._schema_map[schema] = plugin return True def remove(self, *schemas): """ Removes a loaded element (if defined) """ if not self: # Lazy load self.load_modules() for schema in schemas: try: del self[schema] except KeyError: pass def plugins(self, include_disabled=True): """ Return all of our loaded plugins """ if not self: # Lazy load self.load_modules() for module in self._module_map.values(): for plugin in module['plugin']: if not include_disabled and not plugin.enabled: continue yield plugin def schemas(self, include_disabled=True): """ Return all of our loaded schemas if include_disabled == True, then even disabled notifications are returned """ if not self: # Lazy load self.load_modules() # Return our list return list(self._schema_map.keys()) if include_disabled else \ [s for s in self._schema_map.keys() if self._schema_map[s].enabled] def disable(self, *schemas): """ Disables the modules associated with the specified schemas """ if not self: # Lazy load self.load_modules() for schema in schemas: if schema not in self._schema_map: continue if not self._schema_map[schema].enabled: continue # Disable self._schema_map[schema].enabled = False self._disabled.add(schema) def enable_only(self, *schemas): """ Disables the modules associated with the specified schemas """ if not self: # Lazy load self.load_modules() # convert to set for faster indexing schemas = set(schemas) for plugin in self.plugins(): # Get our plugin's schema list p_schemas = set( parse_list(plugin.secure_protocol, plugin.protocol)) if not schemas & p_schemas: if plugin.enabled: # Disable it (only if previously enabled); this prevents us # from adjusting schemas that were disabled due to missing # libraries or other environment reasons plugin.enabled = False self._disabled |= p_schemas continue # If we reach here, our schema was flagged to be enabled if p_schemas & self._disabled: # Previously disabled; no worries, let's clear this up self._disabled -= p_schemas plugin.enabled = True def __contains__(self, schema): """ Checks if a schema exists """ if not self: # Lazy load self.load_modules() return schema in self._schema_map def __delitem__(self, schema): if not self: # Lazy load self.load_modules() # Get our plugin (otherwise we throw a KeyError) which is # intended on del action that doesn't align plugin = self._schema_map[schema] # Our list of all schema entries p_schemas = set([schema]) for key in list(self._module_map.keys()): if plugin in self._module_map[key]['plugin']: # Remove our plugin self._module_map[key]['plugin'].remove(plugin) # Custom Plugin Entry; Clean up cross reference module_pyname = self._module_map[key]['path'] if not self._module_map[key]['native'] and \ module_pyname in self._custom_module_map: del self.\ _custom_module_map[module_pyname]['notify'][schema] if not self._custom_module_map[module_pyname]['notify']: # # Last custom loaded element # # Free up custom object entry del self._custom_module_map[module_pyname] if not self._module_map[key]['plugin']: # # Last element # if self._module_map[key]['native']: # Get our plugin's schema list p_schemas = \ set([s for s in parse_list( plugin.secure_protocol, plugin.protocol) if s in self._schema_map]) # free system memory if self._module_map[key]['module']: del sys.modules[self._module_map[key]['path']] # free last remaining pointer in module map del self._module_map[key] for schema in p_schemas: # Final Tidy del self._schema_map[schema] def __setitem__(self, schema, plugin): """ Support fast assigning of Plugin/Notification Objects """ if not self: # Lazy load self.load_modules() # Set default values if not otherwise set if not plugin.service_name: # Assign service name if one doesn't exist plugin.service_name = f'{schema}://' p_schemas = set( parse_list(plugin.secure_protocol, plugin.protocol)) if not p_schemas: # Assign our protocol plugin.secure_protocol = schema p_schemas.add(schema) elif schema not in p_schemas: # Add our others (if defined) plugin.secure_protocol = \ set([schema] + parse_list(plugin.secure_protocol)) p_schemas.add(schema) if not self.add(plugin, schemas=p_schemas): raise KeyError('Conflicting Assignment') def __getitem__(self, schema): """ Returns the indexed plugin identified by the schema specified """ if not self: # Lazy load self.load_modules() return self._schema_map[schema] def __iter__(self): """ Returns an iterator so we can iterate over our loaded modules """ if not self: # Lazy load self.load_modules() return iter(self._module_map.values()) def __len__(self): """ Returns the number of modules/plugins loaded """ if not self: # Lazy load self.load_modules() return len(self._module_map) def __bool__(self): """ Determines if object has loaded or not """ return True if self._loaded and self._module_map is not None else False apprise-1.9.3/apprise/manager_attachment.py000066400000000000000000000042301477231770000210350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re from os.path import dirname from os.path import abspath from os.path import join from .manager import PluginManager class AttachmentManager(PluginManager): """ Designed to be a singleton object to maintain all initialized attachment plugins/modules in memory. """ # Description (used for logging) name = 'Attachment Plugin' # Filename Prefix to filter on fname_prefix = 'Attach' # Memory Space _id = 'attachment' # Our Module Python path name module_name_prefix = f'apprise.{_id}' # The module path to scan module_path = join(abspath(dirname(__file__)), _id) # For filtering our result set module_filter_re = re.compile( r'^(?P' + fname_prefix + r'(?!Base)[A-Za-z0-9]+)$') apprise-1.9.3/apprise/manager_config.py000066400000000000000000000042351477231770000201570ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re from os.path import dirname from os.path import abspath from os.path import join from .manager import PluginManager class ConfigurationManager(PluginManager): """ Designed to be a singleton object to maintain all initialized configuration plugins/modules in memory. """ # Description (used for logging) name = 'Configuration Plugin' # Filename Prefix to filter on fname_prefix = 'Config' # Memory Space _id = 'config' # Our Module Python path name module_name_prefix = f'apprise.{_id}' # The module path to scan module_path = join(abspath(dirname(__file__)), _id) # For filtering our result set module_filter_re = re.compile( r'^(?P' + fname_prefix + r'(?!Base)[A-Za-z0-9]+)$') apprise-1.9.3/apprise/manager_plugins.py000066400000000000000000000042431477231770000203720ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re from os.path import dirname from os.path import abspath from os.path import join from .manager import PluginManager class NotificationManager(PluginManager): """ Designed to be a singleton object to maintain all initialized notifications in memory. """ # Description (used for logging) name = 'Notification Plugin' # Filename Prefix to filter on fname_prefix = 'Notify' # Memory Space _id = 'plugins' # Our Module Python path name module_name_prefix = f'apprise.{_id}' # The module path to scan module_path = join(abspath(dirname(__file__)), _id) # For filtering our result set module_filter_re = re.compile( r'^(?P' + fname_prefix + r'(?!Base|ImageSize|Type)[A-Za-z0-9]+)$') apprise-1.9.3/apprise/persistent_store.py000066400000000000000000001561761477231770000206500ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Copyright (C) 2025 Chris Caron # All rights reserved. # # This code is licensed under the MIT License. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files(the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and / or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions : # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import os import re import gzip import zlib import base64 import glob import tempfile import json import binascii from . import exception from itertools import chain from datetime import datetime, timezone, timedelta import time import hashlib from .common import PersistentStoreMode, PERSISTENT_STORE_MODES from .utils.disk import path_decode from .logger import logger # Used for writing/reading time stored in cache file EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) # isoformat is spelled out for compatibility with Python v3.6 AWARE_DATE_ISO_FORMAT = '%Y-%m-%dT%H:%M:%S.%f%z' NAIVE_DATE_ISO_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' def _ntf_tidy(ntf): """ Reusable NamedTemporaryFile cleanup """ if ntf: # Cleanup try: ntf.close() except OSError: # Already closed pass try: os.unlink(ntf.name) logger.trace( 'Persistent temporary file removed: %s', ntf.name) except (FileNotFoundError, AttributeError): # AttributeError: something weird was passed in, no action required # FileNotFound: no worries; we were removing it anyway pass except (OSError, IOError) as e: logger.error( 'Persistent temporary file removal failed: %s', ntf.name) logger.debug( 'Persistent Storage Exception: %s', str(e)) class CacheObject: hash_engine = hashlib.sha256 hash_length = 6 def __init__(self, value=None, expires=False, persistent=True): """ Tracks our objects and associates a time limit with them """ self.__value = value self.__class_name = value.__class__.__name__ self.__expires = None if expires: self.set_expiry(expires) # Whether or not we persist this object to disk or not self.__persistent = True if persistent else False def set(self, value, expires=None, persistent=None): """ Sets fields on demand, if set to none, then they are left as is The intent of set is that it allows you to set a new a value and optionally alter meta information against it. If expires or persistent isn't specified then their previous values are used. """ self.__value = value self.__class_name = value.__class__.__name__ if expires is not None: self.set_expiry(expires) if persistent is not None: self.__persistent = True if persistent else False def set_expiry(self, expires=None): """ Sets a new expiry """ if isinstance(expires, datetime): self.__expires = expires.astimezone(timezone.utc) elif expires in (None, False): # Accepted - no expiry self.__expires = None elif expires is True: # Force expiry to now self.__expires = datetime.now(tz=timezone.utc) elif isinstance(expires, (float, int)): self.__expires = \ datetime.now(tz=timezone.utc) + timedelta(seconds=expires) else: # Unsupported raise AttributeError( f"An invalid expiry time ({expires} was specified") def hash(self): """ Our checksum to track the validity of our data """ try: return self.hash_engine( str(self).encode('utf-8'), usedforsecurity=False).hexdigest() except TypeError: # Python <= v3.8 - usedforsecurity flag does not work return self.hash_engine(str(self).encode('utf-8')).hexdigest() def json(self): """ Returns our preparable json object """ return { 'v': self.__value, 'x': (self.__expires - EPOCH).total_seconds() if self.__expires else None, 'c': self.__class_name if not isinstance(self.__value, datetime) else ( 'aware_datetime' if self.__value.tzinfo else 'naive_datetime'), '!': self.hash()[:self.hash_length], } @staticmethod def instantiate(content, persistent=True, verify=True): """ Loads back data read in and returns a CacheObject or None if it could not be loaded. You can pass in the contents of CacheObject.json() and you'll receive a copy assuming the hash checks okay """ try: value = content['v'] expires = content['x'] if expires is not None: expires = datetime.fromtimestamp(expires, timezone.utc) # Acquire some useful integrity objects class_name = content.get('c', '') if not isinstance(class_name, str): raise TypeError('Class name not expected string') hashsum = content.get('!', '') if not isinstance(hashsum, str): raise TypeError('SHA1SUM not expected string') except (TypeError, KeyError) as e: logger.trace(f'CacheObject could not be parsed from {content}') logger.trace('CacheObject exception: %s', str(e)) return None if class_name in ('aware_datetime', 'naive_datetime', 'datetime'): # If datetime is detected, it will fall under the naive category iso_format = AWARE_DATE_ISO_FORMAT \ if class_name[0] == 'a' else NAIVE_DATE_ISO_FORMAT try: # Python v3.6 Support value = datetime.strptime(value, iso_format) except (TypeError, ValueError): # TypeError is thrown if content is not string # ValueError is thrown if the string is not a valid format logger.trace( f'CacheObject (dt) corrupted loading from {content}') return None elif class_name == 'bytes': try: # Convert our object back to a bytes value = base64.b64decode(value) except binascii.Error: logger.trace( f'CacheObject (bin) corrupted loading from {content}') return None # Initialize our object co = CacheObject(value, expires, persistent=persistent) if verify and co.hash()[:co.hash_length] != hashsum: # Our object was tampered with logger.debug(f'Tampering detected with cache entry {co}') del co return None return co @property def value(self): """ Returns our value """ return self.__value @property def persistent(self): """ Returns our persistent value """ return self.__persistent @property def expires(self): """ Returns the datetime the object will expire """ return self.__expires @property def expires_sec(self): """ Returns the number of seconds from now the object will expire """ return None if self.__expires is None else max( 0.0, (self.__expires - datetime.now(tz=timezone.utc)) .total_seconds()) def __bool__(self): """ Returns True it the object hasn't expired, and False if it has """ if self.__expires is None: # No Expiry return True # Calculate if we've expired or not return self.__expires > datetime.now(tz=timezone.utc) def __eq__(self, other): """ Handles equality == flag """ if isinstance(other, CacheObject): return str(self) == str(other) return self.__value == other def __str__(self): """ string output of our data """ persistent = '+' if self.persistent else '-' return f'{self.__class_name}:{persistent}:{self.__value} expires: ' +\ ('never' if self.__expires is None else self.__expires.strftime(NAIVE_DATE_ISO_FORMAT)) class CacheJSONEncoder(json.JSONEncoder): """ A JSON Encoder for handling each of our cache objects """ def default(self, entry): if isinstance(entry, datetime): return entry.strftime( AWARE_DATE_ISO_FORMAT if entry.tzinfo is not None else NAIVE_DATE_ISO_FORMAT) elif isinstance(entry, CacheObject): return entry.json() elif isinstance(entry, bytes): return base64.b64encode(entry).decode('utf-8') return super().default(entry) class PersistentStore: """ An object to make working with persistent storage easier read() and write() are used for direct file i/o set(), get() are used for caching """ # The maximum file-size we will allow the persistent store to grow to # 1 MB = 1048576 bytes max_file_size = 1048576 # 30 days in seconds default_file_expiry = 2678400 # File encoding to use encoding = 'utf-8' # Default data set base_key = 'default' # Directory to store cache __cache_key = 'cache' # Our Temporary working directory temp_dir = 'tmp' # The directory our persistent store content gets placed in data_dir = 'var' # Our Persistent Store File Extension __extension = '.psdata' # Identify our backup file extension __backup_extension = '._psbak' # Used to verify the key specified is valid # - must start with an alpha_numeric # - following optional characters can include period, underscore and # equal __valid_key = re.compile(r'[a-z0-9][a-z0-9._-]*', re.I) # Reference only __not_found_ref = (None, None) def __init__(self, path=None, namespace='default', mode=None): """ Provide the namespace to work within. namespaces can only contain alpha-numeric characters with the exception of '-' (dash), '_' (underscore), and '.' (period). The namespace must be be relative to the current URL being controlled. """ # Initalize our mode so __del__() calls don't go bad on the # error checking below self.__mode = None # Populated only once and after size() is called self.__exclude_list = None # Files to renew on calls to flush self.__renew = set() if not isinstance(namespace, str) \ or not self.__valid_key.match(namespace): raise AttributeError( f"Persistent Storage namespace ({namespace}) provided is" " invalid") if isinstance(path, str): # A storage path has been defined if mode is None: # Store Default if no mode was provided along side of it mode = PERSISTENT_STORE_MODES[0] # Store our information self.__base_path = os.path.join(path_decode(path), namespace) self.__temp_path = os.path.join(self.__base_path, self.temp_dir) self.__data_path = os.path.join(self.__base_path, self.data_dir) else: # If no storage path is provide we set our mode to MEMORY mode = PersistentStoreMode.MEMORY self.__base_path = None self.__temp_path = None self.__data_path = None if mode not in PERSISTENT_STORE_MODES: raise AttributeError( f"Persistent Storage mode ({mode}) provided is invalid") # Store our mode self.__mode = mode # Tracks when we have content to flush self.__dirty = False # A caching value to track persistent storage disk size self.__cache_size = None self.__cache_files = {} # Internal Cache self._cache = None # Prepare our environment self.__prepare() def read(self, key=None, compress=True, expires=False): """ Returns the content of the persistent store object if refresh is set to True, then the file's modify time is updated preventing it from getting caught in prune calls. It's a means of allowing it to persist and not get cleaned up in later prune calls. Content is always returned as a byte object """ try: with self.open(key, mode="rb", compress=compress) as fd: results = fd.read(self.max_file_size) if expires is False: self.__renew.add(os.path.join( self.__data_path, f"{key}{self.__extension}")) return results except (FileNotFoundError, exception.AppriseDiskIOError): # FileNotFoundError: No problem # exception.AppriseDiskIOError: # - Logging of error already occurred inside self.open() pass except (OSError, zlib.error, EOFError, UnicodeDecodeError, IOError) as e: # We can't access the file or it does not exist logger.warning('Could not read with persistent key: %s', key) logger.debug('Persistent Storage Exception: %s', str(e)) # return none return None def write(self, data, key=None, compress=True, _recovery=False): """ Writes the content to the persistent store if it doesn't exceed our filesize limit. Content is always written as a byte object _recovery is reserved for internal usage and should not be changed """ if key is None: key = self.base_key elif not isinstance(key, str) or not self.__valid_key.match(key): raise AttributeError( f"Persistent Storage key ({key} provided is invalid") if not isinstance(data, (bytes, str)): # One last check, we will accept read() objets with the expectation # it will return a binary dataset if not (hasattr(data, 'read') and callable(getattr(data, 'read'))): raise AttributeError( "Invalid data type {} provided to Persistent Storage" .format(type(data))) try: # Read in our data data = data.read() if not isinstance(data, (bytes, str)): raise AttributeError( "Invalid data type {} provided to Persistent Storage" .format(type(data))) except Exception as e: logger.warning( 'Could read() from potential iostream with persistent ' 'key: %s', key) logger.debug('Persistent Storage Exception: %s', str(e)) raise exception.AppriseDiskIOError( "Invalid data type {} provided to Persistent Storage" .format(type(data))) if self.__mode == PersistentStoreMode.MEMORY: # Nothing further can be done return False if _recovery: # Attempt to recover from a bad directory structure or setup self.__prepare() # generate our filename based on the key provided io_file = os.path.join(self.__data_path, f"{key}{self.__extension}") # Calculate the files current filesize try: prev_size = os.stat(io_file).st_size except FileNotFoundError: # No worries, no size to accomodate prev_size = 0 except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning('Could not write with persistent key: %s', key) logger.debug('Persistent Storage Exception: %s', str(e)) return False # Create a temporary file to write our content into # ntf = NamedTemporaryFile ntf = None new_file_size = 0 try: if isinstance(data, str): data = data.encode(self.encoding) ntf = tempfile.NamedTemporaryFile( mode="wb", dir=self.__temp_path, delete=False) # Close our file ntf.close() # Pointer to our open call _open = open if not compress else gzip.open with _open(ntf.name, mode='wb') as fd: # Write our content fd.write(data) # Get our file size new_file_size = os.stat(ntf.name).st_size # Log our progress logger.trace( 'Wrote %d bytes of data to persistent key: %s', new_file_size, key) except FileNotFoundError: # This happens if the directory path is gone preventing the file # from being created... if not _recovery: return self.write( data=data, key=key, compress=compress, _recovery=True) # We've already made our best effort to recover if we are here in # our code base... we're going to have to exit # Tidy our Named Temporary File _ntf_tidy(ntf) # Early Exit return False except (OSError, UnicodeEncodeError, IOError, zlib.error) as e: # We can't access the file or it does not exist logger.warning('Could not write to persistent key: %s', key) logger.debug('Persistent Storage Exception: %s', str(e)) # Tidy our Named Temporary File _ntf_tidy(ntf) return False if self.max_file_size > 0 and ( new_file_size + self.size() - prev_size) > self.max_file_size: # The content to store is to large logger.warning( 'Persistent content exceeds allowable maximum file length ' '({}KB); provide {}KB'.format( int(self.max_file_size / 1024), int(new_file_size / 1024))) return False # Return our final move if not self.__move(ntf.name, io_file): # Attempt to restore things as they were # Tidy our Named Temporary File _ntf_tidy(ntf) return False # Resetour reference variables self.__cache_size = None self.__cache_files.clear() # Content installed return True def __move(self, src, dst): """ Moves the new file in place and handles the old if it exists already If the transaction fails in any way, the old file is swapped back. Function returns True if successful and False if not. """ # A temporary backup of the file we want to move in place dst_backup = dst[:-len(self.__backup_extension)] + \ self.__backup_extension # # Backup the old file (if it exists) allowing us to have a restore # point in the event of a failure # try: # make sure the file isn't already present; if it is; remove it os.unlink(dst_backup) logger.trace( 'Removed previous persistent backup file: %s', dst_backup) except FileNotFoundError: # no worries; we were removing it anyway pass except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( 'Could not previous persistent data backup: %s', dst_backup) logger.debug('Persistent Storage Exception: %s', str(e)) return False try: # Back our file up so we have a fallback os.rename(dst, dst_backup) logger.trace( 'Persistent storage backup file created: %s', dst_backup) except FileNotFoundError: # Not a problem; this is a brand new file we're writing # There is nothing to backup pass except (OSError, IOError) as e: # This isn't good... we couldn't put our new file in place logger.warning( 'Could not install persistent content %s -> %s', dst, os.path.basename(dst_backup)) logger.debug('Persistent Storage Exception: %s', str(e)) return False # # Now place the new file # try: os.rename(src, dst) logger.trace('Persistent file installed: %s', dst) except (OSError, IOError) as e: # This isn't good... we couldn't put our new file in place # Begin fall-back process before leaving the funtion logger.warning( 'Could not install persistent content %s -> %s', src, os.path.basename(dst)) logger.debug('Persistent Storage Exception: %s', str(e)) try: # Restore our old backup (if it exists) os.rename(dst_backup, dst) logger.trace( 'Restoring original persistent content: %s', dst) except FileNotFoundError: # Not a problem pass except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( 'Failed to restore original persistent file: %s', dst) logger.debug('Persistent Storage Exception: %s', str(e)) return False return True def open(self, key=None, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None, compress=False, compresslevel=9): """ Returns an iterator to our our file within our namespace identified by the key provided. If no key is provided, then the default is used """ if key is None: key = self.base_key elif not isinstance(key, str) or not self.__valid_key.match(key): raise AttributeError( f"Persistent Storage key ({key} provided is invalid") if self.__mode == PersistentStoreMode.MEMORY: # Nothing further can be done raise FileNotFoundError() io_file = os.path.join(self.__data_path, f"{key}{self.__extension}") try: return open( io_file, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, closefd=closefd, opener=opener) \ if not compress else gzip.open( io_file, compresslevel=compresslevel, encoding=encoding, errors=errors, newline=newline) except FileNotFoundError: # pass along (but wrap with Apprise exception) raise exception.AppriseFileNotFound( f"No such file or directory: '{io_file}'") except (OSError, IOError, zlib.error) as e: # We can't access the file or it does not exist logger.warning('Could not read with persistent key: %s', key) logger.debug('Persistent Storage Exception: %s', str(e)) raise exception.AppriseDiskIOError(str(e)) def get(self, key, default=None, lazy=True): """ Fetches from cache """ if self._cache is None and not self.__load_cache(): return default if key in self._cache and \ not self.__mode == PersistentStoreMode.MEMORY and \ not self.__dirty: # ensure we renew our content self.__renew.add(self.cache_file) return self._cache[key].value \ if key in self._cache and self._cache[key] else default def set(self, key, value, expires=None, persistent=True, lazy=True): """ Cache reference """ if self._cache is None and not self.__load_cache(): return False cache = CacheObject(value, expires, persistent=persistent) # Fetch our cache value try: if lazy and cache == self._cache[key]: # We're done; nothing further to do return True except KeyError: pass # Store our new cache self._cache[key] = CacheObject(value, expires, persistent=persistent) # Set our dirty flag self.__dirty = persistent if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk return self.flush() return True def clear(self, *args): """ Remove one or more cache entry by it's key e.g: clear('key') clear('key1', 'key2', key-12') Or clear everything: clear() """ if self._cache is None and not self.__load_cache(): return False if args: for arg in args: try: del self._cache[arg] # Set our dirty flag (if not set already) self.__dirty = True except KeyError: pass elif self._cache: # Request to remove everything and there is something to remove # Set our dirty flag (if not set already) self.__dirty = True # Reset our object self._cache.clear() if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk return self.flush() def prune(self): """ Eliminates expired cache entries """ if self._cache is None and not self.__load_cache(): return False change = False for key in list(self._cache.keys()): if key not in self: # It's identified as being expired if not change and self._cache[key].persistent: # track change only if content was persistent change = True # Set our dirty flag self.__dirty = True del self._cache[key] if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk return self.flush() return change def __load_cache(self, _recovery=False): """ Loads our cache _recovery is reserved for internal usage and should not be changed """ # Prepare our dirty flag self.__dirty = False if self.__mode == PersistentStoreMode.MEMORY: # Nothing further to do self._cache = {} return True # Prepare our cache file cache_file = self.cache_file try: with gzip.open(cache_file, 'rb') as f: # Read our ontent from disk self._cache = {} for k, v in json.loads(f.read().decode(self.encoding)).items(): co = CacheObject.instantiate(v) if co: # Verify our object before assigning it self._cache[k] = co elif not self.__dirty: # Track changes from our loadset self.__dirty = True except (UnicodeDecodeError, json.decoder.JSONDecodeError, zlib.error, TypeError, AttributeError, EOFError): # Let users known there was a problem logger.warning( 'Corrupted access persistent cache content: %s', cache_file) if not _recovery: try: os.unlink(cache_file) logger.trace( 'Removed previous persistent cache content: %s', cache_file) except FileNotFoundError: # no worries; we were removing it anyway pass except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( 'Could not remove persistent cache content: %s', cache_file) logger.debug('Persistent Storage Exception: %s', str(e)) return False return self.__load_cache(_recovery=True) return False except FileNotFoundError: # No problem; no cache to load self._cache = {} except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( 'Could not load persistent cache for namespace %s', os.path.basename(self.__base_path)) logger.debug('Persistent Storage Exception: %s', str(e)) return False # Ensure our dirty flag is set to False return True def __prepare(self, flush=True): """ Prepares a working environment """ if self.__mode != PersistentStoreMode.MEMORY: # Ensure our path exists try: os.makedirs(self.__base_path, mode=0o770, exist_ok=True) except (OSError, IOError) as e: # Permission error logger.debug( 'Could not create persistent store directory %s', self.__base_path) logger.debug('Persistent Storage Exception: %s', str(e)) # Mode changed back to MEMORY self.__mode = PersistentStoreMode.MEMORY # Ensure our path exists try: os.makedirs(self.__temp_path, mode=0o770, exist_ok=True) except (OSError, IOError) as e: # Permission error logger.debug( 'Could not create persistent store directory %s', self.__temp_path) logger.debug('Persistent Storage Exception: %s', str(e)) # Mode changed back to MEMORY self.__mode = PersistentStoreMode.MEMORY try: os.makedirs(self.__data_path, mode=0o770, exist_ok=True) except (OSError, IOError) as e: # Permission error logger.debug( 'Could not create persistent store directory %s', self.__data_path) logger.debug('Persistent Storage Exception: %s', str(e)) # Mode changed back to MEMORY self.__mode = PersistentStoreMode.MEMORY if self.__mode is PersistentStoreMode.MEMORY: logger.warning( 'The persistent storage could not be fully initialized; ' 'operating in MEMORY mode') else: if self._cache: # Recovery taking place self.__dirty = True logger.warning( 'The persistent storage environment was disrupted') if self.__mode is PersistentStoreMode.FLUSH and flush: # Flush changes to disk return self.flush(_recovery=True) def flush(self, force=False, _recovery=False): """ Save's our cache to disk """ if self._cache is None or self.__mode == PersistentStoreMode.MEMORY: # nothing to do return True while self.__renew: # update our files path = self.__renew.pop() ftime = time.time() try: # (access_time, modify_time) os.utime(path, (ftime, ftime)) logger.trace('file timestamp updated: %s', path) except FileNotFoundError: # No worries... move along pass except (OSError, IOError) as e: # We can't access the file or it does not exist logger.debug('Could not update file timestamp: %s', path) logger.debug('Persistent Storage Exception: %s', str(e)) if not force and self.__dirty is False: # Nothing further to do logger.trace('Persistent cache is consistent with memory map') return True if _recovery: # Attempt to recover from a bad directory structure or setup self.__prepare(flush=False) # Unset our size lazy setting self.__cache_size = None self.__cache_files.clear() # Prepare our cache file cache_file = self.cache_file if not self._cache: # # We're deleting the cache file s there are no entries left in it # backup_file = cache_file[:-len(self.__backup_extension)] + \ self.__backup_extension try: os.unlink(backup_file) logger.trace( 'Removed previous persistent cache backup: %s', backup_file) except FileNotFoundError: # no worries; we were removing it anyway pass except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( 'Could not remove persistent cache backup: %s', backup_file) logger.debug('Persistent Storage Exception: %s', str(e)) return False try: os.rename(cache_file, backup_file) logger.trace( 'Persistent cache backup file created: %s', backup_file) except FileNotFoundError: # Not a problem; do not create a log entry pass except (OSError, IOError) as e: # This isn't good... we couldn't put our new file in place logger.warning( 'Could not remove stale persistent cache file: %s', cache_file) logger.debug('Persistent Storage Exception: %s', str(e)) return False return True # # If we get here, we need to update our file based cache # # ntf = NamedTemporaryFile ntf = None try: ntf = tempfile.NamedTemporaryFile( mode="w+", encoding=self.encoding, dir=self.__temp_path, delete=False) ntf.close() except FileNotFoundError: # This happens if the directory path is gone preventing the file # from being created... if not _recovery: return self.flush(force=True, _recovery=True) # We've already made our best effort to recover if we are here in # our code base... we're going to have to exit # Tidy our Named Temporary File _ntf_tidy(ntf) # Early Exit return False except OSError as e: logger.error( 'Persistent temporary directory inaccessible: %s', self.__temp_path) logger.debug('Persistent Storage Exception: %s', str(e)) # Tidy our Named Temporary File _ntf_tidy(ntf) # Early Exit return False try: # write our content currently saved to disk to our temporary file with gzip.open(ntf.name, 'wb') as f: # Write our content to disk f.write(json.dumps( {k: v for k, v in self._cache.items() if v and v.persistent}, separators=(',', ':'), cls=CacheJSONEncoder).encode(self.encoding)) except TypeError as e: # JSON object contains content that can not be encoded to disk logger.error( 'Persistent temporary file can not be written to ' 'due to bad input data: %s', ntf.name) logger.debug('Persistent Storage Exception: %s', str(e)) # Tidy our Named Temporary File _ntf_tidy(ntf) # Early Exit return False except (OSError, EOFError, zlib.error) as e: logger.error( 'Persistent temporary file inaccessible: %s', ntf.name) logger.debug('Persistent Storage Exception: %s', str(e)) # Tidy our Named Temporary File _ntf_tidy(ntf) # Early Exit return False if not self.__move(ntf.name, cache_file): # Attempt to restore things as they were # Tidy our Named Temporary File _ntf_tidy(ntf) return False # Ensure our dirty flag is set to False self.__dirty = False return True def files(self, exclude=True, lazy=True): """ Returns the total files """ if lazy and exclude in self.__cache_files: # Take an early exit with our cached results return self.__cache_files[exclude] elif self.__mode == PersistentStoreMode.MEMORY: # Take an early exit # exclude is our cache switch and can be either True or False. # For the below, we just set both cases and set them up as an # empty record self.__cache_files.update({True: [], False: []}) return [] if not lazy or self.__exclude_list is None: # A list of criteria that should be excluded from the size count self.__exclude_list = ( # Exclude backup cache file from count re.compile(re.escape(os.path.join( self.__base_path, f'{self.__cache_key}{self.__backup_extension}'))), # Exclude temporary files re.compile(re.escape(self.__temp_path) + r'[/\\].+'), # Exclude custom backup persistent files re.compile( re.escape(self.__data_path) + r'[/\\].+' + re.escape( self.__backup_extension)), ) try: if exclude: self.__cache_files[exclude] = \ [path for path in filter(os.path.isfile, glob.glob( os.path.join(self.__base_path, '**', '*'), recursive=True)) if next((False for p in self.__exclude_list if p.match(path)), True)] else: # No exclusion list applied self.__cache_files[exclude] = \ [path for path in filter(os.path.isfile, glob.glob( os.path.join(self.__base_path, '**', '*'), recursive=True))] except (OSError, IOError): # We can't access the directory or it does not exist self.__cache_files[exclude] = [] return self.__cache_files[exclude] @staticmethod def disk_scan(path, namespace=None, closest=True): """ Scansk a path provided and returns namespaces detected """ logger.trace('Persistent path can of: %s', path) def is_namespace(x): """ Validate what was detected is a valid namespace """ return os.path.isdir(os.path.join(path, x)) \ and PersistentStore.__valid_key.match(x) # Handle our namespace searching if namespace: if isinstance(namespace, str): namespace = [namespace] elif not isinstance(namespace, (tuple, set, list)): raise AttributeError( "namespace must be None, a string, or a tuple/set/list " "of strings") try: # Acquire all of the files in question namespaces = \ [ns for ns in filter(is_namespace, os.listdir(path)) if not namespace or next( (True for n in namespace if ns.startswith(n)), False)] \ if closest else \ [ns for ns in filter(is_namespace, os.listdir(path)) if not namespace or ns in namespace] except FileNotFoundError: # no worries; Nothing to do logger.debug('Disk Prune path not found; nothing to clean.') return [] except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.error( 'Disk Scan detetcted inaccessible path: %s', path) logger.debug( 'Persistent Storage Exception: %s', str(e)) return [] return namespaces @staticmethod def disk_prune(path, namespace=None, expires=None, action=False): """ Prune persistent disk storage entries that are old and/or unreferenced you must specify a path to perform the prune within if one or more namespaces are provided, then pruning focuses ONLY on those entries (if matched). if action is not set to False, directories to be removed are returned only """ # Prepare our File Expiry expires = datetime.now() - timedelta(seconds=expires) \ if isinstance(expires, (float, int)) and expires >= 0 \ else PersistentStore.default_file_expiry # Get our namespaces namespaces = PersistentStore.disk_scan(path, namespace) # Track matches _map = {} for namespace in namespaces: # Prepare our map _map[namespace] = [] # Reference Directories base_dir = os.path.join(path, namespace) data_dir = os.path.join(base_dir, PersistentStore.data_dir) temp_dir = os.path.join(base_dir, PersistentStore.temp_dir) # Careful to only focus on files created by this Persistent Store # object files = [ os.path.join(base_dir, f'{PersistentStore.__cache_key}' f'{PersistentStore.__extension}'), os.path.join(base_dir, f'{PersistentStore.__cache_key}' f'{PersistentStore.__backup_extension}'), ] # Update our files (applying what was defined above too) valid_data_re = re.compile( r'.*(' + re.escape(PersistentStore.__extension) + r'|' + re.escape(PersistentStore.__backup_extension) + r')$') files = [path for path in filter( os.path.isfile, chain(glob.glob( os.path.join(data_dir, '*'), recursive=False), files)) if valid_data_re.match(path)] # Now all temporary files files.extend([path for path in filter( os.path.isfile, glob.glob( os.path.join(temp_dir, '*'), recursive=False))]) # Track if we should do a directory sweep later on dir_sweep = True # Scan our files for file in files: try: mtime = datetime.fromtimestamp(os.path.getmtime(file)) except FileNotFoundError: # no worries; we were removing it anyway continue except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.error( 'Disk Prune (ns=%s, clean=%s) detetcted inaccessible ' 'file: %s', namespace, 'yes' if action else 'no', file) logger.debug( 'Persistent Storage Exception: %s', str(e)) # No longer worth doing a directory sweep dir_sweep = False continue if expires < mtime: continue # # Handle Removing # record = { 'path': file, 'removed': False, } if action: try: os.unlink(file) # Update our record record['removed'] = True logger.info( 'Disk Prune (ns=%s, clean=%s) removed persistent ' 'file: %s', namespace, 'yes' if action else 'no', file) except FileNotFoundError: # no longer worth doing a directory sweep dir_sweep = False # otherwise, no worries; we were removing the file # anyway except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.error( 'Disk Prune (ns=%s, clean=%s) failed to remove ' 'persistent file: %s', namespace, 'yes' if action else 'no', file) logger.debug( 'Persistent Storage Exception: %s', str(e)) # No longer worth doing a directory sweep dir_sweep = False # Store our record _map[namespace].append(record) # Memory tidy del files if dir_sweep: # Gracefully cleanup our namespace directory. It's okay if we # fail; This just means there were files in the directory. for dirpath in (temp_dir, data_dir, base_dir): if action: try: os.rmdir(dirpath) logger.info( 'Disk Prune (ns=%s, clean=%s) removed ' 'persistent dir: %s', namespace, 'yes' if action else 'no', dirpath) except OSError: # do nothing; pass return _map def size(self, exclude=True, lazy=True): """ Returns the total size of the persistent storage in bytes """ if lazy and self.__cache_size is not None: # Take an early exit return self.__cache_size elif self.__mode == PersistentStoreMode.MEMORY: # Take an early exit self.__cache_size = 0 return self.__cache_size # Get a list of files (file paths) in the given directory try: self.__cache_size = sum( [os.stat(path).st_size for path in self.files(exclude=exclude, lazy=lazy)]) except (OSError, IOError): # We can't access the directory or it does not exist self.__cache_size = 0 return self.__cache_size def __del__(self): """ Deconstruction of our object """ if self.__mode == PersistentStoreMode.AUTO: # Flush changes to disk self.flush() def __delitem__(self, key): """ Remove a cache entry by it's key """ if self._cache is None and not self.__load_cache(): raise KeyError("Could not initialize cache") try: if self._cache[key].persistent: # Set our dirty flag in advance self.__dirty = True # Store our new cache del self._cache[key] except KeyError: # Nothing to do raise if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk self.flush() return def __contains__(self, key): """ Verify if our storage contains the key specified or not. In additiont to this, if the content is expired, it is considered to be not contained in the storage. """ if self._cache is None and not self.__load_cache(): return False return key in self._cache and self._cache[key] def __setitem__(self, key, value): """ Sets a cache value without disrupting existing settings in place """ if self._cache is None and not self.__load_cache(): raise KeyError("Could not initialize cache") if key not in self._cache and not self.set(key, value): raise KeyError("Could not set cache") else: # Update our value self._cache[key].set(value) if self._cache[key].persistent: # Set our dirty flag in advance self.__dirty = True if self.__dirty and self.__mode == PersistentStoreMode.FLUSH: # Flush changes to disk self.flush() return def __getitem__(self, key): """ Returns the indexed value """ if self._cache is None and not self.__load_cache(): raise KeyError("Could not initialize cache") result = self.get(key, default=self.__not_found_ref, lazy=False) if result is self.__not_found_ref: raise KeyError(f" {key} not found in cache") return result def keys(self): """ Returns our keys """ if self._cache is None and not self.__load_cache(): # There are no keys to return return {}.keys() return self._cache.keys() def delete(self, *args, all=None, temp=None, cache=None, validate=True): """ Manages our file space and tidys it up delete('key', 'key2') delete(all=True) delete(temp=True, cache=True) """ # Our failure flag has_error = False valid_key_re = re.compile( r'^(?P.+)(' + re.escape(self.__backup_extension) + r'|' + re.escape(self.__extension) + r')$', re.I) # Default asignments if all is None: all = True if not (len(args) or temp or cache) else False if temp is None: temp = True if all else False if cache is None: cache = True if all else False if cache and self._cache: # Reset our object self._cache.clear() # Reset dirt flag self.__dirty = False for path in self.files(exclude=False): # Some information we use to validate the actions of our clean() # call. This is so we don't remove anything we shouldn't base = os.path.dirname(path) fname = os.path.basename(path) # Clean printable path details ppath = os.path.join(os.path.dirname(base), fname) if base == self.__base_path and cache: # We're handling a cache file (hopefully) result = valid_key_re.match(fname) key = None if not result else ( result['key'] if self.__valid_key.match(result['key']) else None) if validate and key != self.__cache_key: # We're not dealing with a cache key logger.debug( 'Persistent File cleanup ignoring file: %s', path) continue # # We should proceed with removing the file if we get here # elif base == self.__data_path and (args or all): # We're handling a file found in our custom data path result = valid_key_re.match(fname) key = None if not result else ( result['key'] if self.__valid_key.match(result['key']) else None) if validate and key is None: # we're set to validate and a non-valid file was found logger.debug( 'Persistent File cleanup ignoring file: %s', path) continue elif not all and (key is None or key not in args): # no match found logger.debug( 'Persistent File cleanup ignoring file: %s', path) continue # # We should proceed with removing the file if we get here # elif base == self.__temp_path and temp: # # This directory is a temporary path and nothing in here needs # to be further verified. Proceed with the removing of the file # pass else: # No match; move on logger.debug('Persistent File cleanup ignoring file: %s', path) continue try: os.unlink(path) logger.info('Removed persistent file: %s', ppath) except FileNotFoundError: # no worries; we were removing it anyway pass except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point has_error = True logger.error( 'Failed to remove persistent file: %s', ppath) logger.debug('Persistent Storage Exception: %s', str(e)) # Reset our reference variables self.__cache_size = None self.__cache_files.clear() return not has_error @property def cache_file(self): """ Returns the full path to the namespace directory """ return os.path.join( self.__base_path, f'{self.__cache_key}{self.__extension}', ) @property def path(self): """ Returns the full path to the namespace directory """ return self.__base_path @property def mode(self): """ Returns the full path to the namespace directory """ return self.__mode apprise-1.9.3/apprise/plugins/000077500000000000000000000000001477231770000163235ustar00rootroot00000000000000apprise-1.9.3/apprise/plugins/__init__.py000066400000000000000000000444071477231770000204450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import os import copy # Used for testing from .base import NotifyBase from ..common import NotifyImageSize from ..common import NOTIFY_IMAGE_SIZES from ..common import NotifyType from ..common import NOTIFY_TYPES from ..utils.cwe312 import cwe312_url from ..utils.parse import parse_list, GET_SCHEMA_RE from ..logger import logger from ..locale import gettext_lazy as _ from ..locale import LazyTranslation from ..manager_plugins import NotificationManager # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() __all__ = [ # Reference 'NotifyImageSize', 'NOTIFY_IMAGE_SIZES', 'NotifyType', 'NOTIFY_TYPES', 'NotifyBase', # Tokenizer 'url_to_dict', ] def _sanitize_token(tokens, default_delimiter): """ This is called by the details() function and santizes the output by populating expected and consistent arguments if they weren't otherwise specified. """ # Used for tracking groups group_map = {} # Iterate over our tokens for key in tokens.keys(): for element in tokens[key].keys(): # Perform translations (if detected to do so) if isinstance(tokens[key][element], LazyTranslation): tokens[key][element] = str(tokens[key][element]) if 'alias_of' in tokens[key]: # Do not touch this field continue elif 'name' not in tokens[key]: # Default to key tokens[key]['name'] = key if 'map_to' not in tokens[key]: # Default type to key tokens[key]['map_to'] = key # Track our map_to objects if tokens[key]['map_to'] not in group_map: group_map[tokens[key]['map_to']] = set() group_map[tokens[key]['map_to']].add(key) if 'type' not in tokens[key]: # Default type to string tokens[key]['type'] = 'string' elif tokens[key]['type'].startswith('list'): if 'delim' not in tokens[key]: # Default list delimiter (if not otherwise specified) tokens[key]['delim'] = default_delimiter if key in group_map[tokens[key]['map_to']]: # pragma: no branch # Remove ourselves from the list group_map[tokens[key]['map_to']].remove(key) # Pointing to the set directly so we can dynamically update # ourselves tokens[key]['group'] = group_map[tokens[key]['map_to']] elif tokens[key]['type'].startswith('choice') \ and 'default' not in tokens[key] \ and 'values' in tokens[key] \ and len(tokens[key]['values']) == 1: # If there is only one choice; then make it the default # - support dictionaries too tokens[key]['default'] = tokens[key]['values'][0] \ if not isinstance(tokens[key]['values'], dict) \ else next(iter(tokens[key]['values'])) if 'values' in tokens[key] and isinstance(tokens[key]['values'], dict): # Convert values into a list if it was defined as a dictionary tokens[key]['values'] = [k for k in tokens[key]['values'].keys()] if 'regex' in tokens[key]: # Verify that we are a tuple; convert strings to tuples if isinstance(tokens[key]['regex'], str): # Default tuple setup tokens[key]['regex'] = \ (tokens[key]['regex'], None) elif not isinstance(tokens[key]['regex'], (list, tuple)): # Invalid regex del tokens[key]['regex'] if 'required' not in tokens[key]: # Default required is False tokens[key]['required'] = False if 'private' not in tokens[key]: # Private flag defaults to False if not set tokens[key]['private'] = False return def details(plugin): """ Provides templates that can be used by developers to build URLs dynamically. If a list of templates is provided, then they will be used over the default value. If a list of tokens are provided, then they will over-ride any additional settings built from this script and/or will be appended to them afterwards. """ # Our unique list of parsing will be based on the provided templates # if none are provided we will use our own templates = tuple(plugin.templates) # The syntax is simple # { # # The token_name must tie back to an entry found in the # # templates list. # 'token_name': { # # # types can be 'string', 'int', 'choice', 'list, 'float' # # both choice and list may additionally have a : identify # # what the list/choice type is comprised of; the default # # is string. # 'type': 'choice:string', # # # values will only exist the type must be a fixed # # list of inputs (generated from type choice for example) # # # If this is a choice:bool then you should ALWAYS define # # this list as a (True, False) such as ('Yes, 'No') or # # ('Enabled', 'Disabled'), etc # 'values': [ 'http', 'https' ], # # # Identifies if the entry specified is required or not # 'required': True, # # # Identifies all tokens detected to be associated with the # # list:string # # This is ony present in list:string objects and is only set # # if this element acts as an alias for several other # # kwargs/fields. # 'group': [], # # # Identify a default value # 'default': 'http', # # # Optional Verification Entries min and max are for floats # # and/or integers # 'min': 4, # 'max': 5, # # # A list will always identify a delimiter. If this is # # part of a path, this may be a '/', or it could be a # # comma and/or space. delimiters are always in a list # # eg (if space and/or comma is a delimiter the entry # # would look like: 'delim': [',' , ' ' ] # 'delim': None, # # # Use regex if you want to share the regular expression # # required to validate the field. The regex will never # # accomodate the prefix (if one is specified). That is # # up to the user building the URLs to include the prefix # # on the URL when constructing it. # # The format is ('regex', 'reg options') # 'regex': (r'[A-Z0-9]+', 'i'), # # # A Prefix is always a string, to differentiate between # # multiple arguments, sometimes content is prefixed. # 'prefix': '@', # # # By default the key of this object is to be interpreted # # as the argument to the notification in question. However # # To accomodate cases where there are multiple types that # # all map to the same entry, one can find a map_to value. # 'map_to': 'function_arg', # # # Some arguments act as an alias_of an already defined object # # This plays a role more with configuration file generation # # since yaml files allow you to define different argumuments # # in line to simplify things. If this directive is set, then # # it should be treated exactly the same as the object it is # # an alias of # 'alias_of': 'function_arg', # # # Advise developers to consider the potential sensitivity # # of this field owned by the user. This is for passwords, # # and api keys, etc... # 'private': False, # }, # } # Template tokens identify the arguments required to initialize the # plugin itself. It identifies all of the tokens and provides some # details on their use. Each token defined should in some way map # back to at least one URL {token} defined in the templates # Since we nest a dictionary within a dictionary, a simple copy isn't # enough. a deepcopy allows us to manipulate this object in this # funtion without obstructing the original. template_tokens = copy.deepcopy(plugin.template_tokens) # Arguments and/or Options either have a default value and/or are # optional to be set. # # Since we nest a dictionary within a dictionary, a simple copy isn't # enough. a deepcopy allows us to manipulate this object in this # funtion without obstructing the original. template_args = copy.deepcopy(plugin.template_args) # Our template keyword arguments ?+key=value&-key=value # Basically the user provides both the key and the value. this is only # possibly by identifying the key prefix required for them to be # interpreted hence the +/- keys are built into apprise by default for easy # reference. In these cases, entry might look like '+' being the prefix: # { # 'arg_name': { # 'name': 'label', # 'prefix': '+', # } # } # # Since we nest a dictionary within a dictionary, a simple copy isn't # enough. a deepcopy allows us to manipulate this object in this # funtion without obstructing the original. template_kwargs = copy.deepcopy(plugin.template_kwargs) # We automatically create a schema entry template_tokens['schema'] = { 'name': _('Schema'), 'type': 'choice:string', 'required': True, 'values': parse_list(plugin.secure_protocol, plugin.protocol) } # Sanitize our tokens _sanitize_token(template_tokens, default_delimiter=('/', )) # Delimiter(s) are space and/or comma _sanitize_token(template_args, default_delimiter=(',', ' ')) _sanitize_token(template_kwargs, default_delimiter=(',', ' ')) # Argument/Option Handling for key in list(template_args.keys()): if 'alias_of' in template_args[key]: # Check if the mapped reference is a list; if it is, then # we need to store a different delimiter alias_of = template_tokens.get(template_args[key]['alias_of'], {}) if alias_of.get('type', '').startswith('list') \ and 'delim' not in template_args[key]: # Set a default delimiter of a comma and/or space if one # hasn't already been specified template_args[key]['delim'] = (',', ' ') # _lookup_default looks up what the default value if '_lookup_default' in template_args[key]: template_args[key]['default'] = getattr( plugin, template_args[key]['_lookup_default']) # Tidy as we don't want to pass this along in response del template_args[key]['_lookup_default'] # _exists_if causes the argument to only exist IF after checking # the return of an internal variable requiring a check if '_exists_if' in template_args[key]: if not getattr(plugin, template_args[key]['_exists_if']): # Remove entire object del template_args[key] else: # We only nee to remove this key del template_args[key]['_exists_if'] return { 'templates': templates, 'tokens': template_tokens, 'args': template_args, 'kwargs': template_kwargs, } def requirements(plugin): """ Provides a list of packages and its requirement details """ requirements = { # Use the description to provide a human interpretable description of # what is required to make the plugin work. This is only nessisary # if there are package dependencies 'details': '', # Define any required packages needed for the plugin to run. This is # an array of strings that simply look like lines in the # `requirements.txt` file... # # A single string is perfectly acceptable: # 'packages_required' = 'cryptography' # # Multiple entries should look like the following # 'packages_required' = [ # 'cryptography < 3.4`, # ] # 'packages_required': [], # Recommended packages identify packages that are not required to make # your plugin work, but would improve it's use or grant it access to # full functionality (that might otherwise be limited). # Similar to `packages_required`, you would identify each entry in # the array as you would in a `requirements.txt` file. # # - Do not re-provide entries already in the `packages_required` 'packages_recommended': [], } # Populate our template differently if we don't find anything above if not (hasattr(plugin, 'requirements') and isinstance(plugin.requirements, dict)): # We're done early return requirements # Get our required packages _req_packages = plugin.requirements.get('packages_required') if isinstance(_req_packages, str): # Convert to list _req_packages = [_req_packages] elif not isinstance(_req_packages, (set, list, tuple)): # Allow one to set the required packages to None (as an example) _req_packages = [] requirements['packages_required'] = [str(p) for p in _req_packages] # Get our recommended packages _opt_packages = plugin.requirements.get('packages_recommended') if isinstance(_opt_packages, str): # Convert to list _opt_packages = [_opt_packages] elif not isinstance(_opt_packages, (set, list, tuple)): # Allow one to set the recommended packages to None (as an example) _opt_packages = [] requirements['packages_recommended'] = [str(p) for p in _opt_packages] # Get our package details _req_details = plugin.requirements.get('details') if not _req_details: if not (_req_packages or _opt_packages): _req_details = _('No dependencies.') elif _req_packages: _req_details = _('Packages are required to function.') else: # opt_packages _req_details = \ _('Packages are recommended to improve functionality.') else: # Store our details if defined requirements['details'] = _req_details # Return our compiled package requirements return requirements def url_to_dict(url, secure_logging=True): """ Takes an apprise URL and returns the tokens associated with it if they can be acquired based on the plugins available. None is returned if the URL could not be parsed, otherwise the tokens are returned. These tokens can be loaded into apprise through it's add() function. """ # swap hash (#) tag values with their html version _url = url.replace('/#', '/%23') # CWE-312 (Secure Logging) Handling loggable_url = url if not secure_logging else cwe312_url(url) # Attempt to acquire the schema at the very least to allow our plugins to # determine if they can make a better interpretation of a URL geared for # them. schema = GET_SCHEMA_RE.match(_url) if schema is None: # Not a valid URL; take an early exit logger.error('Unsupported URL: {}'.format(loggable_url)) return None # Ensure our schema is always in lower case schema = schema.group('schema').lower() if schema not in N_MGR: # Give the user the benefit of the doubt that the user may be using # one of the URLs provided to them by their notification service. # Before we fail for good, just scan all the plugins that support the # native_url() parse function results = None for plugin in N_MGR.plugins(): results = plugin.parse_native_url(_url) if results: break if not results: logger.error('Unparseable URL {}'.format(loggable_url)) return None logger.trace('URL {} unpacked as:{}{}'.format( url, os.linesep, os.linesep.join( ['{}="{}"'.format(k, v) for k, v in results.items()]))) else: # Parse our url details of the server object as dictionary # containing all of the information parsed from our URL results = N_MGR[schema].parse_url(_url) if not results: logger.error('Unparseable {} URL {}'.format( N_MGR[schema].service_name, loggable_url)) return None logger.trace('{} URL {} unpacked as:{}{}'.format( N_MGR[schema].service_name, url, os.linesep, os.linesep.join( ['{}="{}"'.format(k, v) for k, v in results.items()]))) # Return our results return results apprise-1.9.3/apprise/plugins/africas_talking.py000066400000000000000000000372341477231770000220270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this plugin, you must have a Africas Talking Account setup; See here: # https://account.africastalking.com/ # From here... acquire your APIKey # # API Details: https://developers.africastalking.com/docs/sms/sending/bulk import requests from .base import NotifyBase from ..common import NotifyType from ..utils.parse import ( is_phone_no, parse_bool, parse_phone_no, validate_regex) from ..locale import gettext_lazy as _ class AfricasTalkingSMSMode: """ Africas Talking SMS Mode """ # BulkSMS Mode BULKSMS = 'bulksms' # Premium Mode PREMIUM = 'premium' # Sandbox Mode SANDBOX = 'sandbox' # Define the types in a list for validation purposes AFRICAS_TALKING_SMS_MODES = ( AfricasTalkingSMSMode.BULKSMS, AfricasTalkingSMSMode.PREMIUM, AfricasTalkingSMSMode.SANDBOX, ) # Extend HTTP Error Messages AFRICAS_TALKING_HTTP_ERROR_MAP = { 100: 'Processed', 101: 'Sent', 102: 'Queued', 401: 'Risk Hold', 402: 'Invalid Sender ID', 403: 'Invalid Phone Number', 404: 'Unsupported Number Type', 405: 'Insufficient Balance', 406: 'User In Blacklist', 407: 'Could Not Route', 409: 'Do Not Disturb Rejection', 500: 'Internal Server Error', 501: 'Gateway Error', 502: 'Rejected By Gateway', } class NotifyAfricasTalking(NotifyBase): """ A wrapper for Africas Talking Notifications """ # The default descriptive name associated with the Notification service_name = 'Africas Talking' # The services URL service_url = 'https://africastalking.com/' # The default secure protocol secure_protocol = 'atalk' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_africas_talking' # Africas Talking API Request URLs notify_url = { AfricasTalkingSMSMode.BULKSMS: 'https://api.africastalking.com/version1/messaging', AfricasTalkingSMSMode.PREMIUM: 'https://content.africastalking.com/version1/messaging', AfricasTalkingSMSMode.SANDBOX: 'https://api.sandbox.africastalking.com/version1/messaging', } # The maximum allowable characters allowed in the title per message title_maxlen = 0 # The maximum allowable characters allowed in the body per message body_maxlen = 160 # The maximum amount of phone numbers that can reside within a single # batch transfer default_batch_size = 50 # Define object templates templates = ( '{schema}://{appuser}@{apikey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'appuser': { 'name': _('App User Name'), 'type': 'string', 'regex': (r'^[A-Z0-9_-]+$', 'i'), 'required': True, }, 'apikey': { 'name': _('API Key'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[A-Z0-9_-]+$', 'i'), }, 'target_phone': { 'name': _('Target Phone'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'apikey': { 'alias_of': 'apikey', }, 'from': { # Your registered short code or alphanumeric 'name': _('From'), 'type': 'string', 'default': 'AFRICASTKNG', 'map_to': 'sender', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, 'mode': { 'name': _('SMS Mode'), 'type': 'choice:string', 'values': AFRICAS_TALKING_SMS_MODES, 'default': AFRICAS_TALKING_SMS_MODES[0], }, }) def __init__(self, appuser, apikey, targets=None, sender=None, batch=None, mode=None, **kwargs): """ Initialize Africas Talking Object """ super().__init__(**kwargs) self.appuser = validate_regex( appuser, *self.template_tokens['appuser']['regex']) if not self.appuser: msg = 'The Africas Talking appuser specified ({}) is invalid.'\ .format(appuser) self.logger.warning(msg) raise TypeError(msg) self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'The Africas Talking apikey specified ({}) is invalid.'\ .format(apikey) self.logger.warning(msg) raise TypeError(msg) # Prepare Sender self.sender = self.template_args['from']['default'] \ if sender is None else sender # Prepare Batch Mode Flag self.batch = self.template_args['batch']['default'] \ if batch is None else batch self.mode = self.template_args['mode']['default'] \ if not isinstance(mode, str) else mode.lower() if isinstance(mode, str) and mode: self.mode = next( (a for a in AFRICAS_TALKING_SMS_MODES if a.startswith( mode.lower())), None) if self.mode not in AFRICAS_TALKING_SMS_MODES: msg = 'The Africas Talking mode specified ({}) is invalid.'\ .format(mode) self.logger.warning(msg) raise TypeError(msg) else: self.mode = self.template_args['mode']['default'] # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number # Carry forward '+' if defined, otherwise do not... self.targets.append( ('+' + result['full']) if target.lstrip()[0] == '+' else result['full']) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Africas Talking Notification """ if not self.targets: # There is no one to email; we're done self.logger.warning( 'There are no Africas Talking recipients to notify') return False headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json', 'apiKey': self.apikey, } # error tracking (used for function return) has_error = False # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size # Create a copy of the target list for index in range(0, len(self.targets), batch_size): # Prepare our payload payload = { 'username': self.appuser, 'to': ','.join(self.targets[index:index + batch_size]), 'from': self.sender, 'message': body, } # Acquire our URL notify_url = self.notify_url[self.mode] self.logger.debug( 'Africas Talking POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate)) self.logger.debug('Africas Talking Payload: %s' % str(payload)) # Printable target detail p_target = self.targets[index] if batch_size == 1 \ else '{} target(s)'.format( len(self.targets[index:index + batch_size])) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # Sample response # { # "SMSMessageData": { # "Message": "Sent to 1/1 Total Cost: KES 0.8000", # "Recipients": [{ # "statusCode": 101, # "number": "+254711XXXYYY", # "status": "Success", # "cost": "KES 0.8000", # "messageId": "ATPid_SampleTxnId123" # }] # } # } if r.status_code not in (100, 101, 102, requests.codes.ok): # We had a problem status_str = \ NotifyAfricasTalking.http_response_code_lookup( r.status_code, AFRICAS_TALKING_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send Africas Talking notification to {}: ' '{}{}error={}.'.format( p_target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent Africas Talking notification to {}.' .format(p_target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Africas Talking ' 'notification to {}.'.format(p_target)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.appuser, self.apikey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'batch': 'yes' if self.batch else 'no', } if self.sender != self.template_args['from']['default']: # Set our sender if it was set params['from'] = self.sender if self.mode != self.template_args['mode']['default']: # Set our mode params['mode'] = self.mode # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{appuser}@{apikey}/{targets}?{params}'.format( schema=self.secure_protocol, appuser=NotifyAfricasTalking.quote(self.appuser, safe=''), apikey=self.pprint(self.apikey, privacy, safe=''), targets='/'.join( [NotifyAfricasTalking.quote(x, safe='+') for x in self.targets]), params=NotifyAfricasTalking.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The Application User ID results['appuser'] = NotifyAfricasTalking.unquote(results['user']) # Prepare our targets results['targets'] = [] # Our Application APIKey if 'apikey' in results['qsd'] and len(results['qsd']['apikey']): # Store our apikey if specified as keyword results['apikey'] = \ NotifyAfricasTalking.unquote(results['qsd']['apikey']) # This means our host is actually a phone number (target) results['targets'].append( NotifyAfricasTalking.unquote(results['host'])) else: # First item is our apikey results['apikey'] = NotifyAfricasTalking.unquote(results['host']) # Store our remaining targets found on path results['targets'].extend( NotifyAfricasTalking.split_path(results['fullpath'])) # The 'from' makes it easier to use yaml configuration if 'from' in results['qsd'] and len(results['qsd']['from']): results['sender'] = \ NotifyAfricasTalking.unquote(results['qsd']['from']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyAfricasTalking.parse_phone_no(results['qsd']['to']) # Get our Mode if 'mode' in results['qsd'] and len(results['qsd']['mode']): results['mode'] = \ NotifyAfricasTalking.unquote(results['qsd']['mode']) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifyAfricasTalking.template_args['batch']['default'])) return results apprise-1.9.3/apprise/plugins/apprise_api.py000066400000000000000000000406421477231770000211770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from json import dumps from .. import exception from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import parse_list, validate_regex from ..locale import gettext_lazy as _ class AppriseAPIMethod: """ Defines the method to post data tot he remote server """ JSON = 'json' FORM = 'form' APPRISE_API_METHODS = ( AppriseAPIMethod.FORM, AppriseAPIMethod.JSON, ) class NotifyAppriseAPI(NotifyBase): """ A wrapper for Apprise (Persistent) API Notifications """ # The default descriptive name associated with the Notification service_name = 'Apprise API' # The services URL service_url = 'https://github.com/caronc/apprise-api' # The default protocol protocol = 'apprise' # The default secure protocol secure_protocol = 'apprises' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_apprise_api' # Support attachments attachment_support = True # Depending on the number of transactions/notifications taking place, this # could take a while. 30 seconds should be enough to perform the task socket_read_timeout = 30.0 # Disable throttle rate for Apprise API requests since they are normally # local anyway request_rate_per_sec = 0.0 # Define object templates templates = ( '{schema}://{host}/{token}', '{schema}://{host}:{port}/{token}', '{schema}://{user}@{host}/{token}', '{schema}://{user}@{host}:{port}/{token}', '{schema}://{user}:{password}@{host}/{token}', '{schema}://{user}:{password}@{host}:{port}/{token}', ) # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'token': { 'name': _('Token'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[A-Z0-9_-]{1,128}$', 'i'), }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'tags': { 'name': _('Tags'), 'type': 'string', }, 'method': { 'name': _('Query Method'), 'type': 'choice:string', 'values': APPRISE_API_METHODS, 'default': APPRISE_API_METHODS[0], }, 'to': { 'alias_of': 'token', }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('HTTP Header'), 'prefix': '+', }, } def __init__(self, token=None, tags=None, method=None, headers=None, **kwargs): """ Initialize Apprise API Object headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with """ super().__init__(**kwargs) self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'The Apprise API token specified ({}) is invalid.'\ .format(token) self.logger.warning(msg) raise TypeError(msg) self.method = self.template_args['method']['default'] \ if not isinstance(method, str) else method.lower() if self.method not in APPRISE_API_METHODS: msg = 'The method specified ({}) is invalid.'.format(method) self.logger.warning(msg) raise TypeError(msg) # Build list of tags self.__tags = parse_list(tags) self.headers = {} if headers: # Store our extra headers self.headers.update(headers) return def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'method': self.method, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) if self.__tags: params['tags'] = ','.join([x for x in self.__tags]) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyAppriseAPI.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyAppriseAPI.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 fullpath = self.fullpath.strip('/') return '{schema}://{auth}{hostname}{port}{fullpath}{token}' \ '/?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a # valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath='/{}/'.format(NotifyAppriseAPI.quote( fullpath, safe='/')) if fullpath else '/', token=self.pprint(self.token, privacy, safe=''), params=NotifyAppriseAPI.urlencode(params)) def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Apprise API Notification """ # Prepare HTTP Headers headers = { 'User-Agent': self.app_id, } # Apply any/all header over-rides defined headers.update(self.headers) attachments = [] files = [] if attach and self.attachment_support: for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access Apprise API attachment {}.'.format( attachment.url(privacy=True))) return False try: # Our Attachment filename filename = attachment.name \ if attachment.name else f'file{no:03}.dat' if self.method == AppriseAPIMethod.JSON: # Output must be in a DataURL format (that's what # PushSafer calls it): attachments.append({ "filename": filename, 'base64': attachment.base64(), 'mimetype': attachment.mimetype, }) else: # AppriseAPIMethod.FORM files.append(( 'file{:02d}'.format(no), ( filename, open(attachment.path, 'rb'), attachment.mimetype, ) )) except (TypeError, OSError, exception.AppriseException): # We could not access the attachment self.logger.error( 'Could not access AppriseAPI attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending AppriseAPI attachment {}'.format( attachment.url(privacy=True))) # prepare Apprise API Object payload = { # Apprise API Payload 'title': title, 'body': body, 'type': notify_type, 'format': self.notify_format, } if self.method == AppriseAPIMethod.JSON: headers['Content-Type'] = 'application/json' if attachments: payload['attachments'] = attachments payload = dumps(payload) if self.__tags: payload['tag'] = self.__tags auth = None if self.user: auth = (self.user, self.password) # Set our schema schema = 'https' if self.secure else 'http' url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): url += ':%d' % self.port fullpath = self.fullpath.strip('/') url += '{}'.format('/' + fullpath) if fullpath else '' url += '/notify/{}'.format(self.token) # Some entries can not be over-ridden headers.update({ # Our response to be in JSON format always 'Accept': 'application/json', # Pass our Source UUID4 Identifier 'X-Apprise-ID': self.asset._uid, # Pass our current recursion count to our upstream server 'X-Apprise-Recursion-Count': str(self.asset._recursion + 1), }) self.logger.debug('Apprise API POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Apprise API Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=payload, headers=headers, auth=auth, files=files if files else None, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyAppriseAPI.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Apprise API notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info( 'Sent Apprise API notification; method=%s.', self.method) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Apprise API ' 'notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while reading one of the ' 'attached files.') self.logger.debug('I/O Exception: %s' % str(e)) return False finally: for file in files: # Ensure all files are closed file[1][1].close() return True @staticmethod def parse_native_url(url): """ Support http://hostname/notify/token and http://hostname/path/notify/token """ result = re.match( r'^http(?Ps?)://(?P[A-Z0-9._-]+)' r'(:(?P[0-9]+))?' r'(?P/[^?]+?)?/notify/(?P[A-Z0-9_-]{1,32})/?' r'(?P\?.+)?$', url, re.I) if result: return NotifyAppriseAPI.parse_url( '{schema}://{hostname}{port}{path}/{token}/{params}'.format( schema=NotifyAppriseAPI.secure_protocol if result.group('secure') else NotifyAppriseAPI.protocol, hostname=result.group('hostname'), port='' if not result.group('port') else ':{}'.format(result.group('port')), path='' if not result.group('path') else result.group('path'), token=result.group('token'), params='' if not result.group('params') else '?{}'.format(result.group('params')))) return None @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = \ {NotifyAppriseAPI.unquote(x): NotifyAppriseAPI.unquote(y) for x, y in results['qsd+'].items()} # Support the passing of tags in the URL if 'tags' in results['qsd'] and len(results['qsd']['tags']): results['tags'] = \ NotifyAppriseAPI.parse_list(results['qsd']['tags']) # Support the 'to' & 'token' variable so that we can support rooms # this way too. if 'token' in results['qsd'] and len(results['qsd']['token']): results['token'] = \ NotifyAppriseAPI.unquote(results['qsd']['token']) elif 'to' in results['qsd'] and len(results['qsd']['to']): results['token'] = NotifyAppriseAPI.unquote(results['qsd']['to']) else: # Start with a list of path entries to work with entries = NotifyAppriseAPI.split_path(results['fullpath']) if entries: # use our last entry found results['token'] = entries[-1] # pop our last entry off entries = entries[:-1] # re-assemble our full path results['fullpath'] = '/'.join(entries) # Set method if specified if 'method' in results['qsd'] and len(results['qsd']['method']): results['method'] = \ NotifyAppriseAPI.unquote(results['qsd']['method']) return results apprise-1.9.3/apprise/plugins/aprs.py000066400000000000000000000622631477231770000176530ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # To use this plugin, you need to be a licensed ham radio operator # # Plugin constraints: # # - message length = 67 chars max. # - message content = ASCII 7 bit # - APRS messages will be sent without msg ID, meaning that # ham radio operators cannot acknowledge them # - Bring your own APRS-IS passcode. If you don't know what # this is or how to get it, then this plugin is not for you # - Do NOT change the Device/ToCall ID setting UNLESS this # module is used outside of Apprise. This identifier helps # the ham radio community with determining the software behind # a given APRS message. # - With great (ham radio) power comes great responsibility; do # not use this plugin for spamming other ham radio operators # # In order to digest text input which is not in plain English, # users can install the optional 'unidecode' package as part # of their venv environment. Details: see plugin description # # # You're done at this point, you only need to know your user/pass that # you signed up with. # The following URLs would be accepted by Apprise: # - aprs://{user}:{password}@{callsign} # - aprs://{user}:{password}@{callsign1}/{callsign2} # Optional parameters: # - locale --> APRS-IS target server to connect with # Default: EURO --> 'euro.aprs2.net' # Details: https://www.aprs2.net/ # # APRS message format specification: # http://www.aprs.org/doc/APRS101.PDF # import socket import sys from itertools import chain from .base import NotifyBase from ..locale import gettext_lazy as _ from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import is_call_sign, parse_call_sign from .. import __version__ import re # Fixed APRS-IS server locales # Default is 'EURO' # See https://www.aprs2.net/ for details # Select the rotating server in case you # don"t care about a specific locale APRS_LOCALES = { "NOAM": "noam.aprs2.net", "SOAM": "soam.aprs2.net", "EURO": "euro.aprs2.net", "ASIA": "asia.aprs2.net", "AUNZ": "aunz.aprs2.net", "ROTA": "rotate.aprs2.net", } # Identify all unsupported characters APRS_BAD_CHARMAP = { r"Ä": "Ae", r"Ö": "Oe", r"Ü": "Ue", r"ä": "ae", r"ö": "oe", r"ü": "ue", r"ß": "ss", } # Our compiled mapping of bad characters APRS_COMPILED_MAP = re.compile( r'(' + '|'.join(APRS_BAD_CHARMAP.keys()) + r')') class NotifyAprs(NotifyBase): """ A wrapper for APRS Notifications via APRS-IS """ # The default descriptive name associated with the Notification service_name = "Aprs" # The services URL service_url = "https://www.aprs2.net/" # The default secure protocol secure_protocol = "aprs" # A URL that takes you to the setup/help of the specific protocol setup_url = "https://github.com/caronc/apprise/wiki/Notify_aprs" # APRS default port, supported by all core servers # Details: https://www.aprs-is.net/Connecting.aspx notify_port = 10152 # The maximum length of the APRS message body body_maxlen = 67 # Apprise APRS Device ID / TOCALL ID # This is a FIXED value which is associated with this plugin. # Its value MUST NOT be changed. If you use this APRS plugin # code OUTSIDE of Apprise, please request your own TOCALL ID. # Details: see https://github.com/aprsorg/aprs-deviceid # # Do NOT use the generic "APRS" TOCALL ID !!!!! # device_id = "APPRIS" # A title can not be used for APRS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Helps to reduce the number of login-related errors where the # APRS-IS server "isn't ready yet". If we try to receive the rx buffer # without this grace perid in place, we may receive "incomplete" responses # where the login response lacks information. In case you receive too many # "Rx: APRS-IS msg is too short - needs to have at least two lines" error # messages, you might want to increase this value to a larger time span # Per previous experience, do not use values lower than 0.5 (seconds) request_rate_per_sec = 0.8 # Encoding of retrieved content aprs_encoding = 'latin-1' # Define object templates templates = ("{schema}://{user}:{password}@{targets}",) # Define our template tokens template_tokens = dict( NotifyBase.template_tokens, **{ "user": { "name": _("User Name"), "type": "string", "required": True, }, "password": { "name": _("Password"), "type": "string", "private": True, "required": True, }, "target_callsign": { "name": _("Target Callsign"), "type": "string", "regex": ( r"^[a-z0-9]{2,5}(-[a-z0-9]{1,2})?$", "i", ), "map_to": "targets", }, "targets": { "name": _("Targets"), "type": "list:string", "required": True, }, } ) # Define our template arguments template_args = dict( NotifyBase.template_args, **{ "to": { "name": _("Target Callsign"), "type": "string", "map_to": "targets", }, "delay": { "name": _("Resend Delay"), "type": "float", "min": 0.0, "max": 5.0, "default": 0.0, }, "locale": { "name": _("Locale"), "type": "choice:string", "values": APRS_LOCALES, "default": "EURO", }, } ) def __init__(self, targets=None, locale=None, delay=None, **kwargs): """ Initialize APRS Object """ super().__init__(**kwargs) # Our (future) socket sobject self.sock = None # Parse our targets self.targets = list() """ Check if the user has provided credentials """ if not (self.user and self.password): msg = "An APRS user/pass was not provided." self.logger.warning(msg) raise TypeError(msg) """ Check if the user tries to use a read-only access to APRS-IS. We need to send content, meaning that read-only access will not work """ if self.password == "-1": msg = "APRS read-only passwords are not supported." self.logger.warning(msg) raise TypeError(msg) """ Check if the password is numeric """ if not self.password.isnumeric(): msg = "Invalid APRS-IS password" self.logger.warning(msg) raise TypeError(msg) """ Convert given user name (FROM callsign) and device ID to to uppercase """ self.user = self.user.upper() self.device_id = self.device_id.upper() """ Check if the user has provided a locale for the APRS-IS-server and validate it, if necessary """ if locale: if locale.upper() not in APRS_LOCALES: msg = ( "Unsupported APRS-IS server locale. " "Received: {}. Valid: {}".format( locale, ", ".join(str(x) for x in APRS_LOCALES.keys()) ) ) self.logger.warning(msg) raise TypeError(msg) # Update our delay if delay is None: self.delay = NotifyAprs.template_args["delay"]["default"] else: try: self.delay = float(delay) if self.delay < NotifyAprs.template_args["delay"]["min"]: raise ValueError() elif self.delay >= NotifyAprs.template_args["delay"]["max"]: raise ValueError() except (TypeError, ValueError): msg = "Unsupported APRS-IS delay ({}) specified. ".format( delay) self.logger.warning(msg) raise TypeError(msg) # Bump up our request_rate self.request_rate_per_sec += self.delay # Set the transmitter group self.locale = \ NotifyAprs.template_args["locale"]["default"] \ if not locale else locale.upper() # Used for URL generation afterwards only self.invalid_targets = list() for target in parse_call_sign(targets): # Validate targets and drop bad ones # We just need to know if the call sign (including SSID, if # provided) is valid and can then process the input as is result = is_call_sign(target) if not result: self.logger.warning( "Dropping invalid Amateur radio call sign ({}).".format( target ), ) self.invalid_targets.append(target.upper()) continue # Store entry self.targets.append(target.upper()) return def socket_close(self): """ Closes the socket connection whereas present """ if self.sock: try: self.sock.close() except Exception: # No worries if socket exception thrown on close() pass self.sock = None def socket_open(self): """ Establishes the connection to the APRS-IS socket server """ self.logger.debug( "Creating socket connection with APRS-IS {}:{}".format( APRS_LOCALES[self.locale], self.notify_port ) ) try: self.sock = socket.create_connection( (APRS_LOCALES[self.locale], self.notify_port), self.socket_connect_timeout, ) except ConnectionError as e: self.logger.debug("Socket Exception socket_open: %s", str(e)) self.sock = None return False except socket.gaierror as e: self.logger.debug("Socket Exception socket_open: %s", str(e)) self.sock = None return False except socket.timeout as e: self.logger.debug( "Socket Timeout Exception socket_open: %s", str(e)) self.sock = None return False except Exception as e: self.logger.debug("General Exception socket_open: %s", str(e)) self.sock = None return False # We are connected. # getpeername() is not supported by every OS. Therefore, # we MAY receive an exception even though we are # connected successfully. try: # Get the physical host/port of the server host, port = self.sock.getpeername() # and create debug info self.logger.debug("Connected to {}:{}".format(host, port)) except ValueError: # Seens as if we are running on an operating # system that does not support getpeername() # Create a minimal log file entry self.logger.debug("Connected to APRS-IS") # Return success return True def aprsis_login(self): """ Generate the APRS-IS login string, send it to the server and parse the response Returns True/False wrt whether the login was successful """ self.logger.debug("socket_login: init") # Check if we are connected if not self.sock: self.logger.warning("socket_login: Not connected to APRS-IS") return False # APRS-IS login string, see https://www.aprs-is.net/Connecting.aspx login_str = "user {0} pass {1} vers apprise {2}\r\n".format( self.user, self.password, __version__ ) # Send the data & abort in case of error if not self.socket_send(login_str): self.logger.warning( "socket_login: Login to APRS-IS unsuccessful," " exception occurred" ) self.socket_close() return False rx_buf = self.socket_receive(len(login_str) + 100) # Abort the remaining process in case an error has occurred if not rx_buf: self.logger.warning( "socket_login: Login to APRS-IS " "unsuccessful, exception occurred" ) self.socket_close() return False # APRS-IS sends at least two lines of data # The data that we need is in line #2 so # let's split the content and see what we have rx_lines = rx_buf.splitlines() if len(rx_lines) < 2: self.logger.warning( "socket_login: APRS-IS msg is too short" " - needs to have at least two lines" ) self.socket_close() return False # Now split the 2nd line's content and extract # both call sign and login status try: _, _, callsign, status, _ = rx_lines[1].split(" ", 4) except ValueError: # ValueError is returned if there were not enough elements to # populate the response self.logger.warning( "socket_login: " "received invalid response from APRS-IS" ) self.socket_close() return False if callsign != self.user: self.logger.warning( "socket_login: " "call signs differ: %s" % callsign ) self.socket_close() return False if status.startswith("unverified"): self.logger.warning( "socket_login: " "invalid APRS-IS password for given call sign" ) self.socket_close() return False # all validations are successful; we are connected return True def socket_send(self, tx_data): """ Generic "Send data to a socket" """ self.logger.debug("socket_send: init") # Check if we are connected if not self.sock: self.logger.warning("socket_send: Not connected to APRS-IS") return False # Encode our data if we are on Python3 or later payload = ( tx_data.encode("utf-8") if sys.version_info[0] >= 3 else tx_data ) # Always call throttle before any remote server i/o is made self.throttle() # Try to open the socket # Send the content to APRS-IS try: self.sock.setblocking(True) self.sock.settimeout(self.socket_connect_timeout) self.sock.sendall(payload) except socket.gaierror as e: self.logger.warning("Socket Exception socket_send: %s" % str(e)) self.sock = None return False except socket.timeout as e: self.logger.warning( "Socket Timeout Exception " "socket_send: %s" % str(e) ) self.sock = None return False except Exception as e: self.logger.warning( "General Exception " "socket_send: %s" % str(e) ) self.sock = None return False self.logger.debug("socket_send: successful") # mandatory on several APRS-IS servers # helps to reduce the number of errors where # the server only returns an abbreviated message return True def socket_reset(self): """ Resets the socket's buffer """ self.logger.debug("socket_reset: init") _ = self.socket_receive(0) self.logger.debug("socket_reset: successful") return True def socket_receive(self, rx_len): """ Generic "Receive data from a socket" """ self.logger.debug("socket_receive: init") # Check if we are connected if not self.sock: self.logger.warning("socket_receive: not connected to APRS-IS") return False # len is zero in case we intend to # reset the socket if rx_len > 0: self.logger.debug("socket_receive: Receiving data from APRS-IS") # Receive content from the socket try: self.sock.setblocking(False) self.sock.settimeout(self.socket_connect_timeout) rx_buf = self.sock.recv(rx_len) except socket.gaierror as e: self.logger.warning( "Socket Exception socket_receive: %s" % str(e) ) self.sock = None return False except socket.timeout as e: self.logger.warning( "Socket Timeout Exception " "socket_receive: %s" % str(e) ) self.sock = None return False except Exception as e: self.logger.warning( "General Exception " "socket_receive: %s" % str(e) ) self.sock = None return False rx_buf = ( rx_buf.decode(self.aprs_encoding) if sys.version_info[0] >= 3 else rx_buf ) # There will be no data in case we reset the socket if rx_len > 0: self.logger.debug("Received content: {}".format(rx_buf)) self.logger.debug("socket_receive: successful") return rx_buf.rstrip() def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): """ Perform APRS Notification """ if not self.targets: # There is no one to notify; we're done self.logger.warning( "There are no amateur radio call signs to notify" ) return False # prepare payload payload = body # sock object is "None" if we were unable to establish a connection # In case of errors, the error message has already been sent # to the logger object if not self.socket_open(): return False # We have established a successful connection # to the socket server. Now send the login information if not self.aprsis_login(): return False # Login & authorization confirmed # reset what is in our buffer self.socket_reset() # error tracking (used for function return) has_error = False # Create a copy of the targets list targets = list(self.targets) self.logger.debug("Starting Payload setup") # Prepare the outgoing message # Due to APRS's contraints, we need to do # a lot of filtering before we can send # the actual message # # First remove all characters from the # payload that would break APRS # see https://www.aprs.org/doc/APRS101.PDF pg. 71 payload = re.sub("[{}|~]+", "", payload) payload = ( # pragma: no branch APRS_COMPILED_MAP.sub( lambda x: APRS_BAD_CHARMAP[x.group()], payload) ) # Finally, constrain output string to 67 characters as # APRS messages are limited in length payload = payload[:67] # Our outgoing message MUST end with a CRLF so # let's amend our payload respectively payload = payload.rstrip("\r\n") + "\r\n" self.logger.debug("Payload setup complete: {}".format(payload)) # send the message to our target call sign(s) for index in range(0, len(targets)): # prepare the output string # Format: # Device ID/TOCALL - our call sign - target call sign - body buffer = "{}>{}::{:9}:{}".format( self.user, self.device_id, targets[index], payload ) # and send the content to the socket # Note that there will be no response from APRS and # that all exceptions are handled within the 'send' method self.logger.debug("Sending APRS message: {}".format(buffer)) # send the content if not self.socket_send(buffer): has_error = True break # Finally, reset our socket buffer # we DO NOT read from the socket as we # would simply listen to the default APRS-IS stream self.socket_reset() self.logger.debug("Closing socket.") self.socket_close() self.logger.info( "Sent %d/%d APRS-IS notification(s)", index + 1, len(targets)) return not has_error def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = {} if self.locale != NotifyAprs.template_args["locale"]["default"]: # Store our locale if not default params['locale'] = self.locale if self.delay != NotifyAprs.template_args["delay"]["default"]: # Store our locale if not default params['delay'] = "{:.2f}".format(self.delay) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Setup Authentication auth = "{user}:{password}@".format( user=NotifyAprs.quote(self.user, safe=""), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe="" ), ) return "{schema}://{auth}{targets}?{params}".format( schema=self.secure_protocol, auth=auth, targets="/".join(chain( [self.pprint(x, privacy, safe="") for x in self.targets], [self.pprint(x, privacy, safe="") for x in self.invalid_targets], )), params=NotifyAprs.urlencode(params), ) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.user, self.password, self.locale) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 def __del__(self): """ Ensure we close any lingering connections """ self.socket_close() @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # All elements are targets results["targets"] = [NotifyAprs.unquote(results["host"])] # All entries after the hostname are additional targets results["targets"].extend(NotifyAprs.split_path(results["fullpath"])) # Get Delay (if set) if 'delay' in results['qsd'] and len(results['qsd']['delay']): results['delay'] = NotifyAprs.unquote(results['qsd']['delay']) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if "to" in results["qsd"] and len(results["qsd"]["to"]): results["targets"] += NotifyAprs.parse_list(results["qsd"]["to"]) # Set our APRS-IS server locale's key value and convert it to uppercase if "locale" in results["qsd"] and len(results["qsd"]["locale"]): results["locale"] = NotifyAprs.unquote( results["qsd"]["locale"] ).upper() return results apprise-1.9.3/apprise/plugins/bark.py000066400000000000000000000412671477231770000176260ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # API: https://github.com/Finb/bark-server/blob/master/docs/API_V2.md#python # import requests import json from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyImageSize from ..common import NotifyType from ..utils.parse import parse_list, parse_bool from ..locale import gettext_lazy as _ # Sounds generated off of: https://github.com/Finb/Bark/tree/master/Sounds BARK_SOUNDS = ( "alarm.caf", "anticipate.caf", "bell.caf", "birdsong.caf", "bloom.caf", "calypso.caf", "chime.caf", "choo.caf", "descent.caf", "electronic.caf", "fanfare.caf", "glass.caf", "gotosleep.caf", "healthnotification.caf", "horn.caf", "ladder.caf", "mailsent.caf", "minuet.caf", "multiwayinvitation.caf", "newmail.caf", "newsflash.caf", "noir.caf", "paymentsuccess.caf", "shake.caf", "sherwoodforest.caf", "silence.caf", "spell.caf", "suspense.caf", "telegraph.caf", "tiptoes.caf", "typewriters.caf", "update.caf", ) # Supported Level Entries class NotifyBarkLevel: """ Defines the Bark Level options """ ACTIVE = 'active' TIME_SENSITIVE = 'timeSensitive' PASSIVE = 'passive' CRITICAL = 'critical' BARK_LEVELS = ( NotifyBarkLevel.ACTIVE, NotifyBarkLevel.TIME_SENSITIVE, NotifyBarkLevel.PASSIVE, NotifyBarkLevel.CRITICAL, ) class NotifyBark(NotifyBase): """ A wrapper for Notify Bark Notifications """ # The default descriptive name associated with the Notification service_name = 'Bark' # The services URL service_url = 'https://github.com/Finb/Bark' # The default protocol protocol = 'bark' # The default secure protocol secure_protocol = 'barks' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_bark' # Allows the user to specify the NotifyImageSize object; this is supported # through the webhook image_size = NotifyImageSize.XY_128 # Define object templates templates = ( '{schema}://{host}/{targets}', '{schema}://{host}:{port}/{targets}', '{schema}://{user}:{password}@{host}/{targets}', '{schema}://{user}:{password}@{host}:{port}/{targets}', ) # Define our template arguments template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'target_device': { 'name': _('Target Device'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'sound': { 'name': _('Sound'), 'type': 'choice:string', 'values': BARK_SOUNDS, }, 'level': { 'name': _('Level'), 'type': 'choice:string', 'values': BARK_LEVELS, }, 'volume': { 'name': _('Volume'), 'type': 'int', 'min': 0, 'max': 10, }, 'click': { 'name': _('Click'), 'type': 'string', }, 'badge': { 'name': _('Badge'), 'type': 'int', 'min': 0, }, 'category': { 'name': _('Category'), 'type': 'string', }, 'group': { 'name': _('Group'), 'type': 'string', }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, }) def __init__(self, targets=None, include_image=True, sound=None, category=None, group=None, level=None, click=None, badge=None, volume=None, **kwargs): """ Initialize Notify Bark Object """ super().__init__(**kwargs) # Prepare our URL self.notify_url = '%s://%s%s/push' % ( 'https' if self.secure else 'http', self.host, ':{}'.format(self.port) if (self.port and isinstance(self.port, int)) else '', ) # Assign our category self.category = \ category if isinstance(category, str) else None # Assign our group self.group = group if isinstance(group, str) else None # Initialize device list self.targets = parse_list(targets) # Place an image inline with the message body self.include_image = include_image # A clickthrough option for notifications self.click = click # Badge try: # Acquire our badge count if we can: # - We accept both the integer form as well as a string # representation self.badge = int(badge) if self.badge < 0: raise ValueError() except TypeError: # NoneType means use Default; this is an okay exception self.badge = None except ValueError: self.badge = None self.logger.warning( 'The specified Bark badge ({}) is not valid ', badge) # Sound (easy-lookup) self.sound = None if not sound else next( (f for f in BARK_SOUNDS if f.startswith(sound.lower())), None) if sound and not self.sound: self.logger.warning( 'The specified Bark sound ({}) was not found ', sound) # Volume self.volume = None if volume is not None: try: self.volume = int(volume) if volume is not None else None if self.volume is not None and not (0 <= self.volume <= 10): raise ValueError() except (TypeError, ValueError): self.logger.warning( 'The specified Bark volume ({}) is not valid. ' 'Must be between 0 and 10', volume) # Level self.level = None if not level else next( (f for f in BARK_LEVELS if f[0] == level[0]), None) if level and not self.level: self.logger.warning( 'The specified Bark level ({}) is not valid ', level) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Bark Notification """ # error tracking (used for function return) has_error = False if not self.targets: # We have nothing to notify; we're done self.logger.warning('There are no Bark devices to notify') return False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json; charset=utf-8', } # Prepare our payload (sample below) # { # "body": "Test Bark Server", # "device_key": "nysrshcqielvoxsa", # "title": "bleem", # "category": "category", # "sound": "minuet.caf", # "badge": 1, # "icon": "https://day.app/assets/images/avatar.jpg", # "group": "test", # "url": "https://mritd.com" # } payload = { 'title': title if title else self.app_desc, 'body': body, } # Acquire our image url if configured to do so image_url = None if not self.include_image else \ self.image_url(notify_type) if image_url: payload['icon'] = image_url if self.sound: payload['sound'] = self.sound if self.click: payload['url'] = self.click if self.badge: payload['badge'] = self.badge if self.level: payload['level'] = self.level if self.category: payload['category'] = self.category if self.group: payload['group'] = self.group if self.volume: payload['volume'] = self.volume auth = None if self.user: auth = (self.user, self.password) # Create a copy of the targets targets = list(self.targets) while len(targets) > 0: # Retrieve our device key target = targets.pop() payload['device_key'] = target self.logger.debug('Bark POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('Bark Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=json.dumps(payload), headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyBark.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Bark notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent Bark notification to {}.'.format(target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Bark ' 'notification to {}.'.format(target)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', } if self.sound: params['sound'] = self.sound if self.click: params['click'] = self.click if self.badge: params['badge'] = str(self.badge) if self.level: params['level'] = self.level if self.volume: params['volume'] = str(self.volume) if self.category: params['category'] = self.category if self.group: params['group'] = self.group # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyBark.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyBark.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}/{targets}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), targets='/'.join( [NotifyBark.quote('{}'.format(x)) for x in self.targets]), params=NotifyBark.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.targets) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # Apply our targets results['targets'] = NotifyBark.split_path(results['fullpath']) # Category if 'category' in results['qsd'] and results['qsd']['category']: results['category'] = NotifyBark.unquote( results['qsd']['category'].strip()) # Group if 'group' in results['qsd'] and results['qsd']['group']: results['group'] = NotifyBark.unquote( results['qsd']['group'].strip()) # Badge if 'badge' in results['qsd'] and results['qsd']['badge']: results['badge'] = NotifyBark.unquote( results['qsd']['badge'].strip()) # Volume if 'volume' in results['qsd'] and results['qsd']['volume']: results['volume'] = NotifyBark.unquote( results['qsd']['volume'].strip()) # Level if 'level' in results['qsd'] and results['qsd']['level']: results['level'] = NotifyBark.unquote( results['qsd']['level'].strip()) # Click (URL) if 'click' in results['qsd'] and results['qsd']['click']: results['click'] = NotifyBark.unquote( results['qsd']['click'].strip()) # Sound if 'sound' in results['qsd'] and results['qsd']['sound']: results['sound'] = NotifyBark.unquote( results['qsd']['sound'].strip()) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyBark.parse_list(results['qsd']['to']) # use image= for consistency with the other plugins results['include_image'] = \ parse_bool(results['qsd'].get('image', True)) return results apprise-1.9.3/apprise/plugins/base.py000066400000000000000000001000201477231770000176000ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import asyncio import re from functools import partial from ..url import URLBase from ..common import NotifyType from ..utils.parse import parse_bool from ..common import NOTIFY_TYPES from ..common import NotifyFormat from ..common import NOTIFY_FORMATS from ..common import OverflowMode from ..common import OVERFLOW_MODES from ..common import PersistentStoreMode from ..locale import gettext_lazy as _ from ..persistent_store import PersistentStore from ..apprise_attachment import AppriseAttachment class NotifyBase(URLBase): """ This is the base class for all notification services """ # An internal flag used to test the state of the plugin. If set to # False, then the plugin is not used. Plugins can disable themselves # due to enviroment issues (such as missing libraries, or platform # dependencies that are not present). By default all plugins are # enabled. enabled = True # The category allows for parent inheritance of this object to alter # this when it's function/use is intended to behave differently. The # following category types exist: # # native: Is a native plugin written/stored in `apprise/plugins/Notify*` # custom: Is a custom plugin written/stored in a users plugin directory # that they loaded at execution time. category = 'native' # Some plugins may require additional packages above what is provided # already by Apprise. # # Use this section to relay this information to the users of the script to # help guide them with what they need to know if they plan on using your # plugin. The below configuration should otherwise accomodate all normal # situations and will not requrie any updating: requirements = { # Use the description to provide a human interpretable description of # what is required to make the plugin work. This is only nessisary # if there are package dependencies. Setting this to default will # cause a general response to be returned. Only set this if you plan # on over-riding the default. Always consider language support here. # So before providing a value do the following in your code base: # # from apprise.AppriseLocale import gettext_lazy as _ # # 'details': _('My detailed requirements') 'details': None, # Define any required packages needed for the plugin to run. This is # an array of strings that simply look like lines residing in a # `requirements.txt` file... # # As an example, an entry may look like: # 'packages_required': [ # 'cryptography < 3.4`, # ] 'packages_required': [], # Recommended packages identify packages that are not required to make # your plugin work, but would improve it's use or grant it access to # full functionality (that might otherwise be limited). # Similar to `packages_required`, you would identify each entry in # the array as you would in a `requirements.txt` file. # # - Do not re-provide entries already in the `packages_required` 'packages_recommended': [], } # The services URL service_url = None # A URL that takes you to the setup/help of the specific protocol setup_url = None # Most Servers do not like more then 1 request per 5 seconds, so 5.5 gives # us a safe play range. Override the one defined already in the URLBase request_rate_per_sec = 5.5 # Allows the user to specify the NotifyImageSize object image_size = None # The maximum allowable characters allowed in the body per message body_maxlen = 32768 # Defines the maximum allowable characters in the title; set this to zero # if a title can't be used. Titles that are not used but are defined are # automatically placed into the body title_maxlen = 250 # Set the maximum line count; if this is set to anything larger then zero # the message (prior to it being sent) will be truncated to this number # of lines. Setting this to zero disables this feature. body_max_line_count = 0 # Persistent storage default settings persistent_storage = True # Default Notify Format notify_format = NotifyFormat.TEXT # Default Overflow Mode overflow_mode = OverflowMode.UPSTREAM # Our default is to no not use persistent storage beyond in-memory # reference storage_mode = PersistentStoreMode.MEMORY # Default Emoji Interpretation interpret_emojis = False # Support Attachments; this defaults to being disabled. # Since apprise allows you to send attachments without a body or title # defined, by letting Apprise know the plugin won't support attachments # up front, it can quickly pass over and ignore calls to these end points. # You must set this to true if your application can handle attachments. # You must also consider a flow change to your notification if this is set # to True as well as now there will be cases where both the body and title # may not be set. There will never be a case where a body, or attachment # isn't set in the same call to your notify() function. attachment_support = False # Default Title HTML Tagging # When a title is specified for a notification service that doesn't accept # titles, by default apprise tries to give a plesant view and convert the # title so that it can be placed into the body. The default is to just # use a tag. The below causes the title to get generated: default_html_tag_id = 'b' # Here is where we define all of the arguments we accept on the url # such as: schema://whatever/?overflow=upstream&format=text # These act the same way as tokens except they are optional and/or # have default values set if mandatory. This rule must be followed template_args = dict(URLBase.template_args, **{ 'overflow': { 'name': _('Overflow Mode'), 'type': 'choice:string', 'values': OVERFLOW_MODES, # Provide a default 'default': overflow_mode, # look up default using the following parent class value at # runtime. The variable name identified here (in this case # overflow_mode) is checked and it's result is placed over-top of # the 'default'. This is done because once a parent class inherits # this one, the overflow_mode already set as a default 'could' be # potentially over-ridden and changed to a different value. '_lookup_default': 'overflow_mode', }, 'format': { 'name': _('Notify Format'), 'type': 'choice:string', 'values': NOTIFY_FORMATS, # Provide a default 'default': notify_format, # look up default using the following parent class value at # runtime. '_lookup_default': 'notify_format', }, 'emojis': { 'name': _('Interpret Emojis'), # SSL Certificate Authority Verification 'type': 'bool', # Provide a default 'default': interpret_emojis, # look up default using the following parent class value at # runtime. '_lookup_default': 'interpret_emojis', }, 'store': { 'name': _('Persistent Storage'), # Use Persistent Storage 'type': 'bool', # Provide a default 'default': persistent_storage, # look up default using the following parent class value at # runtime. '_lookup_default': 'persistent_storage', }, }) # # Overflow Defaults / Configuration applicable to SPLIT mode only # # Display Count [X/X] # ^^^^^^ # \\\\\\ # 6 characters (space + count) # Display Count [XX/XX] # ^^^^^^^^ # \\\\\\\\ # 8 characters (space + count) # Display Count [XXX/XXX] # ^^^^^^^^^^ # \\\\\\\\\\ # 10 characters (space + count) # Display Count [XXXX/XXXX] # ^^^^^^^^^^^^ # \\\\\\\\\\\\ # 12 characters (space + count) # # Given the above + some buffer we come up with the following: # If this value is exceeded, display counts automatically shut off overflow_max_display_count_width = 12 # The number of characters to reserver for whitespace buffering # This is detected automatically, but you can enforce a value if # you desire: overflow_buffer = 0 # the min accepted length of a title to allow for a counter display overflow_display_count_threshold = 130 # Whether or not when over-flow occurs, if the title should be repeated # each time the message is split up # - None: Detect # - True: Always display title once # - False: Display the title for each occurance overflow_display_title_once = None # If this is set to to True: # The title_maxlen should be considered as a subset of the body_maxlen # Hence: len(title) + len(body) should never be greater then body_maxlen # # If set to False, then there is no corrorlation between title_maxlen # restrictions and that of body_maxlen overflow_amalgamate_title = False def __init__(self, **kwargs): """ Initialize some general configuration that will keep things consistent when working with the notifiers that will inherit this class. """ super().__init__(**kwargs) # Store our interpret_emoji's setting # If asset emoji value is set to a default of True and the user # specifies it to be false, this is accepted and False over-rides. # # If asset emoji value is set to a default of None, a user may # optionally over-ride this and set it to True from the Apprise # URL. ?emojis=yes # # If asset emoji value is set to a default of False, then all emoji's # are turned off (no user over-rides allowed) # # Our Persistent Storage object is initialized on demand self.__store = None # Take a default self.interpret_emojis = self.asset.interpret_emojis if 'emojis' in kwargs: # possibly over-ride default self.interpret_emojis = True if self.interpret_emojis \ in (None, True) and \ parse_bool( kwargs.get('emojis', False), default=NotifyBase.template_args['emojis']['default']) \ else False if 'format' in kwargs: # Store the specified format if specified notify_format = kwargs.get('format', '') if notify_format.lower() not in NOTIFY_FORMATS: msg = 'Invalid notification format {}'.format(notify_format) self.logger.error(msg) raise TypeError(msg) # Provide override self.notify_format = notify_format if 'overflow' in kwargs: # Store the specified format if specified overflow = kwargs.get('overflow', '') if overflow.lower() not in OVERFLOW_MODES: msg = 'Invalid overflow method {}'.format(overflow) self.logger.error(msg) raise TypeError(msg) # Provide override self.overflow_mode = overflow # Prepare our Persistent Storage switch self.persistent_storage = parse_bool( kwargs.get('store', NotifyBase.persistent_storage)) if not self.persistent_storage: # Enforce the disabling of cache (ortherwise defaults are use) self.url_identifier = False self.__cached_url_identifier = None def image_url(self, notify_type, logo=False, extension=None, image_size=None): """ Returns Image URL if possible """ if not self.image_size: return None if notify_type not in NOTIFY_TYPES: return None return self.asset.image_url( notify_type=notify_type, image_size=self.image_size if image_size is None else image_size, logo=logo, extension=extension, ) def image_path(self, notify_type, extension=None): """ Returns the path of the image if it can """ if not self.image_size: return None if notify_type not in NOTIFY_TYPES: return None return self.asset.image_path( notify_type=notify_type, image_size=self.image_size, extension=extension, ) def image_raw(self, notify_type, extension=None): """ Returns the raw image if it can """ if not self.image_size: return None if notify_type not in NOTIFY_TYPES: return None return self.asset.image_raw( notify_type=notify_type, image_size=self.image_size, extension=extension, ) def color(self, notify_type, color_type=None): """ Returns the html color (hex code) associated with the notify_type """ if notify_type not in NOTIFY_TYPES: return None return self.asset.color( notify_type=notify_type, color_type=color_type, ) def ascii(self, notify_type): """ Returns the ascii characters associated with the notify_type """ if notify_type not in NOTIFY_TYPES: return None return self.asset.ascii( notify_type=notify_type, ) def notify(self, *args, **kwargs): """ Performs notification """ try: # Build a list of dictionaries that can be used to call send(). send_calls = list(self._build_send_calls(*args, **kwargs)) except TypeError: # Internal error return False else: # Loop through each call, one at a time. (Use a list rather than a # generator to call all the partials, even in case of a failure.) the_calls = [self.send(**kwargs2) for kwargs2 in send_calls] return all(the_calls) async def async_notify(self, *args, **kwargs): """ Performs notification for asynchronous callers """ try: # Build a list of dictionaries that can be used to call send(). send_calls = list(self._build_send_calls(*args, **kwargs)) except TypeError: # Internal error return False else: loop = asyncio.get_event_loop() # Wrap each call in a coroutine that uses the default executor. # TODO: In the future, allow plugins to supply a native # async_send() method. async def do_send(**kwargs2): send = partial(self.send, **kwargs2) result = await loop.run_in_executor(None, send) return result # gather() all calls in parallel. the_cors = (do_send(**kwargs2) for kwargs2 in send_calls) return all(await asyncio.gather(*the_cors)) def _build_send_calls(self, body=None, title=None, notify_type=NotifyType.INFO, overflow=None, attach=None, body_format=None, **kwargs): """ Get a list of dictionaries that can be used to call send() or (in the future) async_send(). """ if not self.enabled: # Deny notifications issued to services that are disabled msg = f"{self.service_name} is currently disabled on this system." self.logger.warning(msg) raise TypeError(msg) # Prepare attachments if required if attach is not None and not isinstance(attach, AppriseAttachment): try: attach = AppriseAttachment(attach, asset=self.asset) except TypeError: # bad attachments raise # Handle situations where the body is None body = '' if not body else body elif not (body or attach): # If there is not an attachment at the very least, a body must be # present msg = "No message body or attachment was specified." self.logger.warning(msg) raise TypeError(msg) if not body and not self.attachment_support: # If no body was specified, then we know that an attachment # was. This is logic checked earlier in the code. # # Knowing this, if the plugin itself doesn't support sending # attachments, there is nothing further to do here, just move # along. msg = f"{self.service_name} does not support attachments; " \ " service skipped" self.logger.warning(msg) raise TypeError(msg) # Handle situations where the title is None title = '' if not title else title # Truncate flag set with attachments ensures that only 1 # attachment passes through. In the event there could be many # services specified, we only want to do this logic once. # The logic is only applicable if ther was more then 1 attachment # specified overflow = self.overflow_mode if overflow is None else overflow if attach and len(attach) > 1 and overflow == OverflowMode.TRUNCATE: # Save first attachment _attach = AppriseAttachment(attach[0], asset=self.asset) else: # reference same attachment _attach = attach # Apply our overflow (if defined) for chunk in self._apply_overflow( body=body, title=title, overflow=overflow, body_format=body_format): # Send notification yield dict( body=chunk['body'], title=chunk['title'], notify_type=notify_type, attach=_attach, body_format=body_format ) def _apply_overflow(self, body, title=None, overflow=None, body_format=None): """ Takes the message body and title as input. This function then applies any defined overflow restrictions associated with the notification service and may alter the message if/as required. The function will always return a list object in the following structure: [ { title: 'the title goes here', body: 'the message body goes here', }, { title: 'the title goes here', body: 'the continued message body goes here', }, ] """ response = list() # tidy title = '' if not title else title.strip() body = '' if not body else body.rstrip() if overflow is None: # default overflow = self.overflow_mode if self.title_maxlen <= 0 and len(title) > 0: if self.notify_format == NotifyFormat.HTML: # Content is appended to body as html body = '<{open_tag}>{title}' \ '
\r\n{body}'.format( open_tag=self.default_html_tag_id, title=title, close_tag=self.default_html_tag_id, body=body) elif self.notify_format == NotifyFormat.MARKDOWN and \ body_format == NotifyFormat.TEXT: # Content is appended to body as markdown title = title.lstrip('\r\n \t\v\f#-') if title: # Content is appended to body as text body = '# {}\r\n{}'.format(title, body) else: # Content is appended to body as text body = '{}\r\n{}'.format(title, body) title = '' # Enforce the line count first always if self.body_max_line_count > 0: # Limit results to just the first 2 line otherwise # there is just to much content to display body = re.split(r'\r*\n', body) body = '\r\n'.join(body[0:self.body_max_line_count]) if overflow == OverflowMode.UPSTREAM: # Nothing more to do response.append({'body': body, 'title': title}) return response # a value of '2' allows for the \r\n that is applied when # amalgamating the title overflow_buffer = max(2, self.overflow_buffer) \ if (self.title_maxlen == 0 and len(title)) \ else self.overflow_buffer # # If we reach here in our code, then we're using TRUNCATE, or SPLIT # actions which require some math to handle the data # # Handle situations where our body and title are amalamated into one # calculation title_maxlen = self.title_maxlen \ if not self.overflow_amalgamate_title \ else min(len(title) + self.overflow_max_display_count_width, self.title_maxlen, self.body_maxlen) if len(title) > title_maxlen: # Truncate our Title title = title[:title_maxlen].rstrip() if self.overflow_amalgamate_title and ( self.body_maxlen - overflow_buffer) >= title_maxlen: body_maxlen = (self.body_maxlen if not title else ( self.body_maxlen - title_maxlen)) - overflow_buffer else: # status quo body_maxlen = self.body_maxlen \ if not self.overflow_amalgamate_title else \ (self.body_maxlen - overflow_buffer) if body_maxlen > 0 and len(body) <= body_maxlen: response.append({'body': body, 'title': title}) return response if overflow == OverflowMode.TRUNCATE: # Truncate our body and return response.append({ 'body': body[:body_maxlen].lstrip('\r\n\x0b\x0c').rstrip(), 'title': title, }) # For truncate mode, we're done now return response if self.overflow_display_title_once is None: # Detect if we only display our title once or not: overflow_display_title_once = \ True if self.overflow_amalgamate_title and \ body_maxlen < self.overflow_display_count_threshold \ else False else: # Take on defined value overflow_display_title_once = self.overflow_display_title_once # If we reach here, then we are in SPLIT mode. # For here, we want to split the message as many times as we have to # in order to fit it within the designated limits. if not overflow_display_title_once and not ( # edge case that can occur when overflow_display_title_once is # forced off, but no body exists self.overflow_amalgamate_title and body_maxlen <= 0): show_counter = title and len(body) > body_maxlen and \ ((self.overflow_amalgamate_title and body_maxlen >= self.overflow_display_count_threshold) or (not self.overflow_amalgamate_title and title_maxlen > self.overflow_display_count_threshold)) and ( title_maxlen > (self.overflow_max_display_count_width + overflow_buffer) and self.title_maxlen >= self.overflow_display_count_threshold) count = 0 template = '' if show_counter: # introduce padding body_maxlen -= overflow_buffer count = int(len(body) / body_maxlen) \ + (1 if len(body) % body_maxlen else 0) # Detect padding and prepare template digits = len(str(count)) template = ' [{:0%d}/{:0%d}]' % (digits, digits) # Update our counter overflow_display_count_width = 4 + (digits * 2) if overflow_display_count_width <= \ self.overflow_max_display_count_width: if len(title) > \ title_maxlen - overflow_display_count_width: # Truncate our title further title = title[:title_maxlen - overflow_display_count_width] else: # Way to many messages to display show_counter = False response = [{ 'body': body[i: i + body_maxlen] .lstrip('\r\n\x0b\x0c').rstrip(), 'title': title + ( '' if not show_counter else template.format(idx, count))} for idx, i in enumerate(range(0, len(body), body_maxlen), start=1)] else: # Display title once and move on response = [] try: i = range(0, len(body), body_maxlen)[0] response.append({ 'body': body[i: i + body_maxlen] .lstrip('\r\n\x0b\x0c').rstrip(), 'title': title, }) except (ValueError, IndexError): # IndexError: # - This happens if there simply was no body to display # ValueError: # - This happens when body_maxlen < 0 (due to title being # so large) # No worries; send title along response.append({ 'body': '', 'title': title, }) # Ensure our start is set properly body_maxlen = 0 # Now re-calculate based on the increased length for i in range(body_maxlen, len(body), self.body_maxlen): response.append({ 'body': body[i: i + self.body_maxlen] .lstrip('\r\n\x0b\x0c').rstrip(), 'title': '', }) return response def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Should preform the actual notification itself. """ raise NotImplementedError( "send() is not implimented by the child class.") def url_parameters(self, *args, **kwargs): """ Provides a default set of parameters to work with. This can greatly simplify URL construction in the acommpanied url() function in all defined plugin services. """ params = { 'format': self.notify_format, 'overflow': self.overflow_mode, } # Persistent Storage Setting if self.persistent_storage != NotifyBase.persistent_storage: params['store'] = 'yes' if self.persistent_storage else 'no' params.update(super().url_parameters(*args, **kwargs)) # return default parameters return params @staticmethod def parse_url(url, verify_host=True, plus_to_space=False): """Parses the URL and returns it broken apart into a dictionary. This is very specific and customized for Apprise. Args: url (str): The URL you want to fully parse. verify_host (:obj:`bool`, optional): a flag kept with the parsed URL which some child classes will later use to verify SSL keys (if SSL transactions take place). Unless under very specific circumstances, it is strongly recomended that you leave this default value set to True. Returns: A dictionary is returned containing the URL fully parsed if successful, otherwise None is returned. """ results = URLBase.parse_url( url, verify_host=verify_host, plus_to_space=plus_to_space) if not results: # We're done; we failed to parse our url return results # Allow overriding the default format if 'format' in results['qsd']: results['format'] = results['qsd'].get('format') if results['format'] not in NOTIFY_FORMATS: URLBase.logger.warning( 'Unsupported format specified {}'.format( results['format'])) del results['format'] # Allow overriding the default overflow if 'overflow' in results['qsd']: results['overflow'] = results['qsd'].get('overflow') if results['overflow'] not in OVERFLOW_MODES: URLBase.logger.warning( 'Unsupported overflow specified {}'.format( results['overflow'])) del results['overflow'] # Allow emoji's override if 'emojis' in results['qsd']: results['emojis'] = parse_bool(results['qsd'].get('emojis')) # Store our persistent storage boolean if 'store' in results['qsd']: results['store'] = results['qsd']['store'] return results @staticmethod def parse_native_url(url): """ This is a base class that can be optionally over-ridden by child classes who can build their Apprise URL based on the one provided by the notification service they choose to use. The intent of this is to make Apprise a little more userfriendly to people who aren't familiar with constructing URLs and wish to use the ones that were just provied by their notification serivice that they're using. This function will return None if the passed in URL can't be matched as belonging to the notification service. Otherwise this function should return the same set of results that parse_url() does. """ return None @property def store(self): """ Returns a pointer to our persistent store for use. The best use cases are: self.store.get('key') self.store.set('key', 'value') self.store.delete('key1', 'key2', ...) You can also access the keys this way: self.store['key'] And clear them: del self.store['key'] """ if self.__store is None: # Initialize our persistent store for use self.__store = PersistentStore( namespace=self.url_id(), path=self.asset.storage_path, mode=self.asset.storage_mode) return self.__store apprise-1.9.3/apprise/plugins/base.pyi000066400000000000000000000000251477231770000177550ustar00rootroot00000000000000class NotifyBase: ...apprise-1.9.3/apprise/plugins/bluesky.py000066400000000000000000000473311477231770000203630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # 1. Create a BlueSky account # 2. Access Settings -> Privacy and Security # 3. Generate an App Password. Optionally grant yourself access to Direct # Messages if you want to be able to send them # 4. Assemble your Apprise URL like: # bluesky://handle@you-token-here # import re import requests import json from datetime import (datetime, timezone, timedelta) from ..attachment.base import AttachBase from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..locale import gettext_lazy as _ # For parsing handles HANDLE_HOST_PARSE_RE = re.compile(r'(?P[^.]+)\.+(?P.+)$') IS_USER = re.compile(r'^\s*@?(?P[A-Z0-9_]+)(\.+(?P.+))?$', re.I) class NotifyBlueSky(NotifyBase): """ A wrapper for BlueSky Notifications """ # The default descriptive name associated with the Notification service_name = 'BlueSky' # The services URL service_url = 'https://bluesky.us/' # Protocol secure_protocol = ('bsky', 'bluesky') # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_bluesky' # Support attachments attachment_support = True # XRPC Suffix URLs; Structured as: # https://host/{suffix} # Taken right from google.auth.helpers: clock_skew = timedelta(seconds=10) # 1 hour in seconds (the lifetime of our token) access_token_lifetime_sec = timedelta(seconds=3600) # Detect your Decentralized Identitifer (DID), then you can get your Auth # Token. xrpc_suffix_did = "/xrpc/com.atproto.identity.resolveHandle" xrpc_suffix_session = "/xrpc/com.atproto.server.createSession" xrpc_suffix_record = "/xrpc/com.atproto.repo.createRecord" xrpc_suffix_blob = "/xrpc/com.atproto.repo.uploadBlob" # BlueSky is kind enough to return how many more requests we're allowed to # continue to make within it's header response as: # RateLimit-Reset: The epoc time (in seconds) we can expect our # rate-limit to be reset. # RateLimit-Remaining: an integer identifying how many requests we're # still allow to make. request_rate_per_sec = 0 # For Tracking Purposes ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) # Remaining messages ratelimit_remaining = 1 # The default BlueSky host to use if one isn't specified bluesky_default_host = 'bsky.social' # Our message body size body_maxlen = 280 # BlueSky does not support a title title_maxlen = 0 # Define object templates templates = ( '{schema}://{user}@{password}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'user': { 'name': _('Username'), 'type': 'string', 'required': True, }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, 'required': True, }, }) def __init__(self, **kwargs): """ Initialize BlueSky Object """ super().__init__(**kwargs) # Our access token self.__access_token = self.store.get('access_token') self.__refresh_token = None self.__access_token_expiry = datetime.now(timezone.utc) if not self.user: msg = 'A BlueSky UserID/Handle must be specified.' self.logger.warning(msg) raise TypeError(msg) # Set our default host self.host = self.bluesky_default_host # Identify our Handle (if define) results = HANDLE_HOST_PARSE_RE.match(self.user) if results: self.user = results.group('handle').strip() self.host = results.group('host').strip() return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform BlueSky Notification """ if not self.__access_token and not self.login(): # We failed to authenticate - we're done return False # Track our returning blob IDs as they're stored on the BlueSky server blobs = [] if attach and self.attachment_support: url = f'https://{self.host}{self.xrpc_suffix_blob}' # We need to upload our payload first so that we can source it # in remaining messages for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attachment.url(privacy=True))) return False if not re.match(r'^image/.*', attachment.mimetype, re.I): # Only support images at this time self.logger.warning( 'Ignoring unsupported BlueSky attachment {}.'.format( attachment.url(privacy=True))) continue self.logger.debug( 'Preparing BlueSky attachment {}'.format( attachment.url(privacy=True))) # Upload our image and get our blob associated with it postokay, response = self._fetch( url, payload=attachment, ) if not postokay: # We can't post our attachment return False # Prepare our filename filename = attachment.name \ if attachment.name else f'file{no:03}.dat' if not (isinstance(response, dict) and response.get('blob')): self.logger.debug( 'Could not attach the file to BlueSky: %s (mime=%s)', filename, attachment.mimetype) continue blobs.append((response.get('blob'), filename)) # Prepare our URL url = f'https://{self.host}{self.xrpc_suffix_record}' # prepare our batch of payloads to create payloads = [] payload = { "collection": "app.bsky.feed.post", "repo": self.get_identifier(), "record": { "text": body, # 'YYYY-mm-ddTHH:MM:SSZ' "createdAt": datetime.now( tz=timezone.utc).strftime('%FT%XZ'), "$type": "app.bsky.feed.post" } } if blobs: for no, blob in enumerate(blobs, start=1): _payload = payload.copy() if no > 1: # # multiple instances # # 1. update createdAt time # 2. Change text to identify image no _payload['record']['createdAt'] = \ datetime.now(tz=timezone.utc).strftime('%FT%XZ') _payload['record']['text'] = \ '{:02d}/{:02d}'.format(no, len(blobs)) _payload['record']['embed'] = { "images": [ { "image": blob[0], "alt": blob[1], } ], "$type": "app.bsky.embed.images" } payloads.append(_payload) else: payloads.append(payload) for payload in payloads: # Send Login Information postokay, response = self._fetch( url, payload=json.dumps(payload), ) if not postokay: # We failed # Bad responses look like: # { # 'error': 'InvalidRequest', # 'message': 'reason' # } return False return True def get_identifier(self, user=None, login=False): """ Performs a Decentralized User Lookup and returns the identifier """ if user is None: user = self.user user = f'{user}.{self.host}' if '.' not in user else f'{user}' key = f'did.{user}' did = self.store.get(key) if did: return did url = f'https://{self.host}{self.xrpc_suffix_did}' params = {'handle': user} # Send Login Information postokay, response = self._fetch( url, params=params, method='GET', # We set this boolean so internal recursion doesn't take place. login=login, ) if not postokay or not response or 'did' not in response: # We failed return False # Acquire our Decentralized Identitifer did = response.get('did') self.store.set(key, did) return did def login(self): """ A simple wrapper to authenticate with the BlueSky Server """ # Acquire our Decentralized Identitifer did = self.get_identifier(self.user, login=True) if not did: return False url = f'https://{self.host}{self.xrpc_suffix_session}' payload = { "identifier": did, "password": self.password, } # Send Login Information postokay, response = self._fetch( url, payload=json.dumps(payload), # We set this boolean so internal recursion doesn't take place. login=True, ) # Our response object looks like this (content has been altered for # presentation purposes): # { # 'did': 'did:plc:ruk414jakghak402j1jqekj2', # 'didDoc': { # '@context': [ # 'https://www.w3.org/ns/did/v1', # 'https://w3id.org/security/multikey/v1', # 'https://w3id.org/security/suites/secp256k1-2019/v1' # ], # 'id': 'did:plc:ruk414jakghak402j1jqekj2', # 'alsoKnownAs': ['at://apprise.bsky.social'], # 'verificationMethod': [ # { # 'id': 'did:plc:ruk414jakghak402j1jqekj2#atproto', # 'type': 'Multikey', # 'controller': 'did:plc:ruk414jakghak402j1jqekj2', # 'publicKeyMultibase' 'redacted' # } # ], # 'service': [ # { # 'id': '#atproto_pds', # 'type': 'AtprotoPersonalDataServer', # 'serviceEndpoint': # 'https://woodtuft.us-west.host.bsky.network' # } # ] # }, # 'handle': 'apprise.bsky.social', # 'email': 'whoami@gmail.com', # 'emailConfirmed': True, # 'emailAuthFactor': False, # 'accessJwt': 'redacted', # 'refreshJwt': 'redacted', # 'active': True, # } if not postokay or not response: # We failed return False # Acquire our Token self.__access_token = response.get('accessJwt') # Handle other optional arguments we can use self.__access_token_expiry = self.access_token_lifetime_sec + \ datetime.now(timezone.utc) - self.clock_skew # The Refresh Token self.__refresh_token = response.get('refreshJwt', self.__refresh_token) self.store.set( 'access_token', self.__access_token, self.__access_token_expiry) self.store.set( 'refresh_token', self.__refresh_token, self.__access_token_expiry) self.logger.info('Authenticated to BlueSky as {}.{}'.format( self.user, self.host)) return True def _fetch(self, url, payload=None, params=None, method='POST', content_type=None, login=False): """ Wrapper to BlueSky API requests object """ # use what was specified, otherwise build headers dynamically headers = { 'User-Agent': self.app_id, 'Content-Type': payload.mimetype if isinstance(payload, AttachBase) else ( 'application/x-www-form-urlencoded; charset=utf-8' if method == 'GET' else 'application/json') } if self.__access_token: # Set our token headers['Authorization'] = 'Bearer {}'.format(self.__access_token) # Some Debug Logging self.logger.debug('BlueSky {} URL: {} (cert_verify={})'.format( method, url, self.verify_certificate)) self.logger.debug( 'BlueSky Payload: %s', str(payload) if not isinstance(payload, AttachBase) else 'attach: ' + payload.name) # By default set wait to None wait = None if self.ratelimit_remaining == 0: # Determine how long we should wait for or if we should wait at # all. This isn't fool-proof because we can't be sure the client # time (calling this script) is completely synced up with the # Twitter server. One would hope we're on NTP and our clocks are # the same allowing this to role smoothly: now = datetime.now(timezone.utc).replace(tzinfo=None) if now < self.ratelimit_reset: # We need to throttle for the difference in seconds # We add 0.3 seconds to the end just to allow a grace # period. wait = (self.ratelimit_reset - now).total_seconds() + 0.3 # Always call throttle before any remote server i/o is made; self.throttle(wait=wait) # Initialize a default value for our content value content = {} # acquire our request mode fn = requests.post if method == 'POST' else requests.get try: r = fn( url, data=payload if not isinstance(payload, AttachBase) else payload.open(), params=params, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # Get our JSON content if it's possible try: content = json.loads(r.content) except (TypeError, ValueError, AttributeError): # TypeError = r.content is not a String # ValueError = r.content is Unparsable # AttributeError = r.content is None content = {} # Rate limit handling... our header objects at this point are: # 'RateLimit-Limit': '10', # Total # of requests per hour # 'RateLimit-Remaining': '9', # Requests remaining # 'RateLimit-Reset': '1741631362', # Epoch Time # 'RateLimit-Policy': '10;w=86400' # NoEntries;w= try: # Capture rate limiting if possible self.ratelimit_remaining = \ int(r.headers.get('ratelimit-remaining')) self.ratelimit_reset = datetime.fromtimestamp( int(r.headers.get('ratelimit-reset')), timezone.utc ).replace(tzinfo=None) except (TypeError, ValueError): # This is returned if we could not retrieve this information # gracefully accept this state and move on pass if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyBlueSky.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send BlueSky {} to {}: ' '{}error={}.'.format( method, url, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure return (False, content) except requests.RequestException as e: self.logger.warning( 'Exception received when sending BlueSky {} to {}: '. format(method, url)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure return (False, content) except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while handling {}.'.format( payload.name if isinstance(payload, AttachBase) else payload)) self.logger.debug('I/O Exception: %s' % str(e)) return (False, content) return (True, content) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol[0], self.user, self.password, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Apply our other parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) user = self.user if self.host != self.bluesky_default_host: user += f'.{self.host}' # our URL return '{schema}://{user}@{password}?{params}'.format( schema=self.secure_protocol[0], user=NotifyBlueSky.quote(user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), params=NotifyBlueSky.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results if not results.get('password') and results['host']: results['password'] = NotifyBlueSky.unquote(results['host']) # Do not use host field results['host'] = None return results apprise-1.9.3/apprise/plugins/bulksms.py000066400000000000000000000377321477231770000203710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this service you will need a BulkSMS account # You will need credits (new accounts start with a few) # https://www.bulksms.com/account/ # # API is documented here: # - https://www.bulksms.com/developer/json/v1/#tag/Message import re import requests import json from itertools import chain from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import is_phone_no, parse_phone_no, parse_bool from ..locale import gettext_lazy as _ IS_GROUP_RE = re.compile( r'^(@?(?P[A-Z0-9_-]+))$', re.IGNORECASE, ) class BulkSMSRoutingGroup(object): """ The different categories of routing """ ECONOMY = "ECONOMY" STANDARD = "STANDARD" PREMIUM = "PREMIUM" # Used for verification purposes BULKSMS_ROUTING_GROUPS = ( BulkSMSRoutingGroup.ECONOMY, BulkSMSRoutingGroup.STANDARD, BulkSMSRoutingGroup.PREMIUM, ) class BulkSMSEncoding(object): """ The different categories of routing """ TEXT = "TEXT" UNICODE = "UNICODE" BINARY = "BINARY" class NotifyBulkSMS(NotifyBase): """ A wrapper for BulkSMS Notifications """ # The default descriptive name associated with the Notification service_name = 'BulkSMS' # The services URL service_url = 'https://bulksms.com/' # All notification requests are secure secure_protocol = 'bulksms' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_bulksms' # BulkSMS uses the http protocol with JSON requests notify_url = 'https://api.bulksms.com/v1/messages' # The maximum length of the body body_maxlen = 160 # The maximum amount of texts that can go out in one batch default_batch_size = 4000 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{user}:{password}@{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'user': { 'name': _('User Name'), 'type': 'string', 'required': True, }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, 'required': True, }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'target_group': { 'name': _('Target Group'), 'type': 'string', 'prefix': '@', 'regex': (r'^[A-Z0-9 _-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'name': _('From Phone No'), 'type': 'string', 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'source', }, 'route': { 'name': _('Route Group'), 'type': 'choice:string', 'values': BULKSMS_ROUTING_GROUPS, 'default': BulkSMSRoutingGroup.STANDARD, }, 'unicode': { # Unicode characters 'name': _('Unicode Characters'), 'type': 'bool', 'default': True, }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, }) def __init__(self, source=None, targets=None, unicode=None, batch=None, route=None, **kwargs): """ Initialize BulkSMS Object """ super(NotifyBulkSMS, self).__init__(**kwargs) self.source = None if source: result = is_phone_no(source) if not result: msg = 'The Account (From) Phone # specified ' \ '({}) is invalid.'.format(source) self.logger.warning(msg) raise TypeError(msg) # Tidy source self.source = '+{}'.format(result['full']) # Setup our route self.route = self.template_args['route']['default'] \ if not isinstance(route, str) else route.upper() if self.route not in BULKSMS_ROUTING_GROUPS: msg = 'The route specified ({}) is invalid.'.format(route) self.logger.warning(msg) raise TypeError(msg) # Define whether or not we should set the unicode flag self.unicode = self.template_args['unicode']['default'] \ if unicode is None else bool(unicode) # Define whether or not we should operate in a batch mode self.batch = self.template_args['batch']['default'] \ if batch is None else bool(batch) # Parse our targets self.targets = list() self.groups = list() for target in parse_phone_no(targets): # Parse each phone number we found result = is_phone_no(target) if result: self.targets.append('+{}'.format(result['full'])) continue group_re = IS_GROUP_RE.match(target) if group_re and not target.isdigit(): # If the target specified is all digits, it MUST have a @ # in front of it to eliminate any ambiguity self.groups.append(group_re.group('group')) continue self.logger.warning( 'Dropped invalid phone # and/or Group ' '({}) specified.'.format(target), ) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform BulkSMS Notification """ if not (self.password and self.user): self.logger.warning( 'There were no valid login credentials provided') return False if not (self.targets or self.groups): # We have nothing to notify self.logger.warning('There are no BulkSMS targets to notify') return False # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } # Prepare our payload payload = { # The To gets populated in the loop below 'to': None, 'body': body, 'routingGroup': self.route, 'encoding': BulkSMSEncoding.UNICODE if self.unicode else BulkSMSEncoding.TEXT, # Options are NONE, ALL and ERRORS 'deliveryReports': "ERRORS" } if self.source: payload.update({ 'from': self.source, }) # Authentication auth = (self.user, self.password) # Prepare our targets targets = list(self.targets) if batch_size == 1 else \ [self.targets[index:index + batch_size] for index in range(0, len(self.targets), batch_size)] targets += [{"type": "GROUP", "name": g} for g in self.groups] while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our user payload['to'] = target # Printable reference if isinstance(target, dict): p_target = target['name'] elif isinstance(target, list): p_target = '{} targets'.format(len(target)) else: p_target = target # Some Debug Logging self.logger.debug('BulkSMS POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('BulkSMS Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=json.dumps(payload), headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) # The responsne might look like: # [ # { # "id": "string", # "type": "SENT", # "from": "string", # "to": "string", # "body": null, # "encoding": "TEXT", # "protocolId": 0, # "messageClass": 0, # "numberOfParts": 0, # "creditCost": 0, # "submission": {...}, # "status": {...}, # "relatedSentMessageId": "string", # "userSuppliedId": "string" # } # ] if r.status_code not in ( requests.codes.created, requests.codes.ok): # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) # set up our status code to use status_code = r.status_code self.logger.warning( 'Failed to send BulkSMS notification to {}: ' '{}{}error={}.'.format( p_target, status_str, ', ' if status_str else '', status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent BulkSMS notification to {}.'.format(p_target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending BulkSMS: to %s ', p_target) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'unicode': 'yes' if self.unicode else 'no', 'batch': 'yes' if self.batch else 'no', 'route': self.route, } if self.source: params['from'] = self.source # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{user}:{password}@{targets}/?{params}'.format( schema=self.secure_protocol, user=self.pprint(self.user, privacy, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), targets='/'.join(chain( [NotifyBulkSMS.quote('{}'.format(x), safe='+') for x in self.targets], [NotifyBulkSMS.quote('@{}'.format(x), safe='@') for x in self.groups])), params=NotifyBulkSMS.urlencode(params)) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.user if self.user else None, self.password if self.password else None, ) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # # Note: Groups always require a separate request (and can not be # included in batch calculations) batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets + len(self.groups) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = [ NotifyBulkSMS.unquote(results['host']), *NotifyBulkSMS.split_path(results['fullpath'])] # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyBulkSMS.unquote(results['qsd']['from']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyBulkSMS.parse_phone_no(results['qsd']['to']) # Unicode Characters results['unicode'] = \ parse_bool(results['qsd'].get( 'unicode', NotifyBulkSMS.template_args['unicode']['default'])) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifyBulkSMS.template_args['batch']['default'])) # Allow one to define a route group if 'route' in results['qsd'] and len(results['qsd']['route']): results['route'] = \ NotifyBulkSMS.unquote(results['qsd']['route']) return results apprise-1.9.3/apprise/plugins/bulkvs.py000066400000000000000000000323551477231770000202130ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this service you will need a BulkVS account # You will need credits (new accounts start with a few) # https://www.bulkvs.com/ # API is documented here: # - https://portal.bulkvs.com/api/v1.0/documentation#/\ # Messaging/post_messageSend import requests import json from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import is_phone_no, parse_phone_no, parse_bool from ..locale import gettext_lazy as _ class NotifyBulkVS(NotifyBase): """ A wrapper for BulkVS Notifications """ # The default descriptive name associated with the Notification service_name = 'BulkVS' # The services URL service_url = 'https://www.bulkvs.com/' # All notification requests are secure secure_protocol = 'bulkvs' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_bulkvs' # BulkVS uses the http protocol with JSON requests notify_url = 'https://portal.bulkvs.com/api/v1.0/messageSend' # The maximum length of the body body_maxlen = 160 # The maximum amount of texts that can go out in one batch default_batch_size = 4000 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{user}:{password}@{from_phone}/{targets}', '{schema}://{user}:{password}@{from_phone}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'user': { 'name': _('User Name'), 'type': 'string', 'required': True, }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, 'required': True, }, 'from_phone': { 'name': _('From Phone No'), 'type': 'string', 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'source', 'required': True, }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'name': _('From Phone No'), 'type': 'string', 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'source', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, }) def __init__(self, source=None, targets=None, batch=None, **kwargs): """ Initialize BulkVS Object """ super(NotifyBulkVS, self).__init__(**kwargs) if not (self.user and self.password): msg = 'A BulkVS user/pass was not provided.' self.logger.warning(msg) raise TypeError(msg) result = is_phone_no(source) if not result: msg = 'The Account (From) Phone # specified ' \ '({}) is invalid.'.format(source) self.logger.warning(msg) raise TypeError(msg) # Tidy source self.source = result['full'] # Define whether or not we should operate in a batch mode self.batch = self.template_args['batch']['default'] \ if batch is None else bool(batch) # Parse our targets self.targets = list() has_error = False for target in parse_phone_no(targets): # Parse each phone number we found result = is_phone_no(target) if result: self.targets.append(result['full']) continue has_error = True self.logger.warning( 'Dropped invalid phone # ({}) specified.'.format(target), ) if not targets and not has_error: # Default the SMS Message to ourselves self.targets.append(self.source) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform BulkVS Notification """ if not self.targets: # We have nothing to notify self.logger.warning('There are no BulkVS targets to notify') return False # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Accept': 'application/json', 'Content-Type': 'application/json', } # Prepare our payload payload = { # The To gets populated in the loop below 'From': self.source, 'To': None, 'Message': body, } # Authentication auth = (self.user, self.password) # Prepare our targets targets = list(self.targets) if batch_size == 1 else \ [self.targets[index:index + batch_size] for index in range(0, len(self.targets), batch_size)] while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our user payload['To'] = target # Printable reference if isinstance(target, list): p_target = '{} targets'.format(len(target)) else: p_target = target # Some Debug Logging self.logger.debug('BulkVS POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('BulkVS Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=json.dumps(payload), headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) # A Response may look like: # { # "RefId": "5a66dee6-ff7a-40ee-8218-5805c074dc01", # "From": "13109060901", # "MessageType": "SMS|MMS", # "Results": [ # { # "To": "13105551212", # "Status": "SUCCESS" # }, # { # "To": "13105551213", # "Status": "SUCCESS" # } # ] # } if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) # set up our status code to use status_code = r.status_code self.logger.warning( 'Failed to send BulkVS notification to {}: ' '{}{}error={}.'.format( p_target, status_str, ', ' if status_str else '', status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent BulkVS notification to {}.'.format(p_target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending BulkVS: to %s ', p_target) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.source, self.user, self.password) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'batch': 'yes' if self.batch else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # A nice way of cleaning up the URL length a bit targets = [] if len(self.targets) == 1 \ and self.targets[0] == self.source else self.targets return '{schema}://{user}:{password}@{source}/{targets}' \ '?{params}'.format( schema=self.secure_protocol, source=self.source, user=self.pprint(self.user, privacy, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), targets='/'.join([ NotifyBulkVS.quote('{}'.format(x), safe='+') for x in targets]), params=NotifyBulkVS.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if self.targets else 1 if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyBulkVS.unquote(results['qsd']['from']) # hostname will also be a target in this case results['targets'] = [ *NotifyBulkVS.parse_phone_no(results['host']), *NotifyBulkVS.split_path(results['fullpath'])] else: # store our source results['source'] = NotifyBulkVS.unquote(results['host']) # store targets results['targets'] = NotifyBulkVS.split_path(results['fullpath']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyBulkVS.parse_phone_no(results['qsd']['to']) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifyBulkVS.template_args['batch']['default'])) return results apprise-1.9.3/apprise/plugins/burstsms.py000066400000000000000000000366651477231770000205770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Sign-up with https://burstsms.com/ # # Define your API Secret here and acquire your API Key # - https://can.transmitsms.com/profile # import requests from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import ( is_phone_no, parse_phone_no, parse_bool, validate_regex) from ..locale import gettext_lazy as _ class BurstSMSCountryCode: # Australia AU = 'au' # New Zeland NZ = 'nz' # United Kingdom UK = 'gb' # United States US = 'us' BURST_SMS_COUNTRY_CODES = ( BurstSMSCountryCode.AU, BurstSMSCountryCode.NZ, BurstSMSCountryCode.UK, BurstSMSCountryCode.US, ) class NotifyBurstSMS(NotifyBase): """ A wrapper for Burst SMS Notifications """ # The default descriptive name associated with the Notification service_name = 'Burst SMS' # The services URL service_url = 'https://burstsms.com/' # The default protocol secure_protocol = 'burstsms' # The maximum amount of SMS Messages that can reside within a single # batch transfer based on: # https://developer.transmitsms.com/#74911cf8-dec6-4319-a499-7f535a7fd08c default_batch_size = 500 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_burst_sms' # Burst SMS uses the http protocol with JSON requests notify_url = 'https://api.transmitsms.com/send-sms.json' # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{apikey}:{secret}@{sender_id}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'required': True, 'regex': (r'^[a-z0-9]+$', 'i'), 'private': True, }, 'secret': { 'name': _('API Secret'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, 'sender_id': { 'name': _('Sender ID'), 'type': 'string', 'required': True, 'map_to': 'source', }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'sender_id', }, 'key': { 'alias_of': 'apikey', }, 'secret': { 'alias_of': 'secret', }, 'country': { 'name': _('Country'), 'type': 'choice:string', 'values': BURST_SMS_COUNTRY_CODES, 'default': BurstSMSCountryCode.US, }, # Validity # Expire a message send if it is undeliverable (defined in minutes) # If set to Zero (0); this is the default and sets the max validity # period 'validity': { 'name': _('validity'), 'type': 'int', 'default': 0 }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, }) def __init__(self, apikey, secret, source, targets=None, country=None, validity=None, batch=None, **kwargs): """ Initialize Burst SMS Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'An invalid Burst SMS API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # API Secret (associated with project) self.secret = validate_regex( secret, *self.template_tokens['secret']['regex']) if not self.secret: msg = 'An invalid Burst SMS API Secret ' \ '({}) was specified.'.format(secret) self.logger.warning(msg) raise TypeError(msg) if not country: self.country = self.template_args['country']['default'] else: self.country = country.lower().strip() if country not in BURST_SMS_COUNTRY_CODES: msg = 'An invalid Burst SMS country ' \ '({}) was specified.'.format(country) self.logger.warning(msg) raise TypeError(msg) # Set our Validity self.validity = self.template_args['validity']['default'] if validity: try: self.validity = int(validity) except (ValueError, TypeError): msg = 'The Burst SMS Validity specified ({}) is invalid.'\ .format(validity) self.logger.warning(msg) raise TypeError(msg) # Prepare Batch Mode Flag self.batch = self.template_args['batch']['default'] \ if batch is None else batch # The Sender ID self.source = validate_regex(source) if not self.source: msg = 'The Account Sender ID specified ' \ '({}) is invalid.'.format(source) self.logger.warning(msg) raise TypeError(msg) # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result['full']) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Burst SMS Notification """ if not self.targets: self.logger.warning( 'There are no valid Burst SMS targets to notify.') return False # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Accept': 'application/json', } # Prepare our authentication auth = (self.apikey, self.secret) # Prepare our payload payload = { 'countrycode': self.country, 'message': body, # Sender ID 'from': self.source, # The to gets populated in the loop below 'to': None, } # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size # Create a copy of the targets list targets = list(self.targets) for index in range(0, len(targets), batch_size): # Prepare our user payload['to'] = ','.join(self.targets[index:index + batch_size]) # Some Debug Logging self.logger.debug('Burst SMS POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('Burst SMS Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=payload, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyBurstSMS.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Burst SMS notification to {} ' 'target(s): {}{}error={}.'.format( len(self.targets[index:index + batch_size]), status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent Burst SMS notification to %d target(s).' % len(self.targets[index:index + batch_size])) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Burst SMS ' 'notification to %d target(s).' % len(self.targets[index:index + batch_size])) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'country': self.country, 'batch': 'yes' if self.batch else 'no', } if self.validity: params['validity'] = str(self.validity) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{key}:{secret}@{source}/{targets}/?{params}'.format( schema=self.secure_protocol, key=self.pprint(self.apikey, privacy, safe=''), secret=self.pprint( self.secret, privacy, mode=PrivacyMode.Secret, safe=''), source=NotifyBurstSMS.quote(self.source, safe=''), targets='/'.join( [NotifyBurstSMS.quote(x, safe='') for x in self.targets]), params=NotifyBurstSMS.urlencode(params)) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey, self.secret, self.source) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The hostname is our source (Sender ID) results['source'] = NotifyBurstSMS.unquote(results['host']) # Get any remaining targets results['targets'] = NotifyBurstSMS.split_path(results['fullpath']) # Get our account_side and auth_token from the user/pass config results['apikey'] = NotifyBurstSMS.unquote(results['user']) results['secret'] = NotifyBurstSMS.unquote(results['password']) # API Key if 'key' in results['qsd'] and len(results['qsd']['key']): # Extract the API Key from an argument results['apikey'] = \ NotifyBurstSMS.unquote(results['qsd']['key']) # API Secret if 'secret' in results['qsd'] and len(results['qsd']['secret']): # Extract the API Secret from an argument results['secret'] = \ NotifyBurstSMS.unquote(results['qsd']['secret']) # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyBurstSMS.unquote(results['qsd']['from']) if 'source' in results['qsd'] and len(results['qsd']['source']): results['source'] = \ NotifyBurstSMS.unquote(results['qsd']['source']) # Support country if 'country' in results['qsd'] and len(results['qsd']['country']): results['country'] = \ NotifyBurstSMS.unquote(results['qsd']['country']) # Support validity value if 'validity' in results['qsd'] and len(results['qsd']['validity']): results['validity'] = \ NotifyBurstSMS.unquote(results['qsd']['validity']) # Get Batch Mode Flag if 'batch' in results['qsd'] and len(results['qsd']['batch']): results['batch'] = parse_bool(results['qsd']['batch']) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyBurstSMS.parse_phone_no(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/chanify.py000066400000000000000000000154221477231770000203220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Chanify # 1. Visit https://chanify.net/ # The API URL will look something like this: # https://api.chanify.net/v1/sender/token # import requests from .base import NotifyBase from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ class NotifyChanify(NotifyBase): """ A wrapper for Chanify Notifications """ # The default descriptive name associated with the Notification service_name = _('Chanify') # The services URL service_url = 'https://chanify.net/' # The default secure protocol secure_protocol = 'chanify' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_chanify' # Notification URL notify_url = 'https://api.chanify.net/v1/sender/{token}/' # Define object templates templates = ( '{schema}://{token}', ) # The title is not used title_maxlen = 0 # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them template_tokens = dict(NotifyBase.template_tokens, **{ 'token': { 'name': _('Token'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[A-Z0-9_-]+$', 'i'), }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'token': { 'alias_of': 'token', }, }) def __init__(self, token, **kwargs): """ Initialize Chanify Object """ super().__init__(**kwargs) self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'The Chanify token specified ({}) is invalid.'\ .format(token) self.logger.warning(msg) raise TypeError(msg) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Send our notification """ # prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded', } # Our Message payload = { 'text': body } self.logger.debug('Chanify GET URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate)) self.logger.debug('Chanify Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url.format(token=self.token), data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyChanify.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Chanify notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Chanify notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Chanify ' 'notification.') self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Prepare our parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{token}/?{params}'.format( schema=self.secure_protocol, token=self.pprint(self.token, privacy, safe=''), params=NotifyChanify.urlencode(params), ) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.token) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ # parse_url already handles getting the `user` and `password` fields # populated. results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Allow over-ride if 'token' in results['qsd'] and len(results['qsd']['token']): results['token'] = NotifyChanify.unquote(results['qsd']['token']) else: results['token'] = NotifyChanify.unquote(results['host']) return results apprise-1.9.3/apprise/plugins/clicksend.py000066400000000000000000000266271477231770000206510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this plugin, simply signup with clicksend: # https://www.clicksend.com/ # # You're done at this point, you only need to know your user/pass that # you signed up with. # The following URLs would be accepted by Apprise: # - clicksend://{user}:{password}@{phoneno} # - clicksend://{user}:{password}@{phoneno1}/{phoneno2} # The API reference used to build this plugin was documented here: # https://developers.clicksend.com/docs/rest/v3/ # import requests from json import dumps from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import is_phone_no, parse_phone_no, parse_bool from ..locale import gettext_lazy as _ # Extend HTTP Error Messages CLICKSEND_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', } class NotifyClickSend(NotifyBase): """ A wrapper for ClickSend Notifications """ # The default descriptive name associated with the Notification service_name = 'ClickSend' # The services URL service_url = 'https://clicksend.com/' # The default secure protocol secure_protocol = 'clicksend' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_clicksend' # ClickSend uses the http protocol with JSON requests notify_url = 'https://rest.clicksend.com/v3/sms/send' # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # The maximum SMS batch size accepted by the ClickSend API default_batch_size = 1000 # Define object templates templates = ( '{schema}://{user}:{apikey}@{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'user': { 'name': _('User Name'), 'type': 'string', 'required': True, }, 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, 'map_to': 'password', }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'key': { 'alias_of': 'apikey', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, }) def __init__(self, targets=None, batch=False, **kwargs): """ Initialize ClickSend Object """ super().__init__(**kwargs) # Prepare Batch Mode Flag self.batch = batch # Parse our targets self.targets = list() if not (self.user and self.password): msg = 'A ClickSend user/pass was not provided.' self.logger.warning(msg) raise TypeError(msg) for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result['full']) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform ClickSend Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning('There were no ClickSend targets to notify.') return False headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json; charset=utf-8', } # error tracking (used for function return) has_error = False # prepare JSON Object payload = { 'messages': [] } # Send in batches if identified to do so default_batch_size = 1 if not self.batch else self.default_batch_size for index in range(0, len(self.targets), default_batch_size): payload['messages'] = [{ 'source': 'php', 'body': body, 'to': '+{}'.format(to), } for to in self.targets[index:index + default_batch_size]] self.logger.debug('ClickSend POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('ClickSend Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), auth=(self.user, self.password), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyClickSend.http_response_code_lookup( r.status_code, CLICKSEND_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send {} ClickSend notification{}: ' '{}{}error={}.'.format( len(payload['messages']), ' to {}'.format(self.targets[index]) if default_batch_size == 1 else '(s)', status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent {} ClickSend notification{}.' .format( len(payload['messages']), ' to {}'.format(self.targets[index]) if default_batch_size == 1 else '(s)', )) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending {} ClickSend ' 'notification(s).'.format(len(payload['messages']))) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'batch': 'yes' if self.batch else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Setup Authentication auth = '{user}:{password}@'.format( user=NotifyClickSend.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) return '{schema}://{auth}{targets}?{params}'.format( schema=self.secure_protocol, auth=auth, targets='/'.join( [NotifyClickSend.quote(x, safe='') for x in self.targets]), params=NotifyClickSend.urlencode(params), ) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.user, self.password) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # All elements are targets results['targets'] = [NotifyClickSend.unquote(results['host'])] # All entries after the hostname are additional targets results['targets'].extend( NotifyClickSend.split_path(results['fullpath'])) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get('batch', False)) # API Key if 'key' in results['qsd'] and len(results['qsd']['key']): # Extract the API Key from an argument results['password'] = \ NotifyClickSend.unquote(results['qsd']['key']) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyClickSend.parse_phone_no(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/custom_form.py000066400000000000000000000440131477231770000212340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyImageSize from ..common import NotifyType from ..locale import gettext_lazy as _ class FORMPayloadField: """ Identifies the fields available in the FORM Payload """ VERSION = 'version' TITLE = 'title' MESSAGE = 'message' MESSAGETYPE = 'type' # Defines the method to send the notification METHODS = ( 'POST', 'GET', 'DELETE', 'PUT', 'HEAD', 'PATCH' ) class NotifyForm(NotifyBase): """ A wrapper for Form Notifications """ # Support # - file* # - file? # - file*name # - file?name # - ?file # - *file # - file # The code will convert the ? or * to the digit increments __attach_as_re = re.compile( r'((?P(?P[a-z0-9_-]+)?' r'(?P[*?+$:.%]+)(?P[a-z0-9_-]+))' r'|(?P(?P[a-z0-9_-]+)(?P[*?+$:.%]?)))', re.IGNORECASE) # Our count attach_as_count = '{:02d}' # the default attach_as value attach_as_default = f'file{attach_as_count}' # The default descriptive name associated with the Notification service_name = 'Form' # The default protocol protocol = 'form' # The default secure protocol secure_protocol = 'forms' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_Form' # Support attachments attachment_support = True # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_128 # Disable throttle rate for Form requests since they are normally # local anyway request_rate_per_sec = 0 # Define the FORM version to place in all payloads # Version: Major.Minor, Major is only updated if the entire schema is # changed. If just adding new items (or removing old ones, only increment # the Minor! form_version = '1.0' # Define object templates templates = ( '{schema}://{host}', '{schema}://{host}:{port}', '{schema}://{user}@{host}', '{schema}://{user}@{host}:{port}', '{schema}://{user}:{password}@{host}', '{schema}://{user}:{password}@{host}:{port}', ) # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'method': { 'name': _('Fetch Method'), 'type': 'choice:string', 'values': METHODS, 'default': METHODS[0], }, 'attach-as': { 'name': _('Attach File As'), 'type': 'string', 'default': 'file*', 'map_to': 'attach_as', }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('HTTP Header'), 'prefix': '+', }, 'payload': { 'name': _('Payload Extras'), 'prefix': ':', }, 'params': { 'name': _('GET Params'), 'prefix': '-', }, } def __init__(self, headers=None, method=None, payload=None, params=None, attach_as=None, **kwargs): """ Initialize Form Object headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with """ super().__init__(**kwargs) self.fullpath = kwargs.get('fullpath') if not isinstance(self.fullpath, str): self.fullpath = '' self.method = self.template_args['method']['default'] \ if not isinstance(method, str) else method.upper() if self.method not in METHODS: msg = 'The method specified ({}) is invalid.'.format(method) self.logger.warning(msg) raise TypeError(msg) # Custom File Attachment Over-Ride Support if not isinstance(attach_as, str): # Default value self.attach_as = self.attach_as_default self.attach_multi_support = True else: result = self.__attach_as_re.match(attach_as.strip()) if not result: msg = 'The attach-as specified ({}) is invalid.'.format( attach_as) self.logger.warning(msg) raise TypeError(msg) self.attach_as = '' self.attach_multi_support = False if result.group('match1'): if result.group('id1a'): self.attach_as += result.group('id1a') self.attach_as += self.attach_as_count self.attach_multi_support = True self.attach_as += result.group('id1b') else: # result.group('match2'): self.attach_as += result.group('id2') if result.group('wc2'): self.attach_as += self.attach_as_count self.attach_multi_support = True # A payload map allows users to over-ride the default mapping if # they're detected with the :overide=value. Normally this would # create a new key and assign it the value specified. However # if the key you specify is actually an internally mapped one, # then a re-mapping takes place using the value self.payload_map = { FORMPayloadField.VERSION: FORMPayloadField.VERSION, FORMPayloadField.TITLE: FORMPayloadField.TITLE, FORMPayloadField.MESSAGE: FORMPayloadField.MESSAGE, FORMPayloadField.MESSAGETYPE: FORMPayloadField.MESSAGETYPE, } self.params = {} if params: # Store our extra headers self.params.update(params) self.headers = {} if headers: # Store our extra headers self.headers.update(headers) self.payload_overrides = {} self.payload_extras = {} if payload: # Store our extra payload entries self.payload_extras.update(payload) for key in list(self.payload_extras.keys()): # Any values set in the payload to alter a system related one # alters the system key. Hence :message=msg maps the 'message' # variable that otherwise already contains the payload to be # 'msg' instead (containing the payload) if key in self.payload_map: self.payload_map[key] = self.payload_extras[key] self.payload_overrides[key] = self.payload_extras[key] del self.payload_extras[key] return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Form Notification """ # Prepare HTTP Headers headers = { 'User-Agent': self.app_id, } # Apply any/all header over-rides defined headers.update(self.headers) # Track our potential attachments files = [] if attach and self.attachment_support: for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attachment.url(privacy=True))) return False try: files.append(( self.attach_as.format(no) if self.attach_multi_support else self.attach_as, ( attachment.name if attachment.name else f'file{no:03}.dat', open(attachment.path, 'rb'), attachment.mimetype) )) except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while opening {}.'.format( attachment.name if attachment else 'attachment')) self.logger.debug('I/O Exception: %s' % str(e)) return False if not self.attach_multi_support and no > 1: self.logger.warning( 'Multiple attachments provided while ' 'form:// Multi-Attachment Support not enabled') # prepare Form Object payload = {} for key, value in ( (FORMPayloadField.VERSION, self.form_version), (FORMPayloadField.TITLE, title), (FORMPayloadField.MESSAGE, body), (FORMPayloadField.MESSAGETYPE, notify_type)): if not self.payload_map[key]: # Do not store element in payload response continue payload[self.payload_map[key]] = value # Apply any/all payload over-rides defined payload.update(self.payload_extras) auth = None if self.user: auth = (self.user, self.password) # Set our schema schema = 'https' if self.secure else 'http' url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): url += ':%d' % self.port url += self.fullpath self.logger.debug('Form %s URL: %s (cert_verify=%r)' % ( self.method, url, self.verify_certificate, )) self.logger.debug('Form Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() if self.method == 'GET': method = requests.get payload.update(self.params) elif self.method == 'PUT': method = requests.put elif self.method == 'PATCH': method = requests.patch elif self.method == 'DELETE': method = requests.delete elif self.method == 'HEAD': method = requests.head else: # POST method = requests.post try: r = method( url, files=None if not files else files, data=payload if self.method != 'GET' else None, params=payload if self.method == 'GET' else self.params, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code < 200 or r.status_code >= 300: # We had a problem status_str = \ NotifyForm.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Form %s notification: %s%serror=%s.', self.method, status_str, ', ' if status_str else '', str(r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Form %s notification.', self.method) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Form ' 'notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while reading one of the ' 'attached files.') self.logger.debug('I/O Exception: %s' % str(e)) return False finally: for file in files: # Ensure all files are closed file[1][1].close() return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port if self.port else (443 if self.secure else 80), self.fullpath.rstrip('/'), ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'method': self.method, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Append our GET params into our parameters params.update({'-{}'.format(k): v for k, v in self.params.items()}) # Append our payload extra's into our parameters params.update( {':{}'.format(k): v for k, v in self.payload_extras.items()}) params.update( {':{}'.format(k): v for k, v in self.payload_overrides.items()}) if self.attach_as != self.attach_as_default: # Provide Attach-As extension details params['attach-as'] = self.attach_as # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyForm.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyForm.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath=NotifyForm.quote(self.fullpath, safe='/') if self.fullpath else '/', params=NotifyForm.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # store any additional payload extra's defined results['payload'] = {NotifyForm.unquote(x): NotifyForm.unquote(y) for x, y in results['qsd:'].items()} # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = {NotifyForm.unquote(x): NotifyForm.unquote(y) for x, y in results['qsd+'].items()} # Add our GET paramters in the event the user wants to pass these along results['params'] = {NotifyForm.unquote(x): NotifyForm.unquote(y) for x, y in results['qsd-'].items()} # Allow Attach-As Support which over-rides the name of the filename # posted with the form:// # the default is file01, file02, file03, etc if 'attach-as' in results['qsd'] and len(results['qsd']['attach-as']): results['attach_as'] = results['qsd']['attach-as'] # Set method if not otherwise set if 'method' in results['qsd'] and len(results['qsd']['method']): results['method'] = NotifyForm.unquote(results['qsd']['method']) return results apprise-1.9.3/apprise/plugins/custom_json.py000066400000000000000000000337501477231770000212500ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from json import dumps from .. import exception from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyImageSize from ..common import NotifyType from ..locale import gettext_lazy as _ class JSONPayloadField: """ Identifies the fields available in the JSON Payload """ VERSION = 'version' TITLE = 'title' MESSAGE = 'message' ATTACHMENTS = 'attachments' MESSAGETYPE = 'type' # Defines the method to send the notification METHODS = ( 'POST', 'GET', 'DELETE', 'PUT', 'HEAD', 'PATCH' ) class NotifyJSON(NotifyBase): """ A wrapper for JSON Notifications """ # The default descriptive name associated with the Notification service_name = 'JSON' # The default protocol protocol = 'json' # The default secure protocol secure_protocol = 'jsons' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_JSON' # Support attachments attachment_support = True # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_128 # Disable throttle rate for JSON requests since they are normally # local anyway request_rate_per_sec = 0 # Define the JSON version to place in all payloads # Version: Major.Minor, Major is only updated if the entire schema is # changed. If just adding new items (or removing old ones, only increment # the Minor! json_version = '1.0' # Define object templates templates = ( '{schema}://{host}', '{schema}://{host}:{port}', '{schema}://{user}@{host}', '{schema}://{user}@{host}:{port}', '{schema}://{user}:{password}@{host}', '{schema}://{user}:{password}@{host}:{port}', ) # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'method': { 'name': _('Fetch Method'), 'type': 'choice:string', 'values': METHODS, 'default': METHODS[0], }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('HTTP Header'), 'prefix': '+', }, 'payload': { 'name': _('Payload Extras'), 'prefix': ':', }, 'params': { 'name': _('GET Params'), 'prefix': '-', }, } def __init__(self, headers=None, method=None, payload=None, params=None, **kwargs): """ Initialize JSON Object headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with """ super().__init__(**kwargs) self.fullpath = kwargs.get('fullpath') if not isinstance(self.fullpath, str): self.fullpath = '' self.method = self.template_args['method']['default'] \ if not isinstance(method, str) else method.upper() if self.method not in METHODS: msg = 'The method specified ({}) is invalid.'.format(method) self.logger.warning(msg) raise TypeError(msg) self.params = {} if params: # Store our extra headers self.params.update(params) self.headers = {} if headers: # Store our extra headers self.headers.update(headers) self.payload_extras = {} if payload: # Store our extra payload entries self.payload_extras.update(payload) return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform JSON Notification """ # Prepare HTTP Headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json' } # Apply any/all header over-rides defined headers.update(self.headers) # Track our potential attachments attachments = [] if attach and self.attachment_support: for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access Custom JSON attachment {}.'.format( attachment.url(privacy=True))) return False try: attachments.append({ "filename": attachment.name if attachment.name else f'file{no:03}.dat', 'base64': attachment.base64(), 'mimetype': attachment.mimetype, }) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access Custom JSON attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending Custom JSON attachment {}'.format( attachment.url(privacy=True))) # Prepare JSON Object payload = { JSONPayloadField.VERSION: self.json_version, JSONPayloadField.TITLE: title, JSONPayloadField.MESSAGE: body, JSONPayloadField.ATTACHMENTS: attachments, JSONPayloadField.MESSAGETYPE: notify_type, } for key, value in self.payload_extras.items(): if key in payload: if not value: # Do not store element in payload response del payload[key] else: # Re-map payload[value] = payload[key] del payload[key] else: # Append entry payload[key] = value auth = None if self.user: auth = (self.user, self.password) # Set our schema schema = 'https' if self.secure else 'http' url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): url += ':%d' % self.port url += self.fullpath self.logger.debug('JSON POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('JSON Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() if self.method == 'GET': method = requests.get elif self.method == 'PUT': method = requests.put elif self.method == 'PATCH': method = requests.patch elif self.method == 'DELETE': method = requests.delete elif self.method == 'HEAD': method = requests.head else: # POST method = requests.post try: r = method( url, data=dumps(payload), params=self.params, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code < 200 or r.status_code >= 300: # We had a problem status_str = \ NotifyJSON.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send JSON %s notification: %s%serror=%s.', self.method, status_str, ', ' if status_str else '', str(r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent JSON %s notification.', self.method) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending JSON ' 'notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port if self.port else (443 if self.secure else 80), self.fullpath.rstrip('/'), ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'method': self.method, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Append our GET params into our parameters params.update({'-{}'.format(k): v for k, v in self.params.items()}) # Append our payload extra's into our parameters params.update( {':{}'.format(k): v for k, v in self.payload_extras.items()}) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyJSON.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyJSON.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath=NotifyJSON.quote(self.fullpath, safe='/') if self.fullpath else '/', params=NotifyJSON.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # store any additional payload extra's defined results['payload'] = {NotifyJSON.unquote(x): NotifyJSON.unquote(y) for x, y in results['qsd:'].items()} # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = {NotifyJSON.unquote(x): NotifyJSON.unquote(y) for x, y in results['qsd+'].items()} # Add our GET paramters in the event the user wants to pass these along results['params'] = {NotifyJSON.unquote(x): NotifyJSON.unquote(y) for x, y in results['qsd-'].items()} # Set method if not otherwise set if 'method' in results['qsd'] and len(results['qsd']['method']): results['method'] = NotifyJSON.unquote(results['qsd']['method']) return results apprise-1.9.3/apprise/plugins/custom_xml.py000066400000000000000000000422411477231770000210720ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from .. import exception from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyImageSize from ..common import NotifyType from ..locale import gettext_lazy as _ class XMLPayloadField: """ Identifies the fields available in the JSON Payload """ VERSION = 'Version' TITLE = 'Subject' MESSAGE = 'Message' MESSAGETYPE = 'MessageType' # Defines the method to send the notification METHODS = ( 'POST', 'GET', 'DELETE', 'PUT', 'HEAD', 'PATCH' ) class NotifyXML(NotifyBase): """ A wrapper for XML Notifications """ # The default descriptive name associated with the Notification service_name = 'XML' # The default protocol protocol = 'xml' # The default secure protocol secure_protocol = 'xmls' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_XML' # Support attachments attachment_support = True # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_128 # Disable throttle rate for JSON requests since they are normally # local anyway request_rate_per_sec = 0 # XSD Information xsd_ver = '1.1' xsd_default_url = \ 'https://raw.githubusercontent.com/caronc/apprise/master' \ '/apprise/assets/NotifyXML-{version}.xsd' # Define object templates templates = ( '{schema}://{host}', '{schema}://{host}:{port}', '{schema}://{user}@{host}', '{schema}://{user}@{host}:{port}', '{schema}://{user}:{password}@{host}', '{schema}://{user}:{password}@{host}:{port}', ) # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'method': { 'name': _('Fetch Method'), 'type': 'choice:string', 'values': METHODS, 'default': METHODS[0], }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('HTTP Header'), 'prefix': '+', }, 'payload': { 'name': _('Payload Extras'), 'prefix': ':', }, 'params': { 'name': _('GET Params'), 'prefix': '-', }, } def __init__(self, headers=None, method=None, payload=None, params=None, **kwargs): """ Initialize XML Object headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with """ super().__init__(**kwargs) self.payload = """ {{CORE}} {{ATTACHMENTS}} """ self.fullpath = kwargs.get('fullpath') if not isinstance(self.fullpath, str): self.fullpath = '' self.method = self.template_args['method']['default'] \ if not isinstance(method, str) else method.upper() if self.method not in METHODS: msg = 'The method specified ({}) is invalid.'.format(method) self.logger.warning(msg) raise TypeError(msg) # A payload map allows users to over-ride the default mapping if # they're detected with the :overide=value. Normally this would # create a new key and assign it the value specified. However # if the key you specify is actually an internally mapped one, # then a re-mapping takes place using the value self.payload_map = { XMLPayloadField.VERSION: XMLPayloadField.VERSION, XMLPayloadField.TITLE: XMLPayloadField.TITLE, XMLPayloadField.MESSAGE: XMLPayloadField.MESSAGE, XMLPayloadField.MESSAGETYPE: XMLPayloadField.MESSAGETYPE, } self.params = {} if params: # Store our extra headers self.params.update(params) self.headers = {} if headers: # Store our extra headers self.headers.update(headers) self.payload_overrides = {} self.payload_extras = {} if payload: # Store our extra payload entries (but tidy them up since they will # become XML Keys (they can't contain certain characters for k, v in payload.items(): key = re.sub(r'[^A-Za-z0-9_-]*', '', k) if not key: self.logger.warning( 'Ignoring invalid XML Stanza element name({})' .format(k)) continue # Any values set in the payload to alter a system related one # alters the system key. Hence :message=msg maps the 'message' # variable that otherwise already contains the payload to be # 'msg' instead (containing the payload) if key in self.payload_map: self.payload_map[key] = v self.payload_overrides[key] = v else: self.payload_extras[key] = v # Set our xsd url self.xsd_url = None if self.payload_overrides or self.payload_extras \ else self.xsd_default_url.format(version=self.xsd_ver) return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform XML Notification """ # Prepare HTTP Headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/xml' } # Apply any/all header over-rides defined headers.update(self.headers) # Our XML Attachmement subsitution xml_attachments = '' payload_base = {} for key, value in ( (XMLPayloadField.VERSION, self.xsd_ver), (XMLPayloadField.TITLE, NotifyXML.escape_html( title, whitespace=False)), (XMLPayloadField.MESSAGE, NotifyXML.escape_html( body, whitespace=False)), (XMLPayloadField.MESSAGETYPE, NotifyXML.escape_html( notify_type, whitespace=False))): if not self.payload_map[key]: # Do not store element in payload response continue payload_base[self.payload_map[key]] = value # Apply our payload extras payload_base.update( {k: NotifyXML.escape_html(v, whitespace=False) for k, v in self.payload_extras.items()}) # Base Entres xml_base = ''.join( ['<{}>{}'.format(k, v, k) for k, v in payload_base.items()]) attachments = [] if attach and self.attachment_support: for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access Custom XML attachment {}.'.format( attachment.url(privacy=True))) return False try: # Prepare our Attachment in Base64 entry = \ ''.format( NotifyXML.escape_html( attachment.name if attachment.name else f'file{no:03}.dat', whitespace=False), NotifyXML.escape_html( attachment.mimetype, whitespace=False)) entry += attachment.base64() entry += '' attachments.append(entry) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access Custom XML attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending Custom XML attachment {}'.format( attachment.url(privacy=True))) # Update our xml_attachments record: xml_attachments = \ '' + \ ''.join(attachments) + '' re_map = { '{{XSD_URL}}': f' xmlns:xsi="{self.xsd_url}"' if self.xsd_url else '', '{{ATTACHMENTS}}': xml_attachments, '{{CORE}}': xml_base, } # Iterate over above list and store content accordingly re_table = re.compile( r'(' + '|'.join(re_map.keys()) + r')', re.IGNORECASE, ) auth = None if self.user: auth = (self.user, self.password) # Set our schema schema = 'https' if self.secure else 'http' url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): url += ':%d' % self.port url += self.fullpath payload = re_table.sub(lambda x: re_map[x.group()], self.payload) self.logger.debug('XML POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('XML Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() if self.method == 'GET': method = requests.get elif self.method == 'PUT': method = requests.put elif self.method == 'PATCH': method = requests.patch elif self.method == 'DELETE': method = requests.delete elif self.method == 'HEAD': method = requests.head else: # POST method = requests.post try: r = method( url, data=payload, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code < 200 or r.status_code >= 300: # We had a problem status_str = \ NotifyXML.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send JSON %s notification: %s%serror=%s.', self.method, status_str, ', ' if status_str else '', str(r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent XML %s notification.', self.method) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending XML ' 'notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port if self.port else (443 if self.secure else 80), self.fullpath.rstrip('/'), ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'method': self.method, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Append our GET params into our parameters params.update({'-{}'.format(k): v for k, v in self.params.items()}) # Append our payload extra's into our parameters params.update( {':{}'.format(k): v for k, v in self.payload_extras.items()}) params.update( {':{}'.format(k): v for k, v in self.payload_overrides.items()}) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyXML.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyXML.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath=NotifyXML.quote(self.fullpath, safe='/') if self.fullpath else '/', params=NotifyXML.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # store any additional payload extra's defined results['payload'] = {NotifyXML.unquote(x): NotifyXML.unquote(y) for x, y in results['qsd:'].items()} # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = {NotifyXML.unquote(x): NotifyXML.unquote(y) for x, y in results['qsd+'].items()} # Add our GET paramters in the event the user wants to pass these along results['params'] = {NotifyXML.unquote(x): NotifyXML.unquote(y) for x, y in results['qsd-'].items()} # Set method if not otherwise set if 'method' in results['qsd'] and len(results['qsd']['method']): results['method'] = NotifyXML.unquote(results['qsd']['method']) return results apprise-1.9.3/apprise/plugins/d7networks.py000066400000000000000000000356431477231770000210170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this service you will need a D7 Networks account from their website # at https://d7networks.com/ # # After you've established your account you can get your api login credentials # (both user and password) from the API Details section from within your # account profile area: https://d7networks.com/accounts/profile/ # # API Reference: https://d7networks.com/docs/Messages/Send_Message/ import requests from json import dumps from json import loads from .base import NotifyBase from ..common import NotifyType from ..utils.parse import ( is_phone_no, parse_phone_no, validate_regex, parse_bool) from ..locale import gettext_lazy as _ # Extend HTTP Error Messages D7NETWORKS_HTTP_ERROR_MAP = { 401: 'Invalid Argument(s) Specified.', 403: 'Unauthorized - Authentication Failure.', 412: 'A Routing Error Occured', 500: 'A Serverside Error Occured Handling the Request.', } class NotifyD7Networks(NotifyBase): """ A wrapper for D7 Networks Notifications """ # The default descriptive name associated with the Notification service_name = 'D7 Networks' # The services URL service_url = 'https://d7networks.com/' # All notification requests are secure secure_protocol = 'd7sms' # Allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.20 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_d7networks' # D7 Networks single notification URL notify_url = 'https://api.d7networks.com/messages/v1/send' # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{token}@{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'token': { 'name': _('API Access Token'), 'type': 'string', 'required': True, 'private': True, }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'unicode': { # Unicode characters (default is 'auto') 'name': _('Unicode Characters'), 'type': 'bool', 'default': False, }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, 'to': { 'alias_of': 'targets', }, 'source': { # Originating address,In cases where the rewriting of the sender's # address is supported or permitted by the SMS-C. This is used to # transmit the message, this number is transmitted as the # originating address and is completely optional. 'name': _('Originating Address'), 'type': 'string', 'map_to': 'source', }, 'from': { 'alias_of': 'source', }, }) def __init__(self, token=None, targets=None, source=None, batch=False, unicode=None, **kwargs): """ Initialize D7 Networks Object """ super().__init__(**kwargs) # Prepare Batch Mode Flag self.batch = batch # Setup our source address (if defined) self.source = None \ if not isinstance(source, str) else source.strip() # Define whether or not we should set the unicode flag self.unicode = self.template_args['unicode']['default'] \ if unicode is None else bool(unicode) # The token associated with the account self.token = validate_regex(token) if not self.token: msg = 'The D7 Networks token specified ({}) is invalid.'\ .format(token) self.logger.warning(msg) raise TypeError(msg) # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result['full']) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Depending on whether we are set to batch mode or single mode this redirects to the appropriate handling """ if len(self.targets) == 0: # There were no services to notify self.logger.warning('There were no D7 Networks targets to notify.') return False # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'Accept': 'application/json', 'Authorization': f'Bearer {self.token}', } payload = { 'message_globals': { 'channel': 'sms', }, 'messages': [{ # Populated later on 'recipients': None, 'content': body, 'data_coding': # auto is a better substitute over 'text' as text is easier to # detect from a post than `unicode` is. 'auto' if not self.unicode else 'unicode', }], } # use the list directly targets = list(self.targets) if self.source: payload['message_globals']['originator'] = self.source target = None while len(targets): if self.batch: # Prepare our payload payload['messages'][0]['recipients'] = self.targets # Reset our targets so we don't keep going. This is required # because we're in batch mode; we only need to loop once. targets = [] else: # We're not in a batch mode; so get our next target # Get our target(s) to notify target = targets.pop(0) # Prepare our payload payload['messages'][0]['recipients'] = [target] # Some Debug Logging self.logger.debug( 'D7 Networks POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('D7 Networks Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.created, requests.codes.ok): # We had a problem status_str = \ NotifyBase.http_response_code_lookup( r.status_code, D7NETWORKS_HTTP_ERROR_MAP) try: # Update our status response if we can json_response = loads(r.content) status_str = json_response.get('message', status_str) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None # We could not parse JSON response. # We will just use the status we already have. pass self.logger.warning( 'Failed to send D7 Networks SMS notification to {}: ' '{}{}error={}.'.format( ', '.join(target) if self.batch else target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: if self.batch: self.logger.info( 'Sent D7 Networks batch SMS notification to ' '{} target(s).'.format(len(self.targets))) else: self.logger.info( 'Sent D7 Networks SMS notification to {}.'.format( target)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending D7 Networks:%s ' % ( ', '.join(self.targets)) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'batch': 'yes' if self.batch else 'no', 'unicode': 'yes' if self.unicode else 'no', } if self.source: params['from'] = self.source # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{token}@{targets}/?{params}'.format( schema=self.secure_protocol, token=self.pprint(self.token, privacy, safe=''), targets='/'.join( [NotifyD7Networks.quote(x, safe='') for x in self.targets]), params=NotifyD7Networks.urlencode(params)) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.token) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # return len(self.targets) if not self.batch else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results if 'token' in results['qsd'] and len(results['qsd']['token']): results['token'] = \ NotifyD7Networks.unquote(results['qsd']['token']) elif results['user']: results['token'] = NotifyD7Networks.unquote(results['user']) if results['password']: # Support token containing a colon (:) results['token'] += \ ':' + NotifyD7Networks.unquote(results['password']) elif results['password']: # Support token starting with a colon (:) results['token'] = \ ':' + NotifyD7Networks.unquote(results['password']) # Initialize our targets results['targets'] = list() # The store our first target stored in the hostname results['targets'].append(NotifyD7Networks.unquote(results['host'])) # Get our entries; split_path() looks after unquoting content for us # by default results['targets'].extend( NotifyD7Networks.split_path(results['fullpath'])) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get('batch', False)) # Get Unicode Flag results['unicode'] = \ parse_bool(results['qsd'].get('unicode', False)) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyD7Networks.parse_phone_no(results['qsd']['to']) # Support the 'from' and source variable if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyD7Networks.unquote(results['qsd']['from']) elif 'source' in results['qsd'] and len(results['qsd']['source']): results['source'] = \ NotifyD7Networks.unquote(results['qsd']['source']) return results apprise-1.9.3/apprise/plugins/dapnet.py000066400000000000000000000330421477231770000201520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this plugin, sign up with Hampager (you need to be a licensed # ham radio operator # http://www.hampager.de/ # # You're done at this point, you only need to know your user/pass that # you signed up with. # The following URLs would be accepted by Apprise: # - dapnet://{user}:{password}@{callsign} # - dapnet://{user}:{password}@{callsign1}/{callsign2} # Optional parameters: # - priority (NORMAL or EMERGENCY). Default: NORMAL # - txgroups --> comma-separated list of DAPNET transmitter # groups. Default: 'dl-all' # https://hampager.de/#/transmitters/groups from json import dumps # The API reference used to build this plugin was documented here: # https://hampager.de/dokuwiki/doku.php#dapnet_api # import requests from requests.auth import HTTPBasicAuth from .base import NotifyBase from ..locale import gettext_lazy as _ from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import ( is_call_sign, parse_call_sign, parse_list, parse_bool) class DapnetPriority: NORMAL = 0 EMERGENCY = 1 DAPNET_PRIORITIES = { DapnetPriority.NORMAL: 'normal', DapnetPriority.EMERGENCY: 'emergency', } DAPNET_PRIORITY_MAP = { # Maps against string 'normal' 'n': DapnetPriority.NORMAL, # Maps against string 'emergency' 'e': DapnetPriority.EMERGENCY, # Entries to additionally support (so more like Dapnet's API) '0': DapnetPriority.NORMAL, '1': DapnetPriority.EMERGENCY, } class NotifyDapnet(NotifyBase): """ A wrapper for DAPNET / Hampager Notifications """ # The default descriptive name associated with the Notification service_name = 'Dapnet' # The services URL service_url = 'https://hampager.de/' # The default secure protocol secure_protocol = 'dapnet' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_dapnet' # Dapnet uses the http protocol with JSON requests notify_url = 'http://www.hampager.de:8080/calls' # The maximum length of the body body_maxlen = 80 # A title can not be used for Dapnet Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # The maximum amount of emails that can reside within a single transmission default_batch_size = 50 # Define object templates templates = ('{schema}://{user}:{password}@{targets}',) # Define our template tokens template_tokens = dict( NotifyBase.template_tokens, **{ 'user': { 'name': _('User Name'), 'type': 'string', 'required': True, }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, 'required': True, }, 'target_callsign': { 'name': _('Target Callsign'), 'type': 'string', 'regex': ( r'^[a-z0-9]{2,5}(-[a-z0-9]{1,2})?$', 'i', ), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, } ) # Define our template arguments template_args = dict( NotifyBase.template_args, **{ 'to': { 'name': _('Target Callsign'), 'type': 'string', 'map_to': 'targets', }, 'priority': { 'name': _('Priority'), 'type': 'choice:int', 'values': DAPNET_PRIORITIES, 'default': DapnetPriority.NORMAL, }, 'txgroups': { 'name': _('Transmitter Groups'), 'type': 'string', 'default': 'dl-all', 'private': True, }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, } ) def __init__(self, targets=None, priority=None, txgroups=None, batch=False, **kwargs): """ Initialize Dapnet Object """ super().__init__(**kwargs) # Parse our targets self.targets = list() # The Priority of the message self.priority = int( NotifyDapnet.template_args['priority']['default'] if priority is None else next(( v for k, v in DAPNET_PRIORITY_MAP.items() if str(priority).lower().startswith(k)), NotifyDapnet.template_args['priority']['default'])) if not (self.user and self.password): msg = 'A Dapnet user/pass was not provided.' self.logger.warning(msg) raise TypeError(msg) # Get the transmitter group self.txgroups = parse_list( NotifyDapnet.template_args['txgroups']['default'] if not txgroups else txgroups) # Prepare Batch Mode Flag self.batch = batch for target in parse_call_sign(targets): # Validate targets and drop bad ones: result = is_call_sign(target) if not result: self.logger.warning( 'Dropping invalid Amateur radio call sign ({}).'.format( target), ) continue # Store callsign without SSID and ignore duplicates if result['callsign'] not in self.targets: self.targets.append(result['callsign']) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Dapnet Notification """ if not self.targets: # There is no one to email; we're done self.logger.warning( 'There are no Amateur radio callsigns to notify') return False # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json; charset=utf-8', } # error tracking (used for function return) has_error = False # Create a copy of the targets list targets = list(self.targets) for index in range(0, len(targets), batch_size): # prepare JSON payload payload = { 'text': body, 'callSignNames': targets[index:index + batch_size], 'transmitterGroupNames': self.txgroups, 'emergency': (self.priority == DapnetPriority.EMERGENCY), } self.logger.debug('DAPNET POST URL: %s' % self.notify_url) self.logger.debug('DAPNET Payload: %s' % dumps(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, auth=HTTPBasicAuth( username=self.user, password=self.password), verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.created: # We had a problem self.logger.warning( 'Failed to send DAPNET notification {} to {}: ' 'error={}.'.format( payload['text'], ' to {}'.format(self.targets), r.status_code ) ) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True else: self.logger.info( 'Sent \'{}\' DAPNET notification {}'.format( payload['text'], 'to {}'.format(self.targets) ) ) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending DAPNET ' 'notification to {}'.format(self.targets) ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True return not has_error def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'priority': DAPNET_PRIORITIES[self.template_args['priority']['default']] if self.priority not in DAPNET_PRIORITIES else DAPNET_PRIORITIES[self.priority], 'batch': 'yes' if self.batch else 'no', 'txgroups': ','.join(self.txgroups), } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Setup Authentication auth = '{user}:{password}@'.format( user=NotifyDapnet.quote(self.user, safe=""), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe='' ), ) return '{schema}://{auth}{targets}?{params}'.format( schema=self.secure_protocol, auth=auth, targets='/'.join([self.pprint(x, privacy, safe='') for x in self.targets]), params=NotifyDapnet.urlencode(params), ) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.user, self.password) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # All elements are targets results['targets'] = [NotifyDapnet.unquote(results['host'])] # All entries after the hostname are additional targets results['targets'].extend(NotifyDapnet.split_path(results['fullpath'])) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyDapnet.parse_list(results['qsd']['to']) # Set our priority if 'priority' in results['qsd'] and len(results['qsd']['priority']): results['priority'] = \ NotifyDapnet.unquote(results['qsd']['priority']) # Check for one or multiple transmitter groups (comma separated) # and split them up, when necessary if 'txgroups' in results['qsd']: results['txgroups'] = \ [x.lower() for x in NotifyDapnet.parse_list(results['qsd']['txgroups'])] # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifyDapnet.template_args['batch']['default'])) return results apprise-1.9.3/apprise/plugins/dbus.py000066400000000000000000000342331477231770000176370ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import sys from .base import NotifyBase from ..common import NotifyImageSize from ..common import NotifyType from ..utils.parse import parse_bool from ..locale import gettext_lazy as _ # Default our global support flag NOTIFY_DBUS_SUPPORT_ENABLED = False # Image support is dependant on the GdkPixbuf library being available NOTIFY_DBUS_IMAGE_SUPPORT = False # Initialize our mainloops LOOP_GLIB = None LOOP_QT = None try: # dbus essentials from dbus import SessionBus from dbus import Interface from dbus import Byte from dbus import ByteArray from dbus import DBusException # # now we try to determine which mainloop(s) we can access # # glib try: from dbus.mainloop.glib import DBusGMainLoop LOOP_GLIB = DBusGMainLoop() except ImportError: # pragma: no cover # No problem pass # qt try: from dbus.mainloop.qt import DBusQtMainLoop LOOP_QT = DBusQtMainLoop(set_as_default=True) except ImportError: # No problem pass # We're good as long as at least one NOTIFY_DBUS_SUPPORT_ENABLED = ( LOOP_GLIB is not None or LOOP_QT is not None) # ImportError: When using gi.repository you must not import static modules # like "gobject". Please change all occurrences of "import gobject" to # "from gi.repository import GObject". # See: https://bugzilla.gnome.org/show_bug.cgi?id=709183 if "gobject" in sys.modules: # pragma: no cover del sys.modules["gobject"] try: # The following is required for Image/Icon loading only import gi gi.require_version('GdkPixbuf', '2.0') from gi.repository import GdkPixbuf NOTIFY_DBUS_IMAGE_SUPPORT = True except (ImportError, ValueError, AttributeError): # No problem; this will get caught in outer try/catch # A ValueError will get thrown upon calling gi.require_version() if # GDK/GTK isn't installed on the system but gi is. pass except ImportError: # No problem; we just simply can't support this plugin; we could # be in microsoft windows, or we just don't have the python-gobject # library available to us (or maybe one we don't support)? pass # Define our supported protocols and the loop to assign them. # The key to value pairs are the actual supported schema's matched # up with the Main Loop they should reference when accessed. MAINLOOP_MAP = { 'qt': LOOP_QT, 'kde': LOOP_QT, 'glib': LOOP_GLIB, 'dbus': LOOP_QT if LOOP_QT else LOOP_GLIB, } # Urgencies class DBusUrgency: LOW = 0 NORMAL = 1 HIGH = 2 DBUS_URGENCIES = { # Note: This also acts as a reverse lookup mapping DBusUrgency.LOW: 'low', DBusUrgency.NORMAL: 'normal', DBusUrgency.HIGH: 'high', } DBUS_URGENCY_MAP = { # Maps against string 'low' 'l': DBusUrgency.LOW, # Maps against string 'moderate' 'm': DBusUrgency.LOW, # Maps against string 'normal' 'n': DBusUrgency.NORMAL, # Maps against string 'high' 'h': DBusUrgency.HIGH, # Maps against string 'emergency' 'e': DBusUrgency.HIGH, # Entries to additionally support (so more like DBus's API) '0': DBusUrgency.LOW, '1': DBusUrgency.NORMAL, '2': DBusUrgency.HIGH, } class NotifyDBus(NotifyBase): """ A wrapper for local DBus/Qt Notifications """ # Set our global enabled flag enabled = NOTIFY_DBUS_SUPPORT_ENABLED requirements = { # Define our required packaging in order to work 'details': _('libdbus-1.so.x must be installed.') } # The default descriptive name associated with the Notification service_name = _('DBus Notification') # The services URL service_url = 'http://www.freedesktop.org/Software/dbus/' # The default protocols # Python 3 keys() does not return a list object, it is its own dict_keys() # object if we were to reference, we wouldn't be backwards compatible with # Python v2. So converting the result set back into a list makes us # compatible protocol = list(MAINLOOP_MAP.keys()) # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_dbus' # No throttling required for DBus queries request_rate_per_sec = 0 # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_128 # The number of milliseconds to keep the message present for message_timeout_ms = 13000 # Limit results to just the first 10 line otherwise there is just to much # content to display body_max_line_count = 10 # The following are required to hook into the notifications: dbus_interface = 'org.freedesktop.Notifications' dbus_setting_location = '/org/freedesktop/Notifications' # No URL Identifier will be defined for this service as there simply isn't # enough details to uniquely identify one dbus:// from another. url_identifier = False # Define object templates templates = ( '{schema}://', ) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'urgency': { 'name': _('Urgency'), 'type': 'choice:int', 'values': DBUS_URGENCIES, 'default': DBusUrgency.NORMAL, }, 'priority': { # Apprise uses 'priority' everywhere; it's just a nice consistent # feel to be able to use it here as well. Just map the # value back to 'priority' 'alias_of': 'urgency', }, 'x': { 'name': _('X-Axis'), 'type': 'int', 'min': 0, 'map_to': 'x_axis', }, 'y': { 'name': _('Y-Axis'), 'type': 'int', 'min': 0, 'map_to': 'y_axis', }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, }) def __init__(self, urgency=None, x_axis=None, y_axis=None, include_image=True, **kwargs): """ Initialize DBus Object """ super().__init__(**kwargs) # Track our notifications self.registry = {} # Store our schema; default to dbus self.schema = kwargs.get('schema', 'dbus') if self.schema not in MAINLOOP_MAP: msg = 'The schema specified ({}) is not supported.' \ .format(self.schema) self.logger.warning(msg) raise TypeError(msg) # The urgency of the message self.urgency = int( NotifyDBus.template_args['urgency']['default'] if urgency is None else next(( v for k, v in DBUS_URGENCY_MAP.items() if str(urgency).lower().startswith(k)), NotifyDBus.template_args['urgency']['default'])) # Our x/y axis settings if x_axis or y_axis: try: self.x_axis = int(x_axis) self.y_axis = int(y_axis) except (TypeError, ValueError): # Invalid x/y values specified msg = 'The x,y coordinates specified ({},{}) are invalid.'\ .format(x_axis, y_axis) self.logger.warning(msg) raise TypeError(msg) else: self.x_axis = None self.y_axis = None # Track whether we want to add an image to the notification. self.include_image = include_image def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform DBus Notification """ # Acquire our session try: session = SessionBus(mainloop=MAINLOOP_MAP[self.schema]) except DBusException as e: # Handle exception self.logger.warning('Failed to send DBus notification.') self.logger.debug(f'DBus Exception: {e}') return False # If there is no title, but there is a body, swap the two to get rid # of the weird whitespace if not title: title = body body = '' # acquire our dbus object dbus_obj = session.get_object( self.dbus_interface, self.dbus_setting_location, ) # Acquire our dbus interface dbus_iface = Interface( dbus_obj, dbus_interface=self.dbus_interface, ) # image path icon_path = None if not self.include_image \ else self.image_path(notify_type, extension='.ico') # Our meta payload meta_payload = { "urgency": Byte(self.urgency) } if not (self.x_axis is None and self.y_axis is None): # Set x/y access if these were set meta_payload['x'] = self.x_axis meta_payload['y'] = self.y_axis if NOTIFY_DBUS_IMAGE_SUPPORT and icon_path: try: # Use Pixbuf to create the proper image type image = GdkPixbuf.Pixbuf.new_from_file(icon_path) # Associate our image to our notification meta_payload['icon_data'] = ( image.get_width(), image.get_height(), image.get_rowstride(), image.get_has_alpha(), image.get_bits_per_sample(), image.get_n_channels(), ByteArray(image.get_pixels()) ) except Exception as e: self.logger.warning( "Could not load notification icon (%s).", icon_path) self.logger.debug(f'DBus Exception: {e}') try: # Always call throttle() before any remote execution is made self.throttle() dbus_iface.Notify( # Application Identifier self.app_id, # Message ID (0 = New Message) 0, # Icon (str) - not used '', # Title str(title), # Body str(body), # Actions list(), # Meta meta_payload, # Message Timeout self.message_timeout_ms, ) self.logger.info('Sent DBus notification.') except Exception as e: self.logger.warning('Failed to send DBus notification.') self.logger.debug(f'DBus Exception: {e}') return False return True def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', 'urgency': DBUS_URGENCIES[self.template_args['urgency']['default']] if self.urgency not in DBUS_URGENCIES else DBUS_URGENCIES[self.urgency], } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # x in (x,y) screen coordinates if self.x_axis: params['x'] = str(self.x_axis) # y in (x,y) screen coordinates if self.y_axis: params['y'] = str(self.y_axis) return '{schema}://_/?{params}'.format( schema=self.schema, params=NotifyDBus.urlencode(params), ) @staticmethod def parse_url(url): """ There are no parameters nessisary for this protocol; simply having gnome:// is all you need. This function just makes sure that is in place. """ results = NotifyBase.parse_url(url, verify_host=False) # Include images with our message results['include_image'] = \ parse_bool(results['qsd'].get('image', True)) # DBus supports urgency, but we we also support the keyword priority # so that it is consistent with some of the other plugins if 'priority' in results['qsd'] and len(results['qsd']['priority']): # We intentionally store the priority in the urgency section results['urgency'] = \ NotifyDBus.unquote(results['qsd']['priority']) if 'urgency' in results['qsd'] and len(results['qsd']['urgency']): results['urgency'] = \ NotifyDBus.unquote(results['qsd']['urgency']) # handle x,y coordinates if 'x' in results['qsd'] and len(results['qsd']['x']): results['x_axis'] = NotifyDBus.unquote(results['qsd'].get('x')) if 'y' in results['qsd'] and len(results['qsd']['y']): results['y_axis'] = NotifyDBus.unquote(results['qsd'].get('y')) return results apprise-1.9.3/apprise/plugins/dingtalk.py000066400000000000000000000277611477231770000205070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import time import hmac import hashlib import base64 import requests from json import dumps from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import parse_list, validate_regex from ..locale import gettext_lazy as _ # Register at https://dingtalk.com # - Download their PC based software as it is the only way you can create # a custom robot. You can create a custom robot per group. You will # be provided an access_token that Apprise will need. # Syntax: # dingtalk://{access_token}/ # dingtalk://{access_token}/{optional_phone_no} # dingtalk://{access_token}/{phone_no_1}/{phone_no_2}/{phone_no_N/ # Some Phone Number Detection IS_PHONE_NO = re.compile(r'^\+?(?P[0-9\s)(+-]+)\s*$') class NotifyDingTalk(NotifyBase): """ A wrapper for DingTalk Notifications """ # The default descriptive name associated with the Notification service_name = 'DingTalk' # The services URL service_url = 'https://www.dingtalk.com/' # All notification requests are secure secure_protocol = 'dingtalk' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_dingtalk' # DingTalk API notify_url = 'https://oapi.dingtalk.com/robot/send?access_token={token}' # Do not set title_maxlen as it is set in a property value below # since the length varies depending if we are doing a markdown # based message or a text based one. # title_maxlen = see below @propery defined # Define object templates templates = ( '{schema}://{token}/', '{schema}://{token}/{targets}/', '{schema}://{secret}@{token}/', '{schema}://{secret}@{token}/{targets}/', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'token': { 'name': _('Token'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, 'secret': { 'name': _('Secret'), 'type': 'string', 'private': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, 'target_phone_no': { 'name': _('Target Phone No'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'token': { 'alias_of': 'token', }, 'secret': { 'alias_of': 'secret', }, }) def __init__(self, token, targets=None, secret=None, **kwargs): """ Initialize DingTalk Object """ super().__init__(**kwargs) # Secret Key (associated with project) self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'An invalid DingTalk API Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) self.secret = None if secret: self.secret = validate_regex( secret, *self.template_tokens['secret']['regex']) if not self.secret: msg = 'An invalid DingTalk Secret ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) # Parse our targets self.targets = list() for target in parse_list(targets): # Validate targets and drop bad ones: result = IS_PHONE_NO.match(target) if result: # Further check our phone # for it's digit count result = ''.join(re.findall(r'\d+', result.group('phone'))) if len(result) < 11 or len(result) > 14: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result) continue self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) return def get_signature(self): """ Calculates time-based signature so that we can send arbitrary messages. """ timestamp = str(round(time.time() * 1000)) secret_enc = self.secret.encode('utf-8') str_to_sign_enc = \ "{}\n{}".format(timestamp, self.secret).encode('utf-8') hmac_code = hmac.new( secret_enc, str_to_sign_enc, digestmod=hashlib.sha256).digest() signature = NotifyDingTalk.quote(base64.b64encode(hmac_code), safe='') return timestamp, signature def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform DingTalk Notification """ payload = { 'msgtype': 'text', 'at': { 'atMobiles': self.targets, 'isAtAll': False, } } if self.notify_format == NotifyFormat.MARKDOWN: payload['markdown'] = { 'title': title, 'text': body, } else: payload['text'] = { 'content': body, } # Our Notification URL notify_url = self.notify_url.format(token=self.token) params = None if self.secret: timestamp, signature = self.get_signature() params = { 'timestamp': timestamp, 'sign': signature, } # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json' } # Some Debug Logging self.logger.debug('DingTalk URL: {} (cert_verify={})'.format( notify_url, self.verify_certificate)) self.logger.debug('DingTalk Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=dumps(payload), headers=headers, params=params, verify=self.verify_certificate, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyDingTalk.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send DingTalk notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False else: self.logger.info('Sent DingTalk notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occured sending DingTalk ' 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False return True @property def title_maxlen(self): """ The title isn't used when not in markdown mode. """ return NotifyBase.title_maxlen \ if self.notify_format == NotifyFormat.MARKDOWN else 0 def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any arguments set args = { 'format': self.notify_format, 'overflow': self.overflow_mode, 'verify': 'yes' if self.verify_certificate else 'no', } return '{schema}://{secret}{token}/{targets}/?{args}'.format( schema=self.secure_protocol, secret='' if not self.secret else '{}@'.format(self.pprint( self.secret, privacy, mode=PrivacyMode.Secret, safe='')), token=self.pprint(self.token, privacy, safe=''), targets='/'.join( [NotifyDingTalk.quote(x, safe='') for x in self.targets]), args=NotifyDingTalk.urlencode(args)) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.secret, self.token) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to substantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results results['token'] = NotifyDingTalk.unquote(results['host']) # if a user has been defined, use it's value as the secret if results.get('user'): results['secret'] = results.get('user') # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifyDingTalk.split_path(results['fullpath']) # Support the use of the `token` keyword argument if 'token' in results['qsd'] and len(results['qsd']['token']): results['token'] = \ NotifyDingTalk.unquote(results['qsd']['token']) # Support the use of the `secret` keyword argument if 'secret' in results['qsd'] and len(results['qsd']['secret']): results['secret'] = \ NotifyDingTalk.unquote(results['qsd']['secret']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyDingTalk.parse_list(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/discord.py000066400000000000000000000641161477231770000203340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # For this to work correctly you need to create a webhook. To do this just # click on the little gear icon next to the channel you're part of. From # here you'll be able to access the Webhooks menu and create a new one. # # When you've completed, you'll get a URL that looks a little like this: # https://discord.com/api/webhooks/417429632418316298/\ # JHZ7lQml277CDHmQKMHI8qBe7bk2ZwO5UKjCiOAF7711o33MyqU344Qpgv7YTpadV_js # # Simplified, it looks like this: # https://discord.com/api/webhooks/WEBHOOK_ID/WEBHOOK_TOKEN # # This plugin will simply work using the url of: # discord://WEBHOOK_ID/WEBHOOK_TOKEN # # API Documentation on Webhooks: # - https://discord.com/developers/docs/resources/webhook # import re import requests from json import dumps from datetime import timedelta from datetime import datetime from datetime import timezone from .base import NotifyBase from ..common import NotifyImageSize from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import parse_bool, validate_regex from ..locale import gettext_lazy as _ from ..attachment.base import AttachBase # Used to detect user/role IDs USER_ROLE_DETECTION_RE = re.compile( r'\s*(?:<@(?P&?)(?P[0-9]+)>|@(?P[a-z0-9]+))', re.I) class NotifyDiscord(NotifyBase): """ A wrapper to Discord Notifications """ # The default descriptive name associated with the Notification service_name = 'Discord' # The services URL service_url = 'https://discord.com/' # The default secure protocol secure_protocol = 'discord' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_discord' # Discord Webhook notify_url = 'https://discord.com/api/webhooks' # Support attachments attachment_support = True # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_256 # Discord is kind enough to return how many more requests we're allowed to # continue to make within it's header response as: # X-RateLimit-Reset: The epoc time (in seconds) we can expect our # rate-limit to be reset. # X-RateLimit-Remaining: an integer identifying how many requests we're # still allow to make. request_rate_per_sec = 0 # Taken right from google.auth.helpers: clock_skew = timedelta(seconds=10) # The maximum allowable characters allowed in the body per message body_maxlen = 2000 # The 2000 characters above defined by the body_maxlen include that of the # title. Setting this to True ensures overflow options behave properly overflow_amalgamate_title = True # Discord has a limit of the number of fields you can include in an # embeds message. This value allows the discord message to safely # break into multiple messages to handle these cases. discord_max_fields = 10 # Define object templates templates = ( '{schema}://{webhook_id}/{webhook_token}', '{schema}://{botname}@{webhook_id}/{webhook_token}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'botname': { 'name': _('Bot Name'), 'type': 'string', 'map_to': 'user', }, 'webhook_id': { 'name': _('Webhook ID'), 'type': 'string', 'private': True, 'required': True, }, 'webhook_token': { 'name': _('Webhook Token'), 'type': 'string', 'private': True, 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'tts': { 'name': _('Text To Speech'), 'type': 'bool', 'default': False, }, 'avatar': { 'name': _('Avatar Image'), 'type': 'bool', 'default': True, }, 'avatar_url': { 'name': _('Avatar URL'), 'type': 'string', }, 'href': { 'name': _('URL'), 'type': 'string', }, 'url': { 'alias_of': 'href', }, # Send a message to the specified thread within a webhook's channel. # The thread will automatically be unarchived. 'thread': { 'name': _('Thread ID'), 'type': 'string', }, 'footer': { 'name': _('Display Footer'), 'type': 'bool', 'default': False, }, 'footer_logo': { 'name': _('Footer Logo'), 'type': 'bool', 'default': True, }, 'fields': { 'name': _('Use Fields'), 'type': 'bool', 'default': True, }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': False, 'map_to': 'include_image', }, }) def __init__(self, webhook_id, webhook_token, tts=False, avatar=True, footer=False, footer_logo=True, include_image=False, fields=True, avatar_url=None, href=None, thread=None, **kwargs): """ Initialize Discord Object """ super().__init__(**kwargs) # Webhook ID (associated with project) self.webhook_id = validate_regex(webhook_id) if not self.webhook_id: msg = 'An invalid Discord Webhook ID ' \ '({}) was specified.'.format(webhook_id) self.logger.warning(msg) raise TypeError(msg) # Webhook Token (associated with project) self.webhook_token = validate_regex(webhook_token) if not self.webhook_token: msg = 'An invalid Discord Webhook Token ' \ '({}) was specified.'.format(webhook_token) self.logger.warning(msg) raise TypeError(msg) # Text To Speech self.tts = tts # Over-ride Avatar Icon self.avatar = avatar # Place a footer self.footer = footer # include a footer_logo in footer self.footer_logo = footer_logo # Place a thumbnail image inline with the message body self.include_image = include_image # Use Fields self.fields = fields # Specified Thread ID self.thread_id = thread # Avatar URL # This allows a user to provide an over-ride to the otherwise # dynamically generated avatar url images self.avatar_url = avatar_url # A URL to have the title link to self.href = href # For Tracking Purposes self.ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) # Default to 1.0 self.ratelimit_remaining = 1.0 return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Discord Notification """ payload = { # Text-To-Speech 'tts': self.tts, # If Text-To-Speech is set to True, then we do not want to wait # for the whole message before continuing. Otherwise, we wait 'wait': self.tts is False, } # Acquire image_url image_url = self.image_url(notify_type) if self.avatar and (image_url or self.avatar_url): payload['avatar_url'] = \ self.avatar_url if self.avatar_url else image_url if self.user: # Optionally override the default username of the webhook payload['username'] = self.user # Associate our thread_id with our message params = {'thread_id': self.thread_id} if self.thread_id else None if body: # our fields variable fields = [] if self.notify_format == NotifyFormat.MARKDOWN: # Use embeds for payload payload['embeds'] = [{ 'author': { 'name': self.app_id, 'url': self.app_url, }, 'title': title, 'description': body, # Our color associated with our notification 'color': self.color(notify_type, int), }] if self.href: payload['embeds'][0]['url'] = self.href if self.footer: # Acquire logo URL logo_url = self.image_url(notify_type, logo=True) # Set Footer text to our app description payload['embeds'][0]['footer'] = { 'text': self.app_desc, } if self.footer_logo and logo_url: payload['embeds'][0]['footer']['icon_url'] = logo_url if self.include_image and image_url: payload['embeds'][0]['thumbnail'] = { 'url': image_url, 'height': 256, 'width': 256, } if self.fields: # Break titles out so that we can sort them in embeds description, fields = self.extract_markdown_sections(body) # Swap first entry for description payload['embeds'][0]['description'] = description if fields: # Apply our additional parsing for a better # presentation payload['embeds'][0]['fields'] = \ fields[:self.discord_max_fields] # Remove entry from head of fields fields = fields[self.discord_max_fields:] else: # not markdown payload['content'] = \ body if not title else "{}\r\n{}".format(title, body) # parse for user id's <@123> and role IDs <@&456> results = USER_ROLE_DETECTION_RE.findall(body) if results: payload['allow_mentions'] = { 'parse': [], 'users': [], 'roles': [], } _content = [] for (is_role, no, value) in results: if value: payload['allow_mentions']['parse'].append(value) _content.append(f'@{value}') elif is_role: payload['allow_mentions']['roles'].append(no) _content.append(f'<@&{no}>') else: # is_user payload['allow_mentions']['users'].append(no) _content.append(f'<@{no}>') if self.notify_format == NotifyFormat.MARKDOWN: # Add pingable elements to content field payload['content'] = '👉 ' + ' '.join(_content) if not self._send(payload, params=params): # We failed to post our message return False # Process any remaining fields IF set if fields: payload['embeds'][0]['description'] = '' for i in range(0, len(fields), self.discord_max_fields): payload['embeds'][0]['fields'] = \ fields[i:i + self.discord_max_fields] if not self._send(payload): # We failed to post our message return False if attach and self.attachment_support: # Update our payload; the idea is to preserve it's other detected # and assigned values for re-use here too payload.update({ # Text-To-Speech 'tts': False, # Wait until the upload has posted itself before continuing 'wait': True, }) # # Remove our text/title based content for attachment use # if 'embeds' in payload: del payload['embeds'] if 'content' in payload: del payload['content'] if 'allow_mentions' in payload: del payload['allow_mentions'] # # Send our attachments # for attachment in attach: self.logger.info( 'Posting Discord Attachment {}'.format(attachment.name)) if not self._send(payload, params=params, attach=attachment): # We failed to post our message return False # Otherwise return return True def _send(self, payload, attach=None, params=None, rate_limit=1, **kwargs): """ Wrapper to the requests (post) object """ # Our headers headers = { 'User-Agent': self.app_id, } # Construct Notify URL notify_url = '{0}/{1}/{2}'.format( self.notify_url, self.webhook_id, self.webhook_token, ) self.logger.debug('Discord POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('Discord Payload: %s' % str(payload)) # By default set wait to None wait = None if self.ratelimit_remaining <= 0.0: # Determine how long we should wait for or if we should wait at # all. This isn't fool-proof because we can't be sure the client # time (calling this script) is completely synced up with the # Discord server. One would hope we're on NTP and our clocks are # the same allowing this to role smoothly: now = datetime.now(timezone.utc).replace(tzinfo=None) if now < self.ratelimit_reset: # We need to throttle for the difference in seconds wait = abs( (self.ratelimit_reset - now + self.clock_skew) .total_seconds()) # Always call throttle before any remote server i/o is made; self.throttle(wait=wait) # Perform some simple error checking if isinstance(attach, AttachBase): if not attach: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attach.url(privacy=True))) return False self.logger.debug( 'Posting Discord attachment {}'.format( attach.url(privacy=True))) # Our attachment path (if specified) files = None try: # Open our attachment path if required: if attach: files = {'file': (attach.name, open(attach.path, 'rb'))} else: headers['Content-Type'] = 'application/json; charset=utf-8' r = requests.post( notify_url, params=params, data=payload if files else dumps(payload), headers=headers, files=files, verify=self.verify_certificate, timeout=self.request_timeout, ) # Handle rate limiting (if specified) try: # Store our rate limiting (if provided) self.ratelimit_remaining = \ float(r.headers.get( 'X-RateLimit-Remaining')) self.ratelimit_reset = datetime.fromtimestamp( int(r.headers.get('X-RateLimit-Reset')), timezone.utc).replace(tzinfo=None) except (TypeError, ValueError): # This is returned if we could not retrieve this # information gracefully accept this state and move on pass if r.status_code not in ( requests.codes.ok, requests.codes.no_content): # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) if r.status_code == requests.codes.too_many_requests \ and rate_limit > 0: # handle rate limiting self.logger.warning( 'Discord rate limiting in effect; ' 'blocking for %.2f second(s)', self.ratelimit_remaining) # Try one more time before failing return self._send( payload=payload, attach=attach, params=params, rate_limit=rate_limit - 1, **kwargs) self.logger.warning( 'Failed to send {}to Discord notification: ' '{}{}error={}.'.format( attach.name if attach else '', status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Discord {}.'.format( 'attachment' if attach else 'notification')) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred posting {}to Discord.'.format( attach.name if attach else '')) self.logger.debug('Socket Exception: %s' % str(e)) return False except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while reading {}.'.format( attach.name if attach else 'attachment')) self.logger.debug('I/O Exception: %s' % str(e)) return False finally: # Close our file (if it's open) stored in the second element # of our files tuple (index 1) if files: files['file'][1].close() return True def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'tts': 'yes' if self.tts else 'no', 'avatar': 'yes' if self.avatar else 'no', 'footer': 'yes' if self.footer else 'no', 'footer_logo': 'yes' if self.footer_logo else 'no', 'image': 'yes' if self.include_image else 'no', 'fields': 'yes' if self.fields else 'no', } if self.avatar_url: params['avatar_url'] = self.avatar_url if self.href: params['href'] = self.href if self.thread_id: params['thread'] = self.thread_id # Ensure our botname is set botname = f'{self.user}@' if self.user else '' # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{botname}{webhook_id}/{webhook_token}/?{params}' \ .format( schema=self.secure_protocol, botname=botname, webhook_id=self.pprint(self.webhook_id, privacy, safe=''), webhook_token=self.pprint( self.webhook_token, privacy, safe=''), params=NotifyDiscord.urlencode(params), ) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.webhook_id, self.webhook_token) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. Syntax: discord://webhook_id/webhook_token """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Store our webhook ID webhook_id = NotifyDiscord.unquote(results['host']) # Now fetch our tokens try: webhook_token = \ NotifyDiscord.split_path(results['fullpath'])[0] except IndexError: # Force some bad values that will get caught # in parsing later webhook_token = None results['webhook_id'] = webhook_id results['webhook_token'] = webhook_token # Text To Speech results['tts'] = parse_bool(results['qsd'].get('tts', False)) # Use sections # effectively detect multiple fields and break them off # into sections results['fields'] = parse_bool(results['qsd'].get('fields', True)) # Use Footer results['footer'] = parse_bool(results['qsd'].get('footer', False)) # Use Footer Logo results['footer_logo'] = \ parse_bool(results['qsd'].get('footer_logo', True)) # Update Avatar Icon results['avatar'] = parse_bool(results['qsd'].get('avatar', True)) # Boolean to include an image or not results['include_image'] = parse_bool(results['qsd'].get( 'image', NotifyDiscord.template_args['image']['default'])) if 'botname' in results['qsd']: # Alias to User results['user'] = \ NotifyDiscord.unquote(results['qsd']['botname']) # Extract avatar url if it was specified if 'avatar_url' in results['qsd']: results['avatar_url'] = \ NotifyDiscord.unquote(results['qsd']['avatar_url']) # Extract url if it was specified if 'href' in results['qsd']: results['href'] = \ NotifyDiscord.unquote(results['qsd']['href']) elif 'url' in results['qsd']: results['href'] = \ NotifyDiscord.unquote(results['qsd']['url']) # Markdown is implied results['format'] = NotifyFormat.MARKDOWN # Extract thread id if it was specified if 'thread' in results['qsd']: results['thread'] = \ NotifyDiscord.unquote(results['qsd']['thread']) # Markdown is implied results['format'] = NotifyFormat.MARKDOWN return results @staticmethod def parse_native_url(url): """ Support https://discord.com/api/webhooks/WEBHOOK_ID/WEBHOOK_TOKEN Support Legacy URL as well: https://discordapp.com/api/webhooks/WEBHOOK_ID/WEBHOOK_TOKEN """ result = re.match( r'^https?://discord(app)?\.com/api/webhooks/' r'(?P[0-9]+)/' r'(?P[A-Z0-9_-]+)/?' r'(?P\?.+)?$', url, re.I) if result: return NotifyDiscord.parse_url( '{schema}://{webhook_id}/{webhook_token}/{params}'.format( schema=NotifyDiscord.secure_protocol, webhook_id=result.group('webhook_id'), webhook_token=result.group('webhook_token'), params='' if not result.group('params') else result.group('params'))) return None @staticmethod def extract_markdown_sections(markdown): """ Takes a string in a markdown type format and extracts the headers and their corresponding sections into individual fields that get passed as an embed entry to Discord. """ # Search for any header information found without it's own section # identifier match = re.match( r'^\s*(?P[^\s#]+.*?)(?=\s*$|[\r\n]+\s*#)', markdown, flags=re.S) description = match.group('desc').strip() if match else '' if description: # Strip description from our string since it has been handled # now. markdown = re.sub(re.escape(description), '', markdown, count=1) regex = re.compile( r'\s*#[# \t\v]*(?P[^\n]+)(\n|\s*$)' r'\s*((?P[^#].+?)(?=\s*$|[\r\n]+\s*#))?', flags=re.S) common = regex.finditer(markdown) fields = list() for el in common: d = el.groupdict() fields.append({ 'name': d.get('name', '').strip('#`* \r\n\t\v'), 'value': '```{}\n{}```'.format( 'md' if d.get('value') else '', d.get('value').strip() + '\n' if d.get('value') else '', ), }) return description, fields apprise-1.9.3/apprise/plugins/email/000077500000000000000000000000001477231770000174125ustar00rootroot00000000000000apprise-1.9.3/apprise/plugins/email/__init__.py000066400000000000000000000040651477231770000215300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from email import charset from .base import NotifyEmail from .common import ( AppriseEmailException, EmailMessage, SecureMailMode, SECURE_MODES, WebBaseLogin) from .templates import EMAIL_TEMPLATES # Globally Default encoding mode set to Quoted Printable. charset.add_charset('utf-8', charset.QP, charset.QP, 'utf-8') __all__ = [ # Reference 'NotifyEmail', # Pretty Good Privacy 'ApprisePGPController', 'ApprisePGPException', # Other 'AppriseEmailException', 'EmailMessage', 'SecureMailMode', 'SECURE_MODES', 'WebBaseLogin', # Additional entries that may be useful to some developers 'EMAIL_TEMPLATES', 'PGP_SUPPORT', ] apprise-1.9.3/apprise/plugins/email/base.py000066400000000000000000001127451477231770000207100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import smtplib from email.mime.text import MIMEText from email.mime.application import MIMEApplication from email.mime.multipart import MIMEMultipart from email.mime.base import MIMEBase from email.utils import formataddr, make_msgid from email.header import Header from socket import error as SocketError from datetime import datetime from datetime import timezone from ..base import NotifyBase from ...url import PrivacyMode from ...common import NotifyFormat, NotifyType from ...conversion import convert_between from ...utils import pgp as _pgp from ...utils.parse import ( is_ipaddr, is_email, parse_emails, is_hostname, parse_bool) from ...locale import gettext_lazy as _ from ...logger import logger from .common import ( AppriseEmailException, EmailMessage, SecureMailMode, SECURE_MODES, WebBaseLogin) from . import templates class NotifyEmail(NotifyBase): """ A wrapper to Email Notifications """ # The default descriptive name associated with the Notification service_name = 'E-Mail' # The default simple (insecure) protocol protocol = 'mailto' # The default secure protocol secure_protocol = 'mailtos' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_email' # Support attachments attachment_support = True # Default Notify Format notify_format = NotifyFormat.HTML # Default SMTP Timeout (in seconds) socket_connect_timeout = 15 # Define object templates templates = ( '{schema}://{host}', '{schema}://{host}:{port}', '{schema}://{host}/{targets}', '{schema}://{host}:{port}/{targets}', '{schema}://{user}@{host}', '{schema}://{user}@{host}:{port}', '{schema}://{user}@{host}/{targets}', '{schema}://{user}@{host}:{port}/{targets}', '{schema}://{user}:{password}@{host}', '{schema}://{user}:{password}@{host}:{port}', '{schema}://{user}:{password}@{host}/{targets}', '{schema}://{user}:{password}@{host}:{port}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'user': { 'name': _('User Name'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'host': { 'name': _('Domain'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) template_args = dict(NotifyBase.template_args, **{ 'to': { 'name': _('To Email'), 'type': 'string', 'map_to': 'targets', }, 'from': { 'name': _('From Email'), 'type': 'string', 'map_to': 'from_addr', }, 'name': { 'name': _('From Name'), 'type': 'string', 'map_to': 'from_addr', }, 'cc': { 'name': _('Carbon Copy'), 'type': 'list:string', }, 'bcc': { 'name': _('Blind Carbon Copy'), 'type': 'list:string', }, 'smtp': { 'name': _('SMTP Server'), 'type': 'string', 'map_to': 'smtp_host', }, 'mode': { 'name': _('Secure Mode'), 'type': 'choice:string', 'values': SECURE_MODES, 'default': SecureMailMode.STARTTLS, 'map_to': 'secure_mode', }, 'reply': { 'name': _('Reply To'), 'type': 'list:string', 'map_to': 'reply_to', }, 'pgp': { 'name': _('PGP Encryption'), 'type': 'bool', 'map_to': 'use_pgp', 'default': False, }, 'pgpkey': { 'name': _('PGP Public Key Path'), 'type': 'string', 'private': True, # By default persistent storage is referenced 'default': '', 'map_to': 'pgp_key', }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('Email Header'), 'prefix': '+', }, } def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, targets=None, cc=None, bcc=None, reply_to=None, headers=None, use_pgp=None, pgp_key=None, **kwargs): """ Initialize Email Object The smtp_host and secure_mode can be automatically detected depending on how the URL was built """ super().__init__(**kwargs) # Acquire Email 'To' self.targets = list() # Acquire Carbon Copies self.cc = set() # Acquire Blind Carbon Copies self.bcc = set() # Acquire Reply To self.reply_to = set() # For tracking our email -> name lookups self.names = {} self.headers = {} if headers: # Store our extra headers self.headers.update(headers) # Now we want to construct the To and From email # addresses from the URL provided self.from_addr = [False, ''] # Now detect the SMTP Server self.smtp_host = \ smtp_host if isinstance(smtp_host, str) else '' # Now detect secure mode if secure_mode: self.secure_mode = None \ if not isinstance(secure_mode, str) \ else secure_mode.lower() else: self.secure_mode = SecureMailMode.INSECURE \ if not self.secure else self.template_args['mode']['default'] if self.secure_mode not in SECURE_MODES: msg = 'The secure mode specified ({}) is invalid.'\ .format(secure_mode) self.logger.warning(msg) raise TypeError(msg) # Validate recipients (cc:) and drop bad ones: for recipient in parse_emails(cc): email = is_email(recipient) if email: self.cc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Carbon Copy email ' '({}) specified.'.format(recipient), ) # Validate recipients (bcc:) and drop bad ones: for recipient in parse_emails(bcc): email = is_email(recipient) if email: self.bcc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Blind Carbon Copy email ' '({}) specified.'.format(recipient), ) # Validate recipients (reply-to:) and drop bad ones: for recipient in parse_emails(reply_to): email = is_email(recipient) if email: self.reply_to.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Reply To email ' '({}) specified.'.format(recipient), ) # Apply any defaults based on certain known configurations self.apply_email_defaults(secure_mode=secure_mode, **kwargs) if self.user: if self.host: # Prepare the bases of our email self.from_addr = [self.app_id, '{}@{}'.format( re.split(r'[\s@]+', self.user)[0], self.host, )] else: result = is_email(self.user) if result: # Prepare the bases of our email and include domain self.host = result['domain'] self.from_addr = [self.app_id, self.user] if from_addr: result = is_email(from_addr) if result: self.from_addr = ( result['name'] if result['name'] else False, result['full_email']) else: # Only update the string but use the already detected info self.from_addr[0] = from_addr result = is_email(self.from_addr[1]) if not result: # Parse Source domain based on from_addr msg = 'Invalid ~From~ email specified: {}'.format( '{} <{}>'.format(self.from_addr[0], self.from_addr[1]) if self.from_addr[0] else '{}'.format(self.from_addr[1])) self.logger.warning(msg) raise TypeError(msg) # Store our lookup self.names[self.from_addr[1]] = self.from_addr[0] if targets: # Validate recipients (to:) and drop bad ones: for recipient in parse_emails(targets): result = is_email(recipient) if result: self.targets.append( (result['name'] if result['name'] else False, result['full_email'])) continue self.logger.warning( 'Dropped invalid To email ' '({}) specified.'.format(recipient), ) else: # If our target email list is empty we want to add ourselves to it self.targets.append((False, self.from_addr[1])) if not self.secure and self.secure_mode != SecureMailMode.INSECURE: # Enable Secure mode if not otherwise set self.secure = True if not self.port: # Assign our port based on our secure_mode if not otherwise # detected self.port = SECURE_MODES[self.secure_mode]['default_port'] # if there is still no smtp_host then we fall back to the hostname if not self.smtp_host: self.smtp_host = self.host # Prepare our Pretty Good Privacy Object self.pgp = _pgp.ApprisePGPController( path=self.store.path, pub_keyfile=pgp_key, email=self.from_addr[1], asset=self.asset) # We store so we can generate a URL later on self.pgp_key = pgp_key self.use_pgp = use_pgp if not None \ else self.template_args['pgp']['default'] if self.use_pgp and not _pgp.PGP_SUPPORT: self.logger.warning( 'PGP Support is not available on this installation; ' 'ask admin to install PGPy') return def apply_email_defaults(self, secure_mode=None, port=None, **kwargs): """ A function that prefills defaults based on the email it was provided. """ if self.smtp_host or not self.user: # SMTP Server was explicitly specified, therefore it is assumed # the caller knows what he's doing and is intentionally # over-riding any smarts to be applied. We also can not apply # any default if there was no user specified. return # detect our email address using our user/host combo from_addr = '{}@{}'.format( re.split(r'[\s@]+', self.user)[0], self.host, ) for i in range(len(templates.EMAIL_TEMPLATES)): # pragma: no branch self.logger.trace('Scanning %s against %s' % ( from_addr, templates.EMAIL_TEMPLATES[i][0] )) match = templates.EMAIL_TEMPLATES[i][1].match(from_addr) if match: self.logger.info( 'Applying %s Defaults' % templates.EMAIL_TEMPLATES[i][0], ) # the secure flag can not be altered if defined in the template self.secure = templates.EMAIL_TEMPLATES[i][2]\ .get('secure', self.secure) # The SMTP Host check is already done above; if it was # specified we wouldn't even reach this part of the code. self.smtp_host = templates.EMAIL_TEMPLATES[i][2]\ .get('smtp_host', self.smtp_host) # The following can be over-ridden if defined manually in the # Apprise URL. Otherwise they take on the template value if not port: self.port = templates.EMAIL_TEMPLATES[i][2]\ .get('port', self.port) if not secure_mode: self.secure_mode = templates.EMAIL_TEMPLATES[i][2]\ .get('secure_mode', self.secure_mode) # Adjust email login based on the defined usertype. If no entry # was specified, then we default to having them all set (which # basically implies that there are no restrictions and use use # whatever was specified) login_type = \ templates.EMAIL_TEMPLATES[i][2].get('login_type', []) if login_type: # only apply additional logic to our user if a login_type # was specified. if is_email(self.user): if WebBaseLogin.EMAIL not in login_type: # Email specified but login type # not supported; switch it to user id self.user = match.group('id') else: # Enforce our host information self.host = self.user.split('@')[1] elif WebBaseLogin.USERID not in login_type: # user specified but login type # not supported; switch it to email self.user = '{}@{}'.format(self.user, self.host) break def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): if not self.targets: # There is no one to email; we're done logger.warning('There are no Email recipients to notify') return False # error tracking (used for function return) has_error = False # bind the socket variable to the current namespace socket = None # Always call throttle before any remote server i/o is made self.throttle() try: self.logger.debug('Connecting to remote SMTP server...') socket_func = smtplib.SMTP if self.secure_mode == SecureMailMode.SSL: self.logger.debug('Securing connection with SSL...') socket_func = smtplib.SMTP_SSL socket = socket_func( self.smtp_host, self.port, None, timeout=self.socket_connect_timeout, ) if self.secure_mode == SecureMailMode.STARTTLS: # Handle Secure Connections self.logger.debug('Securing connection with STARTTLS...') socket.starttls() self.logger.trace('Login ID: {}'.format(self.user)) if self.user and self.password: # Apply Login credetials self.logger.debug('Applying user credentials...') socket.login(self.user, self.password) # Prepare our headers headers = { 'X-Application': self.app_id, } headers.update(self.headers) # Iterate over our email messages we can generate and then # send them off. for message in NotifyEmail.prepare_emails( subject=title, body=body, notify_format=self.notify_format, from_addr=self.from_addr, to=self.targets, cc=self.cc, bcc=self.bcc, reply_to=self.reply_to, smtp_host=self.smtp_host, attach=attach, headers=headers, names=self.names, pgp=self.pgp if self.use_pgp else None): try: socket.sendmail( self.from_addr[1], message.to_addrs, message.body) self.logger.info('Sent Email to %s', message.recipient) except (SocketError, smtplib.SMTPException, RuntimeError) as e: self.logger.warning( 'Sending email to "%s" failed.', message.recipient) self.logger.debug(f'Socket Exception: {e}') # Mark as failure has_error = True except (SocketError, smtplib.SMTPException, RuntimeError) as e: self.logger.warning( 'Connection error while submitting email to "%s"', self.smtp_host) self.logger.debug(f'Socket Exception: {e}') # Mark as failure has_error = True except AppriseEmailException as e: self.logger.debug(f'Socket Exception: {e}') # Mark as failure has_error = True finally: # Gracefully terminate the connection with the server if socket is not None: socket.quit() # Reduce our dictionary (eliminate expired keys if any) self.pgp.prune() return not has_error def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define an URL parameters params = { 'pgp': 'yes' if self.use_pgp else 'no', } # Store our public key back into your URL if self.pgp_key is not None: params['pgp_key'] = NotifyEmail.quote(self.pgp_key, safe=':\\/') # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) from_addr = None if len(self.targets) == 1 and self.targets[0][1] != self.from_addr[1]: # A custom email was provided from_addr = self.from_addr[1] if self.smtp_host != self.host: # Apply our SMTP Host only if it differs from the provided hostname params['smtp'] = self.smtp_host if self.secure: # Mode is only requried if we're dealing with a secure connection params['mode'] = self.secure_mode if self.from_addr[0] and self.from_addr[0] != self.app_id: # A custom name was provided params['from'] = self.from_addr[0] if not from_addr else \ formataddr((self.from_addr[0], from_addr), charset='utf-8') elif from_addr: params['from'] = formataddr((False, from_addr), charset='utf-8') elif not self.user: params['from'] = \ formataddr((False, self.from_addr[1]), charset='utf-8') if self.cc: # Handle our Carbon Copy Addresses params['cc'] = ','.join([ formataddr( (self.names[e] if e in self.names else False, e), # Swap comma for it's escaped url code (if detected) since # we're using that as a delimiter charset='utf-8').replace(',', '%2C') for e in self.cc]) if self.bcc: # Handle our Blind Carbon Copy Addresses params['bcc'] = ','.join([ formataddr( (self.names[e] if e in self.names else False, e), # Swap comma for it's escaped url code (if detected) since # we're using that as a delimiter charset='utf-8').replace(',', '%2C') for e in self.bcc]) if self.reply_to: # Handle our Reply-To Addresses params['reply'] = ','.join([ formataddr( (self.names[e] if e in self.names else False, e), # Swap comma for it's escaped url code (if detected) since # we're using that as a delimiter charset='utf-8').replace(',', '%2C') for e in self.reply_to]) # pull email suffix from username (if present) user = None if not self.user else self.user.split('@')[0] # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyEmail.quote(user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif user: # user url auth = '{user}@'.format( user=NotifyEmail.quote(user, safe=''), ) # Default Port setup default_port = SECURE_MODES[self.secure_mode]['default_port'] # a simple boolean check as to whether we display our target emails # or not has_targets = \ not (len(self.targets) == 1 and self.targets[0][1] == self.from_addr[1]) return '{schema}://{auth}{hostname}{port}/{targets}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), targets='' if not has_targets else '/'.join( [NotifyEmail.quote('{}{}'.format( '' if not e[0] else '{}:'.format(e[0]), e[1]), safe='') for e in self.targets]), params=NotifyEmail.urlencode(params), ) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.smtp_host, self.port if self.port else SECURE_MODES[self.secure_mode]['default_port'], ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.targets) if self.targets else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Prepare our target lists results['targets'] = [] if is_ipaddr(results['host']): # Silently move on and do not disrupt any configuration pass elif not is_hostname(results['host'], ipv4=False, ipv6=False, underscore=False): if is_email(NotifyEmail.unquote(results['host'])): # Don't lose defined email addresses results['targets'].append(NotifyEmail.unquote(results['host'])) # Detect if we have a valid hostname or not; be sure to reset it's # value if invalid; we'll attempt to figure this out later on results['host'] = '' # Get PGP Flag results['use_pgp'] = \ parse_bool(results['qsd'].get( 'pgp', NotifyEmail.template_args['pgp']['default'])) # Get PGP Public Key Override if 'pgpkey' in results['qsd'] and results['qsd']['pgpkey']: results['pgp_key'] = \ NotifyEmail.unquote(results['qsd']['pgpkey']) # The From address is a must; either through the use of templates # from= entry and/or merging the user and hostname together, this # must be calculated or parse_url will fail. from_addr = '' # The server we connect to to send our mail to smtp_host = '' # Get our potential email targets; if none our found we'll just # add one to ourselves results['targets'] += NotifyEmail.split_path(results['fullpath']) # Attempt to detect 'to' email address if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'].append(results['qsd']['to']) # Attempt to detect 'from' email address if 'from' in results['qsd'] and len(results['qsd']['from']): from_addr = NotifyEmail.unquote(results['qsd']['from']) if 'name' in results['qsd'] and len(results['qsd']['name']): from_addr = formataddr( (NotifyEmail.unquote(results['qsd']['name']), from_addr), charset='utf-8') elif 'name' in results['qsd'] and len(results['qsd']['name']): # Extract from name to associate with from address from_addr = NotifyEmail.unquote(results['qsd']['name']) # Store SMTP Host if specified if 'smtp' in results['qsd'] and len(results['qsd']['smtp']): # Extract the smtp server smtp_host = NotifyEmail.unquote(results['qsd']['smtp']) if 'mode' in results['qsd'] and len(results['qsd']['mode']): # Extract the secure mode to over-ride the default results['secure_mode'] = results['qsd']['mode'].lower() # Handle Carbon Copy Addresses if 'cc' in results['qsd'] and len(results['qsd']['cc']): results['cc'] = results['qsd']['cc'] # Handle Blind Carbon Copy Addresses if 'bcc' in results['qsd'] and len(results['qsd']['bcc']): results['bcc'] = results['qsd']['bcc'] # Handle Reply To Addresses if 'reply' in results['qsd'] and len(results['qsd']['reply']): results['reply_to'] = results['qsd']['reply'] results['from_addr'] = from_addr results['smtp_host'] = smtp_host # Add our Meta Headers that the user can provide with their outbound # emails results['headers'] = {NotifyBase.unquote(x): NotifyBase.unquote(y) for x, y in results['qsd+'].items()} return results @staticmethod def _get_charset(input_string): """ Get utf-8 charset if non ascii string only Encode an ascii string to utf-8 is bad for email deliverability because some anti-spam gives a bad score for that like SUBJ_EXCESS_QP flag on Rspamd """ if not input_string: return None return 'utf-8' if not all(ord(c) < 128 for c in input_string) else None @staticmethod def prepare_emails(subject, body, from_addr, to, cc=set(), bcc=set(), reply_to=set(), # Providing an SMTP Host helps improve Email Message-ID # and avoids getting flagged as spam smtp_host=None, # Can be either 'html' or 'text' notify_format=NotifyFormat.HTML, attach=None, headers=dict(), # Names can be a dictionary names=None, # Pretty Good Privacy Support; Pass in an # ApprisePGPController if you wish to use it pgp=None): """ Generator for emails from_addr: must be in format: (from_name, from_addr) to: must be in the format: [(to_name, to_addr), (to_name, to_addr)), ...] cc: must be a set of email addresses bcc: must be a set of email addresses reply_to: must be either None, or an email address smtp_host: This is used to generate the email's Message-ID. Set this correctly to avoid getting flagged as Spam notify_format: can be either 'text' or 'html' attach: must be of class AppriseAttachment headers: Optionally provide a dictionary of additional headers you would like to include in the email payload names: This is a dictionary of email addresses as keys and the Names to associate with them when sending the email. This is cross referenced for the cc and bcc lists pgp: Encrypting the message using Pretty Good Privacy support This requires that the pgp_path provided exists and keys can be referenced here to perform the encryption with. If a key isn't found, one will be generated. pgp support requires the 'PGPy' Python library to be available. Pass in an ApprisePGPController() if you wish to use this """ if not to: # There is no one to email; we're done msg = 'There are no Email recipients to notify' logger.warning(msg) raise AppriseEmailException(msg) elif pgp and not _pgp.PGP_SUPPORT: msg = 'PGP Support unavailable; install PGPy library' logger.warning(msg) raise AppriseEmailException(msg) if not names: # Prepare a empty dictionary to prevent errors/warnings names = {} if not smtp_host: # Generate a host identifier (used for Message-ID Creation) smtp_host = from_addr[1].split('@')[1] logger.debug('SMTP Host: {smtp_host}') # Create a copy of the targets list emails = list(to) while len(emails): # Get our email to notify to_name, to_addr = emails.pop(0) # Strip target out of cc list if in To or Bcc _cc = (cc - bcc - set([to_addr])) # Strip target out of bcc list if in To _bcc = (bcc - set([to_addr])) # Strip target out of reply_to list if in To _reply_to = (reply_to - set([to_addr])) # Format our cc addresses to support the Name field _cc = [formataddr( (names.get(addr, False), addr), charset='utf-8') for addr in _cc] # Format our bcc addresses to support the Name field _bcc = [formataddr( (names.get(addr, False), addr), charset='utf-8') for addr in _bcc] if reply_to: # Format our reply-to addresses to support the Name field reply_to = [formataddr( (names.get(addr, False), addr), charset='utf-8') for addr in reply_to] logger.debug( 'Email From: {}'.format( formataddr(from_addr, charset='utf-8'))) logger.debug('Email To: {}'.format(to_addr)) if _cc: logger.debug('Email Cc: {}'.format(', '.join(_cc))) if _bcc: logger.debug('Email Bcc: {}'.format(', '.join(_bcc))) if _reply_to: logger.debug( 'Email Reply-To: {}'.format(', '.join(_reply_to)) ) # Prepare Email Message if notify_format == NotifyFormat.HTML: base = MIMEMultipart("alternative") base.attach(MIMEText( convert_between( NotifyFormat.HTML, NotifyFormat.TEXT, body), 'plain', 'utf-8') ) base.attach(MIMEText(body, 'html', 'utf-8')) else: base = MIMEText(body, 'plain', 'utf-8') if attach: mixed = MIMEMultipart("mixed") mixed.attach(base) # Now store our attachments for no, attachment in enumerate(attach, start=1): if not attachment: # We could not load the attachment; take an early # exit since this isn't what the end user wanted # We could not access the attachment msg = 'Could not access attachment {}.'.format( attachment.url(privacy=True)) logger.warning(msg) raise AppriseEmailException(msg) logger.debug( 'Preparing Email attachment {}'.format( attachment.url(privacy=True))) with open(attachment.path, "rb") as abody: app = MIMEApplication(abody.read()) app.set_type(attachment.mimetype) # Prepare our attachment name filename = attachment.name \ if attachment.name else f'file{no:03}.dat' app.add_header( 'Content-Disposition', 'attachment; filename="{}"'.format( Header(filename, 'utf-8')), ) mixed.attach(app) base = mixed if pgp: logger.debug("Securing Email with PGP Encryption") # Set our header information to include in the encryption base['From'] = formataddr( (None, from_addr[1]), charset='utf-8') base['To'] = formataddr((None, to_addr), charset='utf-8') base['Subject'] = \ Header(subject, NotifyEmail._get_charset(subject)) # Apply our encryption encrypted_content = \ pgp.encrypt(base.as_string(), to_addr) if not encrypted_content: # Unable to send notification msg = 'Unable to encrypt email via PGP' logger.warning(msg) raise AppriseEmailException(msg) # prepare our messsage base = MIMEMultipart( "encrypted", protocol="application/pgp-encrypted") # Store Autocrypt header (DeltaChat Support) base.add_header( "Autocrypt", "addr=%s; prefer-encrypt=mutual" % formataddr( (False, to_addr), charset='utf-8')) # Set Encryption Info Part enc_payload = MIMEText("Version: 1", "plain") enc_payload.set_type("application/pgp-encrypted") base.attach(enc_payload) enc_payload = MIMEBase("application", "octet-stream") enc_payload.set_payload(encrypted_content) base.attach(enc_payload) # Apply any provided custom headers for k, v in headers.items(): base[k] = Header(v, NotifyEmail._get_charset(v)) base['Subject'] = \ Header(subject, NotifyEmail._get_charset(subject)) base['From'] = formataddr(from_addr, charset='utf-8') base['To'] = formataddr((to_name, to_addr), charset='utf-8') base['Message-ID'] = make_msgid(domain=smtp_host) base['Date'] = \ datetime.now(timezone.utc)\ .strftime("%a, %d %b %Y %H:%M:%S +0000") if cc: base['Cc'] = ','.join(_cc) if reply_to: base['Reply-To'] = ','.join(_reply_to) yield EmailMessage( recipient=to_addr, to_addrs=[to_addr] + list(_cc) + list(_bcc), body=base.as_string()) apprise-1.9.3/apprise/plugins/email/common.py000066400000000000000000000050111477231770000212510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import dataclasses import typing as t from ...exception import ApprisePluginException class AppriseEmailException(ApprisePluginException): """ Thrown when there is an error with the Email Attachment """ def __init__(self, message, error_code=601): super().__init__(message, error_code=error_code) class WebBaseLogin: """ This class is just used in conjunction of the default emailers to best formulate a login to it using the data detected """ # User Login must be Email Based EMAIL = 'Email' # User Login must UserID Based USERID = 'UserID' # Secure Email Modes class SecureMailMode: INSECURE = "insecure" SSL = "ssl" STARTTLS = "starttls" # Define all of the secure modes (used during validation) SECURE_MODES = { SecureMailMode.STARTTLS: { 'default_port': 587, }, SecureMailMode.SSL: { 'default_port': 465, }, SecureMailMode.INSECURE: { 'default_port': 25, }, } @dataclasses.dataclass class EmailMessage: """ Our message structure """ recipient: str to_addrs: t.List[str] body: str apprise-1.9.3/apprise/plugins/email/templates.py000066400000000000000000000222401477231770000217620ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re from .common import (SecureMailMode, WebBaseLogin) # To attempt to make this script stupid proof, if we detect an email address # that is part of the this table, we can pre-use a lot more defaults if they # aren't otherwise specified on the users input. EMAIL_TEMPLATES = ( # Google GMail ( 'Google Mail', re.compile( r'^((?P

test

", # "reblog":null, # "application":{ # "name":"Apprise Notifications", # "website":"https://github.com/caronc/apprise" # }, # "account":{ # "id":"109310334138718878", # "username":"caronc", # "acct":"caronc", # "display_name":"Chris", # "locked":false, # "bot":false, # "discoverable":false, # "group":false, # "created_at":"2022-11-08T00:00:00.000Z", # "note":"content", # "url":"https://host/@caronc", # "avatar":"https://host/path/file.png", # "avatar_static":"https://host/path/file.png", # "header":"https://host/headers/original/missing.png", # "header_static":"https://host/path/missing.png", # "followers_count":0, # "following_count":0, # "statuses_count":15, # "last_status_at":"2022-11-09", # "emojis":[ # # ], # "fields":[ # # ] # }, # "media_attachments":[ # { # "id":"109315796405707501", # "type":"image", # "url":"https://host/path/file.jpeg", # "preview_url":"https://host/path/file.jpeg", # "remote_url":null, # "preview_remote_url":null, # "text_url":null, # "meta":{ # "original":{ # "width":640, # "height":640, # "size":"640x640", # "aspect":1.0 # }, # "small":{ # "width":400, # "height":400, # "size":"400x400", # "aspect":1.0 # } # }, # "description":null, # "blurhash":"UmIsdJnT^mX4V@XQofnQ~Ebq%4o3ofnQjZbt" # } # ], # "mentions":[ # # ], # "tags":[ # # ], # "emojis":[ # # ], # "card":null, # "poll":null # } try: url = '{}/web/@{}'.format( self.api_url, response['account']['username']) except (KeyError, TypeError): url = 'unknown' self.logger.debug( 'Mastodon [%.2d/%.2d] (%d attached) delivered to %s', no, len(payloads), len(payload.get('media_ids', [])), url) self.logger.info( 'Sent [%.2d/%.2d] Mastodon notification as public toot.', no, len(payloads)) return not has_error def _whoami(self, lazy=True): """ Looks details of current authenticated user """ if lazy and self._whoami_cache is not None: # Use cached response return self._whoami_cache # Send Mastodon Whoami request postokay, response = self._request( self.mastodon_whoami, method='GET', ) if postokay: # Sample Response: # { # 'id': '12345', # 'username': 'caronc', # 'acct': 'caronc', # 'display_name': 'Chris', # 'locked': False, # 'bot': False, # 'discoverable': False, # 'group': False, # 'created_at': '2022-11-08T00:00:00.000Z', # 'note': 'details', # 'url': 'https://noc.social/@caronc', # 'avatar': 'https://host/path/image.png', # 'avatar_static': 'https://host/path/image.png', # 'header': 'https://host/path/missing.png', # 'header_static': 'https://host/path/missing.png', # 'followers_count': 0, # 'following_count': 0, # 'statuses_count': 2, # 'last_status_at': '2022-11-09', # 'source': { # 'privacy': 'public', # 'sensitive': False, # 'language': None, # 'note': 'details', # 'fields': [], # 'follow_requests_count': 0 # }, # 'emojis': [], # 'fields': [] # } try: # Cache our response for future references self._whoami_cache = { response['username']: response['id']} except (TypeError, KeyError): pass elif response and 'authorized scopes' in response.get('error', ''): self.logger.warning( 'Failed to lookup Mastodon Auth details; ' 'missing scope: read:accounts') return self._whoami_cache if postokay else {} def _request(self, path, payload=None, method='POST'): """ Wrapper to Mastodon API requests object """ headers = { 'User-Agent': self.app_id, 'Authorization': f'Bearer {self.token}', } data = None files = None # Prepare our message url = '{}{}'.format(self.api_url, path) # Some Debug Logging self.logger.debug('Mastodon {} URL: {} (cert_verify={})'.format( method, url, self.verify_certificate)) # Open our attachment path if required: if isinstance(payload, AttachBase): # prepare payload files = { 'file': (payload.name, open(payload.path, 'rb'), 'application/octet-stream')} # Provide a description data = { 'description': payload.name, } else: headers['Content-Type'] = 'application/json' data = dumps(payload) self.logger.debug('Mastodon Payload: %s' % str(payload)) # Default content response object content = {} # By default set wait to None wait = None if self.ratelimit_remaining == 0: # Determine how long we should wait for or if we should wait at # all. This isn't fool-proof because we can't be sure the client # time (calling this script) is completely synced up with the # Mastodon server. One would hope we're on NTP and our clocks are # the same allowing this to role smoothly: now = datetime.now(timezone.utc).replace(tzinfo=None) if now < self.ratelimit_reset: # We need to throttle for the difference in seconds # We add 0.5 seconds to the end just to allow a grace # period. wait = (self.ratelimit_reset - now).total_seconds() + 0.5 # Always call throttle before any remote server i/o is made; self.throttle(wait=wait) # acquire our request mode fn = requests.post if method == 'POST' else requests.get try: r = fn( url, data=data, files=files, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) try: content = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None content = {} if r.status_code not in ( requests.codes.ok, requests.codes.created, requests.codes.accepted): # We had a problem status_str = \ NotifyMastodon.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Mastodon {} to {}: ' '{}error={}.'.format( method, url, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure return (False, content) try: # Capture rate limiting if possible self.ratelimit_remaining = \ int(r.headers.get('X-RateLimit-Remaining')) self.ratelimit_reset = datetime.fromtimestamp( int(r.headers.get('X-RateLimit-Limit')), timezone.utc ).replace(tzinfo=None) except (TypeError, ValueError): # This is returned if we could not retrieve this information # gracefully accept this state and move on pass except requests.RequestException as e: self.logger.warning( 'Exception received when sending Mastodon {} to {}: '. format(method, url)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure return (False, content) except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while handling {}.'.format( payload.name if isinstance(payload, AttachBase) else payload)) self.logger.debug('I/O Exception: %s' % str(e)) return (False, content) finally: # Close our file (if it's open) stored in the second element # of our files tuple (index 1) if files: files['file'][1].close() return (True, content) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results if 'token' in results['qsd'] and len(results['qsd']['token']): results['token'] = NotifyMastodon.unquote(results['qsd']['token']) elif not results['password'] and results['user']: results['token'] = NotifyMastodon.unquote(results['user']) # Apply our targets results['targets'] = NotifyMastodon.split_path(results['fullpath']) # The defined Mastodon visibility if 'visibility' in results['qsd'] and \ len(results['qsd']['visibility']): # Simplified version results['visibility'] = \ NotifyMastodon.unquote(results['qsd']['visibility']) elif results['schema'].startswith('toot'): results['visibility'] = MastodonMessageVisibility.PUBLIC # Get Idempotency Key (if specified) if 'key' in results['qsd'] and len(results['qsd']['key']): results['key'] = \ NotifyMastodon.unquote(results['qsd']['key']) # Get Spoiler Text if 'spoiler' in results['qsd'] and len(results['qsd']['spoiler']): results['spoiler'] = \ NotifyMastodon.unquote(results['qsd']['spoiler']) # Get Language (if specified) if 'language' in results['qsd'] and len(results['qsd']['language']): results['language'] = \ NotifyMastodon.unquote(results['qsd']['language']) # Get Sensitive Flag (for Attachments) results['sensitive'] = \ parse_bool(results['qsd'].get( 'sensitive', NotifyMastodon.template_args['sensitive']['default'])) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifyMastodon.template_args['batch']['default'])) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyMastodon.parse_list(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/matrix.py000066400000000000000000001734541477231770000202170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Great sources # - https://github.com/matrix-org/matrix-python-sdk # - https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst # import re import requests import uuid from markdown import markdown from json import dumps from json import loads from time import time from .base import NotifyBase from ..url import PrivacyMode from ..exception import AppriseException from ..common import NotifyType from ..common import NotifyImageSize from ..common import NotifyFormat from ..common import PersistentStoreMode from ..utils.parse import ( parse_bool, parse_list, is_hostname, validate_regex) from ..locale import gettext_lazy as _ # Define default path MATRIX_V1_WEBHOOK_PATH = '/api/v1/matrix/hook' MATRIX_V2_API_PATH = '/_matrix/client/r0' MATRIX_V3_API_PATH = '/_matrix/client/v3' MATRIX_V3_MEDIA_PATH = '/_matrix/media/v3' MATRIX_V2_MEDIA_PATH = '/_matrix/media/r0' class MatrixDiscoveryException(AppriseException): """ Apprise Matrix Exception Class """ # Extend HTTP Error Messages MATRIX_HTTP_ERROR_MAP = { 403: 'Unauthorized - Invalid Token.', 429: 'Rate limit imposed; wait 2s and try again', } # Matrix Room Syntax IS_ROOM_ALIAS = re.compile( r'^\s*(#|%23)?(?P[a-z0-9-]+)((:|%3A)' r'(?P[a-z0-9.-]+))?\s*$', re.I) # Room ID MUST start with an exclamation to avoid ambiguity IS_ROOM_ID = re.compile( r'^\s*(!|!|%21)(?P[a-z0-9-]+)((:|%3A)' r'(?P[a-z0-9.-]+))?\s*$', re.I) class MatrixMessageType: """ The Matrix Message types """ TEXT = "text" NOTICE = "notice" # matrix message types are placed into this list for validation purposes MATRIX_MESSAGE_TYPES = ( MatrixMessageType.TEXT, MatrixMessageType.NOTICE, ) class MatrixVersion: # Version 2 V2 = "2" # Version 3 V3 = "3" # webhook modes are placed into this list for validation purposes MATRIX_VERSIONS = ( MatrixVersion.V2, MatrixVersion.V3, ) class MatrixWebhookMode: # Webhook Mode is disabled DISABLED = "off" # The default webhook mode is to just be set to Matrix MATRIX = "matrix" # Support the slack webhook plugin SLACK = "slack" # Support the t2bot webhook plugin T2BOT = "t2bot" # webhook modes are placed into this list for validation purposes MATRIX_WEBHOOK_MODES = ( MatrixWebhookMode.DISABLED, MatrixWebhookMode.MATRIX, MatrixWebhookMode.SLACK, MatrixWebhookMode.T2BOT, ) class NotifyMatrix(NotifyBase): """ A wrapper for Matrix Notifications """ # The default descriptive name associated with the Notification service_name = 'Matrix' # The services URL service_url = 'https://matrix.org/' # The default protocol protocol = 'matrix' # The default secure protocol secure_protocol = 'matrixs' # Support Attachments attachment_support = True # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_matrix' # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_32 # The maximum allowable characters allowed in the body per message # https://spec.matrix.org/v1.6/client-server-api/#size-limits # The complete event MUST NOT be larger than 65536 bytes, when formatted # with the federation event format, including any signatures, and encoded # as Canonical JSON. # # To gracefully allow for some overhead' we'll define a max body length # of just slighty lower then the limit of the full message itself. body_maxlen = 65000 # Throttle a wee-bit to avoid thrashing request_rate_per_sec = 0.5 # How many retry attempts we'll make in the event the server asks us to # throttle back. default_retries = 2 # The number of micro seconds to wait if we get a 429 error code and # the server doesn't remind us how long we shoul wait for default_wait_ms = 1000 # Our default is to no not use persistent storage beyond in-memory # reference storage_mode = PersistentStoreMode.AUTO # Keep our cache for 20 days default_cache_expiry_sec = 60 * 60 * 24 * 20 # Used for server discovery discovery_base_key = '__discovery_base' discovery_identity_key = '__discovery_identity' # Defines how long we cache our discovery for discovery_cache_length_sec = 86400 # Define object templates templates = ( # Targets are ignored when using t2bot mode; only a token is required '{schema}://{token}', '{schema}://{user}@{token}', # Matrix Server '{schema}://{user}:{password}@{host}/{targets}', '{schema}://{user}:{password}@{host}:{port}/{targets}', '{schema}://{token}@{host}/{targets}', '{schema}://{token}@{host}:{port}/{targets}', # Webhook mode '{schema}://{user}:{token}@{host}/{targets}', '{schema}://{user}:{token}@{host}:{port}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'token': { 'name': _('Access Token'), 'private': True, 'map_to': 'password', }, 'target_user': { 'name': _('Target User'), 'type': 'string', 'prefix': '@', 'map_to': 'targets', }, 'target_room_id': { 'name': _('Target Room ID'), 'type': 'string', 'prefix': '!', 'map_to': 'targets', }, 'target_room_alias': { 'name': _('Target Room Alias'), 'type': 'string', 'prefix': '!', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': False, 'map_to': 'include_image', }, 'discovery': { 'name': _('Server Discovery'), 'type': 'bool', 'default': True, }, 'mode': { 'name': _('Webhook Mode'), 'type': 'choice:string', 'values': MATRIX_WEBHOOK_MODES, 'default': MatrixWebhookMode.DISABLED, }, 'version': { 'name': _('Matrix API Verion'), 'type': 'choice:string', 'values': MATRIX_VERSIONS, 'default': MatrixVersion.V3, }, 'msgtype': { 'name': _('Message Type'), 'type': 'choice:string', 'values': MATRIX_MESSAGE_TYPES, 'default': MatrixMessageType.TEXT, }, 'to': { 'alias_of': 'targets', }, 'token': { 'alias_of': 'token', }, }) def __init__(self, targets=None, mode=None, msgtype=None, version=None, include_image=None, discovery=None, **kwargs): """ Initialize Matrix Object """ super().__init__(**kwargs) # Prepare a list of rooms to connect and notify self.rooms = parse_list(targets) # our home server gets populated after a login/registration self.home_server = None # our user_id gets populated after a login/registration self.user_id = None # This gets initialized after a login/registration self.access_token = None # This gets incremented for each request made against the v3 API self.transaction_id = 0 # Place an image inline with the message body self.include_image = self.template_args['image']['default'] \ if include_image is None else include_image # Prepare Delegate Server Lookup Check self.discovery = self.template_args['discovery']['default'] \ if discovery is None else discovery # Setup our mode self.mode = self.template_args['mode']['default'] \ if not isinstance(mode, str) else mode.lower() if self.mode and self.mode not in MATRIX_WEBHOOK_MODES: msg = 'The mode specified ({}) is invalid.'.format(mode) self.logger.warning(msg) raise TypeError(msg) # Setup our version self.version = self.template_args['version']['default'] \ if not isinstance(version, str) else version if self.version not in MATRIX_VERSIONS: msg = 'The version specified ({}) is invalid.'.format(version) self.logger.warning(msg) raise TypeError(msg) # Setup our message type self.msgtype = self.template_args['msgtype']['default'] \ if not isinstance(msgtype, str) else msgtype.lower() if self.msgtype and self.msgtype not in MATRIX_MESSAGE_TYPES: msg = 'The msgtype specified ({}) is invalid.'.format(msgtype) self.logger.warning(msg) raise TypeError(msg) if self.mode == MatrixWebhookMode.T2BOT: # t2bot configuration requires that a webhook id is specified self.access_token = validate_regex( self.password, r'^[a-z0-9]{64}$', 'i') if not self.access_token: msg = 'An invalid T2Bot/Matrix Webhook ID ' \ '({}) was specified.'.format(self.password) self.logger.warning(msg) raise TypeError(msg) elif not is_hostname(self.host): msg = 'An invalid Matrix Hostname ({}) was specified'\ .format(self.host) self.logger.warning(msg) raise TypeError(msg) else: # Verify port if specified if self.port is not None and not ( isinstance(self.port, int) and self.port >= self.template_tokens['port']['min'] and self.port <= self.template_tokens['port']['max']): msg = 'An invalid Matrix Port ({}) was specified'\ .format(self.port) self.logger.warning(msg) raise TypeError(msg) if self.mode != MatrixWebhookMode.DISABLED: # Discovery only works when we're not using webhooks self.discovery = False # # Initialize from cache if present # if self.mode != MatrixWebhookMode.T2BOT: # our home server gets populated after a login/registration self.home_server = self.store.get('home_server') # our user_id gets populated after a login/registration self.user_id = self.store.get('user_id') # This gets initialized after a login/registration self.access_token = self.store.get('access_token') # This gets incremented for each request made against the v3 API self.transaction_id = 0 if not self.access_token \ else self.store.get('transaction_id', 0) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Matrix Notification """ # Call the _send_ function applicable to whatever mode we're in # - calls _send_webhook_notification if the mode variable is set # - calls _send_server_notification if the mode variable is not set return getattr(self, '_send_{}_notification'.format( 'webhook' if self.mode != MatrixWebhookMode.DISABLED else 'server'))( body=body, title=title, notify_type=notify_type, **kwargs) def _send_webhook_notification(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Matrix Notification as a webhook """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } if self.mode != MatrixWebhookMode.T2BOT: # Acquire our access token from our URL access_token = self.password if self.password else self.user default_port = 443 if self.secure else 80 # Prepare our URL url = '{schema}://{hostname}:{port}{webhook_path}/{token}'.format( schema='https' if self.secure else 'http', hostname=self.host, port='' if self.port is None or self.port == default_port else self.port, webhook_path=MATRIX_V1_WEBHOOK_PATH, token=access_token, ) else: # # t2bot Setup # # Prepare our URL url = 'https://webhooks.t2bot.io/api/v1/matrix/hook/' \ '{token}'.format(token=self.access_token) # Retrieve our payload payload = getattr(self, '_{}_webhook_payload'.format(self.mode))( body=body, title=title, notify_type=notify_type, **kwargs) self.logger.debug('Matrix POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Matrix Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyMatrix.http_response_code_lookup( r.status_code, MATRIX_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send Matrix notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Matrix notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Matrix notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True def _slack_webhook_payload(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Format the payload for a Slack based message """ if not hasattr(self, '_re_slack_formatting_rules'): # Prepare some one-time slack formatting variables self._re_slack_formatting_map = { # New lines must become the string version r'\r\*\n': '\\n', # Escape other special characters r'&': '&', r'<': '<', r'>': '>', } # Iterate over above list and store content accordingly self._re_slack_formatting_rules = re.compile( r'(' + '|'.join(self._re_slack_formatting_map.keys()) + r')', re.IGNORECASE, ) # Perform Formatting title = self._re_slack_formatting_rules.sub( # pragma: no branch lambda x: self._re_slack_formatting_map[x.group()], title, ) body = self._re_slack_formatting_rules.sub( # pragma: no branch lambda x: self._re_slack_formatting_map[x.group()], body, ) # prepare JSON Object payload = { 'username': self.user if self.user else self.app_id, # Use Markdown language 'mrkdwn': (self.notify_format == NotifyFormat.MARKDOWN), 'attachments': [{ 'title': title, 'text': body, 'color': self.color(notify_type), 'ts': time(), 'footer': self.app_id, }], } return payload def _matrix_webhook_payload(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Format the payload for a Matrix based message """ payload = { 'displayName': self.user if self.user else self.app_id, 'format': 'plain' if self.notify_format == NotifyFormat.TEXT else 'html', 'text': '', } if self.notify_format == NotifyFormat.HTML: payload['text'] = '{title}{body}'.format( title='' if not title else '

{}

'.format( NotifyMatrix.escape_html(title)), body=body) elif self.notify_format == NotifyFormat.MARKDOWN: payload['text'] = '{title}{body}'.format( title='' if not title else '

{}

'.format( NotifyMatrix.escape_html(title)), body=markdown(body)) else: # NotifyFormat.TEXT payload['text'] = \ body if not title else '{}\r\n{}'.format(title, body) return payload def _t2bot_webhook_payload(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Format the payload for a T2Bot Matrix based messages """ # Retrieve our payload payload = self._matrix_webhook_payload( body=body, title=title, notify_type=notify_type, **kwargs) # Acquire our image url if we're configured to do so image_url = None if not self.include_image else \ self.image_url(notify_type) if image_url: # t2bot can take an avatarUrl Entry payload['avatarUrl'] = image_url return payload def _send_server_notification(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Direct Matrix Server Notification (no webhook) """ if self.access_token is None and self.password and not self.user: self.access_token = self.password self.transaction_id = uuid.uuid4() if self.access_token is None: # We need to register if not self._login(): if not self._register(): return False if len(self.rooms) == 0: # Attempt to retrieve a list of already joined channels self.rooms = self._joined_rooms() if len(self.rooms) == 0: # Nothing to notify self.logger.warning( 'There were no Matrix rooms specified to notify.') return False # Create a copy of our rooms to join and message rooms = list(self.rooms) # Initiaize our error tracking has_error = False attachments = None if attach and self.attachment_support: attachments = self._send_attachments(attach) if attachments is False: # take an early exit return False while len(rooms) > 0: # Get our room room = rooms.pop(0) # Get our room_id from our response room_id = self._room_join(room) if not room_id: # Notify our user about our failure self.logger.warning( 'Could not join Matrix room {}.'.format((room))) # Mark our failure has_error = True continue # Acquire our image url if we're configured to do so image_url = None if not self.include_image else \ self.image_url(notify_type) # Build our path if self.version == MatrixVersion.V3: path = '/rooms/{}/send/m.room.message/{}'.format( NotifyMatrix.quote(room_id), self.transaction_id, ) else: path = '/rooms/{}/send/m.room.message'.format( NotifyMatrix.quote(room_id)) if self.version == MatrixVersion.V2: # # Attachments don't work beyond V2 at this time # if image_url: # Define our payload image_payload = { 'msgtype': 'm.image', 'url': image_url, 'body': '{}'.format( notify_type if not title else title), } # Post our content postokay, response = self._fetch( path, payload=image_payload) if not postokay: # Mark our failure has_error = True continue if attachments: for attachment in attachments: attachment['room_id'] = room_id attachment['type'] = 'm.room.message' postokay, response = self._fetch( path, payload=attachment) if not postokay: # Mark our failure has_error = True continue # Define our payload payload = { 'msgtype': 'm.{}'.format(self.msgtype), 'body': '{title}{body}'.format( title='' if not title else '# {}\r\n'.format(title), body=body), } # Update our payload advance formatting for the services that # support them. if self.notify_format == NotifyFormat.HTML: payload.update({ 'format': 'org.matrix.custom.html', 'formatted_body': '{title}{body}'.format( title='' if not title else '

{}

'.format(title), body=body, ) }) elif self.notify_format == NotifyFormat.MARKDOWN: payload.update({ 'format': 'org.matrix.custom.html', 'formatted_body': '{title}{body}'.format( title='' if not title else '

{}

'.format( NotifyMatrix.escape_html(title, whitespace=False)), body=markdown(body), ) }) # Post our content method = 'PUT' if self.version == MatrixVersion.V3 else 'POST' postokay, response = self._fetch( path, payload=payload, method=method) # Increment the transaction ID to avoid future messages being # recognized as retransmissions and ignored if self.version == MatrixVersion.V3 \ and self.access_token != self.password: self.transaction_id += 1 self.store.set( 'transaction_id', self.transaction_id, expires=self.default_cache_expiry_sec) if not postokay: # Notify our user self.logger.warning( 'Could not send notification Matrix room {}.'.format(room)) # Mark our failure has_error = True continue return not has_error def _send_attachments(self, attach): """ Posts all of the provided attachments """ payloads = [] if self.version != MatrixVersion.V2: self.logger.warning( 'Add ?v=2 to Apprise URL to support Attachments') return next((False for a in attach if not a), []) for attachment in attach: if not attachment: # invalid attachment (bad file) return False if not re.match(r'^image/', attachment.mimetype, re.I): # unsuppored at this time continue postokay, response = \ self._fetch('/upload', attachment=attachment) if not (postokay and isinstance(response, dict)): # Failed to perform upload return False # If we get here, we'll have a response that looks like: # { # "content_uri": "mxc://example.com/a-unique-key" # } # FUTURE if self.version == MatrixVersion.V3: # FUTURE # Prepare our payload # FUTURE payloads.append({ # FUTURE "body": attachment.name, # FUTURE "info": { # FUTURE "mimetype": attachment.mimetype, # FUTURE "size": len(attachment), # FUTURE }, # FUTURE "msgtype": "m.image", # FUTURE "url": response.get('content_uri'), # FUTURE }) # FUTURE else: # FUTURE # Prepare our payload # FUTURE payloads.append({ # FUTURE "info": { # FUTURE "mimetype": attachment.mimetype, # FUTURE }, # FUTURE "msgtype": "m.image", # FUTURE "body": "tta.webp", # FUTURE "url": response.get('content_uri'), # FUTURE }) # Prepare our payload payloads.append({ "info": { "mimetype": attachment.mimetype, }, "msgtype": "m.image", "body": "tta.webp", "url": response.get('content_uri'), }) return payloads def _register(self): """ Register with the service if possible. """ # Prepare our Registration Payload. This will only work if registration # is enabled for the public payload = { 'kind': 'user', 'auth': {'type': 'm.login.dummy'}, } # parameters params = { 'kind': 'user', } # If a user is not specified, one will be randomly generated for you. # If you do not specify a password, you will be unable to login to the # account if you forget the access_token. if self.user: payload['username'] = self.user if self.password: payload['password'] = self.password # Register postokay, response = \ self._fetch('/register', payload=payload, params=params) if not (postokay and isinstance(response, dict)): # Failed to register return False # Pull the response details self.access_token = response.get('access_token') self.home_server = response.get('home_server') self.user_id = response.get('user_id') self.store.set( 'access_token', self.access_token, expires=self.default_cache_expiry_sec) self.store.set( 'home_server', self.home_server, expires=self.default_cache_expiry_sec) self.store.set( 'user_id', self.user_id, expires=self.default_cache_expiry_sec) if self.access_token is not None: # Store our token into our store self.logger.debug( 'Registered successfully with Matrix server.') return True return False def _login(self): """ Acquires the matrix token required for making future requests. If we fail we return False, otherwise we return True """ if self.access_token: # Login not required; silently skip-over return True if (self.user and self.password): # Prepare our Authentication Payload if self.version == MatrixVersion.V3: payload = { 'type': 'm.login.password', 'identifier': { 'type': 'm.id.user', 'user': self.user, }, 'password': self.password, } else: payload = { 'type': 'm.login.password', 'user': self.user, 'password': self.password, } else: # It's not possible to register since we need these 2 values to # make the action possible. self.logger.warning( 'Failed to login to Matrix server: ' 'token or user/pass combo is missing.') return False # Build our URL postokay, response = self._fetch('/login', payload=payload) if not (postokay and isinstance(response, dict)): # Failed to login return False # Pull the response details self.access_token = response.get('access_token') self.home_server = response.get('home_server') self.user_id = response.get('user_id') if not self.access_token: return False self.logger.debug( 'Authenticated successfully with Matrix server.') # Store our token into our store self.store.set( 'access_token', self.access_token, expires=self.default_cache_expiry_sec) self.store.set( 'home_server', self.home_server, expires=self.default_cache_expiry_sec) self.store.set( 'user_id', self.user_id, expires=self.default_cache_expiry_sec) return True def _logout(self): """ Relinquishes token from remote server """ if not self.access_token: # Login not required; silently skip-over return True # Prepare our Registration Payload payload = {} # Expire our token postokay, response = self._fetch('/logout', payload=payload) if not postokay: # If we get here, the token was declared as having already # been expired. The response looks like this: # { # u'errcode': u'M_UNKNOWN_TOKEN', # u'error': u'Access Token unknown or expired', # } # # In this case it's okay to safely return True because # we're logged out in this case. if response.get('errcode') != u'M_UNKNOWN_TOKEN': return False # else: The response object looks like this if we were successful: # {} # Pull the response details self.access_token = None self.home_server = None self.user_id = None # clear our tokens self.store.clear( 'access_token', 'home_server', 'user_id', 'transaction_id') self.logger.debug( 'Unauthenticated successfully with Matrix server.') return True def _room_join(self, room): """ Joins a matrix room if we're not already in it. Otherwise it attempts to create it if it doesn't exist and always returns the room_id if it was successful, otherwise it returns None """ if not self.access_token: # We can't join a room if we're not logged in return None if not isinstance(room, str): # Not a supported string return None # Prepare our Join Payload payload = {} # Check if it's a room id... result = IS_ROOM_ID.match(room) if result: # We detected ourselves the home_server home_server = result.group('home_server') \ if result.group('home_server') else self.home_server # It was a room ID; simple mapping: room_id = "!{}:{}".format( result.group('room'), home_server, ) # Check our cache for speed: try: # We're done as we've already joined the channel return self.store[room_id]['id'] except KeyError: # No worries, we'll try to acquire the info pass # Build our URL path = '/join/{}'.format(NotifyMatrix.quote(room_id)) # Make our query postokay, _ = self._fetch(path, payload=payload) if postokay: # Cache our entry for fast access later self.store.set(room_id, { 'id': room_id, 'home_server': home_server, }) return room_id if postokay else None # Try to see if it's an alias then... result = IS_ROOM_ALIAS.match(room) if not result: # There is nothing else it could be self.logger.warning( 'Ignoring illegally formed room {} ' 'from Matrix server list.'.format(room)) return None # If we reach here, we're dealing with a channel alias home_server = self.home_server \ if not result.group('home_server') \ else result.group('home_server') # tidy our room (alias) identifier room = '#{}:{}'.format(result.group('room'), home_server) # Check our cache for speed: try: # We're done as we've already joined the channel return self.store[room]['id'] except KeyError: # No worries, we'll try to acquire the info pass # If we reach here, we need to join the channel # Build our URL path = '/join/{}'.format(NotifyMatrix.quote(room)) # Attempt to join the channel postokay, response = self._fetch(path, payload=payload) if postokay: # Cache our entry for fast access later self.store.set(room, { 'id': response.get('room_id'), 'home_server': home_server, }) return response.get('room_id') # Try to create the channel return self._room_create(room) def _room_create(self, room): """ Creates a matrix room and return it's room_id if successful otherwise None is returned. """ if not self.access_token: # We can't create a room if we're not logged in return None if not isinstance(room, str): # Not a supported string return None # Build our room if we have to: result = IS_ROOM_ALIAS.match(room) if not result: # Illegally formed room return None # Our home_server home_server = result.group('home_server') \ if result.group('home_server') else self.home_server # update our room details room = '#{}:{}'.format(result.group('room'), home_server) # Prepare our Create Payload payload = { 'room_alias_name': result.group('room'), # Set our channel name 'name': '#{} - {}'.format(result.group('room'), self.app_desc), # hide the room by default; let the user open it up if they wish # to others. 'visibility': 'private', 'preset': 'trusted_private_chat', } postokay, response = self._fetch('/createRoom', payload=payload) if not postokay: # Failed to create channel # Typical responses: # - {u'errcode': u'M_ROOM_IN_USE', # u'error': u'Room alias already taken'} # - {u'errcode': u'M_UNKNOWN', # u'error': u'Internal server error'} if (response and response.get('errcode') == 'M_ROOM_IN_USE'): return self._room_id(room) return None # Cache our entry for fast access later self.store.set(response.get('room_alias'), { 'id': response.get('room_id'), 'home_server': home_server, }) return response.get('room_id') def _joined_rooms(self): """ Returns a list of the current rooms the logged in user is a part of. """ if not self.access_token: # No list is possible return list() postokay, response = self._fetch( '/joined_rooms', payload=None, method='GET') if not postokay: # Failed to retrieve listings return list() # Return our list of rooms return response.get('joined_rooms', list()) def _room_id(self, room): """Get room id from its alias. Args: room (str): The room alias name. Returns: returns the room id if it can, otherwise it returns None """ if not self.access_token: # We can't get a room id if we're not logged in return None if not isinstance(room, str): # Not a supported string return None # Build our room if we have to: result = IS_ROOM_ALIAS.match(room) if not result: # Illegally formed room return None # Our home_server home_server = result.group('home_server') \ if result.group('home_server') else self.home_server # update our room details room = '#{}:{}'.format(result.group('room'), home_server) # Make our request postokay, response = self._fetch( "/directory/room/{}".format( NotifyMatrix.quote(room)), payload=None, method='GET') if postokay: return response.get("room_id") return None def _fetch(self, path, payload=None, params={}, attachment=None, method='POST', url_override=None): """ Wrapper to request.post() to manage it's response better and make the send() function cleaner and easier to maintain. This function returns True if the _post was successful and False if it wasn't. this function returns the status code if url_override is used """ # Define our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'Accept': 'application/json', } if self.access_token is not None: headers["Authorization"] = 'Bearer %s' % self.access_token # Server Discovery / Well-known URI if url_override: url = url_override else: try: url = self.base_url except MatrixDiscoveryException: # Discovery failed; we're done return (False, {}) # Default return status code status_code = requests.codes.internal_server_error if path == '/upload': # FUTURE if self.version == MatrixVersion.V3: # FUTURE url += MATRIX_V3_MEDIA_PATH + path # FUTURE else: # FUTURE url += MATRIX_V2_MEDIA_PATH + path url += MATRIX_V2_MEDIA_PATH + path params.update({'filename': attachment.name}) with open(attachment.path, 'rb') as fp: payload = fp.read() # Update our content type headers['Content-Type'] = attachment.mimetype elif not url_override: if self.version == MatrixVersion.V3: url += MATRIX_V3_API_PATH + path else: url += MATRIX_V2_API_PATH + path # Our response object response = {} # fetch function fn = requests.post if method == 'POST' else ( requests.put if method == 'PUT' else requests.get) # Define how many attempts we'll make if we get caught in a throttle # event retries = self.default_retries if self.default_retries > 0 else 1 while retries > 0: # Decrement our throttle retry count retries -= 1 self.logger.debug('Matrix %s URL: %s (cert_verify=%r)' % ( 'POST' if method == 'POST' else ( requests.put if method == 'PUT' else 'GET'), url, self.verify_certificate, )) self.logger.debug('Matrix Payload: %s' % str(payload)) # Initialize our response object r = None try: r = fn( url, data=dumps(payload) if not attachment else payload, params=None if not params else params, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # Store status code status_code = r.status_code self.logger.debug( 'Matrix Response: code=%d, %s' % ( r.status_code, str(r.content))) response = loads(r.content) if r.status_code == requests.codes.too_many_requests: wait = self.default_wait_ms / 1000 try: wait = response['retry_after_ms'] / 1000 except KeyError: try: errordata = response['error'] wait = errordata['retry_after_ms'] / 1000 except KeyError: pass self.logger.warning( 'Matrix server requested we throttle back {}ms; ' 'retries left {}.'.format(wait, retries)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Throttle for specified wait self.throttle(wait=wait) # Try again continue elif r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyMatrix.http_response_code_lookup( r.status_code, MATRIX_HTTP_ERROR_MAP) self.logger.warning( 'Failed to handshake with Matrix server: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Return; we're done return ( False if not url_override else status_code, response) except (AttributeError, TypeError, ValueError): # This gets thrown if we can't parse our JSON Response # - ValueError = r.content is Unparsable # - TypeError = r.content is None # - AttributeError = r is None self.logger.warning('Invalid response from Matrix server.') self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return (False if not url_override else status_code, {}) except (requests.TooManyRedirects, requests.RequestException) as e: self.logger.warning( 'A Connection error occurred while registering with Matrix' ' server.') self.logger.debug('Socket Exception: %s', str(e)) # Return; we're done return (False if not url_override else status_code, response) except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while reading {}.'.format( attachment.name if attachment else 'unknown file')) self.logger.debug('I/O Exception: %s', str(e)) return (False if not url_override else status_code, {}) return (True if not url_override else status_code, response) # If we get here, we ran out of retries return (False if not url_override else status_code, {}) def __del__(self): """ Ensure we relinquish our token """ if self.mode == MatrixWebhookMode.T2BOT: # nothing to do return if self.store.mode != PersistentStoreMode.MEMORY: # We no longer have to log out as we have persistant storage to # re-use our credentials with return if self.access_token is not None \ and self.access_token == self.password and not self.user: return try: self._logout() except LookupError: # pragma: no cover # Python v3.5 call to requests can sometimes throw the exception # "/usr/lib64/python3.7/socket.py", line 748, in getaddrinfo # LookupError: unknown encoding: idna # # This occurs every time when running unit-tests against Apprise: # LANG=C.UTF-8 PYTHONPATH=$(pwd) py.test-3.7 # # There has been an open issue on this since Jan 2017. # - https://bugs.python.org/issue29288 # # A ~similar~ issue can be identified here in the requests # ticket system as unresolved and has provided workarounds # - https://github.com/kennethreitz/requests/issues/3578 pass except ImportError: # pragma: no cover # The actual exception is `ModuleNotFoundError` however ImportError # grants us backwards compatibility with versions of Python older # than v3.6 # Python code that makes early calls to sys.exit() can cause # the __del__() code to run. However, in some newer versions of # Python, this causes the `sys` library to no longer be # available. The stack overflow also goes on to suggest that # it's not wise to use the __del__() as a destructor # which is the case here. # https://stackoverflow.com/questions/67218341/\ # modulenotfounderror-import-of-time-halted-none-in-sys-\ # modules-occured-when-obj?noredirect=1&lq=1 # # # Also see: https://stackoverflow.com/questions\ # /1481488/what-is-the-del-method-and-how-do-i-call-it # At this time it seems clean to try to log out (if we can) # but not throw any unnecessary exceptions (like this one) to # the end user if we don't have to. pass @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.host if self.mode != MatrixWebhookMode.T2BOT else self.access_token, self.port if self.port else (443 if self.secure else 80), self.user if self.mode != MatrixWebhookMode.T2BOT else None, self.password if self.mode != MatrixWebhookMode.T2BOT else None, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', 'mode': self.mode, 'version': self.version, 'msgtype': self.msgtype, 'discovery': 'yes' if self.discovery else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) auth = '' if self.mode != MatrixWebhookMode.T2BOT: # Determine Authentication if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyMatrix.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user or self.password: auth = '{value}@'.format( value=NotifyMatrix.quote( self.user if self.user else self.password, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}/{rooms}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, hostname=NotifyMatrix.quote(self.host, safe='') if self.mode != MatrixWebhookMode.T2BOT else self.pprint(self.access_token, privacy, safe=''), port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), rooms=NotifyMatrix.quote('/'.join(self.rooms)), params=NotifyMatrix.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.rooms) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results if not results.get('host'): return None # Get our rooms results['targets'] = NotifyMatrix.split_path(results['fullpath']) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += NotifyMatrix.parse_list(results['qsd']['to']) # Boolean to include an image or not results['include_image'] = parse_bool(results['qsd'].get( 'image', NotifyMatrix.template_args['image']['default'])) # Boolean to perform a server discovery results['discovery'] = parse_bool(results['qsd'].get( 'discovery', NotifyMatrix.template_args['discovery']['default'])) # Get our mode results['mode'] = results['qsd'].get('mode') # t2bot detection... look for just a hostname, and/or just a user/host # if we match this; we can go ahead and set the mode (but only if # it was otherwise not set) if results['mode'] is None \ and not results['password'] \ and not results['targets']: # Default mode to t2bot results['mode'] = MatrixWebhookMode.T2BOT if results['mode'] and \ results['mode'].lower() == MatrixWebhookMode.T2BOT: # unquote our hostname and pass it in as the password/token results['password'] = NotifyMatrix.unquote(results['host']) # Support the message type keyword if 'msgtype' in results['qsd'] and len(results['qsd']['msgtype']): results['msgtype'] = \ NotifyMatrix.unquote(results['qsd']['msgtype']) # Support the use of the token= keyword if 'token' in results['qsd'] and len(results['qsd']['token']): results['password'] = NotifyMatrix.unquote(results['qsd']['token']) elif not results['password'] and results['user']: # swap results['password'] = results['user'] results['user'] = None # Support the use of the version= or v= keyword if 'version' in results['qsd'] and len(results['qsd']['version']): results['version'] = \ NotifyMatrix.unquote(results['qsd']['version']) elif 'v' in results['qsd'] and len(results['qsd']['v']): results['version'] = NotifyMatrix.unquote(results['qsd']['v']) return results @staticmethod def parse_native_url(url): """ Support https://webhooks.t2bot.io/api/v1/matrix/hook/WEBHOOK_TOKEN/ """ result = re.match( r'^https?://webhooks\.t2bot\.io/api/v[0-9]+/matrix/hook/' r'(?P[A-Z0-9_-]+)/?' r'(?P\?.+)?$', url, re.I) if result: mode = 'mode={}'.format(MatrixWebhookMode.T2BOT) return NotifyMatrix.parse_url( '{schema}://{webhook_token}/{params}'.format( schema=NotifyMatrix.secure_protocol, webhook_token=result.group('webhook_token'), params='?{}'.format(mode) if not result.group('params') else '{}&{}'.format(result.group('params'), mode))) return None def server_discovery(self): """ Home Server Discovery as documented here: https://spec.matrix.org/v1.11/client-server-api/#well-known-uri """ if not (self.discovery and self.secure): # Nothing further to do with insecure server setups return '' # Get our content from cache base_url, identity_url = ( self.store.get(self.discovery_base_key), self.store.get(self.discovery_identity_key), ) if not (base_url is None and identity_url is None): # We can use our cached value and return early return base_url # 1. Extract the server name from the user’s Matrix ID by splitting # the Matrix ID at the first colon. verify_url = f'https://{self.host}/.well-known/matrix/client' code, wk_response = self._fetch( None, method='GET', url_override=verify_url) # Output may look as follows: # { # "m.homeserver": { # "base_url": "https://matrix.example.com" # }, # "m.identity_server": { # "base_url": "https://nuxref.com" # } # } if code == requests.codes.not_found: # This is an acceptable response; we're done self.logger.debug( 'Matrix Well-Known Base URI not found at %s', verify_url) # Set our keys out for fast recall later on self.store.set( self.discovery_base_key, '', expires=self.discovery_cache_length_sec) self.store.set( self.discovery_identity_key, '', expires=self.discovery_cache_length_sec) return '' elif code != requests.codes.ok: # We're done early as we couldn't load the results msg = 'Matrix Well-Known Base URI Discovery Failed' self.logger.warning( '%s - %s returned error code: %d', msg, verify_url, code) raise MatrixDiscoveryException(msg, error_code=code) if not wk_response: # This is an acceptable response; we simply do nothing self.logger.debug( 'Matrix Well-Known Base URI not defined %s', verify_url) # Set our keys out for fast recall later on self.store.set( self.discovery_base_key, '', expires=self.discovery_cache_length_sec) self.store.set( self.discovery_identity_key, '', expires=self.discovery_cache_length_sec) return '' # # Parse our m.homeserver information # try: base_url = wk_response['m.homeserver']['base_url'].rstrip('/') results = NotifyBase.parse_url(base_url, verify_host=True) except (AttributeError, TypeError, KeyError): # AttributeError: result wasn't a string (rstrip failed) # TypeError : wk_response wasn't a dictionary # KeyError : wk_response not to standards results = None if not results: msg = 'Matrix Well-Known Base URI Discovery Failed' self.logger.warning( '%s - m.homeserver payload is missing or invalid: %s', msg, str(wk_response)) raise MatrixDiscoveryException(msg) # # Our .well-known extraction was successful; now we need to verify # that the version information resolves. # verify_url = f'{base_url}/_matrix/client/versions' # Post our content code, response = self._fetch( None, method='GET', url_override=verify_url) if code != requests.codes.ok: # We're done early as we couldn't load the results msg = 'Matrix Well-Known Base URI Discovery Verification Failed' self.logger.warning( '%s - %s returned error code: %d', msg, verify_url, code) raise MatrixDiscoveryException(msg, error_code=code) # # Phase 2: Handle m.identity_server IF defined # if 'm.identity_server' in wk_response: try: identity_url = \ wk_response['m.identity_server']['base_url'].rstrip('/') results = NotifyBase.parse_url(identity_url, verify_host=True) except (AttributeError, TypeError, KeyError): # AttributeError: result wasn't a string (rstrip failed) # TypeError : wk_response wasn't a dictionary # KeyError : wk_response not to standards results = None if not results: msg = 'Matrix Well-Known Identity URI Discovery Failed' self.logger.warning( '%s - m.identity_server payload is missing or invalid: %s', msg, str(wk_response)) raise MatrixDiscoveryException(msg) # # Verify identity server found # verify_url = f'{identity_url}/_matrix/identity/v2' # Post our content code, response = self._fetch( None, method='GET', url_override=verify_url) if code != requests.codes.ok: # We're done early as we couldn't load the results msg = 'Matrix Well-Known Identity URI Discovery Failed' self.logger.warning( '%s - %s returned error code: %d', msg, verify_url, code) raise MatrixDiscoveryException(msg, error_code=code) # Update our cache self.store.set( self.discovery_identity_key, identity_url, # Add 2 seconds to prevent this key from expiring before base expires=self.discovery_cache_length_sec + 2) else: # No identity server self.store.set( self.discovery_identity_key, '', # Add 2 seconds to prevent this key from expiring before base expires=self.discovery_cache_length_sec + 2) # Update our cache self.store.set( self.discovery_base_key, base_url, expires=self.discovery_cache_length_sec) return base_url @property def base_url(self): """ Returns the base_url if known """ try: base_url = self.server_discovery() if base_url: # We can use our cached value and return early return base_url except MatrixDiscoveryException: self.store.clear( self.discovery_base_key, self.discovery_identity_key) raise # If we get hear, we need to build our URL dynamically based on what # was provided to us during the plugins initialization default_port = 443 if self.secure else 80 return '{schema}://{hostname}{port}'.format( schema='https' if self.secure else 'http', hostname=self.host, port='' if self.port is None or self.port == default_port else f':{self.port}') @property def identity_url(self): """ Returns the identity_url if known """ base_url = self.base_url identity_url = self.store.get(self.discovery_identity_key) return base_url if not identity_url else identity_url apprise-1.9.3/apprise/plugins/mattermost.py000066400000000000000000000350671477231770000211070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Create an incoming webhook; the website will provide you with something like: # http://localhost:8065/hooks/yobjmukpaw3r3urc5h6i369yima # ^^^^^^^^^^^^^^^^^^^^^^^^^^^ # |-- this is the webhook --| # # You can effectively turn the url above to read this: # mmost://localhost:8065/yobjmukpaw3r3urc5h6i369yima # - swap http with mmost # - drop /hooks/ reference import re import requests from json import dumps from .base import NotifyBase from ..common import NotifyImageSize from ..common import NotifyType from ..utils.parse import parse_bool, parse_list, validate_regex from ..locale import gettext_lazy as _ # Some Reference Locations: # - https://docs.mattermost.com/developer/webhooks-incoming.html # - https://docs.mattermost.com/administration/config-settings.html class NotifyMattermost(NotifyBase): """ A wrapper for Mattermost Notifications """ # The default descriptive name associated with the Notification service_name = 'Mattermost' # The services URL service_url = 'https://mattermost.com/' # The default protocol protocol = 'mmost' # The default secure protocol secure_protocol = 'mmosts' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_mattermost' # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_72 # The maximum allowable characters allowed in the body per message body_maxlen = 4000 # Mattermost does not have a title title_maxlen = 0 # Define object templates templates = ( '{schema}://{host}/{token}', '{schema}://{host}:{port}/{token}', '{schema}://{host}/{fullpath}/{token}', '{schema}://{host}:{port}/{fullpath}/{token}', '{schema}://{botname}@{host}/{token}', '{schema}://{botname}@{host}:{port}/{token}', '{schema}://{botname}@{host}/{fullpath}/{token}', '{schema}://{botname}@{host}:{port}/{fullpath}/{token}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'token': { 'name': _('Webhook Token'), 'type': 'string', 'private': True, 'required': True, }, 'fullpath': { 'name': _('Path'), 'type': 'string', }, 'botname': { 'name': _('Bot Name'), 'type': 'string', 'map_to': 'user', }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'channels': { 'name': _('Channels'), 'type': 'list:string', }, 'channel': { 'alias_of': 'channels', }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, 'to': { 'alias_of': 'channels', }, }) def __init__(self, token, fullpath=None, channels=None, include_image=False, **kwargs): """ Initialize Mattermost Object """ super().__init__(**kwargs) if self.secure: self.schema = 'https' else: self.schema = 'http' # our full path self.fullpath = '' if not isinstance( fullpath, str) else fullpath.strip() # Authorization Token (associated with project) self.token = validate_regex(token) if not self.token: msg = 'An invalid Mattermost Authorization Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) # Optional Channels (strip off any channel prefix entries if present) self.channels = [x.lstrip('#') for x in parse_list(channels)] # Place a thumbnail image inline with the message body self.include_image = include_image return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Mattermost Notification """ # Create a copy of our channels, otherwise place a dummy entry channels = list(self.channels) if self.channels else [None, ] headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json' } # prepare JSON Object payload = { 'text': body, 'icon_url': None, } # Acquire our image url if configured to do so image_url = None if not self.include_image \ else self.image_url(notify_type) if image_url: # Set our image configuration if told to do so payload['icon_url'] = image_url # Set our user payload['username'] = self.user if self.user else self.app_id port = '' if self.port is not None: port = ':{}'.format(self.port) # For error tracking has_error = False while len(channels): # Pop a channel off of the list channel = channels.pop(0) if channel: payload['channel'] = channel url = '{}://{}{}{}/hooks/{}'.format( self.schema, self.host, port, self.fullpath.rstrip('/'), self.token) self.logger.debug('Mattermost POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Mattermost Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyMattermost.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Mattermost notification{}: ' '{}{}error={}.'.format( '' if not channel else ' to channel {}'.format(channel), status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Flag our error has_error = True continue else: self.logger.info( 'Sent Mattermost notification{}.'.format( '' if not channel else ' to channel {}'.format(channel))) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Mattermost ' 'notification{}.'.format( '' if not channel else ' to channel {}'.format(channel))) self.logger.debug('Socket Exception: %s' % str(e)) # Flag our error has_error = True continue # Return our overall status return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.token, self.host, self.port, self.fullpath, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) if self.channels: # historically the value only accepted one channel and is # therefore identified as 'channel'. Channels have always been # optional, so that is why this setting is nested in an if block params['channel'] = ','.join( [NotifyMattermost.quote(x, safe='') for x in self.channels]) default_port = 443 if self.secure else 80 default_schema = self.secure_protocol if self.secure else self.protocol # Determine if there is a botname present botname = '' if self.user: botname = '{botname}@'.format( botname=NotifyMattermost.quote(self.user, safe=''), ) return \ '{schema}://{botname}{hostname}{port}{fullpath}{token}' \ '/?{params}'.format( schema=default_schema, botname=botname, # never encode hostname since we're expecting it to be a valid # one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath='/' if not self.fullpath else '{}/'.format( NotifyMattermost.quote(self.fullpath, safe='/')), token=self.pprint(self.token, privacy, safe=''), params=NotifyMattermost.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # Acquire our tokens; the last one will always be our token # all entries before it will be our path tokens = NotifyMattermost.split_path(results['fullpath']) results['token'] = None if not tokens else tokens.pop() # Store our path results['fullpath'] = '' if not tokens \ else '/{}'.format('/'.join(tokens)) # Define our optional list of channels to notify results['channels'] = list() # Support both 'to' (for yaml configuration) and channel= if 'to' in results['qsd'] and len(results['qsd']['to']): # Allow the user to specify the channel to post to results['channels'].extend( NotifyMattermost.parse_list(results['qsd']['to'])) if 'channel' in results['qsd'] and len(results['qsd']['channel']): # Allow the user to specify the channel to post to results['channels'].extend( NotifyMattermost.parse_list(results['qsd']['channel'])) if 'channels' in results['qsd'] and len(results['qsd']['channels']): # Allow the user to specify the channel to post to results['channels'].extend( NotifyMattermost.parse_list(results['qsd']['channels'])) # Image manipulation results['include_image'] = parse_bool(results['qsd'].get( 'image', NotifyMattermost.template_args['image']['default'])) return results @staticmethod def parse_native_url(url): """ Support parsing the webhook straight from URL https://HOST:443/workflows/WORKFLOWID/triggers/manual/paths/invoke https://mattermost.HOST/hooks/TOKEN """ # Match our workflows webhook URL and re-assemble result = re.match( r'^http(?Ps?)://(?Pmattermost\.[A-Z0-9_.-]+)' r'(:(?P[1-9][0-9]{0,5}))?' r'/hooks/' r'(?P[A-Z0-9_-]+)/?' r'(?P\?.+)?$', url, re.I) if result: default_port = \ int(result.group('port')) if result.group('port') else ( 443 if result.group('secure') else 80) default_schema = \ NotifyMattermost.secure_protocol \ if result.group('secure') else NotifyMattermost.protocol # Construct our URL return NotifyMattermost.parse_url( '{schema}://{host}{port}/{token}' '/{params}'.format( schema=default_schema, host=result.group('host'), port='' if not result.group('port') or int(result.group('port')) == default_port else f':{default_port}', token=result.group('token'), params='' if not result.group('params') else result.group('params'))) return None apprise-1.9.3/apprise/plugins/messagebird.py000066400000000000000000000303221477231770000211620ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Create an account https://messagebird.com if you don't already have one # # Get your (apikey) and api example from the dashboard here: # - https://dashboard.messagebird.com/en/user/index # import requests from .base import NotifyBase from ..common import NotifyType from ..utils.parse import is_phone_no, parse_phone_no, validate_regex from ..locale import gettext_lazy as _ class NotifyMessageBird(NotifyBase): """ A wrapper for MessageBird Notifications """ # The default descriptive name associated with the Notification service_name = 'MessageBird' # The services URL service_url = 'https://messagebird.com' # The default protocol secure_protocol = 'msgbird' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_messagebird' # MessageBird uses the http protocol with JSON requests notify_url = 'https://rest.messagebird.com/messages' # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{apikey}/{source}', '{schema}://{apikey}/{source}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[a-z0-9]{25}$', 'i'), }, 'source': { 'name': _('Source Phone No'), 'type': 'string', 'prefix': '+', 'required': True, 'regex': (r'^[0-9\s)(+-]+$', 'i'), }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', } }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'source', }, }) def __init__(self, apikey, source, targets=None, **kwargs): """ Initialize MessageBird Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'An invalid MessageBird API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) result = is_phone_no(source) if not result: msg = 'The MessageBird source specified ({}) is invalid.'\ .format(source) self.logger.warning(msg) raise TypeError(msg) # Store our source self.source = result['full'] # Parse our targets self.targets = list() targets = parse_phone_no(targets) if not targets: # No sources specified, use our own phone no self.targets.append(self.source) return # otherwise, store all of our target numbers for target in targets: # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result['full']) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform MessageBird Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning('There were no MessageBird targets to notify.') return False # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded', 'Authorization': 'AccessKey {}'.format(self.apikey), } # Prepare our payload payload = { 'originator': '+{}'.format(self.source), 'recipients': None, 'body': body, } # Create a copy of the targets list targets = list(self.targets) while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our user payload['recipients'] = '+{}'.format(target) # Some Debug Logging self.logger.debug( 'MessageBird POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('MessageBird Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # Sample output of a successful transmission # { # "originator": "+15553338888", # "body": "test", # "direction": "mt", # "mclass": 1, # "reference": null, # "createdDatetime": "2019-08-22T01:32:18+00:00", # "recipients": { # "totalCount": 1, # "totalSentCount": 1, # "totalDeliveredCount": 0, # "totalDeliveryFailedCount": 0, # "items": [ # { # "status": "sent", # "statusDatetime": "2019-08-22T01:32:18+00:00", # "recipient": 15553338888, # "messagePartCount": 1 # } # ] # }, # "validity": null, # "gateway": 10, # "typeDetails": {}, # "href": "https://rest.messagebird.com/messages/\ # b5d424244a5b4fd0b5b5728bccaafc23", # "datacoding": "plain", # "scheduledDatetime": null, # "type": "sms", # "id": "b5d424244a5b4fd0b5b5728bccaafc23" # } if r.status_code not in ( requests.codes.ok, requests.codes.created): # We had a problem status_str = \ NotifyMessageBird.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send MessageBird notification to {}: ' '{}{}error={}.'.format( ','.join(target), status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent MessageBird notification to {}.'.format(target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending MessageBird:%s ' % ( target) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey, self.source) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{apikey}/{source}/{targets}/?{params}'.format( schema=self.secure_protocol, apikey=self.pprint(self.apikey, privacy, safe=''), source=self.source, targets='/'.join( [NotifyMessageBird.quote(x, safe='') for x in self.targets]), params=NotifyMessageBird.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifyMessageBird.split_path(results['fullpath']) try: # The first path entry is the source/originator results['source'] = results['targets'].pop(0) except IndexError: # No path specified... this URL is potentially un-parseable; we can # hope for a from= entry results['source'] = None # The hostname is our authentication key results['apikey'] = NotifyMessageBird.unquote(results['host']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyMessageBird.parse_phone_no(results['qsd']['to']) if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyMessageBird.unquote(results['qsd']['from']) return results apprise-1.9.3/apprise/plugins/misskey.py000066400000000000000000000232651477231770000203710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # 1. visit https://misskey-hub.net/ and see what it's all about if you want. # Choose a service you want to create an account on from here: # https://misskey-hub.net/en/instances.html # # - For this plugin, I tested using https://misskey.sda1.net and created an # account. # # 2. Generate an API Key: # - Settings > API > Generate Key # - Name it whatever you want # - Assign it 'AT LEAST': # a. Compose or delete chat messages # b. Compose or delete notes # # # This plugin also supports taking the URL (as identified above) directly # as well. import requests from json import dumps from .base import NotifyBase from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ class MisskeyVisibility: """ The visibility of any note created """ # post will be public PUBLIC = 'public' HOME = 'home' FOLLOWERS = 'followers' SPECIFIED = 'specified' # Define the types in a list for validation purposes MISSKEY_VISIBILITIES = ( MisskeyVisibility.PUBLIC, MisskeyVisibility.HOME, MisskeyVisibility.FOLLOWERS, MisskeyVisibility.SPECIFIED, ) class NotifyMisskey(NotifyBase): """ A wrapper for Misskey Notifications """ # The default descriptive name associated with the Notification service_name = 'Misskey' # The services URL service_url = 'https://misskey-hub.net/' # The default protocol protocol = 'misskey' # The default secure protocol secure_protocol = 'misskeys' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_misskey' # The title is not used title_maxlen = 0 # The maximum allowable characters allowed in the body per message body_maxlen = 512 # Define object templates templates = ( '{schema}://{project_id}/{msghook}', ) # Define object templates templates = ( '{schema}://{token}@{host}', '{schema}://{token}@{host}:{port}', ) # Define our template arguments # Define our template arguments template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'token': { 'name': _('Access Token'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'token': { 'alias_of': 'token', }, 'visibility': { 'name': _('Visibility'), 'type': 'choice:string', 'values': MISSKEY_VISIBILITIES, 'default': MisskeyVisibility.PUBLIC, }, }) def __init__(self, token=None, visibility=None, **kwargs): """ Initialize Misskey Object """ super().__init__(**kwargs) self.token = validate_regex(token) if not self.token: msg = 'An invalid Misskey Access Token was specified.' self.logger.warning(msg) raise TypeError(msg) if visibility: # Input is a string; attempt to get the lookup from our # sound mapping vis = 'invalid' if not isinstance(visibility, str) \ else visibility.lower().strip() # This little bit of black magic allows us to match against # against multiple versions of the same string ... etc self.visibility = \ next((v for v in MISSKEY_VISIBILITIES if v.startswith(vis)), None) if self.visibility not in MISSKEY_VISIBILITIES: msg = 'The Misskey visibility specified ({}) is invalid.' \ .format(visibility) self.logger.warning(msg) raise TypeError(msg) else: self.visibility = self.template_args['visibility']['default'] # Prepare our URL self.schema = 'https' if self.secure else 'http' self.api_url = '%s://%s' % (self.schema, self.host) if isinstance(self.port, int): self.api_url += ':%d' % self.port return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.token, self.host, self.port, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ params = { 'visibility': self.visibility, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) host = self.host if isinstance(self.port, int): host += ':%d' % self.port return '{schema}://{token}@{host}/?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, host=host, token=self.pprint(self.token, privacy, safe=''), params=NotifyMisskey.urlencode(params), ) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ wrapper to _send since we can alert more then one channel """ # prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } # Prepare our payload payload = { 'i': self.token, 'text': body, 'visibility': self.visibility, } api_url = f'{self.api_url}/api/notes/create' self.logger.debug('Misskey GET URL: %s (cert_verify=%r)' % ( api_url, self.verify_certificate)) self.logger.debug('Misskey Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( api_url, headers=headers, data=dumps(payload), verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyMisskey.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Misskey notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Misskey notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Misskey ' 'notification.') self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results if 'token' in results['qsd'] and len(results['qsd']['token']): results['token'] = NotifyMisskey.unquote(results['qsd']['token']) elif not results['password'] and results['user']: results['token'] = NotifyMisskey.unquote(results['user']) # Capture visibility if specified if 'visibility' in results['qsd'] and \ len(results['qsd']['visibility']): results['visibility'] = \ NotifyMisskey.unquote(results['qsd']['visibility']) return results apprise-1.9.3/apprise/plugins/mqtt.py000066400000000000000000000502261477231770000176670ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # PAHO MQTT Documentation: # https://www.eclipse.org/paho/index.php?page=clients/python/docs/index.php # # Looking at the PAHO MQTT Source can help shed light on what's going on too # as their inline documentation is pretty good! # https://github.com/eclipse/paho.mqtt.python\ # /blob/master/src/paho/mqtt/client.py import ssl import re from time import sleep from datetime import datetime from os.path import isfile from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import parse_list, parse_bool from ..locale import gettext_lazy as _ # Default our global support flag NOTIFY_MQTT_SUPPORT_ENABLED = False try: # 3rd party modules import paho.mqtt.client as mqtt # We're good to go! NOTIFY_MQTT_SUPPORT_ENABLED = True MQTT_PROTOCOL_MAP = { # v3.1.1 "311": mqtt.MQTTv311, # v3.1 "31": mqtt.MQTTv31, # v5.0 "5": mqtt.MQTTv5, # v5.0 (alias) "50": mqtt.MQTTv5, } except ImportError: # No problem; we just simply can't support this plugin because we're # either using Linux, or simply do not have pywin32 installed. MQTT_PROTOCOL_MAP = {} # A lookup map for relaying version to user HUMAN_MQTT_PROTOCOL_MAP = { "v3.1.1": "311", "v3.1": "31", "v5.0": "5", } class NotifyMQTT(NotifyBase): """ A wrapper for MQTT Notifications """ # Set our global enabled flag enabled = NOTIFY_MQTT_SUPPORT_ENABLED requirements = { # Define our required packaging in order to work 'packages_required': 'paho-mqtt != 2.0.*' } # The default descriptive name associated with the Notification service_name = 'MQTT Notification' # The default protocol protocol = 'mqtt' # Secure protocol secure_protocol = 'mqtts' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_mqtt' # MQTT does not have a title title_maxlen = 0 # The maximum length a body can be set to body_maxlen = 268435455 # Use a throttle; but it doesn't need to be so strict since most # MQTT server hostings can handle the small bursts of packets and are # locally hosted anyway request_rate_per_sec = 0.5 # Port Defaults (unless otherwise specified) mqtt_insecure_port = 1883 # The default secure port to use (if mqtts://) mqtt_secure_port = 8883 # The default mqtt keepalive value mqtt_keepalive = 30 # The default mqtt transport mqtt_transport = "tcp" # The number of seconds to wait for a publish to occur at before # checking to see if it's been sent yet. mqtt_block_time_sec = 0.2 # Set the maximum number of messages with QoS>0 that can be part way # through their network flow at once. mqtt_inflight_messages = 200 # Define object templates templates = ( '{schema}://{user}@{host}/{topic}', '{schema}://{user}@{host}:{port}/{topic}', '{schema}://{user}:{password}@{host}/{topic}', '{schema}://{user}:{password}@{host}:{port}/{topic}', ) template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('User Name'), 'type': 'string', 'required': True, }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, 'required': True, }, 'topic': { 'name': _('Target Queue'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'qos': { 'name': _('QOS'), 'type': 'int', 'default': 0, 'min': 0, 'max': 2, }, 'version': { 'name': _('Version'), 'type': 'choice:string', 'values': HUMAN_MQTT_PROTOCOL_MAP, 'default': "v3.1.1", }, 'client_id': { 'name': _('Client ID'), 'type': 'string', }, 'session': { 'name': _('Use Session'), 'type': 'bool', 'default': False, }, 'retain': { 'name': _('Retain Messages'), 'type': 'bool', 'default': False, }, }) def __init__(self, targets=None, version=None, qos=None, client_id=None, session=None, retain=None, **kwargs): """ Initialize MQTT Object """ super().__init__(**kwargs) # Initialize topics self.topics = parse_list(targets) if version is None: self.version = self.template_args['version']['default'] else: self.version = version # Save our client id if specified self.client_id = client_id # Maintain our session (associated with our user id if set) self.session = self.template_args['session']['default'] \ if session is None or not self.client_id \ else parse_bool(session) # Our Retain Message Flag self.retain = self.template_args['retain']['default'] \ if retain is None else parse_bool(retain) # Set up our Quality of Service (QoS) try: self.qos = self.template_args['qos']['default'] \ if qos is None else int(qos) if self.qos < self.template_args['qos']['min'] \ or self.qos > self.template_args['qos']['max']: # Let error get handle on exceptio higher up raise ValueError("") except (ValueError, TypeError): msg = 'An invalid MQTT QOS ({}) was specified.'.format(qos) self.logger.warning(msg) raise TypeError(msg) if not self.port: # Assign port (if not otherwise set) self.port = self.mqtt_secure_port \ if self.secure else self.mqtt_insecure_port self.ca_certs = None if self.secure: # verify SSL key or abort # TODO: There is no error reporting or aborting here? # It could be useful to inform the user _where_ Apprise # tried to find the root CA certificates file. self.ca_certs = next( (cert for cert in self.CA_CERTIFICATE_FILE_LOCATIONS if isfile(cert)), None) # Set up our MQTT Publisher try: # Get our protocol self.mqtt_protocol = \ MQTT_PROTOCOL_MAP[re.sub(r'[^0-9]+', '', self.version)] except (KeyError): msg = 'An invalid MQTT Protocol version ' \ '({}) was specified.'.format(version) self.logger.warning(msg) raise TypeError(msg) # Our MQTT Client Object self.client = mqtt.Client( client_id=self.client_id, clean_session=not self.session, userdata=None, protocol=self.mqtt_protocol, transport=self.mqtt_transport, ) # Our maximum number of in-flight messages self.client.max_inflight_messages_set(self.mqtt_inflight_messages) # Toggled to False once our connection has been established at least # once self.__initial_connect = True def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform MQTT Notification """ if len(self.topics) == 0: # There were no services to notify self.logger.warning('There were no MQTT topics to notify.') return False # For logging: url = '{host}:{port}'.format(host=self.host, port=self.port) try: if self.__initial_connect: # Our initial connection if self.user: self.client.username_pw_set( self.user, password=self.password) if self.secure: if self.ca_certs is None: self.logger.error( 'MQTT secure communication can not be verified, ' 'CA certificates file missing') return False self.client.tls_set( ca_certs=self.ca_certs, certfile=None, keyfile=None, cert_reqs=ssl.CERT_REQUIRED, tls_version=ssl.PROTOCOL_TLS, ciphers=None) # Set our TLS Verify Flag self.client.tls_insecure_set(not self.verify_certificate) # Establish our connection if self.client.connect( self.host, port=self.port, keepalive=self.mqtt_keepalive) \ != mqtt.MQTT_ERR_SUCCESS: self.logger.warning( 'An MQTT connection could not be established for {}'. format(url)) return False # Start our client loop self.client.loop_start() # Throttle our start otherwise the starting handshaking doesnt # work. I'm not sure if this is a bug or not, but with qos=0, # and without this sleep(), the messages randomly fails to be # delivered. sleep(0.01) # Toggle our flag since we never need to enter this area again self.__initial_connect = False # Create a copy of the subreddits list topics = list(self.topics) has_error = False while len(topics) > 0 and not has_error: # Retrieve our subreddit topic = topics.pop() # For logging: url = '{host}:{port}/{topic}'.format( host=self.host, port=self.port, topic=topic) # Always call throttle before any remote server i/o is made self.throttle() # handle a re-connection if not self.client.is_connected() and \ self.client.reconnect() != mqtt.MQTT_ERR_SUCCESS: self.logger.warning( 'An MQTT connection could not be sustained for {}'. format(url)) has_error = True break # Some Debug Logging self.logger.debug('MQTT POST URL: {} (cert_verify={})'.format( url, self.verify_certificate)) self.logger.debug('MQTT Payload: %s' % str(body)) result = self.client.publish( topic, payload=body, qos=self.qos, retain=self.retain) if result.rc != mqtt.MQTT_ERR_SUCCESS: # Toggle our status self.logger.warning( 'An error (rc={}) occured when sending MQTT to {}'. format(result.rc, url)) has_error = True break elif not result.is_published(): self.logger.debug( 'Blocking until MQTT payload is published...') reference = datetime.now() while not has_error and not result.is_published(): # Throttle sleep(self.mqtt_block_time_sec) # Our own throttle so we can abort eventually.... elapsed = (datetime.now() - reference).total_seconds() if elapsed >= self.socket_read_timeout: self.logger.warning( 'The MQTT message could not be delivered') has_error = True # if we reach here; we're at the bottom of our loop # we loop around and do the next topic now except ConnectionError as e: self.logger.warning( 'MQTT Connection Error received from {}'.format(url)) self.logger.debug('Socket Exception: %s' % str(e)) return False except ssl.CertificateError as e: self.logger.warning( 'MQTT SSL Certificate Error received from {}'.format(url)) self.logger.debug('Socket Exception: %s' % str(e)) return False except ValueError as e: # ValueError's are thrown from publish() call if there is a problem self.logger.warning( 'MQTT Publishing error received: from {}'.format(url)) self.logger.debug('Socket Exception: %s' % str(e)) return False if not has_error: # Verbal notice self.logger.info('Sent MQTT notification') return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port if self.port else ( self.mqtt_secure_port if self.secure else self.mqtt_insecure_port), self.fullpath.rstrip('/'), self.client_id, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'version': self.version, 'qos': str(self.qos), 'session': 'yes' if self.session else 'no', 'retain': 'yes' if self.retain else 'no', } if self.client_id: # Our client id is set if specified params['client_id'] = self.client_id # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyMQTT.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyMQTT.quote(self.user, safe=''), ) default_port = self.mqtt_secure_port \ if self.secure else self.mqtt_insecure_port return '{schema}://{auth}{hostname}{port}/{targets}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), targets=','.join( [NotifyMQTT.quote(x, safe='/') for x in self.topics]), params=NotifyMQTT.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.topics) @staticmethod def parse_url(url): """ There are no parameters nessisary for this protocol; simply having windows:// is all you need. This function just makes sure that is in place. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results try: # Acquire topic(s) results['targets'] = parse_list( NotifyMQTT.unquote(results['fullpath'].lstrip('/'))) except AttributeError: # No 'fullpath' specified results['targets'] = [] # The MQTT protocol version to use if 'version' in results['qsd'] and len(results['qsd']['version']): results['version'] = \ NotifyMQTT.unquote(results['qsd']['version']) # The MQTT Client ID if 'client_id' in results['qsd'] and len(results['qsd']['client_id']): results['client_id'] = \ NotifyMQTT.unquote(results['qsd']['client_id']) if 'session' in results['qsd'] and len(results['qsd']['session']): results['session'] = parse_bool(results['qsd']['session']) # Message Retain Flag if 'retain' in results['qsd'] and len(results['qsd']['retain']): results['retain'] = parse_bool(results['qsd']['retain']) # The MQTT Quality of Service to use if 'qos' in results['qsd'] and len(results['qsd']['qos']): results['qos'] = \ NotifyMQTT.unquote(results['qsd']['qos']) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'].extend( NotifyMQTT.parse_list(results['qsd']['to'])) # return results return results @property def CA_CERTIFICATE_FILE_LOCATIONS(self): """ Return possible locations to root certificate authority (CA) bundles. Taken from https://golang.org/src/crypto/x509/root_linux.go TODO: Maybe refactor to a general utility function? """ candidates = [ # Debian/Ubuntu/Gentoo etc. "/etc/ssl/certs/ca-certificates.crt", # Fedora/RHEL 6 "/etc/pki/tls/certs/ca-bundle.crt", # OpenSUSE "/etc/ssl/ca-bundle.pem", # OpenELEC "/etc/pki/tls/cacert.pem", # CentOS/RHEL 7 "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", # macOS Homebrew; brew install ca-certificates "/usr/local/etc/ca-certificates/cert.pem", ] # Certifi provides Mozilla’s carefully curated collection of Root # Certificates for validating the trustworthiness of SSL certificates # while verifying the identity of TLS hosts. It has been extracted from # the Requests project. try: import certifi candidates.append(certifi.where()) except ImportError: # pragma: no cover pass return candidates apprise-1.9.3/apprise/plugins/msg91.py000066400000000000000000000312111477231770000176330ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Create an account https://msg91.com/ if you don't already have one # # Get your (authkey) from the dashboard here: # - https://world.msg91.com/user/index.php#api # # Note: You will need to define a template for this to work # # Get details on the API used in this plugin here: # - https://docs.msg91.com/reference/send-sms import re import requests from json import dumps from .base import NotifyBase from ..common import NotifyType from ..utils.parse import ( is_phone_no, parse_phone_no, parse_bool, validate_regex) from ..locale import gettext_lazy as _ class MSG91PayloadField: """ Identifies the fields available in the JSON Payload """ BODY = 'body' MESSAGETYPE = 'type' # Add entries here that are reserved RESERVED_KEYWORDS = ('mobiles', ) class NotifyMSG91(NotifyBase): """ A wrapper for MSG91 Notifications """ # The default descriptive name associated with the Notification service_name = 'MSG91' # The services URL service_url = 'https://msg91.com' # The default protocol secure_protocol = 'msg91' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_msg91' # MSG91 uses the http protocol with JSON requests notify_url = 'https://control.msg91.com/api/v5/flow/' # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Our supported mappings and component keys component_key_re = re.compile( r'(?P((?P[a-z0-9_-])?|(?Pbody|type)))', re.IGNORECASE) # Define object templates templates = ( '{schema}://{template}@{authkey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'template': { 'name': _('Template ID'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[a-z0-9 _-]+$', 'i'), }, 'authkey': { 'name': _('Authentication Key'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'short_url': { 'name': _('Short URL'), 'type': 'bool', 'default': False, }, }) # Define any kwargs we're using template_kwargs = { 'template_mapping': { 'name': _('Template Mapping'), 'prefix': ':', }, } def __init__(self, template, authkey, targets=None, short_url=None, template_mapping=None, **kwargs): """ Initialize MSG91 Object """ super().__init__(**kwargs) # Authentication Key (associated with project) self.authkey = validate_regex( authkey, *self.template_tokens['authkey']['regex']) if not self.authkey: msg = 'An invalid MSG91 Authentication Key ' \ '({}) was specified.'.format(authkey) self.logger.warning(msg) raise TypeError(msg) # Template ID self.template = validate_regex( template, *self.template_tokens['template']['regex']) if not self.template: msg = 'An invalid MSG91 Template ID ' \ '({}) was specified.'.format(template) self.logger.warning(msg) raise TypeError(msg) if short_url is None: self.short_url = self.template_args['short_url']['default'] else: self.short_url = parse_bool(short_url) # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result['full']) self.template_mapping = {} if template_mapping: # Store our extra payload entries self.template_mapping.update(template_mapping) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform MSG91 Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning('There were no MSG91 targets to notify.') return False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'authkey': self.authkey, } # Base recipient_payload = { 'mobiles': None, # Keyword Tokens MSG91PayloadField.BODY: body, MSG91PayloadField.MESSAGETYPE: notify_type, } # Prepare Recipient Payload Object for key, value in self.template_mapping.items(): if key in RESERVED_KEYWORDS: self.logger.warning( 'Ignoring MSG91 custom payload entry %s', key) continue if key in recipient_payload: if not value: # Do not store element in payload response del recipient_payload[key] else: # Re-map recipient_payload[value] = recipient_payload[key] del recipient_payload[key] else: # Append entry recipient_payload[key] = value # Prepare our recipients recipients = [] for target in self.targets: recipient = recipient_payload.copy() recipient['mobiles'] = target recipients.append(recipient) # Prepare our payload payload = { 'template_id': self.template, 'short_url': 1 if self.short_url else 0, # target phone numbers are sent with a comma delimiter 'recipients': recipients, } # Some Debug Logging self.logger.debug('MSG91 POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('MSG91 Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyMSG91.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send MSG91 notification to {}: ' '{}{}error={}.'.format( ','.join(self.targets), status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False else: self.logger.info( 'Sent MSG91 notification to %s.' % ','.join(self.targets)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending MSG91:%s ' 'notification.' % ','.join(self.targets) ) self.logger.debug('Socket Exception: %s' % str(e)) return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.template, self.authkey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'short_url': str(self.short_url), } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Payload body extras prefixed with a ':' sign # Append our payload extras into our parameters params.update( {':{}'.format(k): v for k, v in self.template_mapping.items()}) return '{schema}://{template}@{authkey}/{targets}/?{params}'.format( schema=self.secure_protocol, template=self.pprint(self.template, privacy, safe=''), authkey=self.pprint(self.authkey, privacy, safe=''), targets='/'.join( [NotifyMSG91.quote(x, safe='') for x in self.targets]), params=NotifyMSG91.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifyMSG91.split_path(results['fullpath']) # The hostname is our authentication key results['authkey'] = NotifyMSG91.unquote(results['host']) # The template id is kept in the user field results['template'] = NotifyMSG91.unquote(results['user']) if 'short_url' in results['qsd'] and len(results['qsd']['short_url']): results['short_url'] = parse_bool(results['qsd']['short_url']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyMSG91.parse_phone_no(results['qsd']['to']) # store any additional payload extra's defined results['template_mapping'] = { NotifyMSG91.unquote(x): NotifyMSG91.unquote(y) for x, y in results['qsd:'].items() } return results apprise-1.9.3/apprise/plugins/msteams.py000066400000000000000000000640351477231770000203560ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this plugin, you need to create a webhook; you can read more about # this here: # https://dev.outlook.com/Connectors/\ # GetStarted#creating-messages-through-office-365-connectors-\ # in-microsoft-teams # # More details are here on API Construction: # https://docs.microsoft.com/en-ca/outlook/actionable-messages/\ # message-card-reference # # I personally created a free account at teams.microsoft.com and then # went to the store (bottom left hand side of slack like interface). # # From here you can search for 'Incoming Webhook'. Once you click on it, # you can associate the webhook with your team. At this point, you can # optionally also assign it a name, an avatar. Finally you'll have to # assign it a channel it will notify. # # When you've completed this, it will generate you a (webhook) URL that # looks like: # https://team-name.webhook.office.com/webhookb2/ \ # abcdefgf8-2f4b-4eca-8f61-225c83db1967@abcdefg2-5a99-4849-8efc-\ # c9e78d28e57d/IncomingWebhook/291289f63a8abd3593e834af4d79f9fe/\ # a2329f43-0ffb-46ab-948b-c9abdad9d643 # # Yes... The URL is that big... But it looks like this (greatly simplified): # https://TEAM-NAME.webhook.office.com/webhookb2/ABCD/IncomingWebhook/DEFG/HIJK # ^ ^ ^ ^ # | | | | # These are important <--------------------------^--------------------^----^ # # The Legacy format didn't have the team name identified and reads 'outlook' # While this still works, consider that Microsoft will be dropping support # for this soon, so you may need to update your IncomingWebhook. Here is # what a legacy URL looked like: # https://outlook.office.com/webhook/ABCD/IncomingWebhook/DEFG/HIJK # ^ ^ ^ ^ # | | | | # legacy team reference: 'outlook' | | | # | | | # These are important <--------------^--------------------^----^ # # You'll notice that the first token is actually 2 separated by an @ symbol # But lets just ignore that and assume it's one great big token instead. # # These 3 tokens need to be placed in the URL after the Team # msteams://TEAM/ABCD/DEFG/HIJK # import re import requests import json from json.decoder import JSONDecodeError from .base import NotifyBase from ..common import NotifyImageSize from ..common import NotifyType from ..common import NotifyFormat from ..utils.parse import parse_bool, validate_regex from ..utils.templates import apply_template, TemplateType from ..apprise_attachment import AppriseAttachment from ..locale import gettext_lazy as _ class NotifyMSTeams(NotifyBase): """ A wrapper for Microsoft Teams Notifications """ # The default descriptive name associated with the Notification service_name = 'MSTeams' # The services URL service_url = 'https://teams.micrsoft.com/' # The default secure protocol secure_protocol = 'msteams' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_msteams' # MSTeams uses the http protocol with JSON requests notify_url_v1 = 'https://outlook.office.com/webhook/' \ '{token_a}/IncomingWebhook/{token_b}/{token_c}' # New MSTeams webhook (as of April 11th, 2021) notify_url_v2 = 'https://{team}.webhook.office.com/webhookb2/' \ '{token_a}/IncomingWebhook/{token_b}/{token_c}' notify_url_v3 = 'https://{team}.webhook.office.com/webhookb2/' \ '{token_a}/IncomingWebhook/{token_b}/{token_c}/{token_d}' # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_72 # The maximum allowable characters allowed in the body per message body_maxlen = 1000 # Default Notification Format notify_format = NotifyFormat.MARKDOWN # There is no reason we should exceed 35KB when reading in a JSON file. # If it is more than this, then it is not accepted max_msteams_template_size = 35000 # Define object templates templates = ( # New required format '{schema}://{team}/{token_a}/{token_b}/{token_c}', # Deprecated '{schema}://{token_a}/{token_b}/{token_c}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ # The Microsoft Team Name 'team': { 'name': _('Team Name'), 'type': 'string', 'required': True, 'regex': (r'^[A-Z0-9_-]+$', 'i'), }, # Token required as part of the API request # /AAAAAAAAA@AAAAAAAAA/........./......... 'token_a': { 'name': _('Token A'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[A-Z0-9-]+@[A-Z0-9-]+$', 'i'), }, # Token required as part of the API request # /................../BBBBBBBBB/.......... 'token_b': { 'name': _('Token B'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, # Token required as part of the API request # /........./........./CCCCCCCCCCCCCCCCCCCCCCCC 'token_c': { 'name': _('Token C'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9-]+$', 'i'), }, # Token required as part of the API request # /........./........./........./DDDDDDDDDDDDDDDDD 'token_d': { 'name': _('Token D'), 'type': 'string', 'private': True, 'required': False, 'regex': (r'^V2[a-zA-Z0-9-_]+$', 'i'), }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': False, 'map_to': 'include_image', }, 'version': { 'name': _('Version'), 'type': 'choice:int', 'values': (1, 2, 3), 'default': 2, }, 'template': { 'name': _('Template Path'), 'type': 'string', 'private': True, }, }) # Define our token control template_kwargs = { 'tokens': { 'name': _('Template Tokens'), 'prefix': ':', }, } def __init__(self, token_a, token_b, token_c, token_d=None, team=None, version=None, include_image=True, template=None, tokens=None, **kwargs): """ Initialize Microsoft Teams Object You can optional specify a template and identify arguments you wish to populate your template with when posting. Some reserved template arguments that can not be over-ridden are: `body`, `title`, and `type`. """ super().__init__(**kwargs) try: self.version = int(version) except TypeError: # None was specified... take on default self.version = self.template_args['version']['default'] except ValueError: # invalid content was provided; let this get caught in the next # validation check for the version self.version = None if self.version not in self.template_args['version']['values']: msg = 'An invalid MSTeams Version ' \ '({}) was specified.'.format(version) self.logger.warning(msg) raise TypeError(msg) self.team = validate_regex(team) if not self.team: NotifyBase.logger.deprecate( "Apprise requires you to identify your Microsoft Team name as " "part of the URL. e.g.: " "msteams://TEAM-NAME/{token_a}/{token_b}/{token_c}") # Fallback self.team = 'outlook' self.token_a = validate_regex( token_a, *self.template_tokens['token_a']['regex']) if not self.token_a: msg = 'An invalid MSTeams (first) Token ' \ '({}) was specified.'.format(token_a) self.logger.warning(msg) raise TypeError(msg) self.token_b = validate_regex( token_b, *self.template_tokens['token_b']['regex']) if not self.token_b: msg = 'An invalid MSTeams (second) Token ' \ '({}) was specified.'.format(token_b) self.logger.warning(msg) raise TypeError(msg) self.token_c = validate_regex( token_c, *self.template_tokens['token_c']['regex']) if not self.token_c: msg = 'An invalid MSTeams (third) Token ' \ '({}) was specified.'.format(token_c) self.logger.warning(msg) raise TypeError(msg) self.token_d = validate_regex( token_d, *self.template_tokens['token_d']['regex']) # Place a thumbnail image inline with the message body self.include_image = include_image # Our template object is just an AppriseAttachment object self.template = AppriseAttachment(asset=self.asset) if template: # Add our definition to our template self.template.add(template) # Enforce maximum file size self.template[0].max_file_size = self.max_msteams_template_size # Template functionality self.tokens = {} if isinstance(tokens, dict): self.tokens.update(tokens) elif tokens: msg = 'The specified MSTeams Template Tokens ' \ '({}) are not identified as a dictionary.'.format(tokens) self.logger.warning(msg) raise TypeError(msg) self.logger.deprecate( "Microsoft is deprecating their MSTeams webhooks on " "December 31, 2025. It is advised that you switch to " "Microsoft Power Automate (already supported by Apprise as " "workflows://. For more information visit: " "https://github.com/caronc/apprise/wiki/Notify_workflows") def gen_payload(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ This function generates our payload whether it be the generic one Apprise generates by default, or one provided by a specified external template. """ # Acquire our to-be footer icon if configured to do so image_url = None if not self.include_image \ else self.image_url(notify_type) if not self.template: # By default we use a generic working payload if there was # no template specified payload = { "@type": "MessageCard", "@context": "https://schema.org/extensions", "summary": self.app_desc, "themeColor": self.color(notify_type), "sections": [ { "activityImage": None, "activityTitle": title, "text": body, }, ] } if image_url: payload['sections'][0]['activityImage'] = image_url return payload # If our code reaches here, then we generate ourselves the payload template = self.template[0] if not template: # We could not access the attachment self.logger.error( 'Could not access MSTeam template {}.'.format( template.url(privacy=True))) return False # Take a copy of our token dictionary tokens = self.tokens.copy() # Apply some defaults template values tokens['app_body'] = body tokens['app_title'] = title tokens['app_type'] = notify_type tokens['app_id'] = self.app_id tokens['app_desc'] = self.app_desc tokens['app_color'] = self.color(notify_type) tokens['app_image_url'] = image_url tokens['app_url'] = self.app_url # Enforce Application mode tokens['app_mode'] = TemplateType.JSON try: with open(template.path, 'r') as fp: content = json.loads(apply_template(fp.read(), **tokens)) except (OSError, IOError): self.logger.error( 'MSTeam template {} could not be read.'.format( template.url(privacy=True))) return None except JSONDecodeError as e: self.logger.error( 'MSTeam template {} contains invalid JSON.'.format( template.url(privacy=True))) self.logger.debug('JSONDecodeError: {}'.format(e)) return None # Load our JSON data (if valid) has_error = False if '@type' not in content: self.logger.error( 'MSTeam template {} is missing @type kwarg.'.format( template.url(privacy=True))) has_error = True if '@context' not in content: self.logger.error( 'MSTeam template {} is missing @context kwarg.'.format( template.url(privacy=True))) has_error = True return content if not has_error else None def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Microsoft Teams Notification """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } if self.version == 1: notify_url = self.notify_url_v1.format( token_a=self.token_a, token_b=self.token_b, token_c=self.token_c) if self.version == 2: notify_url = self.notify_url_v2.format( team=self.team, token_a=self.token_a, token_b=self.token_b, token_c=self.token_c, ) if self.version == 3: notify_url = self.notify_url_v3.format( team=self.team, token_a=self.token_a, token_b=self.token_b, token_c=self.token_c, token_d=self.token_d, ) # Generate our payload if it's possible payload = self.gen_payload( body=body, title=title, notify_type=notify_type, **kwargs) if not payload: # No need to present a reason; that will come from the # gen_payload() function itself return False self.logger.debug('MSTeams POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('MSTeams Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=json.dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyMSTeams.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send MSTeams notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # We failed return False else: self.logger.info('Sent MSTeams notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending MSTeams notification.') self.logger.debug('Socket Exception: %s' % str(e)) # We failed return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.team if self.version > 1 else None, self.token_a, self.token_b, self.token_c, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', } if self.version != self.template_args['version']['default']: params['version'] = str(self.version) if self.template: params['template'] = NotifyMSTeams.quote( self.template[0].url(), safe='') # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Store any template entries if specified params.update({':{}'.format(k): v for k, v in self.tokens.items()}) result = None if self.version == 1: result = '{schema}://{token_a}/{token_b}/{token_c}/'\ '?{params}'.format( schema=self.secure_protocol, token_a=self.pprint(self.token_a, privacy, safe='@'), token_b=self.pprint(self.token_b, privacy, safe=''), token_c=self.pprint(self.token_c, privacy, safe=''), params=NotifyMSTeams.urlencode(params), ) if self.version == 2: result = '{schema}://{team}/{token_a}/{token_b}/{token_c}/'\ '?{params}'.format( schema=self.secure_protocol, team=NotifyMSTeams.quote(self.team, safe=''), token_a=self.pprint(self.token_a, privacy, safe=''), token_b=self.pprint(self.token_b, privacy, safe=''), token_c=self.pprint(self.token_c, privacy, safe=''), params=NotifyMSTeams.urlencode(params), ) if self.version == 3: result = '{schema}://{team}/{token_a}/{token_b}/{token_c}/'\ '{token_d}/?{params}'.format( schema=self.secure_protocol, team=NotifyMSTeams.quote(self.team, safe=''), token_a=self.pprint(self.token_a, privacy, safe=''), token_b=self.pprint(self.token_b, privacy, safe=''), token_c=self.pprint(self.token_c, privacy, safe=''), token_d=self.pprint(self.token_d, privacy, safe=''), params=NotifyMSTeams.urlencode(params), ) return result @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get unquoted entries entries = NotifyMSTeams.split_path(results['fullpath']) # Deprecated mode (backwards compatibility) if results.get('user'): # If a user was found, it's because it's still part of the first # token, so we concatinate them results['token_a'] = '{}@{}'.format( NotifyMSTeams.unquote(results['user']), NotifyMSTeams.unquote(results['host']), ) else: # Get the Team from the hostname results['team'] = NotifyMSTeams.unquote(results['host']) # Get the token from the path results['token_a'] = None if not entries \ else NotifyMSTeams.unquote(entries.pop(0)) results['token_b'] = None if not entries \ else NotifyMSTeams.unquote(entries.pop(0)) results['token_c'] = None if not entries \ else NotifyMSTeams.unquote(entries.pop(0)) results['token_d'] = None if not entries \ else NotifyMSTeams.unquote(entries.pop(0)) # Get Image results['include_image'] = \ parse_bool(results['qsd'].get('image', True)) # Get Team name if defined if 'team' in results['qsd'] and results['qsd']['team']: results['team'] = \ NotifyMSTeams.unquote(results['qsd']['team']) # Template Handling if 'template' in results['qsd'] and results['qsd']['template']: results['template'] = \ NotifyMSTeams.unquote(results['qsd']['template']) # Override version if defined if 'version' in results['qsd'] and results['qsd']['version']: results['version'] = \ NotifyMSTeams.unquote(results['qsd']['version']) else: version = 1 if results.get('team'): version = 2 if results.get('token_d'): version = 3 # Set our version if not otherwise set results['version'] = version # Store our tokens results['tokens'] = results['qsd:'] return results @staticmethod def parse_native_url(url): """ Legacy Support: https://outlook.office.com/webhook/ABCD/IncomingWebhook/DEFG/HIJK New Hook Support: https://team-name.office.com/webhook/ABCD/IncomingWebhook/DEFG/HIJK Newer Hook Support: https://team-name.office.com/webhook/ABCD/IncomingWebhook/DEFG/HIJK/V2LMNOP """ # We don't need to do incredibly details token matching as the purpose # of this is just to detect that were dealing with an msteams url # token parsing will occur once we initialize the function result = re.match( r'^https?://(?P[^.]+)(?P\.webhook)?\.office\.com/' r'webhook(?Pb2)?/' r'(?P[A-Z0-9-]+@[A-Z0-9-]+)/' r'IncomingWebhook/' r'(?P[A-Z0-9]+)/' r'(?P[A-Z0-9-]+)/' r'(?PV2[A-Z0-9-_]+)/?' r'(?P\?.+)?$', url, re.I) if result: # Version 3 URL return NotifyMSTeams.parse_url( '{schema}://{team}/{token_a}/{token_b}/{token_c}/{token_d}' '/{params}'.format( schema=NotifyMSTeams.secure_protocol, team=result.group('team'), token_a=result.group('token_a'), token_b=result.group('token_b'), token_c=result.group('token_c'), token_d=result.group('token_d'), params='' if not result.group('params') else result.group('params'))) result = re.match( r'^https?://(?P[^.]+)(?P\.webhook)?\.office\.com/' r'webhook(?Pb2)?/' r'(?P[A-Z0-9-]+@[A-Z0-9-]+)/' r'IncomingWebhook/' r'(?P[A-Z0-9]+)/' r'(?P[A-Z0-9-]+)/?' r'(?P\?.+)?$', url, re.I) if result: if result.group('v2a'): # Version 2 URL return NotifyMSTeams.parse_url( '{schema}://{team}/{token_a}/{token_b}/{token_c}' '/{params}'.format( schema=NotifyMSTeams.secure_protocol, team=result.group('team'), token_a=result.group('token_a'), token_b=result.group('token_b'), token_c=result.group('token_c'), params='' if not result.group('params') else result.group('params'))) else: # Version 1 URLs # team is also set to 'outlook' in this case return NotifyMSTeams.parse_url( '{schema}://{token_a}/{token_b}/{token_c}' '/{params}'.format( schema=NotifyMSTeams.secure_protocol, token_a=result.group('token_a'), token_b=result.group('token_b'), token_c=result.group('token_c'), params='' if not result.group('params') else result.group('params'))) return None apprise-1.9.3/apprise/plugins/nextcloud.py000066400000000000000000000315161477231770000207100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import parse_list from ..locale import gettext_lazy as _ class NotifyNextcloud(NotifyBase): """ A wrapper for Nextcloud Notifications """ # The default descriptive name associated with the Notification service_name = 'Nextcloud' # The services URL service_url = 'https://nextcloud.com/' # Insecure protocol (for those self hosted requests) protocol = 'ncloud' # The default protocol (this is secure for notica) secure_protocol = 'nclouds' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_nextcloud' # Nextcloud title length title_maxlen = 255 # Defines the maximum allowable characters per message. body_maxlen = 4000 # Define object templates templates = ( '{schema}://{host}/{targets}', '{schema}://{host}:{port}/{targets}', '{schema}://{user}:{password}@{host}/{targets}', '{schema}://{user}:{password}@{host}:{port}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'target_user': { 'name': _('Target User'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ # Nextcloud uses different API end points depending on the version # being used however the (API) payload remains the same. Allow users # to specify the version they are using: 'version': { 'name': _('Version'), 'type': 'int', 'min': 1, 'default': 21, }, 'url_prefix': { 'name': _('URL Prefix'), 'type': 'string', }, 'to': { 'alias_of': 'targets', }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('HTTP Header'), 'prefix': '+', }, } def __init__(self, targets=None, version=None, headers=None, url_prefix=None, **kwargs): """ Initialize Nextcloud Object """ super().__init__(**kwargs) # Store our targets self.targets = parse_list(targets) self.version = self.template_args['version']['default'] if version is not None: try: self.version = int(version) if self.version < self.template_args['version']['min']: # Let upper exception handle this raise ValueError() except (ValueError, TypeError): msg = 'At invalid Nextcloud version ({}) was specified.'\ .format(version) self.logger.warning(msg) raise TypeError(msg) # Support URL Prefix self.url_prefix = '' if not url_prefix \ else url_prefix.strip('/') self.headers = {} if headers: # Store our extra headers self.headers.update(headers) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Nextcloud Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning('There were no Nextcloud targets to notify.') return False # Prepare our Header headers = { 'User-Agent': self.app_id, 'OCS-APIREQUEST': 'true', } # Apply any/all header over-rides defined headers.update(self.headers) # error tracking (used for function return) has_error = False # Create a copy of the targets list targets = list(self.targets) while len(targets): target = targets.pop(0) # Prepare our Payload payload = { 'shortMessage': title if title else self.app_desc, } if body: # Only store the longMessage if a body was defined; nextcloud # doesn't take kindly to empty longMessage entries. payload['longMessage'] = body auth = None if self.user: auth = (self.user, self.password) # Nextcloud URL based on version used notify_url = '{schema}://{host}/{url_prefix}/ocs/v2.php/'\ 'apps/admin_notifications/' \ 'api/v1/notifications/{target}' \ if self.version < 21 else \ '{schema}://{host}/{url_prefix}/ocs/v2.php/'\ 'apps/notifications/'\ 'api/v2/admin_notifications/{target}' notify_url = notify_url.format( schema='https' if self.secure else 'http', host=self.host if not isinstance(self.port, int) else '{}:{}'.format(self.host, self.port), url_prefix=self.url_prefix, target=target, ) self.logger.debug( 'Nextcloud v%d POST URL: %s (cert_verify=%r)', self.version, notify_url, self.verify_certificate) self.logger.debug( 'Nextcloud v%d Payload: %s', self.version, str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=payload, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyNextcloud.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Nextcloud v{} notification:' '{}{}error={}.'.format( self.version, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # track our failure has_error = True continue else: self.logger.info( 'Sent Nextcloud %d notification.', self.version) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Nextcloud v%d' 'notification.', self.version) self.logger.debug('Socket Exception: %s' % str(e)) # track our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Create URL parameters from our headers params = {'+{}'.format(k): v for k, v in self.headers.items()} # Set our version params['version'] = str(self.version) if self.url_prefix: params['url_prefix'] = self.url_prefix # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyNextcloud.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyNextcloud.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}/{targets}?{params}' \ .format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a # valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), targets='/'.join([NotifyNextcloud.quote(x) for x in self.targets]), params=NotifyNextcloud.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # Fetch our targets results['targets'] = \ NotifyNextcloud.split_path(results['fullpath']) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyNextcloud.parse_list(results['qsd']['to']) # Allow users to over-ride the Nextcloud version being used if 'version' in results['qsd'] and len(results['qsd']['version']): results['version'] = \ NotifyNextcloud.unquote(results['qsd']['version']) # Support URL Prefixes if 'url_prefix' in results['qsd'] \ and len(results['qsd']['url_prefix']): results['url_prefix'] = \ NotifyNextcloud.unquote(results['qsd']['url_prefix']) # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = { NotifyNextcloud.unquote(x): NotifyNextcloud.unquote(y) for x, y in results['qsd+'].items()} return results apprise-1.9.3/apprise/plugins/nextcloudtalk.py000066400000000000000000000262071477231770000215650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from json import dumps from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import parse_list from ..locale import gettext_lazy as _ class NotifyNextcloudTalk(NotifyBase): """ A wrapper for Nextcloud Talk Notifications """ # The default descriptive name associated with the Notification service_name = _('Nextcloud Talk') # The services URL service_url = 'https://nextcloud.com/talk' # Insecure protocol (for those self hosted requests) protocol = 'nctalk' # The default protocol (this is secure for notica) secure_protocol = 'nctalks' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_nextcloudtalk' # Nextcloud title length title_maxlen = 255 # Defines the maximum allowable characters per message. body_maxlen = 4000 # Define object templates templates = ( '{schema}://{user}:{password}@{host}/{targets}', '{schema}://{user}:{password}@{host}:{port}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', 'required': True, }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, 'required': True, }, 'target_room_id': { 'name': _('Room ID'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'url_prefix': { 'name': _('URL Prefix'), 'type': 'string', }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('HTTP Header'), 'prefix': '+', }, } def __init__(self, targets=None, headers=None, url_prefix=None, **kwargs): """ Initialize Nextcloud Talk Object """ super().__init__(**kwargs) if self.user is None or self.password is None: msg = 'A NextCloudTalk User and Password must be specified.' self.logger.warning(msg) raise TypeError(msg) # Store our targets self.targets = parse_list(targets) # Support URL Prefix self.url_prefix = '' if not url_prefix \ else url_prefix.strip('/') self.headers = {} if headers: # Store our extra headers self.headers.update(headers) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Nextcloud Talk Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning( 'There were no Nextcloud Talk targets to notify.') return False # Prepare our Header headers = { 'User-Agent': self.app_id, 'OCS-APIRequest': 'true', 'Accept': 'application/json', 'Content-Type': 'application/json', } # Apply any/all header over-rides defined headers.update(self.headers) # error tracking (used for function return) has_error = False # Create a copy of the targets list targets = list(self.targets) while len(targets): target = targets.pop(0) # Prepare our Payload if not body: payload = { 'message': title if title else self.app_desc, } else: payload = { 'message': title + '\r\n' + body if title else self.app_desc + '\r\n' + body, } # Nextcloud Talk URL notify_url = '{schema}://{host}/{url_prefix}'\ '/ocs/v2.php/apps/spreed/api/v1/chat/{target}' notify_url = notify_url.format( schema='https' if self.secure else 'http', host=self.host if not isinstance(self.port, int) else '{}:{}'.format(self.host, self.port), url_prefix=self.url_prefix, target=target, ) self.logger.debug( 'Nextcloud Talk POST URL: %s (cert_verify=%r)', notify_url, self.verify_certificate) self.logger.debug( 'Nextcloud Talk Payload: %s', str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=dumps(payload), headers=headers, auth=(self.user, self.password), verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.created, requests.codes.ok): # We had a problem status_str = \ NotifyNextcloudTalk.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Nextcloud Talk notification:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # track our failure has_error = True continue else: self.logger.info( 'Sent Nextcloud Talk notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Nextcloud Talk ' 'notification.') self.logger.debug('Socket Exception: %s' % str(e)) # track our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our default set of parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) if self.url_prefix: params['url_prefix'] = self.url_prefix # Determine Authentication auth = '{user}:{password}@'.format( user=NotifyNextcloudTalk.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}/{targets}?{params}' \ .format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a # valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), targets='/'.join([NotifyNextcloudTalk.quote(x) for x in self.targets]), params=NotifyNextcloudTalk.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # Fetch our targets results['targets'] = \ NotifyNextcloudTalk.split_path(results['fullpath']) # Support URL Prefixes if 'url_prefix' in results['qsd'] \ and len(results['qsd']['url_prefix']): results['url_prefix'] = \ NotifyNextcloudTalk.unquote(results['qsd']['url_prefix']) # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = { NotifyNextcloudTalk.unquote(x): NotifyNextcloudTalk.unquote(y) for x, y in results['qsd+'].items()} return results apprise-1.9.3/apprise/plugins/notica.py000066400000000000000000000321301477231770000201510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # 1. Simply visit https://notica.us # 2. You'll be provided a new variation of the website which will look # something like: https://notica.us/?abc123. # ^ # | # token # # Your token is actually abc123 (do not include/grab the question mark) # You can use that URL as is directly in Apprise, or you can follow # the next step which shows you how to assemble the Apprise URL: # # 3. With respect to the above, your apprise URL would be: # notica://abc123 # import re import requests from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ class NoticaMode: """ Tracks if we're accessing the notica upstream server or a locally hosted one. """ # We're dealing with a self hosted service SELFHOSTED = 'selfhosted' # We're dealing with the official hosted service at https://notica.us OFFICIAL = 'official' # Define our Notica Modes NOTICA_MODES = ( NoticaMode.SELFHOSTED, NoticaMode.OFFICIAL, ) class NotifyNotica(NotifyBase): """ A wrapper for Notica Notifications """ # The default descriptive name associated with the Notification service_name = 'Notica' # The services URL service_url = 'https://notica.us/' # Insecure protocol (for those self hosted requests) protocol = 'notica' # The default protocol (this is secure for notica) secure_protocol = 'noticas' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_notica' # Notica URL notify_url = 'https://notica.us/?{token}' # Notica does not support a title title_maxlen = 0 # Define object templates templates = ( '{schema}://{token}', # Self-hosted notica servers '{schema}://{host}/{token}', '{schema}://{host}:{port}/{token}', '{schema}://{user}@{host}/{token}', '{schema}://{user}@{host}:{port}/{token}', '{schema}://{user}:{password}@{host}/{token}', '{schema}://{user}:{password}@{host}:{port}/{token}', # Self-hosted notica servers (with custom path) '{schema}://{host}{path}/{token}', '{schema}://{host}:{port}/{path}/{token}', '{schema}://{user}@{host}/{path}/{token}', '{schema}://{user}@{host}:{port}{path}/{token}', '{schema}://{user}:{password}@{host}{path}/{token}', '{schema}://{user}:{password}@{host}:{port}/{path}/{token}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'token': { 'name': _('Token'), 'type': 'string', 'private': True, 'required': True, 'regex': r'^\?*(?P[^/]+)\s*$' }, 'host': { 'name': _('Hostname'), 'type': 'string', }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'path': { 'name': _('Path'), 'type': 'string', 'map_to': 'fullpath', 'default': '/', }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('HTTP Header'), 'prefix': '+', }, } def __init__(self, token, headers=None, **kwargs): """ Initialize Notica Object """ super().__init__(**kwargs) # Token (associated with project) self.token = validate_regex(token) if not self.token: msg = 'An invalid Notica Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) # Setup our mode self.mode = NoticaMode.SELFHOSTED if self.host else NoticaMode.OFFICIAL # prepare our fullpath self.fullpath = kwargs.get('fullpath') if not isinstance(self.fullpath, str): self.fullpath = '/' self.headers = {} if headers: # Store our extra headers self.headers.update(headers) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Notica Notification """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded' } # Prepare our payload payload = 'd:{}'.format(body) # Auth is used for SELFHOSTED queries auth = None if self.mode is NoticaMode.OFFICIAL: # prepare our notify url notify_url = self.notify_url.format(token=self.token) else: # Prepare our self hosted URL # Apply any/all header over-rides defined headers.update(self.headers) if self.user: auth = (self.user, self.password) # Set our schema schema = 'https' if self.secure else 'http' # Prepare our notify_url notify_url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): notify_url += ':%d' % self.port notify_url += '{fullpath}?token={token}'.format( fullpath=self.fullpath, token=self.token) self.logger.debug('Notica POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('Notica Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url.format(token=self.token), data=payload, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyNotica.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Notica notification:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Notica notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Notica notification.', ) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.mode, self.token, self.user, self.password, self.host, self.port, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) if self.mode == NoticaMode.OFFICIAL: # Official URLs are easy to assemble return '{schema}://{token}/?{params}'.format( schema=self.protocol, token=self.pprint(self.token, privacy, safe=''), params=NotifyNotica.urlencode(params), ) # If we reach here then we are assembling a self hosted URL # Append URL parameters from our headers params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Authorization can be used for self-hosted sollutions auth = '' # Determine Authentication if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyNotica.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyNotica.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}{fullpath}{token}/?{params}' \ .format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, hostname=NotifyNotica.quote(self.host, safe=''), port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath=NotifyNotica.quote( self.fullpath, safe='/'), token=self.pprint(self.token, privacy, safe=''), params=NotifyNotica.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get unquoted entries entries = NotifyNotica.split_path(results['fullpath']) if not entries: # If there are no path entries, then we're only dealing with the # official website results['mode'] = NoticaMode.OFFICIAL # Store our token using the host results['token'] = NotifyNotica.unquote(results['host']) # Unset our host results['host'] = None else: # Otherwise we're running a self hosted instance results['mode'] = NoticaMode.SELFHOSTED # The last element in the list is our token results['token'] = entries.pop() # Re-assemble our full path results['fullpath'] = \ '/' if not entries else '/{}/'.format('/'.join(entries)) # Add our headers that the user can potentially over-ride if they # wish to to our returned result set and tidy entries by unquoting # them results['headers'] = { NotifyNotica.unquote(x): NotifyNotica.unquote(y) for x, y in results['qsd+'].items()} return results @staticmethod def parse_native_url(url): """ Support https://notica.us/?abc123 """ result = re.match( r'^https?://notica\.us/?' r'\??(?P[^&]+)([&\s]*(?P.+))?$', url, re.I) if result: return NotifyNotica.parse_url( '{schema}://{token}/{params}'.format( schema=NotifyNotica.protocol, token=result.group('token'), params='' if not result.group('params') else '?{}'.format(result.group('params')))) return None apprise-1.9.3/apprise/plugins/notifiarr.py000066400000000000000000000362341477231770000207020ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from json import dumps from itertools import chain from .base import NotifyBase from ..common import NotifyType from ..locale import gettext_lazy as _ from ..common import NotifyImageSize from ..utils.parse import parse_list, parse_bool, validate_regex from .discord import USER_ROLE_DETECTION_RE # Used to break path apart into list of channels CHANNEL_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+') CHANNEL_REGEX = re.compile( r'^\s*(\#|\%35)?(?P[0-9]+)', re.I) # For API Details see: # https://notifiarr.wiki/Client/Installation # Another good example: # https://notifiarr.wiki/en/Website/ \ # Integrations/Passthrough#payload-example-1 class NotifyNotifiarr(NotifyBase): """ A wrapper for Notifiarr Notifications """ # The default descriptive name associated with the Notification service_name = 'Notifiarr' # The services URL service_url = 'https://notifiarr.com/' # The default secure protocol secure_protocol = 'notifiarr' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_notifiarr' # The Notification URL notify_url = 'https://notifiarr.com/api/v1/notification/apprise' # Notifiarr Throttling (knowing in advance reduces 429 responses) # define('NOTIFICATION_LIMIT_SECOND_USER', 5); # define('NOTIFICATION_LIMIT_SECOND_PATRON', 15); # Throttle requests ever so slightly request_rate_per_sec = 0.04 # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_256 # Define object templates templates = ( '{schema}://{apikey}/{targets}', ) # Define our apikeys; these are the minimum apikeys required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('Token'), 'type': 'string', 'required': True, 'private': True, }, 'target_channel': { 'name': _('Target Channel'), 'type': 'string', 'prefix': '#', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'key': { 'alias_of': 'apikey', }, 'apikey': { 'alias_of': 'apikey', }, 'event': { 'name': _('Discord Event ID'), 'type': 'int', }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': False, 'map_to': 'include_image', }, 'source': { 'name': _('Source'), 'type': 'string', }, 'from': { 'alias_of': 'source' }, 'to': { 'alias_of': 'targets', }, }) def __init__(self, apikey=None, include_image=None, event=None, targets=None, source=None, **kwargs): """ Initialize Notifiarr Object headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with """ super().__init__(**kwargs) self.apikey = apikey if not self.apikey: msg = 'An invalid Notifiarr APIKey ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # Place a thumbnail image inline with the message body self.include_image = include_image \ if isinstance(include_image, bool) \ else self.template_args['image']['default'] # Prepare our source (if set) self.source = validate_regex(source) self.event = 0 if event: try: self.event = int(event) except (ValueError, TypeError): msg = 'An invalid Notifiarr Discord Event ID ' \ '({}) was specified.'.format(event) self.logger.warning(msg) raise TypeError(msg) # Prepare our targets self.targets = { 'channels': [], 'invalid': [], } for target in parse_list(targets): result = CHANNEL_REGEX.match(target) if result: # Store role information self.targets['channels'].append(int(result.group('channel'))) continue self.logger.warning( 'Dropped invalid channel ' '({}) specified.'.format(target), ) self.targets['invalid'].append(target) return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.apikey, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', } if self.source: params['source'] = self.source if self.event: params['event'] = self.event # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{apikey}' \ '/{targets}?{params}'.format( schema=self.secure_protocol, apikey=self.pprint(self.apikey, privacy, safe=''), targets='/'.join( [NotifyNotifiarr.quote(x, safe='+#@') for x in chain( # Channels ['#{}'.format(x) for x in self.targets['channels']], # Pass along the same invalid entries as were provided self.targets['invalid'], )]), params=NotifyNotifiarr.urlencode(params), ) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Notifiarr Notification """ if not self.targets['channels']: # There were no services to notify self.logger.warning( 'There were no Notifiarr channels to notify.') return False # No error to start with has_error = False # Acquire image_url image_url = self.image_url(notify_type) # Define our mentions mentions = { 'pingUser': [], 'pingRole': [], 'content': [], } # parse for user id's <@123> and role IDs <@&456> results = USER_ROLE_DETECTION_RE.findall(body) if results: for (is_role, no, value) in results: if value: # @everybody, @admin, etc - unsupported mentions['content'].append(f'@{value}') elif is_role: mentions['pingRole'].append(no) mentions['content'].append(f'<@&{no}>') else: # is_user mentions['pingUser'].append(no) mentions['content'].append(f'<@{no}>') for idx, channel in enumerate(self.targets['channels']): # prepare Notifiarr Object payload = { 'source': self.source if self.source else self.app_id, 'type': notify_type, 'notification': { 'update': True if self.event else False, 'name': self.app_id, 'event': str(self.event) if self.event else "", }, 'discord': { 'color': self.color(notify_type), 'ping': { # Only 1 user is supported, so truncate the rest 'pingUser': 0 if not mentions['pingUser'] else mentions['pingUser'][0], # Only 1 role is supported, so truncate the rest 'pingRole': 0 if not mentions['pingRole'] else mentions['pingRole'][0], }, 'text': { 'title': title, 'content': '' if not mentions['content'] else '👉 ' + ' '.join(mentions['content']), 'description': body, 'footer': self.app_desc, }, 'ids': { 'channel': channel, } } } if self.include_image and image_url: payload['discord']['text']['icon'] = image_url payload['discord']['images'] = { 'thumbnail': image_url, } if not self._send(payload): has_error = True return not has_error def _send(self, payload): """ Send notification """ self.logger.debug('Notifiarr POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('Notifiarr Payload: %s' % str(payload)) # Prepare HTTP Headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'Accept': 'text/plain', 'X-api-Key': self.apikey, } # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code < 200 or r.status_code >= 300: # We had a problem status_str = \ NotifyNotifiarr.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Notifiarr %s notification: ' '%serror=%s.', status_str, ', ' if status_str else '', str(r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Notifiarr notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Notifiarr ' 'Chat notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets['channels']) + len(self.targets['invalid']) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get channels results['targets'] = NotifyNotifiarr.split_path(results['fullpath']) if 'event' in results['qsd'] and \ len(results['qsd']['event']): results['event'] = \ NotifyNotifiarr.unquote(results['qsd']['event']) # Include images with our message results['include_image'] = \ parse_bool(results['qsd'].get('image', False)) # Track if we need to extract the hostname as a target host_is_potential_target = False if 'source' in results['qsd'] and len(results['qsd']['source']): results['source'] = \ NotifyNotifiarr.unquote(results['qsd']['source']) elif 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyNotifiarr.unquote(results['qsd']['from']) # Set our apikey if found as an argument if 'apikey' in results['qsd'] and len(results['qsd']['apikey']): results['apikey'] = \ NotifyNotifiarr.unquote(results['qsd']['apikey']) host_is_potential_target = True elif 'key' in results['qsd'] and len(results['qsd']['key']): results['apikey'] = \ NotifyNotifiarr.unquote(results['qsd']['key']) host_is_potential_target = True else: # Pop the first element (this is the api key) results['apikey'] = \ NotifyNotifiarr.unquote(results['host']) if host_is_potential_target is True and results['host']: results['targets'].append(NotifyNotifiarr.unquote(results['host'])) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += [x for x in filter( bool, CHANNEL_LIST_DELIM.split( NotifyNotifiarr.unquote(results['qsd']['to'])))] return results apprise-1.9.3/apprise/plugins/notifico.py000066400000000000000000000300251477231770000205070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Notifico allows you to relay notifications into IRC channels. # # 1. visit https://n.tkte.ch and sign up for an account # 2. create a project; either manually or sync with github # 3. from within the project, you can create a message hook # # the URL will look something like this: # https://n.tkte.ch/h/2144/uJmKaBW9WFk42miB146ci3Kj # ^ ^ # | | # project id message hook # # This plugin also supports taking the URL (as identified above) directly # as well. import re import requests from .base import NotifyBase from ..common import NotifyType from ..utils.parse import parse_bool, validate_regex from ..locale import gettext_lazy as _ class NotificoFormat: # Resets all formatting Reset = '\x0F' # Formatting Bold = '\x02' Italic = '\x1D' Underline = '\x1F' BGSwap = '\x16' class NotificoColor: # Resets Color Reset = '\x03' # Colors White = '\x0300' Black = '\x0301' Blue = '\x0302' Green = '\x0303' Red = '\x0304' Brown = '\x0305' Purple = '\x0306' Orange = '\x0307' Yellow = '\x0308', LightGreen = '\x0309' Teal = '\x0310' LightCyan = '\x0311' LightBlue = '\x0312' Violet = '\x0313' Grey = '\x0314' LightGrey = '\x0315' class NotifyNotifico(NotifyBase): """ A wrapper for Notifico Notifications """ # The default descriptive name associated with the Notification service_name = 'Notifico' # The services URL service_url = 'https://n.tkte.ch' # The default protocol protocol = 'notifico' # The default secure protocol secure_protocol = 'notifico' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_notifico' # Plain Text Notification URL notify_url = 'https://n.tkte.ch/h/{proj}/{hook}' # The title is not used title_maxlen = 0 # The maximum allowable characters allowed in the body per message body_maxlen = 512 # Define object templates templates = ( '{schema}://{project_id}/{msghook}', ) # Define our template arguments template_tokens = dict(NotifyBase.template_tokens, **{ # The Project ID is found as the first part of the URL # /1234/........................ 'project_id': { 'name': _('Project ID'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[0-9]+$', ''), }, # The Message Hook follows the Project ID # /..../AbCdEfGhIjKlMnOpQrStUvWX 'msghook': { 'name': _('Message Hook'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ # You can optionally pass IRC colors into 'color': { 'name': _('IRC Colors'), 'type': 'bool', 'default': True, }, # You can optionally pass IRC color into 'prefix': { 'name': _('Prefix'), 'type': 'bool', 'default': True, }, }) def __init__(self, project_id, msghook, color=True, prefix=True, **kwargs): """ Initialize Notifico Object """ super().__init__(**kwargs) # Assign our message hook self.project_id = validate_regex( project_id, *self.template_tokens['project_id']['regex']) if not self.project_id: msg = 'An invalid Notifico Project ID ' \ '({}) was specified.'.format(project_id) self.logger.warning(msg) raise TypeError(msg) # Assign our message hook self.msghook = validate_regex( msghook, *self.template_tokens['msghook']['regex']) if not self.msghook: msg = 'An invalid Notifico Message Token ' \ '({}) was specified.'.format(msghook) self.logger.warning(msg) raise TypeError(msg) # Prefix messages with a [?] where ? identifies the message type # such as if it's an error, warning, info, or success self.prefix = prefix # Send colors self.color = color # Prepare our notification URL now: self.api_url = self.notify_url.format( proj=self.project_id, hook=self.msghook, ) return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.project_id, self.msghook) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'color': 'yes' if self.color else 'no', 'prefix': 'yes' if self.prefix else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{proj}/{hook}/?{params}'.format( schema=self.secure_protocol, proj=self.pprint(self.project_id, privacy, safe=''), hook=self.pprint(self.msghook, privacy, safe=''), params=NotifyNotifico.urlencode(params), ) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ wrapper to _send since we can alert more then one channel """ # prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', } # Prepare our IRC Prefix color = '' token = '' if notify_type == NotifyType.INFO: color = NotificoColor.Teal token = 'i' elif notify_type == NotifyType.SUCCESS: color = NotificoColor.LightGreen token = '✔' elif notify_type == NotifyType.WARNING: color = NotificoColor.Orange token = '!' elif notify_type == NotifyType.FAILURE: color = NotificoColor.Red token = '✗' if self.color: # Colors were specified, make sure we capture and correctly # allow them to exist inline in the message # \g<1> is less ambiguous than \1 body = re.sub(r'\\x03(\d{0,2})', r'\\x03\g<1>', body) else: # no colors specified, make sure we strip out any colors found # to make the string read-able body = re.sub(r'\\x03(\d{1,2}(,[0-9]{1,2})?)?', r'', body) # Prepare our payload payload = { 'payload': body if not self.prefix else '{}[{}]{} {}{}{}: {}{}'.format( # Token [?] at the head color if self.color else '', token, NotificoColor.Reset if self.color else '', # App ID NotificoFormat.Bold if self.color else '', self.app_id, NotificoFormat.Reset if self.color else '', # Message Body body, # Reset NotificoFormat.Reset if self.color else '', ), } self.logger.debug('Notifico GET URL: %s (cert_verify=%r)' % ( self.api_url, self.verify_certificate)) self.logger.debug('Notifico Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.get( self.api_url, params=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyNotifico.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Notifico notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Notifico notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Notifico ' 'notification.') self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The first token is stored in the hostname results['project_id'] = NotifyNotifico.unquote(results['host']) # Get Message Hook try: results['msghook'] = NotifyNotifico.split_path( results['fullpath'])[0] except IndexError: results['msghook'] = None # Include Color results['color'] = \ parse_bool(results['qsd'].get('color', True)) # Include Prefix results['prefix'] = \ parse_bool(results['qsd'].get('prefix', True)) return results @staticmethod def parse_native_url(url): """ Support https://n.tkte.ch/h/PROJ_ID/MESSAGE_HOOK/ """ result = re.match( r'^https?://n\.tkte\.ch/h/' r'(?P[0-9]+)/' r'(?P[A-Z0-9]+)/?' r'(?P\?.+)?$', url, re.I) if result: return NotifyNotifico.parse_url( '{schema}://{proj}/{hook}/{params}'.format( schema=NotifyNotifico.secure_protocol, proj=result.group('proj'), hook=result.group('hook'), params='' if not result.group('params') else result.group('params'))) return None apprise-1.9.3/apprise/plugins/ntfy.py000066400000000000000000000733041477231770000176640ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Great sources # - https://github.com/matrix-org/matrix-python-sdk # - https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst # # Examples: # ntfys://my-topic # ntfy://ntfy.local.domain/my-topic # ntfys://ntfy.local.domain:8080/my-topic # ntfy://ntfy.local.domain/?priority=max import re import requests from json import loads from json import dumps from os.path import basename from urllib.parse import quote from .base import NotifyBase from ..common import NotifyFormat from ..common import NotifyType from ..common import NotifyImageSize from ..locale import gettext_lazy as _ from ..utils.parse import ( parse_list, parse_bool, is_hostname, is_ipaddr, validate_regex) from ..url import PrivacyMode from ..attachment.base import AttachBase from ..attachment.memory import AttachMemory class NtfyMode: """ Define ntfy Notification Modes """ # App posts upstream to the developer API on ntfy's website CLOUD = "cloud" # Running a dedicated private ntfy Server PRIVATE = "private" NTFY_MODES = ( NtfyMode.CLOUD, NtfyMode.PRIVATE, ) # A Simple regular expression used to auto detect Auth mode if it isn't # otherwise specified: NTFY_AUTH_DETECT_RE = re.compile('tk_[^ \t]+', re.IGNORECASE) class NtfyAuth: """ Define ntfy Authentication Modes """ # Basic auth (user and password provided) BASIC = "basic" # Auth Token based TOKEN = "token" NTFY_AUTH = ( NtfyAuth.BASIC, NtfyAuth.TOKEN, ) class NtfyPriority: """ Ntfy Priority Definitions """ MAX = 'max' HIGH = 'high' NORMAL = 'default' LOW = 'low' MIN = 'min' NTFY_PRIORITIES = ( NtfyPriority.MAX, NtfyPriority.HIGH, NtfyPriority.NORMAL, NtfyPriority.LOW, NtfyPriority.MIN, ) NTFY_PRIORITY_MAP = { # Maps against string 'low' but maps to Moderate to avoid # conflicting with actual ntfy mappings 'l': NtfyPriority.LOW, # Maps against string 'moderate' 'mo': NtfyPriority.LOW, # Maps against string 'normal' 'n': NtfyPriority.NORMAL, # Maps against string 'high' 'h': NtfyPriority.HIGH, # Maps against string 'emergency' 'e': NtfyPriority.MAX, # Entries to additionally support (so more like Ntfy's API) # Maps against string 'min' 'mi': NtfyPriority.MIN, # Maps against string 'max' 'ma': NtfyPriority.MAX, # Maps against string 'default' 'd': NtfyPriority.NORMAL, # support 1-5 values as well '1': NtfyPriority.MIN, # Maps against string 'moderate' '2': NtfyPriority.LOW, # Maps against string 'normal' '3': NtfyPriority.NORMAL, # Maps against string 'high' '4': NtfyPriority.HIGH, # Maps against string 'emergency' '5': NtfyPriority.MAX, } class NotifyNtfy(NotifyBase): """ A wrapper for ntfy Notifications """ # The default descriptive name associated with the Notification service_name = 'ntfy' # The services URL service_url = 'https://ntfy.sh/' # Insecure protocol (for those self hosted requests) protocol = 'ntfy' # The default protocol secure_protocol = 'ntfys' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_ntfy' # Default upstream/cloud host if none is defined cloud_notify_url = 'https://ntfy.sh' # Support attachments attachment_support = True # Maximum title length title_maxlen = 200 # Maximum body length body_maxlen = 7800 # Message size calculates title and body together overflow_amalgamate_title = True # Defines the number of bytes our JSON object can not exceed in size or we # know the upstream server will reject it. We convert these into # attachments ntfy_json_upstream_size_limit = 8000 # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_256 # Message time to live (if remote client isn't around to receive it) time_to_live = 2419200 # if our hostname matches the following we automatically enforce # cloud mode __auto_cloud_host = re.compile(r'ntfy\.sh', re.IGNORECASE) # Define object templates templates = ( '{schema}://{topic}', '{schema}://{host}/{targets}', '{schema}://{host}:{port}/{targets}', '{schema}://{user}@{host}/{targets}', '{schema}://{user}@{host}:{port}/{targets}', '{schema}://{user}:{password}@{host}/{targets}', '{schema}://{user}:{password}@{host}:{port}/{targets}', '{schema}://{token}@{host}/{targets}', '{schema}://{token}@{host}:{port}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'token': { 'name': _('Token'), 'type': 'string', 'private': True, }, 'topic': { 'name': _('Topic'), 'type': 'string', 'map_to': 'targets', 'regex': (r'^[a-z0-9_-]{1,64}$', 'i') }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'attach': { 'name': _('Attach'), 'type': 'string', }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, 'avatar_url': { 'name': _('Avatar URL'), 'type': 'string', }, 'filename': { 'name': _('Attach Filename'), 'type': 'string', }, 'click': { 'name': _('Click'), 'type': 'string', }, 'delay': { 'name': _('Delay'), 'type': 'string', }, 'email': { 'name': _('Email'), 'type': 'string', }, 'priority': { 'name': _('Priority'), 'type': 'choice:string', 'values': NTFY_PRIORITIES, 'default': NtfyPriority.NORMAL, }, 'tags': { 'name': _('Tags'), 'type': 'string', }, 'mode': { 'name': _('Mode'), 'type': 'choice:string', 'values': NTFY_MODES, 'default': NtfyMode.PRIVATE, }, 'token': { 'alias_of': 'token', }, 'auth': { 'name': _('Authentication Type'), 'type': 'choice:string', 'values': NTFY_AUTH, 'default': NtfyAuth.BASIC, }, 'to': { 'alias_of': 'targets', }, }) def __init__(self, targets=None, attach=None, filename=None, click=None, delay=None, email=None, priority=None, tags=None, mode=None, include_image=True, avatar_url=None, auth=None, token=None, **kwargs): """ Initialize ntfy Object """ super().__init__(**kwargs) # Prepare our mode self.mode = mode.strip().lower() \ if isinstance(mode, str) \ else self.template_args['mode']['default'] if self.mode not in NTFY_MODES: msg = 'An invalid ntfy Mode ({}) was specified.'.format(mode) self.logger.warning(msg) raise TypeError(msg) # Show image associated with notification self.include_image = include_image # Prepare our authentication type self.auth = auth.strip().lower() \ if isinstance(auth, str) \ else self.template_args['auth']['default'] if self.auth not in NTFY_AUTH: msg = 'An invalid ntfy Authentication type ({}) was specified.' \ .format(auth) self.logger.warning(msg) raise TypeError(msg) # Attach a file (URL supported) self.attach = attach # Our filename (if defined) self.filename = filename # A clickthrough option for notifications # Support Internationalized URLs self.click = None if not isinstance(click, str) else ( click if not any(ord(char) > 127 for char in click) else quote(click, safe=':/?&=[]')) # Time delay for notifications (various string formats) self.delay = delay # An email to forward notifications to self.email = email # Save our token self.token = token # The Priority of the message self.priority = NotifyNtfy.template_args['priority']['default'] \ if not priority else \ next(( v for k, v in NTFY_PRIORITY_MAP.items() if str(priority).lower().startswith(k)), NotifyNtfy.template_args['priority']['default']) # Any optional tags to attach to the notification self.__tags = parse_list(tags) # Avatar URL # This allows a user to provide an over-ride to the otherwise # dynamically generated avatar url images self.avatar_url = avatar_url # Build list of topics topics = parse_list(targets) self.topics = [] for _topic in topics: topic = validate_regex( _topic, *self.template_tokens['topic']['regex']) if not topic: self.logger.warning( 'A specified ntfy topic ({}) is invalid and will be ' 'ignored'.format(_topic)) continue self.topics.append(topic) return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform ntfy Notification """ # error tracking (used for function return) has_error = False if not len(self.topics): # We have nothing to notify; we're done self.logger.warning('There are no ntfy topics to notify') return False # Acquire image_url image_url = self.image_url(notify_type) if self.include_image and (image_url or self.avatar_url): image_url = \ self.avatar_url if self.avatar_url else image_url else: image_url = None # Create a copy of the topics topics = list(self.topics) while len(topics) > 0: # Retrieve our topic topic = topics.pop() if attach and self.attachment_support: # We need to upload our payload first so that we can source it # in remaining messages for no, attachment in enumerate(attach): # First message only includes the text (if defined) _body = body if not no and body else None _title = title if not no and title else None # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Preparing ntfy attachment {}'.format( attachment.url(privacy=True))) okay, response = self._send( topic, body=_body, title=_title, image_url=image_url, attach=attachment) if not okay: # We can't post our attachment; abort immediately return False else: # Send our Notification Message okay, response = self._send( topic, body=body, title=title, image_url=image_url) if not okay: # Mark our failure, but contiue to move on has_error = True return not has_error def _send(self, topic, body=None, title=None, attach=None, image_url=None, **kwargs): """ Wrapper to the requests (post) object """ # Prepare our headers headers = { 'User-Agent': self.app_id, } # See https://ntfy.sh/docs/publish/#publish-as-json data = {} # Posting Parameters params = {} auth = None if self.mode == NtfyMode.CLOUD: # Cloud Service notify_url = self.cloud_notify_url else: # NotifyNtfy.PRVATE # Allow more settings to be applied now if self.auth == NtfyAuth.BASIC and self.user: auth = (self.user, self.password) elif self.auth == NtfyAuth.TOKEN: if not self.token: self.logger.warning('No Ntfy Token was specified') return False, None # Set Token headers['Authorization'] = f'Bearer {self.token}' # Prepare our ntfy Template URL schema = 'https' if self.secure else 'http' notify_url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): notify_url += ':%d' % self.port if not attach: headers['Content-Type'] = 'application/json' data['topic'] = topic virt_payload = data if self.attach: virt_payload['attach'] = self.attach if self.filename: virt_payload['filename'] = self.filename else: # Point our payload to our parameters virt_payload = params notify_url += '/{topic}'.format(topic=topic) # Prepare our Header virt_payload['filename'] = attach.name with attach as fp: data = fp.read() if image_url: headers['X-Icon'] = image_url if title: virt_payload['title'] = title if body: virt_payload['message'] = body if self.notify_format == NotifyFormat.MARKDOWN: # Support Markdown headers['X-Markdown'] = 'yes' if self.priority != NtfyPriority.NORMAL: headers['X-Priority'] = self.priority if self.delay is not None: headers['X-Delay'] = self.delay if self.click is not None: headers['X-Click'] = quote(self.click, safe=':/?@&=#') if self.email is not None: headers['X-Email'] = self.email if self.__tags: headers['X-Tags'] = ",".join(self.__tags) self.logger.debug('ntfy POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) # Default response type response = None if not attach: data = dumps(data) if len(data) > self.ntfy_json_upstream_size_limit: # Convert to an attachment if self.notify_format == NotifyFormat.MARKDOWN: mimetype = 'text/markdown' elif self.notify_format == NotifyFormat.TEXT: mimetype = 'text/plain' else: # self.notify_format == NotifyFormat.HTML: mimetype = 'text/html' attach = AttachMemory( mimetype=mimetype, content='{title}{body}'.format( title=title + '\n' if title else '', body=body)) # Recursively send the message body as an attachment instead return self._send( topic=topic, body='', title='', attach=attach, image_url=image_url, **kwargs) self.logger.debug('ntfy Payload: %s' % str(virt_payload)) self.logger.debug('ntfy Headers: %s' % str(headers)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, params=params if params else None, data=data, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) # set up our status code to use status_code = r.status_code try: # Update our status response if we can response = loads(r.content) status_str = response.get('error', status_str) status_code = \ int(response.get('code', status_code)) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None # We could not parse JSON response. # We will just use the status we already have. pass self.logger.warning( "Failed to send ntfy notification to topic '{}': " '{}{}error={}.'.format( topic, status_str, ', ' if status_str else '', status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False, response # otherwise we were successful self.logger.info( "Sent ntfy notification to '{}'.".format(notify_url)) return True, response except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending ntfy:%s ' % ( notify_url) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while handling {}.'.format( attach.name if isinstance(attach, AttachBase) else virt_payload)) self.logger.debug('I/O Exception: %s' % str(e)) return False, response @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ kwargs = [ self.secure_protocol if self.mode == NtfyMode.CLOUD else ( self.secure_protocol if self.secure else self.protocol), self.host if self.mode == NtfyMode.PRIVATE else '', 443 if self.mode == NtfyMode.CLOUD else ( self.port if self.port else (443 if self.secure else 80)), ] if self.mode == NtfyMode.PRIVATE: if self.auth == NtfyAuth.BASIC: kwargs.extend([ self.user if self.user else None, self.password if self.password else None, ]) elif self.token: # NtfyAuth.TOKEN also kwargs.append(self.token) return kwargs def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ default_port = 443 if self.secure else 80 params = { 'priority': self.priority, 'mode': self.mode, 'image': 'yes' if self.include_image else 'no', 'auth': self.auth, } if self.avatar_url: params['avatar_url'] = self.avatar_url if self.attach is not None: params['attach'] = self.attach if self.click is not None: params['click'] = self.click if self.delay is not None: params['delay'] = self.delay if self.email is not None: params['email'] = self.email if self.__tags: params['tags'] = ','.join(self.__tags) params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine Authentication auth = '' if self.auth == NtfyAuth.BASIC: if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyNtfy.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyNtfy.quote(self.user, safe=''), ) elif self.token: # NtfyAuth.TOKEN also auth = '{token}@'.format( token=self.pprint(self.token, privacy, safe=''), ) if self.mode == NtfyMode.PRIVATE: return '{schema}://{auth}{host}{port}/{targets}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, host=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), targets='/'.join( [NotifyNtfy.quote(x, safe='') for x in self.topics]), params=NotifyNtfy.urlencode(params) ) else: # Cloud mode return '{schema}://{targets}?{params}'.format( schema=self.secure_protocol, targets='/'.join( [NotifyNtfy.quote(x, safe='') for x in self.topics]), params=NotifyNtfy.urlencode(params) ) def __len__(self): """ Returns the number of targets associated with this notification """ return 1 if not self.topics else len(self.topics) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Set our priority if 'priority' in results['qsd'] and len(results['qsd']['priority']): results['priority'] = \ NotifyNtfy.unquote(results['qsd']['priority']) if 'attach' in results['qsd'] and len(results['qsd']['attach']): results['attach'] = NotifyNtfy.unquote(results['qsd']['attach']) _results = NotifyBase.parse_url(results['attach']) if _results: results['filename'] = \ None if _results['fullpath'] \ else basename(_results['fullpath']) if 'filename' in results['qsd'] and \ len(results['qsd']['filename']): results['filename'] = \ basename(NotifyNtfy.unquote(results['qsd']['filename'])) if 'click' in results['qsd'] and len(results['qsd']['click']): results['click'] = NotifyNtfy.unquote(results['qsd']['click']) if 'delay' in results['qsd'] and len(results['qsd']['delay']): results['delay'] = NotifyNtfy.unquote(results['qsd']['delay']) if 'email' in results['qsd'] and len(results['qsd']['email']): results['email'] = NotifyNtfy.unquote(results['qsd']['email']) if 'tags' in results['qsd'] and len(results['qsd']['tags']): results['tags'] = \ parse_list(NotifyNtfy.unquote(results['qsd']['tags'])) # Boolean to include an image or not results['include_image'] = parse_bool(results['qsd'].get( 'image', NotifyNtfy.template_args['image']['default'])) # Extract avatar url if it was specified if 'avatar_url' in results['qsd']: results['avatar_url'] = \ NotifyNtfy.unquote(results['qsd']['avatar_url']) # Acquire our targets/topics results['targets'] = NotifyNtfy.split_path(results['fullpath']) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyNtfy.parse_list(results['qsd']['to']) # Token Specified if 'token' in results['qsd'] and len(results['qsd']['token']): # Token presumed to be the one in use results['auth'] = NtfyAuth.TOKEN results['token'] = NotifyNtfy.unquote(results['qsd']['token']) # Auth override if 'auth' in results['qsd'] and results['qsd']['auth']: results['auth'] = NotifyNtfy.unquote( results['qsd']['auth'].strip().lower()) if not results.get('auth') and results['user'] \ and not results['password']: # We can try to detect the authentication type on the formatting of # the username. Look for tk_.* # # This isn't a surfire way to do things though; it's best to # specify the auth= flag results['auth'] = NtfyAuth.TOKEN \ if NTFY_AUTH_DETECT_RE.match(results['user']) \ else NtfyAuth.BASIC if results.get('auth') == NtfyAuth.TOKEN and not results.get('token'): if results['user'] and not results['password']: # Make sure we properly set our token results['token'] = NotifyNtfy.unquote(results['user']) elif results['password']: # Make sure we properly set our token results['token'] = NotifyNtfy.unquote(results['password']) # Mode override if 'mode' in results['qsd'] and results['qsd']['mode']: results['mode'] = NotifyNtfy.unquote( results['qsd']['mode'].strip().lower()) else: # We can try to detect the mode based on the validity of the # hostname. # # This isn't a surfire way to do things though; it's best to # specify the mode= flag results['mode'] = NtfyMode.PRIVATE \ if ((is_hostname(results['host']) or is_ipaddr(results['host'])) and results['targets']) \ else NtfyMode.CLOUD if results['mode'] == NtfyMode.CLOUD: # Store first entry as it can be a topic too in this case # But only if we also rule it out not being the words # ntfy.sh itself, something that starts wiht an non-alpha numeric # character: if not NotifyNtfy.__auto_cloud_host.search(results['host']): # Add it to the front of the list for consistency results['targets'].insert(0, results['host']) elif results['mode'] == NtfyMode.PRIVATE and \ not (is_hostname(results['host'] or is_ipaddr(results['host']))): # Invalid Host for NtfyMode.PRIVATE return None return results @staticmethod def parse_native_url(url): """ Support https://ntfy.sh/topic """ # Quick lookup for users who want to just paste # the ntfy.sh url directly into Apprise result = re.match( r'^(http|ntfy)s?://ntfy\.sh' r'(?P/[^?]+)?' r'(?P\?.+)?$', url, re.I) if result: mode = 'mode=%s' % NtfyMode.CLOUD return NotifyNtfy.parse_url( '{schema}://{topics}{params}'.format( schema=NotifyNtfy.secure_protocol, topics=result.group('topics') if result.group('topics') else '', params='?%s' % mode if not result.group('params') else result.group('params') + '&%s' % mode)) return None apprise-1.9.3/apprise/plugins/office365.py000066400000000000000000001077211477231770000203760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # API Details: # https://docs.microsoft.com/en-us/previous-versions/office/\ # office-365-api/?redirectedfrom=MSDN # Information on sending an email: # https://docs.microsoft.com/en-us/graph/api/user-sendmail\ # ?view=graph-rest-1.0&tabs=http # # Note: One must set up Application Permissions (not Delegated Permissions) # - Scopes required: Mail.Send # - For Large Attachments: Mail.ReadWrite # - For Email Lookups: User.Read.All # import requests import json from uuid import uuid4 from datetime import datetime from datetime import timedelta from .base import NotifyBase from .. import exception from ..url import PrivacyMode from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import is_email, parse_emails, validate_regex from ..locale import gettext_lazy as _ from ..common import PersistentStoreMode class NotifyOffice365(NotifyBase): """ A wrapper for Office 365 Notifications """ # The default descriptive name associated with the Notification service_name = 'Office 365' # The services URL service_url = 'https://office.com/' # The default protocol secure_protocol = ('azure', 'o365') # Allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.20 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_office365' # URL to Microsoft Graph Server graph_url = 'https://graph.microsoft.com' # Authentication URL auth_url = 'https://login.microsoftonline.com/{tenant}/oauth2/v2.0/token' # Support attachments attachment_support = True # Our default is to no not use persistent storage beyond in-memory # reference storage_mode = PersistentStoreMode.AUTO # the maximum size an attachment can be for it to be allowed to be # uploaded inline with the current email going out (one http post) # Anything larger than this and a second PUT request is required to # the outlook server to post the content through reference. # Currently (as of 2025.10.06) this was documented to be 3MB outlook_attachment_inline_max = 3145728 # Use all the direct application permissions you have configured for your # app. The endpoint should issue a token for the ones associated with the # resource you want to use. # see https://docs.microsoft.com/en-us/azure/active-directory/develop/\ # v2-permissions-and-consent#the-default-scope scope = '.default' # Default Notify Format notify_format = NotifyFormat.HTML # Define object templates templates = ( # Send as user (only supported method) '{schema}://{source}/{tenant}/{client_id}/{secret}', '{schema}://{source}/{tenant}/{client_id}/{secret}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'tenant': { 'name': _('Tenant Domain'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[a-z0-9-]+$', 'i'), }, 'source': { 'name': _('Account Email or Object ID'), 'type': 'string', 'required': True, }, 'client_id': { 'name': _('Client ID'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[a-z0-9-]+$', 'i'), }, 'secret': { 'name': _('Client Secret'), 'type': 'string', 'private': True, 'required': True, }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'cc': { 'name': _('Carbon Copy'), 'type': 'list:string', }, 'bcc': { 'name': _('Blind Carbon Copy'), 'type': 'list:string', }, 'oauth_id': { 'alias_of': 'client_id', }, 'oauth_secret': { 'alias_of': 'secret', }, }) def __init__(self, tenant, client_id, secret, source=None, targets=None, cc=None, bcc=None, **kwargs): """ Initialize Office 365 Object """ super().__init__(**kwargs) # Tenant identifier self.tenant = validate_regex( tenant, *self.template_tokens['tenant']['regex']) if not self.tenant: msg = 'An invalid Office 365 Tenant' \ '({}) was specified.'.format(tenant) self.logger.warning(msg) raise TypeError(msg) # Store our email/ObjectID Source self.source = source # Client Key (associated with generated OAuth2 Login) self.client_id = validate_regex( client_id, *self.template_tokens['client_id']['regex']) if not self.client_id: msg = 'An invalid Office 365 Client OAuth2 ID ' \ '({}) was specified.'.format(client_id) self.logger.warning(msg) raise TypeError(msg) # Client Secret (associated with generated OAuth2 Login) self.secret = validate_regex(secret) if not self.secret: msg = 'An invalid Office 365 Client OAuth2 Secret ' \ '({}) was specified.'.format(secret) self.logger.warning(msg) raise TypeError(msg) # For tracking our email -> name lookups self.names = {} # Acquire Carbon Copies self.cc = set() # Acquire Blind Carbon Copies self.bcc = set() # Parse our targets self.targets = list() if targets: for recipient in parse_emails(targets): # Validate recipients (to:) and drop bad ones: result = is_email(recipient) if result: # Add our email to our target list self.targets.append( (result['name'] if result['name'] else False, result['full_email'])) continue self.logger.warning( 'Dropped invalid To email ({}) specified.' .format(recipient)) else: result = is_email(self.source) if not result: self.logger.warning('No Target Office 365 Email Detected') else: # If our target email list is empty we want to add ourselves to # it self.targets.append((False, self.source)) # Validate recipients (cc:) and drop bad ones: for recipient in parse_emails(cc): email = is_email(recipient) if email: self.cc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Carbon Copy email ' '({}) specified.'.format(recipient), ) # Validate recipients (bcc:) and drop bad ones: for recipient in parse_emails(bcc): email = is_email(recipient) if email: self.bcc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Blind Carbon Copy email ' '({}) specified.'.format(recipient), ) # Our token is acquired upon a successful login self.token = None # Presume that our token has expired 'now' self.token_expiry = datetime.now() # Our email source; we detect this if the source is an ObjectID # If it is unknown we set this to None # User is the email associated with the account self.from_email = self.store.get('from') result = is_email(self.source) if result: self.from_email = result['full_email'] self.from_name = \ result['name'] or self.store.get('name') else: self.from_name = self.store.get('name') return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Office 365 Notification """ # error tracking (used for function return) has_error = False if not self.targets: # There is no one to email; we're done self.logger.warning( 'There are no Email recipients to notify') return False if self.from_email is None: if not self.authenticate(): # We could not authenticate ourselves; we're done return False # Acquire our from_email url = "https://graph.microsoft.com/v1.0/users/{}".format( self.source) postokay, response = self._fetch(url=url, method='GET') if not postokay: self.logger.warning( 'Could not acquire From email address; ensure ' '"User.Read.All" Application scope is set!') else: # Acquire our from_email (if possible) from_email = \ response.get("mail") or response.get("userPrincipalName") result = is_email(from_email) if not result: self.logger.warning( 'Could not get From email from the Azure endpoint.') # Prevent re-occuring upstream fetches for info that isn't # there self.from_email = False else: # Store our email for future reference self.from_email = result['full_email'] self.store.set('from', result['full_email']) self.from_name = response.get("displayName") if self.from_name: self.store.set('name', self.from_name) # Setup our Content Type content_type = \ 'HTML' if self.notify_format == NotifyFormat.HTML else 'Text' # Prepare our payload payload = { 'message': { 'subject': title, 'body': { 'contentType': content_type, 'content': body, }, }, # Below takes a string (not bool) of either 'true' or 'false' 'saveToSentItems': 'true' } if self.from_email: # Apply from email if it is known payload['message'].update({ 'from': { "emailAddress": { "address": self.from_email, "name": self.from_name or self.app_id, } }, }) # Create a copy of the email list emails = list(self.targets) # Define our URL to post to url = '{graph_url}/v1.0/users/{userid}/sendMail'.format( graph_url=self.graph_url, userid=self.source, ) # Prepare our Draft URL draft_url = \ '{graph_url}/v1.0/users/{userid}/messages' \ .format( graph_url=self.graph_url, userid=self.source, ) small_attachments = [] large_attachments = [] # draft emails drafts = [] if attach and self.attachment_support: for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access Office 365 attachment {}.'.format( attachment.url(privacy=True))) return False if len(attachment) > self.outlook_attachment_inline_max: # Messages larger then xMB need to be uploaded after; a # draft email must be prepared; below is our session large_attachments.append({ 'obj': attachment, 'name': attachment.name if attachment.name else f'file{no:03}.dat', }) continue try: # Prepare our Attachment in Base64 small_attachments.append({ "@odata.type": "#microsoft.graph.fileAttachment", # Name of the attachment (as it should appear in email) "name": attachment.name if attachment.name else f'file{no:03}.dat', # MIME type of the attachment "contentType": "attachment.mimetype", # Base64 Content "contentBytes": attachment.base64(), }) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access Office 365 attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending Office 365 attachment {}'.format( attachment.url(privacy=True))) if small_attachments: # Store Attachments payload['message']['attachments'] = small_attachments while len(emails): # authenticate ourselves if we aren't already; but this function # also tracks if our token we have is still valid and will # re-authenticate ourselves if nessisary. if not self.authenticate(): # We could not authenticate ourselves; we're done return False # Get our email to notify to_name, to_addr = emails.pop(0) # Strip target out of cc list if in To or Bcc cc = (self.cc - self.bcc - set([to_addr])) # Strip target out of bcc list if in To bcc = (self.bcc - set([to_addr])) # Prepare our email payload['message']['toRecipients'] = [{ 'emailAddress': { 'address': to_addr } }] if to_name: # Apply our To Name payload['message']['toRecipients'][0]['emailAddress']['name'] \ = to_name self.logger.debug('{}Email To: {}'.format( 'Draft' if large_attachments else '', to_addr)) if cc: # Prepare our CC list payload['message']['ccRecipients'] = [] for addr in cc: _payload = {'address': addr} if self.names.get(addr): _payload['name'] = self.names[addr] # Store our address in our payload payload['message']['ccRecipients']\ .append({'emailAddress': _payload}) self.logger.debug('{}Email Cc: {}'.format( 'Draft' if large_attachments else '', ', '.join( ['{}{}'.format( '' if self.names.get(e) else '{}: '.format( self.names[e]), e) for e in cc]))) if bcc: # Prepare our CC list payload['message']['bccRecipients'] = [] for addr in bcc: _payload = {'address': addr} if self.names.get(addr): _payload['name'] = self.names[addr] # Store our address in our payload payload['message']['bccRecipients']\ .append({'emailAddress': _payload}) self.logger.debug('{}Email Bcc: {}'.format( 'Draft' if large_attachments else '', ', '.join( ['{}{}'.format( '' if self.names.get(e) else '{}: '.format( self.names[e]), e) for e in bcc]))) # Perform upstream post postokay, response = self._fetch( url=url if not large_attachments else draft_url, payload=payload) # Test if we were okay if not postokay: has_error = True elif large_attachments: # We have large attachments now to upload and associate with # our message. We need to prepare a draft message; acquire # the message-id associated with it and then attach the file # via this means. # Acquire our Draft ID to work with message_id = response.get("id") if not message_id: self.logger.warning( 'Email Draft ID could not be retrieved') has_error = True continue self.logger.debug('Email Draft ID: {}'.format(message_id)) # In future, the below could probably be called via async has_attach_error = False for attachment in large_attachments: if not self.upload_attachment( attachment['obj'], message_id, attachment['name']): self.logger.warning( 'Could not prepare attachment session for %s', attachment['name']) has_error = True has_attach_error = True # Take early exit break if has_attach_error: continue # Send off our draft attach_url = \ "https://graph.microsoft.com/v1.0/users/" \ "{}/messages/{}/send" attach_url = attach_url.format( self.source, message_id, ) # Trigger our send postokay, response = self._fetch(url=url) if not postokay: self.logger.warning( 'Could not send drafted email id: {} ', message_id) has_error = True continue # Memory management del small_attachments del large_attachments del drafts return not has_error def upload_attachment(self, attachment, message_id, name=None): """ Uploads an attachment to a session """ # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access Office 365 attachment {}.'.format( attachment.url(privacy=True))) return False # Our Session URL url = \ '{graph_url}/v1.0/users/{userid}/message/{message_id}' \ .format( graph_url=self.graph_url, userid=self.source, message_id=message_id, ) + '/attachments/createUploadSession' file_size = len(attachment) payload = { "AttachmentItem": { "attachmentType": "file", "name": name if name else ( attachment.name if attachment.name else '{}.dat'.format(str(uuid4()))), # MIME type of the attachment "contentType": attachment.mimetype, "size": file_size, } } if not self.authenticate(): # We could not authenticate ourselves; we're done return False # Get our Upload URL postokay, response = self._fetch(url, payload) if not postokay: return False upload_url = response.get('uploadUrl') if not upload_url: return False start_byte = 0 postokay = False response = None for chunk in attachment.chunk(): end_byte = start_byte + len(chunk) - 1 # Define headers for this chunk headers = { 'User-Agent': self.app_id, 'Content-Length': str(len(chunk)), 'Content-Range': f'bytes {start_byte}-{end_byte}/{file_size}' } # Upload the chunk postokay, response = self._fetch( upload_url, chunk, headers=headers, content_type=None, method='PUT') if not postokay: return False # Return our Upload URL return postokay def authenticate(self): """ Logs into and acquires us an authentication token to work with """ if self.token and self.token_expiry > datetime.now(): # If we're already authenticated and our token is still valid self.logger.debug( 'Already authenticate with token {}'.format(self.token)) return True # If we reach here, we've either expired, or we need to authenticate # for the first time. # Prepare our payload payload = { 'grant_type': 'client_credentials', 'client_id': self.client_id, 'client_secret': self.secret, 'scope': '{graph_url}/{scope}'.format( graph_url=self.graph_url, scope=self.scope), } # Prepare our URL url = self.auth_url.format(tenant=self.tenant) # A response looks like the following: # { # "token_type": "Bearer", # "expires_in": 3599, # "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSzI1NiIsInNBXBP..." # } # # Where expires_in defines the number of seconds the key is valid for # before it must be renewed. # Alternatively, this could happen too... # { # "error": "invalid_scope", # "error_description": "AADSTS70011: Blah... Blah Blah... Blah", # "error_codes": [ # 70011 # ], # "timestamp": "2020-01-09 02:02:12Z", # "trace_id": "255d1aef-8c98-452f-ac51-23d051240864", # "correlation_id": "fb3d2015-bc17-4bb9-bb85-30c5cf1aaaa7" # } postokay, response = self._fetch( url=url, payload=payload, content_type='application/x-www-form-urlencoded') if not postokay: return False # Reset our token self.token = None try: # Extract our time from our response and subtrace 10 seconds from # it to give us some wiggle/grace people to re-authenticate if we # need to self.token_expiry = datetime.now() + \ timedelta(seconds=int(response.get('expires_in')) - 10) except (ValueError, AttributeError, TypeError): # ValueError: expires_in wasn't an integer # TypeError: expires_in was None # AttributeError: we could not extract anything from our response # object. return False # Go ahead and store our token if it's available self.token = response.get('access_token') # We're authenticated return True if self.token else False def _fetch(self, url, payload=None, headers=None, content_type='application/json', method='POST'): """ Wrapper to request object """ # Prepare our headers: if not headers: headers = { 'User-Agent': self.app_id, 'Content-Type': content_type, } if self.token: # Are we authenticated? headers['Authorization'] = 'Bearer ' + self.token # Default content response object content = {} # Some Debug Logging self.logger.debug('Office 365 %s URL: {} (cert_verify={})'.format( url, self.verify_certificate), method) self.logger.debug('Office 365 Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() # fetch function req = requests.post if method == 'POST' else ( requests.put if method == 'PUT' else requests.get) try: r = req( url, data=json.dumps(payload) if content_type and content_type.endswith('/json') else payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.created, requests.codes.accepted): # We had a problem status_str = \ NotifyOffice365.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Office 365 %s to {}: ' '{}error={}.'.format( url, ', ' if status_str else '', r.status_code), method) # A Response could look like this if a Scope element was not # found: # { # "error": { # "code": "MissingClaimType", # "message":"The token is missing the claim type \'oid\'.", # "innerError": { # "oAuthEventOperationId":" 7abe20-339f-4659-9381-38f52", # "oAuthEventcV": "xsOSpAHSHVm3Tp4SNH5oIA.1.1", # "errorUrl": "https://url", # "requestId": "2328ea-ec9e-43a8-80f4-164c", # "date":"2024-12-01T02:03:13" # }} # } # Error 403; the below is returned if he User.Read.All # Application scope is not set and a lookup is # attempted. # { # "error": { # "code": "Authorization_RequestDenied", # "message": # "Insufficient privileges to complete the operation.", # "innerError": { # "date": "2024-12-06T00:15:57", # "request-id": # "48fdb3e7-2f1a-4f45-a5a0-99b8b851278b", # "client-request-id": "48f-2f1a-4f45-a5a0-99b8" # } # } # } # Another response type (error 415): # { # "error": { # "code": "RequestBodyRead", # "message": "A missing or empty content type header was \ # found when trying to read a message. The content \ # type header is required.", # } # } self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure return (False, content) try: content = json.loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None content = {} except requests.RequestException as e: self.logger.warning( 'Exception received when sending Office 365 %s to {}: '. format(url), method) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure return (False, content) return (True, content) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol[0], self.source, self.tenant, self.client_id, self.secret, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Extend our parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) if self.cc: # Handle our Carbon Copy Addresses params['cc'] = ','.join( ['{}{}'.format( '' if not self.names.get(e) else '{}:'.format(self.names[e]), e) for e in self.cc]) if self.bcc: # Handle our Blind Carbon Copy Addresses params['bcc'] = ','.join( ['{}{}'.format( '' if not self.names.get(e) else '{}:'.format(self.names[e]), e) for e in self.bcc]) return '{schema}://{source}/{tenant}/{client_id}/{secret}' \ '/{targets}/?{params}'.format( schema=self.secure_protocol[0], tenant=self.pprint(self.tenant, privacy, safe=''), # email does not need to be escaped because it should # already be a valid host and username at this point source=self.source, client_id=self.pprint(self.client_id, privacy, safe=''), secret=self.pprint( self.secret, privacy, mode=PrivacyMode.Secret, safe=''), targets='/'.join( [NotifyOffice365.quote('{}{}'.format( '' if not e[0] else '{}:'.format(e[0]), e[1]), safe='@') for e in self.targets]), params=NotifyOffice365.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.targets) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Now make a list of all our path entries # We need to read each entry back one at a time in reverse order # where each email found we mark as a target. Once we run out # of targets, the presume the remainder of the entries are part # of the secret key (since it can contain slashes in it) entries = NotifyOffice365.split_path(results['fullpath']) # Initialize our tenant results['tenant'] = None # Initialize our email results['email'] = None # From Email if 'from' in results['qsd'] and \ len(results['qsd']['from']): # Extract the sending account's information results['source'] = \ NotifyOffice365.unquote(results['qsd']['from']) # If tenant is occupied, then the user defined makes up our source elif results['user']: results['source'] = '{}@{}'.format( NotifyOffice365.unquote(results['user']), NotifyOffice365.unquote(results['host']), ) else: # Object ID instead of email results['source'] = NotifyOffice365.unquote(results['host']) # Tenant if 'tenant' in results['qsd'] and len(results['qsd']['tenant']): # Extract the Tenant from the argument results['tenant'] = \ NotifyOffice365.unquote(results['qsd']['tenant']) elif entries: results['tenant'] = NotifyOffice365.unquote(entries.pop(0)) # OAuth2 ID if 'oauth_id' in results['qsd'] and len(results['qsd']['oauth_id']): # Extract the API Key from an argument results['client_id'] = \ NotifyOffice365.unquote(results['qsd']['oauth_id']) elif entries: # Get our client_id is the first entry on the path results['client_id'] = NotifyOffice365.unquote(entries.pop(0)) # # Prepare our target listing # results['targets'] = list() while entries: # Pop the last entry entry = NotifyOffice365.unquote(entries.pop(-1)) if is_email(entry): # Store our email and move on results['targets'].append(entry) continue # If we reach here, the entry we just popped is part of the secret # key, so put it back entries.append(NotifyOffice365.quote(entry, safe='')) # We're done break # OAuth2 Secret if 'oauth_secret' in results['qsd'] and \ len(results['qsd']['oauth_secret']): # Extract the API Secret from an argument results['secret'] = \ NotifyOffice365.unquote(results['qsd']['oauth_secret']) else: # Assemble our secret key which is a combination of the host # followed by all entries in the full path that follow up until # the first email results['secret'] = '/'.join( [NotifyOffice365.unquote(x) for x in entries]) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyOffice365.parse_list(results['qsd']['to']) # Handle Carbon Copy Addresses if 'cc' in results['qsd'] and len(results['qsd']['cc']): results['cc'] = results['qsd']['cc'] # Handle Blind Carbon Copy Addresses if 'bcc' in results['qsd'] and len(results['qsd']['bcc']): results['bcc'] = results['qsd']['bcc'] return results apprise-1.9.3/apprise/plugins/one_signal.py000066400000000000000000000540331477231770000210200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # One Signal requires that you've signed up with the service and # generated yourself an API Key and APP ID. # Sources: # - https://documentation.onesignal.com/docs/accounts-and-keys # - https://documentation.onesignal.com/reference/create-notification import requests from json import dumps from itertools import chain from .base import NotifyBase from ..common import NotifyType from ..common import NotifyImageSize from ..utils.base64 import decode_b64_dict, encode_b64_dict from ..utils.parse import validate_regex, parse_list, parse_bool, is_email from ..locale import gettext_lazy as _ class OneSignalCategory: """ We define the different category types that we can notify via OneSignal """ PLAYER = 'include_player_ids' EMAIL = 'include_email_tokens' USER = 'include_external_user_ids' SEGMENT = 'included_segments' ONESIGNAL_CATEGORIES = ( OneSignalCategory.PLAYER, OneSignalCategory.EMAIL, OneSignalCategory.USER, OneSignalCategory.SEGMENT, ) class NotifyOneSignal(NotifyBase): """ A wrapper for OneSignal Notifications """ # The default descriptive name associated with the Notification service_name = 'OneSignal' # The services URL service_url = 'https://onesignal.com' # The default protocol secure_protocol = 'onesignal' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_onesignal' # Notification notify_url = "https://api.onesignal.com/notifications" # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_72 # The maximum allowable batch sizes per message default_batch_size = 2000 # Define object templates templates = ( '{schema}://{app}@{apikey}/{targets}', '{schema}://{template}:{app}@{apikey}/{targets}', ) # Define our template template_tokens = dict(NotifyBase.template_tokens, **{ # The App_ID is a UUID # such as: 8250eaf6-1a58-489e-b136-7c74a864b434 'app': { 'name': _('App ID'), 'type': 'string', 'private': True, 'required': True, }, 'template': { 'name': _('Template'), 'type': 'string', 'private': True, }, 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, }, 'target_player': { 'name': _('Target Player ID'), 'type': 'string', 'map_to': 'targets', }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'target_user': { 'name': _('Target User'), 'type': 'string', 'prefix': '@', 'map_to': 'targets', }, 'target_segment': { 'name': _('Include Segment'), 'type': 'string', 'prefix': '#', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, 'contents': { 'name': _('Enable Contents'), 'type': 'bool', 'default': True, 'map_to': 'use_contents', }, 'decode': { 'name': _('Decode Template Args'), 'type': 'bool', 'default': False, 'map_to': 'decode_tpl_args', }, 'template': { 'alias_of': 'template', }, 'subtitle': { 'name': _('Subtitle'), 'type': 'string', }, 'language': { 'name': _('Language'), 'type': 'string', 'default': 'en', }, }) # Define our token control template_kwargs = { 'custom': { 'name': _('Custom Data'), 'prefix': ':', }, 'postback': { 'name': _('Postback Data'), 'prefix': '+', }, } def __init__(self, app, apikey, targets=None, include_image=True, template=None, subtitle=None, language=None, batch=None, use_contents=None, decode_tpl_args=None, custom=None, postback=None, **kwargs): """ Initialize OneSignal """ super().__init__(**kwargs) # The apikey associated with the account self.apikey = validate_regex(apikey) if not self.apikey: msg = 'An invalid OneSignal API key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # The App ID associated with the account self.app = validate_regex(app) if not self.app: msg = 'An invalid OneSignal Application ID ' \ '({}) was specified.'.format(app) self.logger.warning(msg) raise TypeError(msg) # Prepare Batch Mode Flag self.batch_size = self.default_batch_size if ( batch if batch is not None else self.template_args['batch']['default']) else 1 # Prepare Use Contents Flag self.use_contents = True if ( use_contents if use_contents is not None else self.template_args['contents']['default']) else False # Prepare Decode Template Arguments Flag self.decode_tpl_args = True if ( decode_tpl_args if decode_tpl_args is not None else self.template_args['decode']['default']) else False # Place a thumbnail image inline with the message body self.include_image = include_image # Our Assorted Types of Targets self.targets = { OneSignalCategory.PLAYER: [], OneSignalCategory.EMAIL: [], OneSignalCategory.USER: [], OneSignalCategory.SEGMENT: [], } # Assign our template (if defined) self.template_id = template # Assign our subtitle (if defined) self.subtitle = subtitle # Our Language self.language = language.strip().lower()[0:2]\ if language \ else NotifyOneSignal.template_args['language']['default'] if not self.language or len(self.language) != 2: msg = 'An invalid OneSignal Language ({}) was specified.'.format( language) self.logger.warning(msg) raise TypeError(msg) # Sort our targets for _target in parse_list(targets): target = _target.strip() if len(target) < 2: self.logger.debug('Ignoring OneSignal Entry: %s' % target) continue if target.startswith( NotifyOneSignal.template_tokens ['target_user']['prefix']): self.targets[OneSignalCategory.USER].append(target) self.logger.debug( 'Detected OneSignal UserID: %s' % self.targets[OneSignalCategory.USER][-1]) continue if target.startswith( NotifyOneSignal.template_tokens ['target_segment']['prefix']): self.targets[OneSignalCategory.SEGMENT].append(target) self.logger.debug( 'Detected OneSignal Include Segment: %s' % self.targets[OneSignalCategory.SEGMENT][-1]) continue result = is_email(target) if result: self.targets[OneSignalCategory.EMAIL]\ .append(result['full_email']) self.logger.debug( 'Detected OneSignal Email: %s' % self.targets[OneSignalCategory.EMAIL][-1]) else: # Add element as Player ID self.targets[OneSignalCategory.PLAYER].append(target) self.logger.debug( 'Detected OneSignal Player ID: %s' % self.targets[OneSignalCategory.PLAYER][-1]) # Custom Data self.custom_data = {} if custom and isinstance(custom, dict): if self.decode_tpl_args: custom = decode_b64_dict(custom) self.custom_data.update(custom) elif custom: msg = 'The specified OneSignal Custom Data ' \ '({}) are not identified as a dictionary.'.format(custom) self.logger.warning(msg) raise TypeError(msg) # Postback Data self.postback_data = {} if postback and isinstance(postback, dict): self.postback_data.update(postback) elif postback: msg = 'The specified OneSignal Postback Data ' \ '({}) are not identified as a dictionary.'.format(postback) self.logger.warning(msg) raise TypeError(msg) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform OneSignal Notification """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json; charset=utf-8', "Authorization": "Basic {}".format(self.apikey), } has_error = False sent_count = 0 payload = { 'app_id': self.app, 'contents': { self.language: body, }, # Sending true wakes your app from background to run custom native # code (Apple interprets this as content-available=1). # Note: Not applicable if the app is in the "force-quit" state # (i.e app was swiped away). Omit the contents field to # prevent displaying a visible notification. 'content_available': True, } if self.template_id: # Store template information payload['template_id'] = self.template_id if not self.use_contents: # Only if a template is defined can contents be removed del payload['contents'] # Set our data if defined if self.custom_data: payload.update({ 'custom_data': self.custom_data, }) # Set our postback data if defined if self.postback_data: payload.update({ 'data': self.postback_data, }) if title: # Display our title if defined payload.update({ 'headings': { self.language: title, }}) if self.subtitle: payload.update({ 'subtitle': { self.language: self.subtitle, }, }) # Acquire our large_icon image URL (if set) image_url = None if not self.include_image \ else self.image_url(notify_type) if image_url: payload['large_icon'] = image_url # Acquire our small_icon image URL (if set) image_url = None if not self.include_image \ else self.image_url(notify_type, image_size=NotifyImageSize.XY_32) if image_url: payload['small_icon'] = image_url for category in ONESIGNAL_CATEGORIES: # Create a pointer to our list of targets for specified category targets = self.targets[category] for index in range(0, len(targets), self.batch_size): payload[category] = targets[index:index + self.batch_size] # Track our sent count sent_count += len(payload[category]) self.logger.debug('OneSignal POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('OneSignal Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.no_content): # We had a problem status_str = \ NotifyOneSignal.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send OneSignal notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n%s', r.content) has_error = True else: self.logger.info('Sent OneSignal notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending OneSignal ' 'notification.' ) self.logger.debug('Socket Exception: %s', str(e)) has_error = True if not sent_count: # There is no one to notify; we need to capture this and not # return a valid self.logger.warning('There are no OneSignal targets to notify') return False return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.template_id, self.app, self.apikey, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', 'batch': 'yes' if self.batch_size > 1 else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) custom_data, needs_decoding = encode_b64_dict(self.custom_data) # custom_data, needs_decoding = self.custom_data, False # Save our template data params.update( {':{}'.format(k): v for k, v in custom_data.items()} ) # Save our postback data params.update( {'+{}'.format(k): v for k, v in self.postback_data.items()}) if self.use_contents != self.template_args['contents']['default']: params['contents'] = 'yes' if self.use_contents else 'no' if (self.decode_tpl_args != self.template_args['decode']['default'] or needs_decoding): params['decode'] = 'yes' if (self.decode_tpl_args or needs_decoding) else 'no' return '{schema}://{tp_id}{app}@{apikey}/{targets}?{params}'.format( schema=self.secure_protocol, tp_id='{}:'.format( self.pprint(self.template_id, privacy, safe='')) if self.template_id else '', app=self.pprint(self.app, privacy, safe=''), apikey=self.pprint(self.apikey, privacy, safe=''), targets='/'.join(chain( [NotifyOneSignal.quote(x) for x in self.targets[OneSignalCategory.PLAYER]], [NotifyOneSignal.quote(x) for x in self.targets[OneSignalCategory.EMAIL]], [NotifyOneSignal.quote('{}{}'.format( NotifyOneSignal.template_tokens ['target_user']['prefix'], x), safe='') for x in self.targets[OneSignalCategory.USER]], [NotifyOneSignal.quote('{}{}'.format( NotifyOneSignal.template_tokens ['target_segment']['prefix'], x), safe='') for x in self.targets[OneSignalCategory.SEGMENT]])), params=NotifyOneSignal.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # if self.batch_size > 1: # Batches can only be sent by group (you can't combine groups into # a single batch) total_targets = 0 for k, m in self.targets.items(): targets = len(m) total_targets += int(targets / self.batch_size) + \ (1 if targets % self.batch_size else 0) return total_targets # Normal batch count; just count the targets return sum([len(m) for _, m in self.targets.items()]) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results if not results.get('password'): # The APP ID identifier associated with the account results['app'] = NotifyOneSignal.unquote(results['user']) else: # The APP ID identifier associated with the account results['app'] = NotifyOneSignal.unquote(results['password']) # The Template ID results['template'] = NotifyOneSignal.unquote(results['user']) # Get Image Boolean (if set) results['include_image'] = \ parse_bool( results['qsd'].get( 'image', NotifyOneSignal.template_args['image']['default'])) # Get Batch Boolean (if set) results['batch'] = \ parse_bool( results['qsd'].get( 'batch', NotifyOneSignal.template_args['batch']['default'])) # Get Use Contents Boolean (if set) results['use_contents'] = \ parse_bool( results['qsd'].get( 'contents', NotifyOneSignal.template_args['contents']['default'])) # Get Use Contents Boolean (if set) results['decode_tpl_args'] = \ parse_bool( results['qsd'].get( 'decode', NotifyOneSignal.template_args['decode']['default'])) # The API Key is stored in the hostname results['apikey'] = NotifyOneSignal.unquote(results['host']) # Get our Targets results['targets'] = NotifyOneSignal.split_path(results['fullpath']) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyOneSignal.parse_list(results['qsd']['to']) if 'app' in results['qsd'] and len(results['qsd']['app']): results['app'] = \ NotifyOneSignal.unquote(results['qsd']['app']) if 'apikey' in results['qsd'] and len(results['qsd']['apikey']): results['apikey'] = \ NotifyOneSignal.unquote(results['qsd']['apikey']) if 'template' in results['qsd'] and len(results['qsd']['template']): results['template'] = \ NotifyOneSignal.unquote(results['qsd']['template']) if 'subtitle' in results['qsd'] and len(results['qsd']['subtitle']): results['subtitle'] = \ NotifyOneSignal.unquote(results['qsd']['subtitle']) if 'lang' in results['qsd'] and len(results['qsd']['lang']): results['language'] = \ NotifyOneSignal.unquote(results['qsd']['lang']) # Store our custom data results['custom'] = results['qsd:'] # Store our postback data results['postback'] = results['qsd+'] return results apprise-1.9.3/apprise/plugins/opsgenie.py000066400000000000000000000674141477231770000205220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Signup @ https://www.opsgenie.com # # Generate your Integration API Key # https://app.opsgenie.com/settings/integration/add/API/ # Knowing this, you can build your Opsgenie URL as follows: # opsgenie://{apikey}/ # opsgenie://{apikey}/@{user} # opsgenie://{apikey}/*{schedule} # opsgenie://{apikey}/^{escalation} # opsgenie://{apikey}/#{team} # # You can mix and match what you want to notify freely # opsgenie://{apikey}/@{user}/#{team}/*{schedule}/^{escalation} # # If no target prefix is specified, then it is assumed to be a user. # # API Documentation: https://docs.opsgenie.com/docs/alert-api # API Integration Docs: https://docs.opsgenie.com/docs/api-integration import requests from json import dumps, loads import hashlib from .base import NotifyBase from ..common import NotifyType, NOTIFY_TYPES from ..common import PersistentStoreMode from ..utils.parse import validate_regex, is_uuid, parse_list, parse_bool from ..locale import gettext_lazy as _ class OpsgenieCategory(NotifyBase): """ We define the different category types that we can notify """ USER = 'user' SCHEDULE = 'schedule' ESCALATION = 'escalation' TEAM = 'team' OPSGENIE_CATEGORIES = ( OpsgenieCategory.USER, OpsgenieCategory.SCHEDULE, OpsgenieCategory.ESCALATION, OpsgenieCategory.TEAM, ) class OpsgenieAlertAction: """ Defines the supported actions """ # Use mapping (specify :key=arg to over-ride) MAP = 'map' # Create new alert (default) NEW = 'new' # Close Alert CLOSE = 'close' # Delete Alert DELETE = 'delete' # Acknowledge Alert ACKNOWLEDGE = 'acknowledge' # Add note to alert NOTE = 'note' OPSGENIE_ACTIONS = ( OpsgenieAlertAction.MAP, OpsgenieAlertAction.NEW, OpsgenieAlertAction.CLOSE, OpsgenieAlertAction.DELETE, OpsgenieAlertAction.ACKNOWLEDGE, OpsgenieAlertAction.NOTE, ) # Map all support Apprise Categories to Opsgenie Categories OPSGENIE_ALERT_MAP = { NotifyType.INFO: OpsgenieAlertAction.CLOSE, NotifyType.SUCCESS: OpsgenieAlertAction.CLOSE, NotifyType.WARNING: OpsgenieAlertAction.NEW, NotifyType.FAILURE: OpsgenieAlertAction.NEW, } # Regions class OpsgenieRegion: US = 'us' EU = 'eu' # Opsgenie APIs OPSGENIE_API_LOOKUP = { OpsgenieRegion.US: 'https://api.opsgenie.com/v2/alerts', OpsgenieRegion.EU: 'https://api.eu.opsgenie.com/v2/alerts', } # A List of our regions we can use for verification OPSGENIE_REGIONS = ( OpsgenieRegion.US, OpsgenieRegion.EU, ) # Priorities class OpsgeniePriority: LOW = 1 MODERATE = 2 NORMAL = 3 HIGH = 4 EMERGENCY = 5 OPSGENIE_PRIORITIES = { # Note: This also acts as a reverse lookup mapping OpsgeniePriority.LOW: 'low', OpsgeniePriority.MODERATE: 'moderate', OpsgeniePriority.NORMAL: 'normal', OpsgeniePriority.HIGH: 'high', OpsgeniePriority.EMERGENCY: 'emergency', } OPSGENIE_PRIORITY_MAP = { # Maps against string 'low' 'l': OpsgeniePriority.LOW, # Maps against string 'moderate' 'm': OpsgeniePriority.MODERATE, # Maps against string 'normal' 'n': OpsgeniePriority.NORMAL, # Maps against string 'high' 'h': OpsgeniePriority.HIGH, # Maps against string 'emergency' 'e': OpsgeniePriority.EMERGENCY, # Entries to additionally support (so more like Opsgenie's API) '1': OpsgeniePriority.LOW, '2': OpsgeniePriority.MODERATE, '3': OpsgeniePriority.NORMAL, '4': OpsgeniePriority.HIGH, '5': OpsgeniePriority.EMERGENCY, # Support p-prefix 'p1': OpsgeniePriority.LOW, 'p2': OpsgeniePriority.MODERATE, 'p3': OpsgeniePriority.NORMAL, 'p4': OpsgeniePriority.HIGH, 'p5': OpsgeniePriority.EMERGENCY, } class NotifyOpsgenie(NotifyBase): """ A wrapper for Opsgenie Notifications """ # The default descriptive name associated with the Notification service_name = 'Opsgenie' # The services URL service_url = 'https://opsgenie.com/' # All notification requests are secure secure_protocol = 'opsgenie' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_opsgenie' # The maximum length of the body body_maxlen = 15000 # Our default is to no not use persistent storage beyond in-memory # reference storage_mode = PersistentStoreMode.AUTO # If we don't have the specified min length, then we don't bother using # the body directive opsgenie_body_minlen = 130 # The default region to use if one isn't otherwise specified opsgenie_default_region = OpsgenieRegion.US # The maximum allowable targets within a notification default_batch_size = 50 # Defines our default message mapping opsgenie_message_map = { # Add a note to existing alert NotifyType.INFO: OpsgenieAlertAction.NOTE, # Close existing alert NotifyType.SUCCESS: OpsgenieAlertAction.CLOSE, # Create notice NotifyType.WARNING: OpsgenieAlertAction.NEW, # Create notice NotifyType.FAILURE: OpsgenieAlertAction.NEW, } # Define object templates templates = ( '{schema}://{apikey}', '{schema}://{user}@{apikey}', '{schema}://{apikey}/{targets}', '{schema}://{user}@{apikey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'target_escalation': { 'name': _('Target Escalation'), 'prefix': '^', 'type': 'string', 'map_to': 'targets', }, 'target_schedule': { 'name': _('Target Schedule'), 'type': 'string', 'prefix': '*', 'map_to': 'targets', }, 'target_user': { 'name': _('Target User'), 'type': 'string', 'prefix': '@', 'map_to': 'targets', }, 'target_team': { 'name': _('Target Team'), 'type': 'string', 'prefix': '#', 'map_to': 'targets', }, 'targets': { 'name': _('Targets '), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'region': { 'name': _('Region Name'), 'type': 'choice:string', 'values': OPSGENIE_REGIONS, 'default': OpsgenieRegion.US, 'map_to': 'region_name', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, 'priority': { 'name': _('Priority'), 'type': 'choice:int', 'values': OPSGENIE_PRIORITIES, 'default': OpsgeniePriority.NORMAL, }, 'entity': { 'name': _('Entity'), 'type': 'string', }, 'alias': { 'name': _('Alias'), 'type': 'string', }, 'tags': { 'name': _('Tags'), 'type': 'string', }, 'to': { 'alias_of': 'targets', }, 'action': { 'name': _('Action'), 'type': 'choice:string', 'values': OPSGENIE_ACTIONS, 'default': OPSGENIE_ACTIONS[0], } }) # Map of key-value pairs to use as custom properties of the alert. template_kwargs = { 'details': { 'name': _('Details'), 'prefix': '+', }, 'mapping': { 'name': _('Action Mapping'), 'prefix': ':', }, } def __init__(self, apikey, targets, region_name=None, details=None, priority=None, alias=None, entity=None, batch=False, tags=None, action=None, mapping=None, **kwargs): """ Initialize Opsgenie Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex(apikey) if not self.apikey: msg = 'An invalid Opsgenie API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # The Priority of the message self.priority = NotifyOpsgenie.template_args['priority']['default'] \ if not priority else \ next(( v for k, v in OPSGENIE_PRIORITY_MAP.items() if str(priority).lower().startswith(k)), NotifyOpsgenie.template_args['priority']['default']) # Store our region try: self.region_name = self.opsgenie_default_region \ if region_name is None else region_name.lower() if self.region_name not in OPSGENIE_REGIONS: # allow the outer except to handle this common response raise except: # Invalid region specified msg = 'The Opsgenie region specified ({}) is invalid.' \ .format(region_name) self.logger.warning(msg) raise TypeError(msg) if action and isinstance(action, str): self.action = next( (a for a in OPSGENIE_ACTIONS if a.startswith(action)), None) if self.action not in OPSGENIE_ACTIONS: msg = 'The Opsgenie action specified ({}) is invalid.'\ .format(action) self.logger.warning(msg) raise TypeError(msg) else: self.action = self.template_args['action']['default'] # Store our mappings self.mapping = self.opsgenie_message_map.copy() if mapping and isinstance(mapping, dict): for _k, _v in mapping.items(): # Get our mapping k = next((t for t in NOTIFY_TYPES if t.startswith(_k)), None) if not k: msg = 'The Opsgenie mapping key specified ({}) ' \ 'is invalid.'.format(_k) self.logger.warning(msg) raise TypeError(msg) _v_lower = _v.lower() v = next((v for v in OPSGENIE_ACTIONS[1:] if v.startswith(_v_lower)), None) if not v: msg = 'The Opsgenie mapping value (assigned to {}) ' \ 'specified ({}) is invalid.'.format(k, _v) self.logger.warning(msg) raise TypeError(msg) # Update our mapping self.mapping[k] = v self.details = {} if details: # Store our extra details self.details.update(details) # Prepare Batch Mode Flag self.batch_size = self.default_batch_size if batch else 1 # Assign our tags (if defined) self.__tags = parse_list(tags) # Assign our entity (if defined) self.entity = entity # Assign our alias (if defined) self.alias = alias # Initialize our Targets self.targets = [] # Sort our targets for _target in parse_list(targets): target = _target.strip() if len(target) < 2: self.logger.debug('Ignoring Opsgenie Entry: %s' % target) continue if target.startswith(NotifyOpsgenie.template_tokens ['target_team']['prefix']): self.targets.append( {'type': OpsgenieCategory.TEAM, 'id': target[1:]} if is_uuid(target[1:]) else {'type': OpsgenieCategory.TEAM, 'name': target[1:]}) elif target.startswith(NotifyOpsgenie.template_tokens ['target_schedule']['prefix']): self.targets.append( {'type': OpsgenieCategory.SCHEDULE, 'id': target[1:]} if is_uuid(target[1:]) else {'type': OpsgenieCategory.SCHEDULE, 'name': target[1:]}) elif target.startswith(NotifyOpsgenie.template_tokens ['target_escalation']['prefix']): self.targets.append( {'type': OpsgenieCategory.ESCALATION, 'id': target[1:]} if is_uuid(target[1:]) else {'type': OpsgenieCategory.ESCALATION, 'name': target[1:]}) elif target.startswith(NotifyOpsgenie.template_tokens ['target_user']['prefix']): self.targets.append( {'type': OpsgenieCategory.USER, 'id': target[1:]} if is_uuid(target[1:]) else {'type': OpsgenieCategory.USER, 'username': target[1:]}) else: # Ambiguious entry; treat it as a user but not before # displaying a warning to the end user first: self.logger.debug( 'Treating ambigious Opsgenie target %s as a user', target) self.targets.append( {'type': OpsgenieCategory.USER, 'id': target} if is_uuid(target) else {'type': OpsgenieCategory.USER, 'username': target}) def _fetch(self, method, url, payload, params=None): """ Performs server retrieval/update and returns JSON Response """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'Authorization': 'GenieKey {}'.format(self.apikey), } # Some Debug Logging self.logger.debug( 'Opsgenie POST URL: {} (cert_verify={})'.format( url, self.verify_certificate)) self.logger.debug('Opsgenie Payload: {}' .format(payload)) # Initialize our response object content = {} # Always call throttle before any remote server i/o is made self.throttle() try: r = method( url, data=dumps(payload), params=params, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # A Response might look like: # { # "result": "Request will be processed", # "took": 0.302, # "requestId": "43a29c5c-3dbf-4fa4-9c26-f4f71023e120" # } try: # Update our response object content = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None content = {} if r.status_code not in ( requests.codes.accepted, requests.codes.ok): status_str = \ NotifyBase.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Opsgenie notification:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return (False, content.get('requestId')) # If we reach here; the message was sent self.logger.info('Sent Opsgenie notification') self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return (True, content.get('requestId')) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Opsgenie ' 'notification.') self.logger.debug('Socket Exception: %s' % str(e)) return (False, content.get('requestId')) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Opsgenie Notification """ # Get our Opsgenie Action action = OPSGENIE_ALERT_MAP[notify_type] \ if self.action == OpsgenieAlertAction.MAP else self.action # Prepare our URL as it's based on our hostname notify_url = OPSGENIE_API_LOOKUP[self.region_name] # Initialize our has_error flag has_error = False # Default method is to post method = requests.post # For indexing in persistent store key = hashlib.sha1( (self.entity if self.entity else ( self.alias if self.alias else ( title if title else self.app_id))) .encode('utf-8')).hexdigest()[0:10] # Get our Opsgenie Request IDs request_ids = self.store.get(key, []) if not isinstance(request_ids, list): request_ids = [] if action == OpsgenieAlertAction.NEW: # Create a copy ouf our details object details = self.details.copy() if 'type' not in details: details['type'] = notify_type # Use body if title not set title_body = body if not title else title # Prepare our payload payload = { 'source': self.app_desc, 'message': title_body, 'description': body, 'details': details, 'priority': 'P{}'.format(self.priority), } # Use our body directive if we exceed the minimum message # limitation if len(payload['message']) > self.opsgenie_body_minlen: payload['message'] = '{}...'.format( title_body[:self.opsgenie_body_minlen - 3]) if self.__tags: payload['tags'] = self.__tags if self.entity: payload['entity'] = self.entity if self.alias: payload['alias'] = self.alias if self.user: payload['user'] = self.user # reset our request IDs - we will re-populate them request_ids = [] length = len(self.targets) if self.targets else 1 for index in range(0, length, self.batch_size): if self.targets: # If there were no targets identified, then we simply # just iterate once without the responders set payload['responders'] = \ self.targets[index:index + self.batch_size] # Perform our post success, request_id = self._fetch( method, notify_url, payload) if success and request_id: # Save our response request_ids.append(request_id) else: has_error = True # Store our entries for a maximum of 60 days self.store.set(key, request_ids, expires=60 * 60 * 24 * 60) elif request_ids: # Prepare our payload payload = { 'source': self.app_desc, 'note': body, } if self.user: payload['user'] = self.user # Prepare our Identifier type params = { 'identifierType': 'id', } for request_id in request_ids: if action == OpsgenieAlertAction.DELETE: # Update our URL url = f'{notify_url}/{request_id}' method = requests.delete elif action == OpsgenieAlertAction.ACKNOWLEDGE: url = f'{notify_url}/{request_id}/acknowledge' elif action == OpsgenieAlertAction.CLOSE: url = f'{notify_url}/{request_id}/close' else: # action == OpsgenieAlertAction.CLOSE: url = f'{notify_url}/{request_id}/notes' # Perform our post success, _ = self._fetch(method, url, payload, params) if not success: has_error = True if not has_error and action == OpsgenieAlertAction.DELETE: # Remove cached entry self.store.clear(key) else: self.logger.info( 'No Opsgenie notification sent due to (nothing to %s) ' 'condition', self.action) return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.region_name, self.apikey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'action': self.action, 'region': self.region_name, 'priority': OPSGENIE_PRIORITIES[self.template_args['priority']['default']] if self.priority not in OPSGENIE_PRIORITIES else OPSGENIE_PRIORITIES[self.priority], 'batch': 'yes' if self.batch_size > 1 else 'no', } # Assign our entity value (if defined) if self.entity: params['entity'] = self.entity # Assign our alias value (if defined) if self.alias: params['alias'] = self.alias # Assign our tags (if specifed) if self.__tags: params['tags'] = ','.join(self.__tags) # Append our details into our parameters params.update({'+{}'.format(k): v for k, v in self.details.items()}) # Append our assignment extra's into our parameters params.update( {':{}'.format(k): v for k, v in self.mapping.items()}) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # A map allows us to map our target types so they can be correctly # placed back into your URL below. Hence map the 'user' -> '@' __map = { OpsgenieCategory.USER: NotifyOpsgenie.template_tokens['target_user']['prefix'], OpsgenieCategory.SCHEDULE: NotifyOpsgenie.template_tokens['target_schedule']['prefix'], OpsgenieCategory.ESCALATION: NotifyOpsgenie.template_tokens['target_escalation']['prefix'], OpsgenieCategory.TEAM: NotifyOpsgenie.template_tokens['target_team']['prefix'], } return '{schema}://{user}{apikey}/{targets}/?{params}'.format( schema=self.secure_protocol, user='{}@'.format(self.user) if self.user else '', apikey=self.pprint(self.apikey, privacy, safe=''), targets='/'.join( [NotifyOpsgenie.quote('{}{}'.format( __map[x['type']], x.get('id', x.get('name', x.get('username'))))) for x in self.targets]), params=NotifyOpsgenie.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # targets = len(self.targets) if self.batch_size > 1: targets = int(targets / self.batch_size) + \ (1 if targets % self.batch_size else 0) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The API Key is stored in the hostname results['apikey'] = NotifyOpsgenie.unquote(results['host']) # Get our Targets results['targets'] = NotifyOpsgenie.split_path(results['fullpath']) # Add our Meta Detail keys results['details'] = {NotifyBase.unquote(x): NotifyBase.unquote(y) for x, y in results['qsd+'].items()} # Set our priority if 'priority' in results['qsd'] and len(results['qsd']['priority']): results['priority'] = \ NotifyOpsgenie.unquote(results['qsd']['priority']) # Get Batch Boolean (if set) results['batch'] = \ parse_bool( results['qsd'].get( 'batch', NotifyOpsgenie.template_args['batch']['default'])) if 'apikey' in results['qsd'] and len(results['qsd']['apikey']): results['apikey'] = \ NotifyOpsgenie.unquote(results['qsd']['apikey']) if 'tags' in results['qsd'] and len(results['qsd']['tags']): # Extract our tags results['tags'] = \ parse_list(NotifyOpsgenie.unquote(results['qsd']['tags'])) if 'region' in results['qsd'] and len(results['qsd']['region']): # Extract our region results['region_name'] = \ NotifyOpsgenie.unquote(results['qsd']['region']) if 'entity' in results['qsd'] and len(results['qsd']['entity']): # Extract optional entity field results['entity'] = \ NotifyOpsgenie.unquote(results['qsd']['entity']) if 'alias' in results['qsd'] and len(results['qsd']['alias']): # Extract optional alias field results['alias'] = \ NotifyOpsgenie.unquote(results['qsd']['alias']) # Handle 'to' email address if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'].append(results['qsd']['to']) # Store our action (if defined) if 'action' in results['qsd'] and len(results['qsd']['action']): results['action'] = \ NotifyOpsgenie.unquote(results['qsd']['action']) # store any custom mapping defined results['mapping'] = \ {NotifyOpsgenie.unquote(x): NotifyOpsgenie.unquote(y) for x, y in results['qsd:'].items()} return results apprise-1.9.3/apprise/plugins/pagerduty.py000066400000000000000000000434241477231770000207100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # API Refererence: # - https://developer.pagerduty.com/api-reference/\ # 368ae3d938c9e-send-an-event-to-pager-duty # import requests from json import dumps from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..common import NotifyImageSize from ..utils.parse import validate_regex, parse_bool from ..locale import gettext_lazy as _ class PagerDutySeverity: """ Defines the Pager Duty Severity Levels """ INFO = 'info' WARNING = 'warning' ERROR = 'error' CRITICAL = 'critical' # Map all support Apprise Categories with the Pager Duty ones PAGERDUTY_SEVERITY_MAP = { NotifyType.INFO: PagerDutySeverity.INFO, NotifyType.SUCCESS: PagerDutySeverity.INFO, NotifyType.WARNING: PagerDutySeverity.WARNING, NotifyType.FAILURE: PagerDutySeverity.CRITICAL, } PAGERDUTY_SEVERITIES = ( PagerDutySeverity.INFO, PagerDutySeverity.WARNING, PagerDutySeverity.CRITICAL, PagerDutySeverity.ERROR, ) # Priorities class PagerDutyRegion: US = 'us' EU = 'eu' # SparkPost APIs PAGERDUTY_API_LOOKUP = { PagerDutyRegion.US: 'https://events.pagerduty.com/v2/enqueue', PagerDutyRegion.EU: 'https://events.eu.pagerduty.com/v2/enqueue', } # A List of our regions we can use for verification PAGERDUTY_REGIONS = ( PagerDutyRegion.US, PagerDutyRegion.EU, ) class NotifyPagerDuty(NotifyBase): """ A wrapper for Pager Duty Notifications """ # The default descriptive name associated with the Notification service_name = 'Pager Duty' # The services URL service_url = 'https://pagerduty.com/' # Secure Protocol secure_protocol = 'pagerduty' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pagerduty' # We don't support titles for Pager Duty notifications title_maxlen = 0 # Allows the user to specify the NotifyImageSize object; this is supported # through the webhook image_size = NotifyImageSize.XY_128 # Our event action type event_action = 'trigger' # The default region to use if one isn't otherwise specified default_region = PagerDutyRegion.US # Define object templates templates = ( '{schema}://{integrationkey}@{apikey}', '{schema}://{integrationkey}@{apikey}/{source}', '{schema}://{integrationkey}@{apikey}/{source}/{component}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True }, # Optional but triggers V2 API 'integrationkey': { 'name': _('Integration Key'), 'type': 'string', 'private': True, 'required': True }, 'source': { # Optional Source Identifier (preferably a FQDN) 'name': _('Source'), 'type': 'string', 'default': 'Apprise', }, 'component': { # Optional Component Identifier 'name': _('Component'), 'type': 'string', 'default': 'Notification', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'group': { 'name': _('Group'), 'type': 'string', }, 'class': { 'name': _('Class'), 'type': 'string', 'map_to': 'class_id', }, 'click': { 'name': _('Click'), 'type': 'string', }, 'region': { 'name': _('Region Name'), 'type': 'choice:string', 'values': PAGERDUTY_REGIONS, 'default': PagerDutyRegion.US, 'map_to': 'region_name', }, # The severity is automatically determined, however you can optionally # over-ride its value and force it to be what you want 'severity': { 'name': _('Severity'), 'type': 'choice:string', 'values': PAGERDUTY_SEVERITIES, 'map_to': 'severity', }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, }) # Define any kwargs we're using template_kwargs = { 'details': { 'name': _('Custom Details'), 'prefix': '+', }, } def __init__(self, apikey, integrationkey=None, source=None, component=None, group=None, class_id=None, include_image=True, click=None, details=None, region_name=None, severity=None, **kwargs): """ Initialize Pager Duty Object """ super().__init__(**kwargs) # Long-Lived Access token (generated from User Profile) self.apikey = validate_regex(apikey) if not self.apikey: msg = 'An invalid Pager Duty API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) self.integration_key = validate_regex(integrationkey) if not self.integration_key: msg = 'An invalid Pager Duty Routing Key ' \ '({}) was specified.'.format(integrationkey) self.logger.warning(msg) raise TypeError(msg) # An Optional Source self.source = self.template_tokens['source']['default'] if source: self.source = validate_regex(source) if not self.source: msg = 'An invalid Pager Duty Notification Source ' \ '({}) was specified.'.format(source) self.logger.warning(msg) raise TypeError(msg) else: self.component = self.template_tokens['source']['default'] # An Optional Component self.component = self.template_tokens['component']['default'] if component: self.component = validate_regex(component) if not self.component: msg = 'An invalid Pager Duty Notification Component ' \ '({}) was specified.'.format(component) self.logger.warning(msg) raise TypeError(msg) else: self.component = self.template_tokens['component']['default'] # Store our region try: self.region_name = self.default_region \ if region_name is None else region_name.lower() if self.region_name not in PAGERDUTY_REGIONS: # allow the outer except to handle this common response raise except: # Invalid region specified msg = 'The PagerDuty region specified ({}) is invalid.' \ .format(region_name) self.logger.warning(msg) raise TypeError(msg) # The severity (if specified) self.severity = \ None if severity is None else next(( s for s in PAGERDUTY_SEVERITIES if str(s).lower().startswith(severity)), False) if self.severity is False: # Invalid severity specified msg = 'The PagerDuty severity specified ({}) is invalid.' \ .format(severity) self.logger.warning(msg) raise TypeError(msg) # A clickthrough option for notifications self.click = click # Store Class ID if specified self.class_id = class_id # Store Group if specified self.group = group self.details = {} if details: # Store our extra details self.details.update(details) # Display our Apprise Image self.include_image = include_image return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Send our PagerDuty Notification """ # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'Authorization': 'Token token={}'.format(self.apikey), } # Prepare our persistent_notification.create payload payload = { # Define our integration key 'routing_key': self.integration_key, # Prepare our payload 'payload': { 'summary': body, # Set our severity 'severity': PAGERDUTY_SEVERITY_MAP[notify_type] if not self.severity else self.severity, # Our Alerting Source/Component 'source': self.source, 'component': self.component, }, 'client': self.app_id, # Our Event Action 'event_action': self.event_action, } if self.group: payload['payload']['group'] = self.group if self.class_id: payload['payload']['class'] = self.class_id if self.click: payload['links'] = [{ "href": self.click, }] # Acquire our image url if configured to do so image_url = None if not self.include_image else \ self.image_url(notify_type) if image_url: payload['images'] = [{ 'src': image_url, 'alt': notify_type, }] if self.details: payload['payload']['custom_details'] = {} # Apply any provided custom details for k, v in self.details.items(): payload['payload']['custom_details'][k] = v # Prepare our URL based on region notify_url = PAGERDUTY_API_LOOKUP[self.region_name] self.logger.debug('Pager Duty POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('Pager Duty Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.created, requests.codes.accepted): # We had a problem status_str = \ NotifyPagerDuty.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Pager Duty notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Pager Duty notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Pager Duty ' 'notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.integration_key, self.apikey, self.source, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'region': self.region_name, 'image': 'yes' if self.include_image else 'no', } if self.class_id: params['class'] = self.class_id if self.group: params['group'] = self.group if self.click is not None: params['click'] = self.click if self.severity: params['severity'] = self.severity # Append our custom entries our parameters params.update({'+{}'.format(k): v for k, v in self.details.items()}) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) url = '{schema}://{integration_key}@{apikey}/' \ '{source}/{component}?{params}' return url.format( schema=self.secure_protocol, # never encode hostname since we're expecting it to be a valid one integration_key=self.pprint( self.integration_key, privacy, mode=PrivacyMode.Secret, safe=''), apikey=self.pprint( self.apikey, privacy, mode=PrivacyMode.Secret, safe=''), source=self.pprint( self.source, privacy, safe=''), component=self.pprint( self.component, privacy, safe=''), params=NotifyPagerDuty.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The 'apikey' makes it easier to use yaml configuration if 'apikey' in results['qsd'] and len(results['qsd']['apikey']): results['apikey'] = \ NotifyPagerDuty.unquote(results['qsd']['apikey']) else: results['apikey'] = NotifyPagerDuty.unquote(results['host']) # The 'integrationkey' makes it easier to use yaml configuration if 'integrationkey' in results['qsd'] and \ len(results['qsd']['integrationkey']): results['integrationkey'] = \ NotifyPagerDuty.unquote(results['qsd']['integrationkey']) else: results['integrationkey'] = \ NotifyPagerDuty.unquote(results['user']) if 'click' in results['qsd'] and len(results['qsd']['click']): results['click'] = NotifyPagerDuty.unquote(results['qsd']['click']) if 'group' in results['qsd'] and len(results['qsd']['group']): results['group'] = \ NotifyPagerDuty.unquote(results['qsd']['group']) if 'class' in results['qsd'] and len(results['qsd']['class']): results['class_id'] = \ NotifyPagerDuty.unquote(results['qsd']['class']) if 'severity' in results['qsd'] and len(results['qsd']['severity']): results['severity'] = \ NotifyPagerDuty.unquote(results['qsd']['severity']) # Acquire our full path fullpath = NotifyPagerDuty.split_path(results['fullpath']) # Get our source if 'source' in results['qsd'] and len(results['qsd']['source']): results['source'] = \ NotifyPagerDuty.unquote(results['qsd']['source']) else: results['source'] = fullpath.pop(0) if fullpath else None # Get our component if 'component' in results['qsd'] and len(results['qsd']['component']): results['component'] = \ NotifyPagerDuty.unquote(results['qsd']['component']) else: results['component'] = fullpath.pop(0) if fullpath else None # Add our custom details key/value pairs that the user can potentially # over-ride if they wish to to our returned result set and tidy # entries by unquoting them results['details'] = { NotifyPagerDuty.unquote(x): NotifyPagerDuty.unquote(y) for x, y in results['qsd+'].items()} if 'region' in results['qsd'] and len(results['qsd']['region']): # Extract from name to associate with from address results['region_name'] = \ NotifyPagerDuty.unquote(results['qsd']['region']) # Include images with our message results['include_image'] = \ parse_bool(results['qsd'].get('image', True)) return results apprise-1.9.3/apprise/plugins/pagertree.py000066400000000000000000000334361477231770000206640ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from json import dumps from uuid import uuid4 from .base import NotifyBase from ..common import NotifyType from ..utils.parse import parse_list, validate_regex from ..locale import gettext_lazy as _ # Actions class PagerTreeAction: CREATE = 'create' ACKNOWLEDGE = 'acknowledge' RESOLVE = 'resolve' # Urgencies class PagerTreeUrgency: SILENT = "silent" LOW = "low" MEDIUM = "medium" HIGH = "high" CRITICAL = "critical" PAGERTREE_ACTIONS = { PagerTreeAction.CREATE: 'create', PagerTreeAction.ACKNOWLEDGE: 'acknowledge', PagerTreeAction.RESOLVE: 'resolve', } PAGERTREE_URGENCIES = { # Note: This also acts as a reverse lookup mapping PagerTreeUrgency.SILENT: 'silent', PagerTreeUrgency.LOW: 'low', PagerTreeUrgency.MEDIUM: 'medium', PagerTreeUrgency.HIGH: 'high', PagerTreeUrgency.CRITICAL: 'critical', } # Extend HTTP Error Messages PAGERTREE_HTTP_ERROR_MAP = { 402: 'Payment Required - Please subscribe or upgrade', 403: 'Forbidden - Blocked', 404: 'Not Found - Invalid Integration ID', 405: 'Method Not Allowed - Integration Disabled', 429: 'Too Many Requests - Rate Limit Exceeded', } class NotifyPagerTree(NotifyBase): """ A wrapper for PagerTree Notifications """ # The default descriptive name associated with the Notification service_name = 'PagerTree' # The services URL service_url = 'https://pagertree.com/' # All PagerTree requests are secure secure_protocol = 'pagertree' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pagertree' # PagerTree uses the http protocol with JSON requests notify_url = 'https://api.pagertree.com/integration/{}' # Define object templates templates = ( '{schema}://{integration}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'integration': { 'name': _('Integration ID'), 'type': 'string', 'private': True, 'required': True, } }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'action': { 'name': _('Action'), 'type': 'choice:string', 'values': PAGERTREE_ACTIONS, 'default': PagerTreeAction.CREATE, }, 'thirdparty': { 'name': _('Third Party ID'), 'type': 'string', }, 'urgency': { 'name': _('Urgency'), 'type': 'choice:string', 'values': PAGERTREE_URGENCIES, }, 'tags': { 'name': _('Tags'), 'type': 'string', }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('HTTP Header'), 'prefix': '+', }, 'payload_extras': { 'name': _('Payload Extras'), 'prefix': ':', }, 'meta_extras': { 'name': _('Meta Extras'), 'prefix': '-', }, } def __init__(self, integration, action=None, thirdparty=None, urgency=None, tags=None, headers=None, payload_extras=None, meta_extras=None, **kwargs): """ Initialize PagerTree Object """ super().__init__(**kwargs) # Integration ID (associated with account) self.integration = \ validate_regex(integration, r'^int_[a-zA-Z0-9\-_]{7,14}$') if not self.integration: msg = 'An invalid PagerTree Integration ID ' \ '({}) was specified.'.format(integration) self.logger.warning(msg) raise TypeError(msg) # thirdparty (optional, in case they want to pass the # acknowledge or resolve action) self.thirdparty = None if thirdparty: # An id was specified, we want to validate it self.thirdparty = validate_regex(thirdparty) if not self.thirdparty: msg = 'An invalid PagerTree third party ID ' \ '({}) was specified.'.format(thirdparty) self.logger.warning(msg) raise TypeError(msg) self.headers = {} if headers: # Store our extra headers self.headers.update(headers) self.payload_extras = {} if payload_extras: # Store our extra payload entries self.payload_extras.update(payload_extras) self.meta_extras = {} if meta_extras: # Store our extra payload entries self.meta_extras.update(meta_extras) # Setup our action self.action = NotifyPagerTree.template_args['action']['default'] \ if action not in PAGERTREE_ACTIONS else \ PAGERTREE_ACTIONS[action] # Setup our urgency self.urgency = \ None if urgency not in PAGERTREE_URGENCIES else \ PAGERTREE_URGENCIES[urgency] # Any optional tags to attach to the notification self.__tags = parse_list(tags) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform PagerTree Notification """ # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } # Apply any/all header over-rides defined # For things like PagerTree Token headers.update(self.headers) # prepare JSON Object payload = { # Generate an ID (unless one was explicitly forced to be used) 'id': self.thirdparty if self.thirdparty else str(uuid4()), 'event_type': self.action, } if self.action == PagerTreeAction.CREATE: payload['title'] = title if title else self.app_desc payload['description'] = body payload['meta'] = self.meta_extras payload['tags'] = self.__tags if self.urgency is not None: payload['urgency'] = self.urgency # Apply any/all payload over-rides defined payload.update(self.payload_extras) # Prepare our URL based on integration notify_url = self.notify_url.format(self.integration) self.logger.debug('PagerTree POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('PagerTree Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.created, requests.codes.accepted): # We had a problem status_str = \ NotifyPagerTree.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send PagerTree notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent PagerTree notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending PagerTree ' 'notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.integration) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'action': self.action, } if self.thirdparty: params['tid'] = self.thirdparty if self.urgency: params['urgency'] = self.urgency if self.__tags: params['tags'] = ','.join([x for x in self.__tags]) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Headers prefixed with a '+' sign # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Meta: {} prefixed with a '-' sign # Append our meta extras into our parameters params.update( {'-{}'.format(k): v for k, v in self.meta_extras.items()}) # Payload body extras prefixed with a ':' sign # Append our payload extras into our parameters params.update( {':{}'.format(k): v for k, v in self.payload_extras.items()}) return '{schema}://{integration}?{params}'.format( schema=self.secure_protocol, # never encode hostname since we're expecting it to be a valid one integration=self.pprint(self.integration, privacy, safe=''), params=NotifyPagerTree.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = { NotifyPagerTree.unquote(x): NotifyPagerTree.unquote(y) for x, y in results['qsd+'].items() } # store any additional payload extra's defined results['payload_extras'] = { NotifyPagerTree.unquote(x): NotifyPagerTree.unquote(y) for x, y in results['qsd:'].items() } # store any additional meta extra's defined results['meta_extras'] = { NotifyPagerTree.unquote(x): NotifyPagerTree.unquote(y) for x, y in results['qsd-'].items() } # Integration ID if 'id' in results['qsd'] and len(results['qsd']['id']): # Shortened version of integration id results['integration'] = \ NotifyPagerTree.unquote(results['qsd']['id']) elif 'integration' in results['qsd'] and \ len(results['qsd']['integration']): results['integration'] = \ NotifyPagerTree.unquote(results['qsd']['integration']) else: results['integration'] = \ NotifyPagerTree.unquote(results['host']) # Set our thirdparty if 'tid' in results['qsd'] and len(results['qsd']['tid']): # Shortened version of thirdparty results['thirdparty'] = \ NotifyPagerTree.unquote(results['qsd']['tid']) elif 'thirdparty' in results['qsd'] and \ len(results['qsd']['thirdparty']): results['thirdparty'] = \ NotifyPagerTree.unquote(results['qsd']['thirdparty']) # Set our urgency if 'action' in results['qsd'] and \ len(results['qsd']['action']): results['action'] = \ NotifyPagerTree.unquote(results['qsd']['action']) # Set our urgency if 'urgency' in results['qsd'] and len(results['qsd']['urgency']): results['urgency'] = \ NotifyPagerTree.unquote(results['qsd']['urgency']) # Set our tags if 'tags' in results['qsd'] and len(results['qsd']['tags']): results['tags'] = \ parse_list(NotifyPagerTree.unquote(results['qsd']['tags'])) return results apprise-1.9.3/apprise/plugins/parseplatform.py000066400000000000000000000247251477231770000215660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from json import dumps from .base import NotifyBase from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ # Used to break path apart into list of targets TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+') # Priorities class ParsePlatformDevice: # All Devices ALL = 'all' # Apple IOS (APNS) IOS = 'ios' # Android/Firebase (FCM) ANDROID = 'android' PARSE_PLATFORM_DEVICES = ( ParsePlatformDevice.ALL, ParsePlatformDevice.IOS, ParsePlatformDevice.ANDROID, ) class NotifyParsePlatform(NotifyBase): """ A wrapper for Parse Platform Notifications """ # The default descriptive name associated with the Notification service_name = 'Parse Platform' # The services URL service_url = ' https://parseplatform.org/' # insecure notifications (using http) protocol = 'parsep' # Secure notifications (using https) secure_protocol = 'parseps' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_parseplatform' # Define object templates templates = ( '{schema}://{app_id}:{master_key}@{host}', '{schema}://{app_id}:{master_key}@{host}:{port}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'app_id': { 'name': _('App ID'), 'type': 'string', 'private': True, 'required': True, }, 'master_key': { 'name': _('Master Key'), 'type': 'string', 'private': True, 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'device': { 'name': _('Device'), 'type': 'choice:string', 'values': PARSE_PLATFORM_DEVICES, 'default': ParsePlatformDevice.ALL, }, 'app_id': { 'alias_of': 'app_id', }, 'master_key': { 'alias_of': 'master_key', }, }) def __init__(self, app_id, master_key, device=None, **kwargs): """ Initialize Parse Platform Object """ super().__init__(**kwargs) self.fullpath = kwargs.get('fullpath') if not isinstance(self.fullpath, str): self.fullpath = '/' # Application ID self.application_id = validate_regex(app_id) if not self.application_id: msg = 'An invalid Parse Platform Application ID ' \ '({}) was specified.'.format(app_id) self.logger.warning(msg) raise TypeError(msg) # Master Key self.master_key = validate_regex(master_key) if not self.master_key: msg = 'An invalid Parse Platform Master Key ' \ '({}) was specified.'.format(master_key) self.logger.warning(msg) raise TypeError(msg) # Initialize Devices Array self.devices = [] if device: self.device = device.lower() if device not in PARSE_PLATFORM_DEVICES: msg = 'An invalid Parse Platform device ' \ '({}) was specified.'.format(device) self.logger.warning(msg) raise TypeError(msg) else: self.device = self.template_args['device']['default'] if self.device == ParsePlatformDevice.ALL: self.devices = [d for d in PARSE_PLATFORM_DEVICES if d != ParsePlatformDevice.ALL] else: # Store our device self.devices.append(device) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Parse Platform Notification """ # Prepare our headers: headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'X-Parse-Application-Id': self.application_id, 'X-Parse-Master-Key': self.master_key, } # prepare our payload payload = { 'where': { 'deviceType': { '$in': self.devices, } }, 'data': { 'title': title, 'alert': body, } } # Set our schema schema = 'https' if self.secure else 'http' # Our Notification URL url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): url += ':%d' % self.port url += self.fullpath.rstrip('/') + '/parse/push/' self.logger.debug('Parse Platform POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Parse Platform Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=dumps(payload), headers=headers, verify=self.verify_certificate, ) if r.status_code != requests.codes.ok: # We had a problem status_str = NotifyParsePlatform.\ http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Parse Platform notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Parse Platform notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occured sending Parse Platform ' 'notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.application_id, self.master_key, self.host, self.port, self.fullpath, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any arguments set params = { 'device': self.device, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) default_port = 443 if self.secure else 80 return \ '{schema}://{app_id}:{master_key}@' \ '{hostname}{port}{fullpath}/?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, app_id=self.pprint(self.application_id, privacy, safe=''), master_key=self.pprint(self.master_key, privacy, safe=''), hostname=NotifyParsePlatform.quote(self.host, safe=''), port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath=NotifyParsePlatform.quote(self.fullpath, safe='/'), params=NotifyParsePlatform.urlencode(params)) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to substantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # App ID is retrieved from the user results['app_id'] = NotifyParsePlatform.unquote(results['user']) # Master Key is retrieved from the password results['master_key'] = \ NotifyParsePlatform.unquote(results['password']) # Device support override if 'device' in results['qsd'] and len(results['qsd']['device']): results['device'] = results['qsd']['device'] # Allow app_id attribute over-ride if 'app_id' in results['qsd'] and len(results['qsd']['app_id']): results['app_id'] = results['qsd']['app_id'] # Allow master_key attribute over-ride if 'master_key' in results['qsd'] \ and len(results['qsd']['master_key']): results['master_key'] = results['qsd']['master_key'] return results apprise-1.9.3/apprise/plugins/plivo.py000066400000000000000000000330201477231770000200240ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 3-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Create an account https://messagebird.com if you don't already have one # # Get your auth_id and auth token from the dashboard here: # - https://console.plivo.com/dashboard/ # import requests from json import dumps from .base import NotifyBase from ..common import NotifyType from ..utils.parse import ( parse_bool, is_phone_no, parse_phone_no, validate_regex) from ..locale import gettext_lazy as _ class NotifyPlivo(NotifyBase): """ A wrapper for Plivo Notifications """ # The default descriptive name associated with the Notification service_name = 'Plivo' # The services URL service_url = 'https://plivo.com' # The default protocol secure_protocol = 'plivo' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_plivo' # Plivo uses the http protocol with JSON requests notify_url = 'https://api.plivo.com/v1/Account/{auth_id}/Message/' # The maximum number of messages that can be sent in a single batch default_batch_size = 20 # The maximum length of the body body_maxlen = 140 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{auth_id}@{token}/{source}', '{schema}://{auth_id}@{token}/{source}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'auth_id': { 'name': _('Auth ID'), 'type': 'string', 'required': True, 'regex': (r'^[a-z0-9]{20,30}$', 'i'), }, 'token': { 'name': _('Auth Token'), 'type': 'string', 'required': True, 'regex': (r'^[a-z0-9]{30,50}$', 'i'), }, 'source': { 'name': _('Source Phone No'), 'type': 'string', 'prefix': '+', 'required': True, 'regex': (r'^[0-9\s)(+-]+$', 'i'), }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', } }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'source', }, 'token': { 'alias_of': 'token', }, 'id': { 'alias_of': 'auth_id', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, }) def __init__(self, auth_id, token, source, targets=None, batch=None, **kwargs): """ Initialize Plivo Object """ super(NotifyPlivo, self).__init__(**kwargs) self.auth_id = validate_regex( auth_id, *self.template_tokens['auth_id']['regex']) if not self.auth_id: msg = 'The Plivo authentication ID specified ({}) is ' \ 'invalid.'.format(auth_id) self.logger.warning(msg) raise TypeError(msg) self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'The Plivo authentication token specified ({}) is ' \ 'invalid.'.format(token) self.logger.warning(msg) raise TypeError(msg) result = is_phone_no(source) if not result: msg = 'The Plivo source specified ({}) is invalid.'\ .format(source) self.logger.warning(msg) raise TypeError(msg) # Store our source; enforce E.164 format self.source = f'+{result["full"]}' # Parse our targets self.targets = list() if targets: for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if result: # store valid phone number; enforce E.164 format self.targets.append(f'+{result["full"]}') continue self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) else: # No sources specified, use our own phone no self.targets.append(self.source) # Set batch self.batch = batch if batch is not None \ else self.template_args['batch']['default'] def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Plivo Notification """ if not self.targets: # There were no services to notify self.logger.warning( 'There were no Plivo targets to notify.') return False # Initialize our has_error flag has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } # Prepare our authentication auth = (self.auth_id, self.token) # Prepare our payload payload = { 'src': self.source, 'dst': None, 'text': body, } # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size for index in range(0, len(self.targets), batch_size): # Prepare our phone no (< delimits more then one) payload['recipients'] = \ ','.join(self.targets[index:index + batch_size]) # Some Debug Logging self.logger.debug( 'Plivo POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('Plivo Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.accepted): # We had a problem status_str = \ NotifyPlivo.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send {} Plivo notification{}: ' '{}{}error={}.'.format( len(self.targets[index:index + batch_size]), ' to {}'.format(self.targets[index]) if batch_size == 1 else '(s)', status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Send {} Plivo notification{}'.format( len(self.targets[index:index + batch_size]), ' to {}'.format(self.targets[index]) if batch_size == 1 else '(s)', )) except requests.RequestException as e: self.logger.warning( 'A Connection error occured sending Plivo:%s ' % ( self.targets) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.auth_id, self.token, self.source, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any arguments set params = { 'batch': 'yes' if self.batch else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{auth_id}@{token}/{source}/' \ '{targets}/?{params}'.format( schema=self.secure_protocol, auth_id=self.pprint(self.auth_id, privacy, safe=''), token=self.pprint(self.token, privacy, safe=''), source=self.source, targets='/'.join( [NotifyPlivo.quote(x, safe='+') for x in self.targets]), params=NotifyPlivo.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # return len(self.targets) if self.targets else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to substantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The Auth ID is in the username field if 'id' in results['qsd'] and len(results['qsd']['id']): results['auth_id'] = NotifyPlivo.unquote(results['qsd']['id']) else: results['auth_id'] = NotifyPlivo.unquote(results['user']) # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifyPlivo.split_path(results['fullpath']) if 'token' in results['qsd'] and len(results['qsd']['token']): # Store token results['token'] = NotifyPlivo.unquote(results['qsd']['token']) # go ahead and put the host entry in the targets list if results['host']: results['targets'].insert( 0, NotifyPlivo.unquote(results['host'])) else: # The hostname is our authentication key results['token'] = NotifyPlivo.unquote(results['host']) if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyPlivo.unquote(results['qsd']['from']) else: try: # The first path entry is the source/originator results['source'] = results['targets'].pop(0) except IndexError: # No source specified... results['source'] = None pass # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyPlivo.parse_phone_no(results['qsd']['to']) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifyPlivo.template_args['batch']['default'])) return results apprise-1.9.3/apprise/plugins/popcorn_notify.py000066400000000000000000000250511477231770000217500ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from .base import NotifyBase from ..common import NotifyType from ..utils.parse import ( is_email, is_phone_no, parse_list, parse_bool, validate_regex) from ..locale import gettext_lazy as _ class NotifyPopcornNotify(NotifyBase): """ A wrapper for PopcornNotify Notifications """ # The default descriptive name associated with the Notification service_name = 'PopcornNotify' # The services URL service_url = 'https://popcornnotify.com/' # The default protocol secure_protocol = 'popcorn' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_popcornnotify' # PopcornNotify uses the http protocol notify_url = 'https://popcornnotify.com/notify' # The maximum targets to include when doing batch transfers default_batch_size = 10 # Define object templates templates = ( '{schema}://{apikey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'regex': (r'^[a-z0-9]+$', 'i'), 'required': True, }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, } }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, }) def __init__(self, apikey, targets=None, batch=False, **kwargs): """ Initialize PopcornNotify Object """ super().__init__(**kwargs) # Access Token (associated with project) self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'An invalid PopcornNotify API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # Prepare Batch Mode Flag self.batch = batch # Parse our targets self.targets = list() for target in parse_list(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if result: # store valid phone number self.targets.append(result['full']) continue result = is_email(target) if result: # store valid email self.targets.append(result['full_email']) continue self.logger.warning( 'Dropped invalid target ' '({}) specified.'.format(target), ) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform PopcornNotify Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning( 'There were no PopcornNotify targets to notify.') return False # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded', } # Prepare our payload payload = { 'message': body, 'subject': title, } auth = (self.apikey, None) # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size for index in range(0, len(self.targets), batch_size): # Prepare our recipients payload['recipients'] = \ ','.join(self.targets[index:index + batch_size]) self.logger.debug('PopcornNotify POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('PopcornNotify Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, auth=auth, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyPopcornNotify.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send {} PopcornNotify notification{}: ' '{}{}error={}.'.format( len(self.targets[index:index + batch_size]), ' to {}'.format(self.targets[index]) if batch_size == 1 else '(s)', status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent {} PopcornNotify notification{}.' .format( len(self.targets[index:index + batch_size]), ' to {}'.format(self.targets[index]) if batch_size == 1 else '(s)', )) except requests.RequestException as e: self.logger.warning( 'A Connection error occured sending {} PopcornNotify ' 'notification(s).'.format( len(self.targets[index:index + batch_size]))) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'batch': 'yes' if self.batch else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{apikey}/{targets}/?{params}'.format( schema=self.secure_protocol, apikey=self.pprint(self.apikey, privacy, safe=''), targets='/'.join( [NotifyPopcornNotify.quote(x, safe='') for x in self.targets]), params=NotifyPopcornNotify.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = \ NotifyPopcornNotify.split_path(results['fullpath']) # The hostname is our authentication key results['apikey'] = NotifyPopcornNotify.unquote(results['host']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyPopcornNotify.parse_list(results['qsd']['to']) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get('batch', False)) return results apprise-1.9.3/apprise/plugins/prowl.py000066400000000000000000000235331477231770000200460ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from .base import NotifyBase from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ # Priorities class ProwlPriority: LOW = -2 MODERATE = -1 NORMAL = 0 HIGH = 1 EMERGENCY = 2 PROWL_PRIORITIES = { # Note: This also acts as a reverse lookup mapping ProwlPriority.LOW: 'low', ProwlPriority.MODERATE: 'moderate', ProwlPriority.NORMAL: 'normal', ProwlPriority.HIGH: 'high', ProwlPriority.EMERGENCY: 'emergency', } PROWL_PRIORITY_MAP = { # Maps against string 'low' 'l': ProwlPriority.LOW, # Maps against string 'moderate' 'm': ProwlPriority.MODERATE, # Maps against string 'normal' 'n': ProwlPriority.NORMAL, # Maps against string 'high' 'h': ProwlPriority.HIGH, # Maps against string 'emergency' 'e': ProwlPriority.EMERGENCY, # Entries to additionally support (so more like Prowl's API) '-2': ProwlPriority.LOW, '-1': ProwlPriority.MODERATE, '0': ProwlPriority.NORMAL, '1': ProwlPriority.HIGH, '2': ProwlPriority.EMERGENCY, } # Provide some known codes Prowl uses and what they translate to: PROWL_HTTP_ERROR_MAP = { 406: 'IP address has exceeded API limit', 409: 'Request not aproved.', } class NotifyProwl(NotifyBase): """ A wrapper for Prowl Notifications """ # The default descriptive name associated with the Notification service_name = 'Prowl' # The services URL service_url = 'https://www.prowlapp.com/' # The default secure protocol secure_protocol = 'prowl' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_prowl' # Prowl uses the http protocol with JSON requests notify_url = 'https://api.prowlapp.com/publicapi/add' # Disable throttle rate for Prowl requests since they are normally # local anyway request_rate_per_sec = 0 # The maximum allowable characters allowed in the body per message body_maxlen = 10000 # Defines the maximum allowable characters in the title title_maxlen = 1024 # Define object templates templates = ( '{schema}://{apikey}', '{schema}://{apikey}/{providerkey}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[A-Za-z0-9]{40}$', 'i'), }, 'providerkey': { 'name': _('Provider Key'), 'type': 'string', 'private': True, 'regex': (r'^[A-Za-z0-9]{40}$', 'i'), }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'priority': { 'name': _('Priority'), 'type': 'choice:int', 'values': PROWL_PRIORITIES, 'default': ProwlPriority.NORMAL, }, }) def __init__(self, apikey, providerkey=None, priority=None, **kwargs): """ Initialize Prowl Object """ super().__init__(**kwargs) # The Priority of the message self.priority = NotifyProwl.template_args['priority']['default'] \ if not priority else \ next(( v for k, v in PROWL_PRIORITY_MAP.items() if str(priority).lower().startswith(k)), NotifyProwl.template_args['priority']['default']) # API Key (associated with project) self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'An invalid Prowl API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # Store the provider key (if specified) if providerkey: self.providerkey = validate_regex( providerkey, *self.template_tokens['providerkey']['regex']) if not self.providerkey: msg = 'An invalid Prowl Provider Key ' \ '({}) was specified.'.format(providerkey) self.logger.warning(msg) raise TypeError(msg) else: # No provider key was set self.providerkey = None return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Prowl Notification """ headers = { 'User-Agent': self.app_id, 'Content-type': "application/x-www-form-urlencoded", } # prepare JSON Object payload = { 'apikey': self.apikey, 'application': self.app_id, 'event': title, 'description': body, 'priority': self.priority, } if self.providerkey: payload['providerkey'] = self.providerkey self.logger.debug('Prowl POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('Prowl Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyBase.http_response_code_lookup( r.status_code, PROWL_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send Prowl notification:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Prowl notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Prowl notification.') self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey, self.providerkey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'priority': PROWL_PRIORITIES[self.template_args['priority']['default']] if self.priority not in PROWL_PRIORITIES else PROWL_PRIORITIES[self.priority], } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{apikey}/{providerkey}/?{params}'.format( schema=self.secure_protocol, apikey=self.pprint(self.apikey, privacy, safe=''), providerkey=self.pprint(self.providerkey, privacy, safe=''), params=NotifyProwl.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Set the API Key results['apikey'] = NotifyProwl.unquote(results['host']) # Optionally try to find the provider key try: results['providerkey'] = \ NotifyProwl.split_path(results['fullpath'])[0] except IndexError: pass # Set our priority if 'priority' in results['qsd'] and len(results['qsd']['priority']): results['priority'] = \ NotifyProwl.unquote(results['qsd']['priority']) return results apprise-1.9.3/apprise/plugins/pushbullet.py000066400000000000000000000365171477231770000211000ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from json import dumps from json import loads from .base import NotifyBase from ..common import NotifyType from ..utils.parse import is_email, parse_list, validate_regex from ..locale import gettext_lazy as _ from ..attachment.base import AttachBase # Flag used as a placeholder to sending to all devices PUSHBULLET_SEND_TO_ALL = 'ALL_DEVICES' # Provide some known codes Pushbullet uses and what they translate to: PUSHBULLET_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', } class NotifyPushBullet(NotifyBase): """ A wrapper for PushBullet Notifications """ # The default descriptive name associated with the Notification service_name = 'Pushbullet' # The services URL service_url = 'https://www.pushbullet.com/' # The default secure protocol secure_protocol = 'pbul' # Allow 50 requests per minute (Tier 2). # 60/50 = 0.2 request_rate_per_sec = 1.2 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushbullet' # PushBullet uses the http protocol with JSON requests notify_url = 'https://api.pushbullet.com/v2/{}' # Support attachments attachment_support = True # Define object templates templates = ( '{schema}://{accesstoken}', '{schema}://{accesstoken}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'accesstoken': { 'name': _('Access Token'), 'type': 'string', 'private': True, 'required': True, }, 'target_device': { 'name': _('Target Device'), 'type': 'string', 'map_to': 'targets', }, 'target_channel': { 'name': _('Target Channel'), 'type': 'string', 'prefix': '#', 'map_to': 'targets', }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, }) def __init__(self, accesstoken, targets=None, **kwargs): """ Initialize PushBullet Object """ super().__init__(**kwargs) # Access Token (associated with project) self.accesstoken = validate_regex(accesstoken) if not self.accesstoken: msg = 'An invalid PushBullet Access Token ' \ '({}) was specified.'.format(accesstoken) self.logger.warning(msg) raise TypeError(msg) self.targets = parse_list(targets) if len(self.targets) == 0: self.targets = (PUSHBULLET_SEND_TO_ALL, ) return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform PushBullet Notification """ # error tracking (used for function return) has_error = False # Build a list of our attachments attachments = [] if attach and self.attachment_support: # We need to upload our payload first so that we can source it # in remaining messages for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Preparing PushBullet attachment {}'.format( attachment.url(privacy=True))) # prepare payload payload = { 'file_name': attachment.name if attachment.name else f'file{no:03}.dat', 'file_type': attachment.mimetype, } # First thing we need to do is make a request so that we can # get a URL to post our request to. # see: https://docs.pushbullet.com/#upload-request okay, response = self._send( self.notify_url.format('upload-request'), payload) if not okay: # We can't post our attachment return False # If we get here, our output will look something like this: # { # "file_name": "cat.jpg", # "file_type": "image/jpeg", # "file_url": "https://dl.pushb.com/abc/cat.jpg", # "upload_url": "https://upload.pushbullet.com/abcd123" # } # - The file_url is where the file will be available after it # is uploaded. # - The upload_url is where to POST the file to. The file must # be posted using multipart/form-data encoding. # Prepare our attachment payload; we'll use this if we # successfully upload the content below for later on. try: # By placing this in a try/except block we can validate # our response at the same time as preparing our payload payload = { # PushBullet v2/pushes file type: 'type': 'file', 'file_name': response['file_name'], 'file_type': response['file_type'], 'file_url': response['file_url'], } if response['file_type'].startswith('image/'): # Allow image to be displayed inline (if image type) payload['image_url'] = response['file_url'] upload_url = response['upload_url'] except (KeyError, TypeError): # A method of verifying our content exists return False okay, response = self._send(upload_url, attachment) if not okay: # We can't post our attachment return False # Save our pre-prepared payload for attachment posting attachments.append(payload) # Create a copy of the targets list targets = list(self.targets) while len(targets): recipient = targets.pop(0) # prepare payload payload = { 'type': 'note', 'title': title, 'body': body, } # Check if an email was defined match = is_email(recipient) if match: payload['email'] = match['full_email'] self.logger.debug( "PushBullet recipient {} parsed as an email address" .format(recipient)) elif recipient is PUSHBULLET_SEND_TO_ALL: # Send to all pass elif recipient[0] == '#': payload['channel_tag'] = recipient[1:] self.logger.debug( "PushBullet recipient {} parsed as a channel" .format(recipient)) else: payload['device_iden'] = recipient self.logger.debug( "PushBullet recipient {} parsed as a device" .format(recipient)) if body: okay, response = self._send( self.notify_url.format('pushes'), payload) if not okay: has_error = True continue self.logger.info( 'Sent PushBullet notification to "%s".' % (recipient)) for attach_payload in attachments: # Send our attachments to our same user (already prepared as # our payload object) okay, response = self._send( self.notify_url.format('pushes'), attach_payload) if not okay: has_error = True continue self.logger.info( 'Sent PushBullet attachment ({}) to "{}".'.format( attach_payload['file_name'], recipient)) return not has_error def _send(self, url, payload, **kwargs): """ Wrapper to the requests (post) object """ headers = { 'User-Agent': self.app_id, } # Some default values for our request object to which we'll update # depending on what our payload is files = None data = None if not isinstance(payload, AttachBase): # Send our payload as a JSON object headers['Content-Type'] = 'application/json' data = dumps(payload) if payload else None auth = (self.accesstoken, '') self.logger.debug('PushBullet POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('PushBullet Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() # Default response type response = None try: # Open our attachment path if required: if isinstance(payload, AttachBase): files = {'file': (payload.name, open(payload.path, 'rb'))} r = requests.post( url, data=data, headers=headers, files=files, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) try: response = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None # Fall back to the existing unparsed value response = r.content if r.status_code not in ( requests.codes.ok, requests.codes.no_content): # We had a problem status_str = \ NotifyPushBullet.http_response_code_lookup( r.status_code, PUSHBULLET_HTTP_ERROR_MAP) self.logger.warning( 'Failed to deliver payload to PushBullet:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False, response # otherwise we were successful return True, response except requests.RequestException as e: self.logger.warning( 'A Connection error occurred communicating with PushBullet.') self.logger.debug('Socket Exception: %s' % str(e)) return False, response except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while handling {}.'.format( payload.name if isinstance(payload, AttachBase) else payload)) self.logger.debug('I/O Exception: %s' % str(e)) return False, response finally: # Close our file (if it's open) stored in the second element # of our files tuple (index 1) if files: files['file'][1].close() @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.accesstoken) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) targets = '/'.join([NotifyPushBullet.quote(x) for x in self.targets]) if targets == PUSHBULLET_SEND_TO_ALL: # keyword is reserved for internal usage only; it's safe to remove # it from the recipients list targets = '' return '{schema}://{accesstoken}/{targets}/?{params}'.format( schema=self.secure_protocol, accesstoken=self.pprint(self.accesstoken, privacy, safe=''), targets=targets, params=NotifyPushBullet.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.targets) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Fetch our targets results['targets'] = \ NotifyPushBullet.split_path(results['fullpath']) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyPushBullet.parse_list(results['qsd']['to']) # Setup the token; we store it in Access Token for global # plugin consistency with naming conventions results['accesstoken'] = NotifyPushBullet.unquote(results['host']) return results apprise-1.9.3/apprise/plugins/pushdeer.py000066400000000000000000000161721477231770000205230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from ..common import NotifyType from .base import NotifyBase from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ # Syntax: # schan://{key}/ class NotifyPushDeer(NotifyBase): """ A wrapper for PushDeer Notifications """ # The default descriptive name associated with the Notification service_name = 'PushDeer' # The services URL service_url = 'https://www.pushdeer.com/' # Insecure Protocol Access protocol = 'pushdeer' # Secure Protocol secure_protocol = 'pushdeers' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_PushDeer' # Default hostname default_hostname = 'api2.pushdeer.com' # PushDeer API notify_url = '{schema}://{host}:{port}/message/push?pushkey={pushKey}' # Define object templates templates = ( '{schema}://{pushkey}', '{schema}://{host}/{pushkey}', '{schema}://{host}:{port}/{pushkey}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'pushkey': { 'name': _('Pushkey'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, }) def __init__(self, pushkey, **kwargs): """ Initialize PushDeer Object """ super().__init__(**kwargs) # PushKey (associated with project) self.push_key = validate_regex( pushkey, *self.template_tokens['pushkey']['regex']) if not self.push_key: msg = 'An invalid PushDeer API Pushkey ' \ '({}) was specified.'.format(pushkey) self.logger.warning(msg) raise TypeError(msg) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform PushDeer Notification """ # Prepare our persistent_notification.create payload payload = { 'text': title if title else body, 'type': 'text', 'desp': body if title else '', } # Set our schema schema = 'https' if self.secure else 'http' # Set host host = self.default_hostname if self.host: host = self.host # Set port port = 443 if self.secure else 80 if self.port: port = self.port # Our Notification URL notify_url = self.notify_url.format( schema=schema, host=host, port=port, pushKey=self.push_key) # Some Debug Logging self.logger.debug('PushDeer URL: {} (cert_verify={})'.format( notify_url, self.verify_certificate)) self.logger.debug('PushDeer Payload: {}'.format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=payload, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyPushDeer.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send PushDeer notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False else: self.logger.info('Sent PushDeer notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occured sending PushDeer ' 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.push_key, self.host, self.port, ) def url(self, privacy=False): """ Returns the URL built dynamically based on specified arguments. """ if self.host: url = '{schema}://{host}{port}/{pushkey}' else: url = '{schema}://{pushkey}' return url.format( schema=self.secure_protocol if self.secure else self.protocol, host=self.host, port='' if not self.port else ':{}'.format(self.port), pushkey=self.pprint(self.push_key, privacy, safe='')) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to substantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't parse the URL return results fullpaths = NotifyPushDeer.split_path(results['fullpath']) if len(fullpaths) == 0: results['pushkey'] = results['host'] results['host'] = None else: results['pushkey'] = fullpaths.pop() return results apprise-1.9.3/apprise/plugins/pushed.py000066400000000000000000000303741477231770000201740ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from json import dumps from itertools import chain from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import parse_list, validate_regex from ..locale import gettext_lazy as _ # Used to detect and parse channels IS_CHANNEL = re.compile(r'^#?(?P[A-Za-z0-9]+)$') # Used to detect and parse a users push id IS_USER_PUSHED_ID = re.compile(r'^@(?P[A-Za-z0-9]+)$') class NotifyPushed(NotifyBase): """ A wrapper to Pushed Notifications """ # The default descriptive name associated with the Notification service_name = 'Pushed' # The services URL service_url = 'https://pushed.co/' # The default secure protocol secure_protocol = 'pushed' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushed' # Pushed uses the http protocol with JSON requests notify_url = 'https://api.pushed.co/1/push' # A title can not be used for Pushed Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # The maximum allowable characters allowed in the body per message body_maxlen = 160 # Define object templates templates = ( '{schema}://{app_key}/{app_secret}', '{schema}://{app_key}/{app_secret}@{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'app_key': { 'name': _('Application Key'), 'type': 'string', 'private': True, 'required': True, }, 'app_secret': { 'name': _('Application Secret'), 'type': 'string', 'private': True, 'required': True, }, 'target_user': { 'name': _('Target User'), 'prefix': '@', 'type': 'string', 'map_to': 'targets', }, 'target_channel': { 'name': _('Target Channel'), 'type': 'string', 'prefix': '#', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, }) def __init__(self, app_key, app_secret, targets=None, **kwargs): """ Initialize Pushed Object """ super().__init__(**kwargs) # Application Key (associated with project) self.app_key = validate_regex(app_key) if not self.app_key: msg = 'An invalid Pushed Application Key ' \ '({}) was specified.'.format(app_key) self.logger.warning(msg) raise TypeError(msg) # Access Secret (associated with project) self.app_secret = validate_regex(app_secret) if not self.app_secret: msg = 'An invalid Pushed Application Secret ' \ '({}) was specified.'.format(app_secret) self.logger.warning(msg) raise TypeError(msg) # Initialize channel list self.channels = list() # Initialize user list self.users = list() # Get our targets targets = parse_list(targets) if targets: # Validate recipients and drop bad ones: for target in targets: result = IS_CHANNEL.match(target) if result: # store valid device self.channels.append(result.group('name')) continue result = IS_USER_PUSHED_ID.match(target) if result: # store valid room self.users.append(result.group('name')) continue self.logger.warning( 'Dropped invalid channel/userid ' '(%s) specified.' % target, ) if len(self.channels) + len(self.users) == 0: # We have no valid channels or users to notify after # explicitly identifying at least one. msg = 'No Pushed targets to notify.' self.logger.warning(msg) raise TypeError(msg) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Pushed Notification """ # Initiaize our error tracking has_error = False # prepare JSON Object payload = { 'app_key': self.app_key, 'app_secret': self.app_secret, 'target_type': 'app', 'content': body, } # So the logic is as follows: # - if no user/channel was specified, then we just simply notify the # app. # - if there are user/channels specified, then we only alert them # while respecting throttle limits (in the event there are a lot of # entries. if len(self.channels) + len(self.users) == 0: # Just notify the app return self._send( payload=payload, notify_type=notify_type, **kwargs) # If our code reaches here, we want to target channels and users (by # their Pushed_ID instead... # Generate a copy of our original list channels = list(self.channels) users = list(self.users) # Copy our payload _payload = dict(payload) _payload['target_type'] = 'channel' while len(channels) > 0: # Get Channel _payload['target_alias'] = channels.pop(0) if not self._send( payload=_payload, notify_type=notify_type, **kwargs): # toggle flag has_error = True # Copy our payload _payload = dict(payload) _payload['target_type'] = 'pushed_id' # Send all our defined User Pushed ID's while len(users): # Get User's Pushed ID _payload['pushed_id'] = users.pop(0) if not self._send( payload=_payload, notify_type=notify_type, **kwargs): # toggle flag has_error = True return not has_error def _send(self, payload, notify_type, **kwargs): """ A lower level call that directly pushes a payload to the Pushed Notification servers. This should never be called directly; it is referenced automatically through the send() function. """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json' } self.logger.debug('Pushed POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('Pushed Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyPushed.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Pushed notification:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Pushed notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Pushed notification.') self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.app_key, self.app_secret) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{app_key}/{app_secret}/{targets}/?{params}'.format( schema=self.secure_protocol, app_key=self.pprint(self.app_key, privacy, safe=''), app_secret=self.pprint( self.app_secret, privacy, mode=PrivacyMode.Secret, safe=''), targets='/'.join( [NotifyPushed.quote(x) for x in chain( # Channels are prefixed with a pound/hashtag symbol ['#{}'.format(x) for x in self.channels], # Users are prefixed with an @ symbol ['@{}'.format(x) for x in self.users], )]), params=NotifyPushed.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.channels) + len(self.users) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The first token is stored in the hostname app_key = NotifyPushed.unquote(results['host']) entries = NotifyPushed.split_path(results['fullpath']) # Now fetch the remaining tokens try: app_secret = entries.pop(0) except IndexError: # Force some bad values that will get caught # in parsing later app_secret = None app_key = None # Get our recipients (based on remaining entries) results['targets'] = entries # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyPushed.parse_list(results['qsd']['to']) results['app_key'] = app_key results['app_secret'] = app_secret return results apprise-1.9.3/apprise/plugins/pushjet.py000066400000000000000000000222001477231770000203530ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from json import dumps from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ class NotifyPushjet(NotifyBase): """ A wrapper for Pushjet Notifications """ # The default descriptive name associated with the Notification service_name = 'Pushjet' # The default protocol protocol = 'pjet' # The default secure protocol secure_protocol = 'pjets' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushjet' # Disable throttle rate for Pushjet requests since they are normally # local anyway (the remote/online service is no more) request_rate_per_sec = 0 # Define object templates templates = ( '{schema}://{host}:{port}/{secret_key}', '{schema}://{host}/{secret_key}', '{schema}://{user}:{password}@{host}:{port}/{secret_key}', '{schema}://{user}:{password}@{host}/{secret_key}', ) # Define our tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'secret_key': { 'name': _('Secret Key'), 'type': 'string', 'required': True, 'private': True, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, }) template_args = dict(NotifyBase.template_args, **{ 'secret': { 'alias_of': 'secret_key', }, }) def __init__(self, secret_key, **kwargs): """ Initialize Pushjet Object """ super().__init__(**kwargs) # Secret Key (associated with project) self.secret_key = validate_regex(secret_key) if not self.secret_key: msg = 'An invalid Pushjet Secret Key ' \ '({}) was specified.'.format(secret_key) self.logger.warning(msg) raise TypeError(msg) return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port, self.secret_key, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) default_port = 443 if self.secure else 80 # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyPushjet.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) return '{schema}://{auth}{hostname}{port}/{secret}/?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), secret=self.pprint( self.secret_key, privacy, mode=PrivacyMode.Secret, safe=''), params=NotifyPushjet.urlencode(params), ) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Pushjet Notification """ params = { 'secret': self.secret_key, } # prepare Pushjet Object payload = { 'message': body, 'title': title, 'link': None, 'level': None, } headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', } auth = None if self.user: auth = (self.user, self.password) notify_url = '{schema}://{host}{port}/message/'.format( schema="https" if self.secure else "http", host=self.host, port=':{}'.format(self.port) if self.port else '') self.logger.debug('Pushjet POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('Pushjet Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, params=params, data=dumps(payload), headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyPushjet.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Pushjet notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Pushjet notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Pushjet ' 'notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. Syntax: pjet://hostname/secret_key pjet://hostname:port/secret_key pjet://user:pass@hostname/secret_key pjet://user:pass@hostname:port/secret_key pjets://hostname/secret_key pjets://hostname:port/secret_key pjets://user:pass@hostname/secret_key pjets://user:pass@hostname:port/secret_key """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results try: # Retrieve our secret_key from the first entry in the url path results['secret_key'] = \ NotifyPushjet.split_path(results['fullpath'])[0] except IndexError: # no secret key specified results['secret_key'] = None # Allow over-riding the secret by specifying it as an argument # this allows people who have http-auth infront to login # through it in addition to supporting the secret key if 'secret' in results['qsd'] and len(results['qsd']['secret']): results['secret_key'] = \ NotifyPushjet.unquote(results['qsd']['secret']) return results apprise-1.9.3/apprise/plugins/pushme.py000066400000000000000000000163351477231770000202060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from .base import NotifyBase from ..common import NotifyType from ..common import NotifyFormat from ..utils.parse import validate_regex, parse_bool from ..locale import gettext_lazy as _ class NotifyPushMe(NotifyBase): """ A wrapper for PushMe Notifications """ # The default descriptive name associated with the Notification service_name = 'PushMe' # The services URL service_url = 'https://push.i-i.me/' # Insecure protocol (for those self hosted requests) protocol = 'pushme' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushme' # PushMe URL notify_url = 'https://push.i-i.me/' # Define object templates templates = ( '{schema}://{token}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'token': { 'name': _('Token'), 'type': 'string', 'private': True, 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'token': { 'alias_of': 'token', }, 'push_key': { 'alias_of': 'token', }, 'status': { 'name': _('Show Status'), 'type': 'bool', 'default': True, }, }) def __init__(self, token, status=None, **kwargs): """ Initialize PushMe Object """ super().__init__(**kwargs) # Token (associated with project) self.token = validate_regex(token) if not self.token: msg = 'An invalid PushMe Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) # Set Status type self.status = status return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform PushMe Notification """ headers = { 'User-Agent': self.app_id, } # Prepare our payload params = { 'push_key': self.token, 'title': title if not self.status else '{} {}'.format(self.asset.ascii(notify_type), title), 'content': body, 'type': 'markdown' if self.notify_format == NotifyFormat.MARKDOWN else 'text' } self.logger.debug('PushMe POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('PushMe Payload: %s' % str(params)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, params=params, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyPushMe.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send PushMe notification:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent PushMe notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending PushMe notification.', ) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.token) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'status': 'yes' if self.status else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Official URLs are easy to assemble return '{schema}://{token}/?{params}'.format( schema=self.protocol, token=self.pprint(self.token, privacy, safe=''), params=NotifyPushMe.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Store our token using the host results['token'] = NotifyPushMe.unquote(results['host']) # The 'token' makes it easier to use yaml configuration if 'token' in results['qsd'] and len(results['qsd']['token']): results['token'] = NotifyPushMe.unquote(results['qsd']['token']) elif 'push_key' in results['qsd'] and len(results['qsd']['push_key']): # Support 'push_key' if specified results['token'] = NotifyPushMe.unquote(results['qsd']['push_key']) # Get status switch results['status'] = \ parse_bool(results['qsd'].get('status', True)) return results apprise-1.9.3/apprise/plugins/pushover.py000066400000000000000000000517451477231770000205640ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from itertools import chain from .base import NotifyBase from ..common import NotifyType from ..common import NotifyFormat from ..conversion import convert_between from ..utils.parse import parse_list, validate_regex from ..locale import gettext_lazy as _ from ..attachment.base import AttachBase # Flag used as a placeholder to sending to all devices PUSHOVER_SEND_TO_ALL = 'ALL_DEVICES' # Used to detect a Device VALIDATE_DEVICE = re.compile(r'^\s*(?P[a-z0-9_-]{1,25})\s*$', re.I) # Priorities class PushoverPriority: LOW = -2 MODERATE = -1 NORMAL = 0 HIGH = 1 EMERGENCY = 2 # Sounds class PushoverSound: PUSHOVER = 'pushover' BIKE = 'bike' BUGLE = 'bugle' CASHREGISTER = 'cashregister' CLASSICAL = 'classical' COSMIC = 'cosmic' FALLING = 'falling' GAMELAN = 'gamelan' INCOMING = 'incoming' INTERMISSION = 'intermission' MAGIC = 'magic' MECHANICAL = 'mechanical' PIANOBAR = 'pianobar' SIREN = 'siren' SPACEALARM = 'spacealarm' TUGBOAT = 'tugboat' ALIEN = 'alien' CLIMB = 'climb' PERSISTENT = 'persistent' ECHO = 'echo' UPDOWN = 'updown' NONE = 'none' PUSHOVER_SOUNDS = ( PushoverSound.PUSHOVER, PushoverSound.BIKE, PushoverSound.BUGLE, PushoverSound.CASHREGISTER, PushoverSound.CLASSICAL, PushoverSound.COSMIC, PushoverSound.FALLING, PushoverSound.GAMELAN, PushoverSound.INCOMING, PushoverSound.INTERMISSION, PushoverSound.MAGIC, PushoverSound.MECHANICAL, PushoverSound.PIANOBAR, PushoverSound.SIREN, PushoverSound.SPACEALARM, PushoverSound.TUGBOAT, PushoverSound.ALIEN, PushoverSound.CLIMB, PushoverSound.PERSISTENT, PushoverSound.ECHO, PushoverSound.UPDOWN, PushoverSound.NONE, ) PUSHOVER_PRIORITIES = { # Note: This also acts as a reverse lookup mapping PushoverPriority.LOW: 'low', PushoverPriority.MODERATE: 'moderate', PushoverPriority.NORMAL: 'normal', PushoverPriority.HIGH: 'high', PushoverPriority.EMERGENCY: 'emergency', } PUSHOVER_PRIORITY_MAP = { # Maps against string 'low' 'l': PushoverPriority.LOW, # Maps against string 'moderate' 'm': PushoverPriority.MODERATE, # Maps against string 'normal' 'n': PushoverPriority.NORMAL, # Maps against string 'high' 'h': PushoverPriority.HIGH, # Maps against string 'emergency' 'e': PushoverPriority.EMERGENCY, # Entries to additionally support (so more like Pushover's API) '-2': PushoverPriority.LOW, '-1': PushoverPriority.MODERATE, '0': PushoverPriority.NORMAL, '1': PushoverPriority.HIGH, '2': PushoverPriority.EMERGENCY, } # Extend HTTP Error Messages PUSHOVER_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', } class NotifyPushover(NotifyBase): """ A wrapper for Pushover Notifications """ # The default descriptive name associated with the Notification service_name = 'Pushover' # The services URL service_url = 'https://pushover.net/' # All pushover requests are secure secure_protocol = 'pover' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushover' # Pushover uses the http protocol with JSON requests notify_url = 'https://api.pushover.net/1/messages.json' # Support attachments attachment_support = True # The maximum allowable characters allowed in the body per message body_maxlen = 1024 # Default Pushover sound default_pushover_sound = PushoverSound.PUSHOVER # 2.5MB is the maximum supported image filesize as per documentation # here: https://pushover.net/api#attachments (Dec 26th, 2019) attach_max_size_bytes = 2621440 # The regular expression of the current attachment supported mime types # At this time it is only images attach_supported_mime_type = r'^image/.*' # Define object templates templates = ( '{schema}://{user_key}@{token}', '{schema}://{user_key}@{token}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'user_key': { 'name': _('User Key'), 'type': 'string', 'private': True, 'required': True, }, 'token': { 'name': _('Access Token'), 'type': 'string', 'private': True, 'required': True, }, 'target_device': { 'name': _('Target Device'), 'type': 'string', 'regex': (r'^[a-z0-9_-]{1,25}$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'priority': { 'name': _('Priority'), 'type': 'choice:int', 'values': PUSHOVER_PRIORITIES, 'default': PushoverPriority.NORMAL, }, 'sound': { 'name': _('Sound'), 'type': 'string', 'regex': (r'^[a-z]{1,12}$', 'i'), 'default': PushoverSound.PUSHOVER, }, 'url': { 'name': _('URL'), 'map_to': 'supplemental_url', 'type': 'string', }, 'url_title': { 'name': _('URL Title'), 'map_to': 'supplemental_url_title', 'type': 'string' }, 'retry': { 'name': _('Retry'), 'type': 'int', 'min': 30, 'default': 900, # 15 minutes }, 'expire': { 'name': _('Expire'), 'type': 'int', 'min': 0, 'max': 10800, 'default': 3600, # 1 hour }, 'to': { 'alias_of': 'targets', }, }) def __init__(self, user_key, token, targets=None, priority=None, sound=None, retry=None, expire=None, supplemental_url=None, supplemental_url_title=None, **kwargs): """ Initialize Pushover Object """ super().__init__(**kwargs) # Access Token (associated with project) self.token = validate_regex(token) if not self.token: msg = 'An invalid Pushover Access Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) # User Key (associated with project) self.user_key = validate_regex(user_key) if not self.user_key: msg = 'An invalid Pushover User Key ' \ '({}) was specified.'.format(user_key) self.logger.warning(msg) raise TypeError(msg) # Track our valid devices targets = parse_list(targets) # Track any invalid entries self.invalid_targets = list() if len(targets) == 0: self.targets = (PUSHOVER_SEND_TO_ALL, ) else: self.targets = [] for target in targets: result = VALIDATE_DEVICE.match(target) if result: # Store device information self.targets.append(result.group('device')) continue self.logger.warning( 'Dropped invalid Pushover device ' '({}) specified.'.format(target), ) self.invalid_targets.append(target) # Setup supplemental url self.supplemental_url = supplemental_url self.supplemental_url_title = supplemental_url_title # Setup our sound self.sound = NotifyPushover.default_pushover_sound \ if not isinstance(sound, str) else sound.lower() if self.sound and self.sound not in PUSHOVER_SOUNDS: msg = 'Using custom sound specified ({}). '.format(sound) self.logger.debug(msg) # The Priority of the message self.priority = int( NotifyPushover.template_args['priority']['default'] if priority is None else next(( v for k, v in PUSHOVER_PRIORITY_MAP.items() if str(priority).lower().startswith(k)), NotifyPushover.template_args['priority']['default'])) # The following are for emergency alerts if self.priority == PushoverPriority.EMERGENCY: # How often to resend notification, in seconds self.retry = self.template_args['retry']['default'] try: self.retry = int(retry) except (ValueError, TypeError): # Do nothing pass # How often to resend notification, in seconds self.expire = self.template_args['expire']['default'] try: self.expire = int(expire) except (ValueError, TypeError): # Do nothing pass if self.retry < 30: msg = 'Pushover retry must be at least 30 seconds.' self.logger.warning(msg) raise TypeError(msg) if self.expire < 0 or self.expire > 10800: msg = 'Pushover expire must reside in the range of ' \ '0 to 10800 seconds.' self.logger.warning(msg) raise TypeError(msg) return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Pushover Notification """ if not self.targets: # There were no services to notify self.logger.warning( 'There were no Pushover targets to notify.') return False # prepare JSON Object payload = { 'token': self.token, 'user': self.user_key, 'priority': str(self.priority), 'title': title if title else self.app_desc, 'message': body, 'device': ','.join(self.targets), 'sound': self.sound, } if self.supplemental_url: payload['url'] = self.supplemental_url if self.supplemental_url_title: payload['url_title'] = self.supplemental_url_title if self.notify_format == NotifyFormat.HTML: # https://pushover.net/api#html payload['html'] = 1 elif self.notify_format == NotifyFormat.MARKDOWN: payload['message'] = convert_between( NotifyFormat.MARKDOWN, NotifyFormat.HTML, body) payload['html'] = 1 if self.priority == PushoverPriority.EMERGENCY: payload.update({'retry': self.retry, 'expire': self.expire}) if attach and self.attachment_support: # Create a copy of our payload _payload = payload.copy() # Send with attachments for no, attachment in enumerate(attach): if no or not body: # To handle multiple attachments, clean up our message _payload['message'] = attachment.name if not self._send(_payload, attachment): # Mark our failure return False # Clear our title if previously set _payload['title'] = '' # No need to alarm for each consecutive attachment uploaded # afterwards _payload['sound'] = PushoverSound.NONE else: # Simple send return self._send(payload) return True def _send(self, payload, attach=None): """ Wrapper to the requests (post) object """ if isinstance(attach, AttachBase): # Perform some simple error checking if not attach: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attach.url(privacy=True))) return False # Perform some basic checks as we want to gracefully skip # over unsupported mime types. if not re.match( self.attach_supported_mime_type, attach.mimetype, re.I): # No problem; we just don't support this attachment # type; gracefully move along self.logger.debug( 'Ignored unsupported Pushover attachment ({}): {}' .format( attach.mimetype, attach.url(privacy=True))) attach = None else: # If we get here, we're dealing with a supported image. # Verify that the filesize is okay though. file_size = len(attach) if not (file_size > 0 and file_size <= self.attach_max_size_bytes): # File size is no good self.logger.warning( 'Pushover attachment size ({}B) exceeds limit: {}' .format(file_size, attach.url(privacy=True))) return False self.logger.debug( 'Posting Pushover attachment {}'.format( attach.url(privacy=True))) # Default Header headers = { 'User-Agent': self.app_id, } # Authentication auth = (self.token, '') # Some default values for our request object to which we'll update # depending on what our payload is files = None self.logger.debug('Pushover POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('Pushover Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: # Open our attachment path if required: if attach: files = {'attachment': (attach.name, open(attach.path, 'rb'))} r = requests.post( self.notify_url, data=payload, headers=headers, files=files, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyPushover.http_response_code_lookup( r.status_code, PUSHOVER_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send Pushover notification to {}: ' '{}{}error={}.'.format( payload['device'], status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False else: self.logger.info( 'Sent Pushover notification to %s.' % payload['device']) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Pushover:%s ' % ( payload['device']) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while reading {}.'.format( attach.name if attach else 'attachment')) self.logger.debug('I/O Exception: %s' % str(e)) return False finally: # Close our file (if it's open) stored in the second element # of our files tuple (index 1) if files: files['attachment'][1].close() return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.user_key, self.token) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'priority': PUSHOVER_PRIORITIES[self.template_args['priority']['default']] if self.priority not in PUSHOVER_PRIORITIES else PUSHOVER_PRIORITIES[self.priority], } # Only add expire and retry for emergency messages, # pushover ignores for all other priorities if self.priority == PushoverPriority.EMERGENCY: params.update({'expire': self.expire, 'retry': self.retry}) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Escape our devices devices = '/'.join( [NotifyPushover.quote(x, safe='') for x in chain(self.targets, self.invalid_targets)]) if devices == PUSHOVER_SEND_TO_ALL: # keyword is reserved for internal usage only; it's safe to remove # it from the devices list devices = '' return '{schema}://{user_key}@{token}/{devices}/?{params}'.format( schema=self.secure_protocol, user_key=self.pprint(self.user_key, privacy, safe=''), token=self.pprint(self.token, privacy, safe=''), devices=devices, params=NotifyPushover.urlencode(params)) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Set our priority if 'priority' in results['qsd'] and len(results['qsd']['priority']): results['priority'] = \ NotifyPushover.unquote(results['qsd']['priority']) # Retrieve all of our targets results['targets'] = NotifyPushover.split_path(results['fullpath']) # User Key is retrieved from the user results['user_key'] = NotifyPushover.unquote(results['user']) # Get the sound if 'sound' in results['qsd'] and len(results['qsd']['sound']): results['sound'] = \ NotifyPushover.unquote(results['qsd']['sound']) # Get the supplementary url if 'url' in results['qsd'] and len(results['qsd']['url']): results['supplemental_url'] = NotifyPushover.unquote( results['qsd']['url'] ) if 'url_title' in results['qsd'] and len(results['qsd']['url_title']): results['supplemental_url_title'] = results['qsd']['url_title'] # Get expire and retry if 'expire' in results['qsd'] and len(results['qsd']['expire']): results['expire'] = results['qsd']['expire'] if 'retry' in results['qsd'] and len(results['qsd']['retry']): results['retry'] = results['qsd']['retry'] # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyPushover.parse_list(results['qsd']['to']) # Token results['token'] = NotifyPushover.unquote(results['host']) return results apprise-1.9.3/apprise/plugins/pushsafer.py000066400000000000000000000650361477231770000207070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from json import loads from .base import NotifyBase from .. import exception from ..common import NotifyType from ..utils.parse import parse_list, validate_regex from ..locale import gettext_lazy as _ class PushSaferSound: """ Defines all of the supported PushSafe sounds """ # Silent SILENT = 0 # Ahem (IM) AHEM = 1 # Applause (Mail) APPLAUSE = 2 # Arrow (Reminder) ARROW = 3 # Baby (SMS) BABY = 4 # Bell (Alarm) BELL = 5 # Bicycle (Alarm2) BICYCLE = 6 # Boing (Alarm3) BOING = 7 # Buzzer (Alarm4) BUZZER = 8 # Camera (Alarm5) CAMERA = 9 # Car Horn (Alarm6) CAR_HORN = 10 # Cash Register (Alarm7) CASH_REGISTER = 11 # Chime (Alarm8) CHIME = 12 # Creaky Door (Alarm9) CREAKY_DOOR = 13 # Cuckoo Clock (Alarm10) CUCKOO_CLOCK = 14 # Disconnect (Call) DISCONNECT = 15 # Dog (Call2) DOG = 16 # Doorbell (Call3) DOORBELL = 17 # Fanfare (Call4) FANFARE = 18 # Gun Shot (Call5) GUN_SHOT = 19 # Honk (Call6) HONK = 20 # Jaw Harp (Call7) JAW_HARP = 21 # Morse (Call8) MORSE = 22 # Electricity (Call9) ELECTRICITY = 23 # Radio Tuner (Call10) RADIO_TURNER = 24 # Sirens SIRENS = 25 # Military Trumpets MILITARY_TRUMPETS = 26 # Ufo UFO = 27 # Whah Whah Whah LONG_WHAH = 28 # Man Saying Goodbye GOODBYE = 29 # Man Saying Hello HELLO = 30 # Man Saying No NO = 31 # Man Saying Ok OKAY = 32 # Man Saying Ooohhhweee OOOHHHWEEE = 33 # Man Saying Warning WARNING = 34 # Man Saying Welcome WELCOME = 35 # Man Saying Yeah YEAH = 36 # Man Saying Yes YES = 37 # Beep short BEEP1 = 38 # Weeeee short WEEE = 39 # Cut in and out short CUTINOUT = 40 # Finger flicking glas short FLICK_GLASS = 41 # Wa Wa Waaaa short SHORT_WHAH = 42 # Laser short LASER = 43 # Wind Chime short WIND_CHIME = 44 # Echo short ECHO = 45 # Zipper short ZIPPER = 46 # HiHat short HIHAT = 47 # Beep 2 short BEEP2 = 48 # Beep 3 short BEEP3 = 49 # Beep 4 short BEEP4 = 50 # The Alarm is armed ALARM_ARMED = 51 # The Alarm is disarmed ALARM_DISARMED = 52 # The Backup is ready BACKUP_READY = 53 # The Door is closed DOOR_CLOSED = 54 # The Door is opend DOOR_OPENED = 55 # The Window is closed WINDOW_CLOSED = 56 # The Window is open WINDOW_OPEN = 57 # The Light is off LIGHT_ON = 58 # The Light is on LIGHT_OFF = 59 # The Doorbell rings DOORBELL_RANG = 60 PUSHSAFER_SOUND_MAP = { # Device Default, 'silent': PushSaferSound.SILENT, 'ahem': PushSaferSound.AHEM, 'applause': PushSaferSound.APPLAUSE, 'arrow': PushSaferSound.ARROW, 'baby': PushSaferSound.BABY, 'bell': PushSaferSound.BELL, 'bicycle': PushSaferSound.BICYCLE, 'bike': PushSaferSound.BICYCLE, 'boing': PushSaferSound.BOING, 'buzzer': PushSaferSound.BUZZER, 'camera': PushSaferSound.CAMERA, 'carhorn': PushSaferSound.CAR_HORN, 'horn': PushSaferSound.CAR_HORN, 'cashregister': PushSaferSound.CASH_REGISTER, 'chime': PushSaferSound.CHIME, 'creakydoor': PushSaferSound.CREAKY_DOOR, 'cuckooclock': PushSaferSound.CUCKOO_CLOCK, 'cuckoo': PushSaferSound.CUCKOO_CLOCK, 'disconnect': PushSaferSound.DISCONNECT, 'dog': PushSaferSound.DOG, 'doorbell': PushSaferSound.DOORBELL, 'fanfare': PushSaferSound.FANFARE, 'gunshot': PushSaferSound.GUN_SHOT, 'honk': PushSaferSound.HONK, 'jawharp': PushSaferSound.JAW_HARP, 'morse': PushSaferSound.MORSE, 'electric': PushSaferSound.ELECTRICITY, 'radiotuner': PushSaferSound.RADIO_TURNER, 'sirens': PushSaferSound.SIRENS, 'militarytrumpets': PushSaferSound.MILITARY_TRUMPETS, 'military': PushSaferSound.MILITARY_TRUMPETS, 'trumpets': PushSaferSound.MILITARY_TRUMPETS, 'ufo': PushSaferSound.UFO, 'whahwhah': PushSaferSound.LONG_WHAH, 'whah': PushSaferSound.SHORT_WHAH, 'goodye': PushSaferSound.GOODBYE, 'hello': PushSaferSound.HELLO, 'no': PushSaferSound.NO, 'okay': PushSaferSound.OKAY, 'ok': PushSaferSound.OKAY, 'ooohhhweee': PushSaferSound.OOOHHHWEEE, 'warn': PushSaferSound.WARNING, 'warning': PushSaferSound.WARNING, 'welcome': PushSaferSound.WELCOME, 'yeah': PushSaferSound.YEAH, 'yes': PushSaferSound.YES, 'beep': PushSaferSound.BEEP1, 'beep1': PushSaferSound.BEEP1, 'weee': PushSaferSound.WEEE, 'wee': PushSaferSound.WEEE, 'cutinout': PushSaferSound.CUTINOUT, 'flickglass': PushSaferSound.FLICK_GLASS, 'laser': PushSaferSound.LASER, 'windchime': PushSaferSound.WIND_CHIME, 'echo': PushSaferSound.ECHO, 'zipper': PushSaferSound.ZIPPER, 'hihat': PushSaferSound.HIHAT, 'beep2': PushSaferSound.BEEP2, 'beep3': PushSaferSound.BEEP3, 'beep4': PushSaferSound.BEEP4, 'alarmarmed': PushSaferSound.ALARM_ARMED, 'armed': PushSaferSound.ALARM_ARMED, 'alarmdisarmed': PushSaferSound.ALARM_DISARMED, 'disarmed': PushSaferSound.ALARM_DISARMED, 'backupready': PushSaferSound.BACKUP_READY, 'dooropen': PushSaferSound.DOOR_OPENED, 'dopen': PushSaferSound.DOOR_OPENED, 'doorclosed': PushSaferSound.DOOR_CLOSED, 'dclosed': PushSaferSound.DOOR_CLOSED, 'windowopen': PushSaferSound.WINDOW_OPEN, 'wopen': PushSaferSound.WINDOW_OPEN, 'windowclosed': PushSaferSound.WINDOW_CLOSED, 'wclosed': PushSaferSound.WINDOW_CLOSED, 'lighton': PushSaferSound.LIGHT_ON, 'lon': PushSaferSound.LIGHT_ON, 'lightoff': PushSaferSound.LIGHT_OFF, 'loff': PushSaferSound.LIGHT_OFF, 'doorbellrang': PushSaferSound.DOORBELL_RANG, } # Priorities class PushSaferPriority: LOW = -2 MODERATE = -1 NORMAL = 0 HIGH = 1 EMERGENCY = 2 PUSHSAFER_PRIORITIES = ( PushSaferPriority.LOW, PushSaferPriority.MODERATE, PushSaferPriority.NORMAL, PushSaferPriority.HIGH, PushSaferPriority.EMERGENCY, ) PUSHSAFER_PRIORITY_MAP = { # short for 'low' 'low': PushSaferPriority.LOW, # short for 'medium' 'medium': PushSaferPriority.MODERATE, # short for 'normal' 'normal': PushSaferPriority.NORMAL, # short for 'high' 'high': PushSaferPriority.HIGH, # short for 'emergency' 'emergency': PushSaferPriority.EMERGENCY, } # Identify the priority ou want to designate as the fall back DEFAULT_PRIORITY = "normal" # Vibrations class PushSaferVibration: """ Defines the acceptable vibration settings for notification """ # x1 LOW = 1 # x2 NORMAL = 2 # x3 HIGH = 3 # Identify all of the vibrations in one place PUSHSAFER_VIBRATIONS = ( PushSaferVibration.LOW, PushSaferVibration.NORMAL, PushSaferVibration.HIGH, ) # At this time, the following pictures can be attached to each notification # at one time. When more are supported, just add their argument below PICTURE_PARAMETER = ( 'p', 'p2', 'p3', ) # Flag used as a placeholder to sending to all devices PUSHSAFER_SEND_TO_ALL = 'a' class NotifyPushSafer(NotifyBase): """ A wrapper for PushSafer Notifications """ # The default descriptive name associated with the Notification service_name = 'Pushsafer' # The services URL service_url = 'https://www.pushsafer.com/' # The default insecure protocol protocol = 'psafer' # The default secure protocol secure_protocol = 'psafers' # Support attachments attachment_support = True # Number of requests to a allow per second request_rate_per_sec = 1.2 # The icon ID of 25 looks like a megaphone default_pushsafer_icon = 25 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushsafer' # Defines the hostname to post content to; since this service supports # both insecure and secure methods, we set the {schema} just before we # post the message upstream. notify_url = '{schema}://www.pushsafer.com/api' # Define object templates templates = ( '{schema}://{privatekey}', '{schema}://{privatekey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'privatekey': { 'name': _('Private Key'), 'type': 'string', 'private': True, 'required': True, }, 'target_device': { 'name': _('Target Device'), 'type': 'string', 'map_to': 'targets', }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'priority': { 'name': _('Priority'), 'type': 'choice:int', 'values': PUSHSAFER_PRIORITIES, }, 'sound': { 'name': _('Sound'), 'type': 'choice:string', 'values': PUSHSAFER_SOUND_MAP, }, 'vibration': { 'name': _('Vibration'), 'type': 'choice:int', 'values': PUSHSAFER_VIBRATIONS, }, 'to': { 'alias_of': 'targets', }, }) def __init__(self, privatekey, targets=None, priority=None, sound=None, vibration=None, **kwargs): """ Initialize PushSafer Object """ super().__init__(**kwargs) # # Priority # try: # Acquire our priority if we can: # - We accept both the integer form as well as a string # representation self.priority = int(priority) except TypeError: # NoneType means use Default; this is an okay exception self.priority = None except ValueError: # Input is a string; attempt to get the lookup from our # priority mapping priority = priority.lower().strip() # This little bit of black magic allows us to match against # low, lo, l (for low); # normal, norma, norm, nor, no, n (for normal) # ... etc match = next((key for key in PUSHSAFER_PRIORITY_MAP.keys() if key.startswith(priority)), None) \ if priority else None # Now test to see if we got a match if not match: msg = 'An invalid PushSafer priority ' \ '({}) was specified.'.format(priority) self.logger.warning(msg) raise TypeError(msg) # store our successfully looked up priority self.priority = PUSHSAFER_PRIORITY_MAP[match] if self.priority is not None and \ self.priority not in PUSHSAFER_PRIORITY_MAP.values(): msg = 'An invalid PushSafer priority ' \ '({}) was specified.'.format(priority) self.logger.warning(msg) raise TypeError(msg) # # Sound # try: # Acquire our sound if we can: # - We accept both the integer form as well as a string # representation self.sound = int(sound) except TypeError: # NoneType means use Default; this is an okay exception self.sound = None except ValueError: # Input is a string; attempt to get the lookup from our # sound mapping sound = sound.lower().strip() # This little bit of black magic allows us to match against # against multiple versions of the same string # ... etc match = next((key for key in PUSHSAFER_SOUND_MAP.keys() if key.startswith(sound)), None) \ if sound else None # Now test to see if we got a match if not match: msg = 'An invalid PushSafer sound ' \ '({}) was specified.'.format(sound) self.logger.warning(msg) raise TypeError(msg) # store our successfully looked up sound self.sound = PUSHSAFER_SOUND_MAP[match] if self.sound is not None and \ self.sound not in PUSHSAFER_SOUND_MAP.values(): msg = 'An invalid PushSafer sound ' \ '({}) was specified.'.format(sound) self.logger.warning(msg) raise TypeError(msg) # # Vibration # try: # Use defined integer as is if defined, no further error checking # is performed self.vibration = int(vibration) except TypeError: # NoneType means use Default; this is an okay exception self.vibration = None except ValueError: msg = 'An invalid PushSafer vibration ' \ '({}) was specified.'.format(vibration) self.logger.warning(msg) raise TypeError(msg) if self.vibration and self.vibration not in PUSHSAFER_VIBRATIONS: msg = 'An invalid PushSafer vibration ' \ '({}) was specified.'.format(vibration) self.logger.warning(msg) raise TypeError(msg) # # Private Key (associated with project) # self.privatekey = validate_regex(privatekey) if not self.privatekey: msg = 'An invalid PushSafer Private Key ' \ '({}) was specified.'.format(privatekey) self.logger.warning(msg) raise TypeError(msg) self.targets = parse_list(targets) if len(self.targets) == 0: self.targets = (PUSHSAFER_SEND_TO_ALL, ) return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform PushSafer Notification """ # error tracking (used for function return) has_error = False # Initialize our list of attachments attachments = [] if attach and self.attachment_support: # We need to upload our payload first so that we can source it # in remaining messages for no, attachment in enumerate(attach, start=1): # prepare payload if not attachment: # We could not access the attachment self.logger.error( 'Could not access PushSafer attachment {}.'.format( attachment.url(privacy=True))) return False if not attachment.mimetype.startswith('image/'): # Attachment not supported; continue peacefully self.logger.debug( 'Ignoring unsupported PushSafer attachment {}.'.format( attachment.url(privacy=True))) continue self.logger.debug( 'Posting PushSafer attachment {}'.format( attachment.url(privacy=True))) try: # Output must be in a DataURL format (that's what # PushSafer calls it): attachments.append(( attachment.name if attachment.name else f'file{no:03}.dat', 'data:{};base64,{}'.format( attachment.mimetype, attachment.base64(), ) )) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access PushSafer attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending PushSafer attachment {}'.format( attachment.url(privacy=True))) # Create a copy of the targets list targets = list(self.targets) while len(targets): recipient = targets.pop(0) # prepare payload payload = { 't': title, 'm': body, # Our default icon to use 'i': self.default_pushsafer_icon, # Notification Color 'c': self.color(notify_type), # Target Recipient 'd': recipient, } if self.sound is not None: # Only apply sound setting if it was specified payload['s'] = str(self.sound) if self.vibration is not None: # Only apply vibration setting payload['v'] = str(self.vibration) if not attachments: okay, response = self._send(payload) if not okay: has_error = True continue self.logger.info( 'Sent PushSafer notification to "%s".' % (recipient)) else: # Create a copy of our payload object _payload = payload.copy() for idx in range( 0, len(attachments), len(PICTURE_PARAMETER)): # Send our attachments to our same user (already prepared # as our payload object) for c, attachment in enumerate( attachments[idx:idx + len(PICTURE_PARAMETER)]): # Get our attachment information filename, dataurl = attachment _payload.update({PICTURE_PARAMETER[c]: dataurl}) self.logger.debug( 'Added attachment (%s) to "%s".' % ( filename, recipient)) okay, response = self._send(_payload) if not okay: has_error = True continue self.logger.info( 'Sent PushSafer attachment (%s) to "%s".' % ( filename, recipient)) # More then the maximum messages shouldn't cause all of # the text to loop on future iterations _payload = payload.copy() _payload['t'] = '' _payload['m'] = '...' return not has_error def _send(self, payload, **kwargs): """ Wrapper to the requests (post) object """ headers = { 'User-Agent': self.app_id, } # Prepare the notification URL to post to notify_url = self.notify_url.format( schema='https' if self.secure else 'http' ) # Store the payload key payload['k'] = self.privatekey self.logger.debug('PushSafer POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('PushSafer Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() # Default response type response = None # Initialize our Pushsafer expected responses _code = None _str = 'Unknown' try: # Open our attachment path if required: r = requests.post( notify_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) try: response = loads(r.content) _code = response.get('status') _str = response.get('success', _str) \ if _code == 1 else response.get('error', _str) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None # Fall back to the existing unparsed value response = r.content if r.status_code not in ( requests.codes.ok, requests.codes.no_content): # We had a problem status_str = \ NotifyPushSafer.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to deliver payload to PushSafer:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False, response elif _code != 1: # It's a bit backwards, but: # 1 is returned if we succeed # 0 is returned if we fail self.logger.warning( 'Failed to deliver payload to PushSafer;' ' error={}.'.format(_str)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False, response # otherwise we were successful return True, response except requests.RequestException as e: self.logger.warning( 'A Connection error occurred communicating with PushSafer.') self.logger.debug('Socket Exception: %s' % str(e)) return False, response @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.privatekey, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) if self.priority is not None: # Store our priority; but only if it was specified params['priority'] = \ next((key for key, value in PUSHSAFER_PRIORITY_MAP.items() if value == self.priority), DEFAULT_PRIORITY) # pragma: no cover if self.sound is not None: # Store our sound; but only if it was specified params['sound'] = \ next((key for key, value in PUSHSAFER_SOUND_MAP.items() if value == self.sound), '') # pragma: no cover if self.vibration is not None: # Store our vibration; but only if it was specified params['vibration'] = str(self.vibration) targets = '/'.join([NotifyPushSafer.quote(x) for x in self.targets]) if targets == PUSHSAFER_SEND_TO_ALL: # keyword is reserved for internal usage only; it's safe to remove # it from the recipients list targets = '' return '{schema}://{privatekey}/{targets}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, privatekey=self.pprint(self.privatekey, privacy, safe=''), targets=targets, params=NotifyPushSafer.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.targets) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Fetch our targets results['targets'] = \ NotifyPushSafer.split_path(results['fullpath']) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyPushSafer.parse_list(results['qsd']['to']) # Setup the token; we store it in Private Key for global # plugin consistency with naming conventions results['privatekey'] = NotifyPushSafer.unquote(results['host']) if 'priority' in results['qsd'] and len(results['qsd']['priority']): results['priority'] = \ NotifyPushSafer.unquote(results['qsd']['priority']) if 'sound' in results['qsd'] and len(results['qsd']['sound']): results['sound'] = \ NotifyPushSafer.unquote(results['qsd']['sound']) if 'vibration' in results['qsd'] and len(results['qsd']['vibration']): results['vibration'] = \ NotifyPushSafer.unquote(results['qsd']['vibration']) return results apprise-1.9.3/apprise/plugins/pushy.py000066400000000000000000000307201477231770000200470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # API reference: https://pushy.me/docs/api/send-notifications import re import requests from itertools import chain from json import dumps, loads from .base import NotifyBase from ..common import NotifyType from ..utils.parse import parse_list, validate_regex from ..locale import gettext_lazy as _ # Used to detect a Device and Topic VALIDATE_DEVICE = re.compile(r'^@(?P[a-z0-9]+)$', re.I) VALIDATE_TOPIC = re.compile(r'^[#]?(?P[a-z0-9]+)$', re.I) # Extend HTTP Error Messages PUSHY_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', } class NotifyPushy(NotifyBase): """ A wrapper for Pushy Notifications """ # The default descriptive name associated with the Notification service_name = 'Pushy' # The services URL service_url = 'https://pushy.me/' # All Pushy requests are secure secure_protocol = 'pushy' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushy' # Pushy uses the http protocol with JSON requests notify_url = 'https://api.pushy.me/push?api_key={apikey}' # The maximum allowable characters allowed in the body per message body_maxlen = 4096 # Define object templates templates = ( '{schema}://{apikey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('Secret API Key'), 'type': 'string', 'private': True, 'required': True, }, 'target_device': { 'name': _('Target Device'), 'type': 'string', 'prefix': '@', 'map_to': 'targets', }, 'target_topic': { 'name': _('Target Topic'), 'type': 'string', 'prefix': '#', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'sound': { # Specify something like ping.aiff 'name': _('Sound'), 'type': 'string', }, 'badge': { 'name': _('Badge'), 'type': 'int', 'min': 0, }, 'to': { 'alias_of': 'targets', }, 'key': { 'alias_of': 'apikey', }, }) def __init__(self, apikey, targets=None, sound=None, badge=None, **kwargs): """ Initialize Pushy Object """ super().__init__(**kwargs) # Access Token (associated with project) self.apikey = validate_regex(apikey) if not self.apikey: msg = 'An invalid Pushy Secret API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # Get our targets self.devices = [] self.topics = [] for target in parse_list(targets): result = VALIDATE_TOPIC.match(target) if result: self.topics.append(result.group('topic')) continue result = VALIDATE_DEVICE.match(target) if result: self.devices.append(result.group('device')) continue self.logger.warning( 'Dropped invalid topic/device ' '({}) specified.'.format(target), ) # Setup our sound self.sound = sound # Badge try: # Acquire our badge count if we can: # - We accept both the integer form as well as a string # representation self.badge = int(badge) if self.badge < 0: raise ValueError() except TypeError: # NoneType means use Default; this is an okay exception self.badge = None except ValueError: self.badge = None self.logger.warning( 'The specified Pushy badge ({}) is not valid ', badge) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Pushy Notification """ if len(self.topics) + len(self.devices) == 0: # There were no services to notify self.logger.warning('There were no Pushy targets to notify.') return False # error tracking (used for function return) has_error = False # Default Header headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'Accepts': 'application/json', } # Our URL notify_url = self.notify_url.format(apikey=self.apikey) # Default content response object content = {} # Create a copy of targets (topics and devices) targets = list(self.topics) + list(self.devices) while len(targets): target = targets.pop(0) # prepare JSON Object payload = { # Mandatory fields 'to': target, "data": { "message": body, }, "notification": { 'body': body, } } # Optional payload items if title: payload['notification']['title'] = title if self.sound: payload['notification']['sound'] = self.sound if self.badge is not None: payload['notification']['badge'] = self.badge self.logger.debug('Pushy POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('Pushy Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # Sample response # See: https://pushy.me/docs/api/send-notifications # { # "success": true, # "id": "5ea9b214b47cad768a35f13a", # "info": { # "devices": 1 # "failed": ['abc'] # } # } try: content = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None content = { "success": False, "id": '', "info": {}, } if r.status_code != requests.codes.ok \ or not content.get('success'): # We had a problem status_str = \ NotifyPushy.http_response_code_lookup( r.status_code, PUSHY_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send Pushy notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) has_error = True continue else: self.logger.info( 'Sent Pushy notification to %s.' % target) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Pushy:%s ' 'notification', target) self.logger.debug('Socket Exception: %s' % str(e)) has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = {} if self.sound: params['sound'] = self.sound if self.badge is not None: params['badge'] = str(self.badge) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{apikey}/{targets}/?{params}'.format( schema=self.secure_protocol, apikey=self.pprint(self.apikey, privacy, safe=''), targets='/'.join( [NotifyPushy.quote(x, safe='@#') for x in chain( # Topics are prefixed with a pound/hashtag symbol ['#{}'.format(x) for x in self.topics], # Devices ['@{}'.format(x) for x in self.devices], )]), params=NotifyPushy.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.topics) + len(self.devices) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Token results['apikey'] = NotifyPushy.unquote(results['host']) # Retrieve all of our targets results['targets'] = NotifyPushy.split_path(results['fullpath']) # Get the sound if 'sound' in results['qsd'] and len(results['qsd']['sound']): results['sound'] = \ NotifyPushy.unquote(results['qsd']['sound']) # Badge if 'badge' in results['qsd'] and results['qsd']['badge']: results['badge'] = NotifyPushy.unquote( results['qsd']['badge'].strip()) # Support key variable to store Secret API Key if 'key' in results['qsd'] and len(results['qsd']['key']): results['apikey'] = results['qsd']['key'] # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyPushy.parse_list(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/reddit.py000066400000000000000000000627661477231770000201710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # 1. Visit https://www.reddit.com/prefs/apps and scroll to the bottom # 2. Click on the button that reads 'are you a developer? create an app...' # 3. Set the mode to `script`, # 4. Provide a `name`, `description`, `redirect uri` and save it. # 5. Once the bot is saved, you'll be given a ID (next to the the bot name) # and a Secret. # The App ID will look something like this: YWARPXajkk645m # The App Secret will look something like this: YZGKc5YNjq3BsC-bf7oBKalBMeb1xA # The App will also have a location where you can identify the users # who have access (identified as Developers) to the app itself. You will # additionally need these credentials authenticate with. # With this information you'll be able to form the URL: # reddit://{user}:{password}@{app_id}/{app_secret} # All of the documentation needed to work with the Reddit API can be found # here: # - https://www.reddit.com/dev/api/ # - https://www.reddit.com/dev/api/#POST_api_submit # - https://github.com/reddit-archive/reddit/wiki/API import requests from json import loads from datetime import timedelta from datetime import datetime from datetime import timezone from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import parse_list, parse_bool, validate_regex from ..locale import gettext_lazy as _ from .. import __title__, __version__ # Extend HTTP Error Messages REDDIT_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token', } class RedditMessageKind: """ Define the kinds of messages supported """ # Attempt to auto-detect the type prior to passing along the message to # Reddit AUTO = 'auto' # A common message SELF = 'self' # A Hyperlink LINK = 'link' REDDIT_MESSAGE_KINDS = ( RedditMessageKind.AUTO, RedditMessageKind.SELF, RedditMessageKind.LINK, ) class NotifyReddit(NotifyBase): """ A wrapper for Notify Reddit Notifications """ # The default descriptive name associated with the Notification service_name = 'Reddit' # The services URL service_url = 'https://reddit.com' # The default secure protocol secure_protocol = 'reddit' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_reddit' # The maximum size of the message body_maxlen = 6000 # Maximum title length as defined by the Reddit API title_maxlen = 300 # Default to markdown notify_format = NotifyFormat.MARKDOWN # The default Notification URL to use auth_url = 'https://www.reddit.com/api/v1/access_token' submit_url = 'https://oauth.reddit.com/api/submit' # Reddit is kind enough to return how many more requests we're allowed to # continue to make within it's header response as: # X-RateLimit-Reset: The epoc time (in seconds) we can expect our # rate-limit to be reset. # X-RateLimit-Remaining: an integer identifying how many requests we're # still allow to make. request_rate_per_sec = 0 # Taken right from google.auth.helpers: clock_skew = timedelta(seconds=10) # 1 hour in seconds (the lifetime of our token) access_token_lifetime_sec = timedelta(seconds=3600) # Define object templates templates = ( '{schema}://{user}:{password}@{app_id}/{app_secret}/{targets}', ) # Define our template arguments template_tokens = dict(NotifyBase.template_tokens, **{ 'user': { 'name': _('User Name'), 'type': 'string', 'required': True, }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, 'required': True, }, 'app_id': { 'name': _('Application ID'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9_-]+$', 'i'), }, 'app_secret': { 'name': _('Application Secret'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9_-]+$', 'i'), }, 'target_subreddit': { 'name': _('Target Subreddit'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'kind': { 'name': _('Kind'), 'type': 'choice:string', 'values': REDDIT_MESSAGE_KINDS, 'default': RedditMessageKind.AUTO, }, 'flair_id': { 'name': _('Flair ID'), 'type': 'string', 'map_to': 'flair_id', }, 'flair_text': { 'name': _('Flair Text'), 'type': 'string', 'map_to': 'flair_text', }, 'nsfw': { 'name': _('NSFW'), 'type': 'bool', 'default': False, 'map_to': 'nsfw', }, 'ad': { 'name': _('Is Ad?'), 'type': 'bool', 'default': False, 'map_to': 'advertisement', }, 'replies': { 'name': _('Send Replies'), 'type': 'bool', 'default': True, 'map_to': 'sendreplies', }, 'spoiler': { 'name': _('Is Spoiler'), 'type': 'bool', 'default': False, 'map_to': 'spoiler', }, 'resubmit': { 'name': _('Resubmit Flag'), 'type': 'bool', 'default': False, 'map_to': 'resubmit', }, }) def __init__(self, app_id=None, app_secret=None, targets=None, kind=None, nsfw=False, sendreplies=True, resubmit=False, spoiler=False, advertisement=False, flair_id=None, flair_text=None, **kwargs): """ Initialize Notify Reddit Object """ super().__init__(**kwargs) # Initialize subreddit list self.subreddits = set() # Not Safe For Work Flag self.nsfw = nsfw # Send Replies Flag self.sendreplies = sendreplies # Is Spoiler Flag self.spoiler = spoiler # Resubmit Flag self.resubmit = resubmit # Is Ad? self.advertisement = advertisement # Flair details self.flair_id = flair_id self.flair_text = flair_text # Our keys we build using the provided content self.__refresh_token = None self.__access_token = None self.__access_token_expiry = datetime.now(timezone.utc) self.kind = kind.strip().lower() \ if isinstance(kind, str) \ else self.template_args['kind']['default'] if self.kind not in REDDIT_MESSAGE_KINDS: msg = 'An invalid Reddit message kind ({}) was specified'.format( kind) self.logger.warning(msg) raise TypeError(msg) self.user = validate_regex(self.user) if not self.user: msg = 'An invalid Reddit User ID ' \ '({}) was specified'.format(self.user) self.logger.warning(msg) raise TypeError(msg) self.password = validate_regex(self.password) if not self.password: msg = 'An invalid Reddit Password ' \ '({}) was specified'.format(self.password) self.logger.warning(msg) raise TypeError(msg) self.client_id = validate_regex( app_id, *self.template_tokens['app_id']['regex']) if not self.client_id: msg = 'An invalid Reddit App ID ' \ '({}) was specified'.format(app_id) self.logger.warning(msg) raise TypeError(msg) self.client_secret = validate_regex( app_secret, *self.template_tokens['app_secret']['regex']) if not self.client_secret: msg = 'An invalid Reddit App Secret ' \ '({}) was specified'.format(app_secret) self.logger.warning(msg) raise TypeError(msg) # Build list of subreddits self.subreddits = [ sr.lstrip('#') for sr in parse_list(targets) if sr.lstrip('#')] if not self.subreddits: self.logger.warning( 'No subreddits were identified to be notified') # For Rate Limit Tracking Purposes self.ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) # Default to 1.0 self.ratelimit_remaining = 1.0 return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.client_id, self.client_secret, self.user, self.password, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'kind': self.kind, 'ad': 'yes' if self.advertisement else 'no', 'nsfw': 'yes' if self.nsfw else 'no', 'resubmit': 'yes' if self.resubmit else 'no', 'replies': 'yes' if self.sendreplies else 'no', 'spoiler': 'yes' if self.spoiler else 'no', } # Flair support if self.flair_id: params['flair_id'] = self.flair_id if self.flair_text: params['flair_text'] = self.flair_text # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{user}:{password}@{app_id}/{app_secret}' \ '/{targets}/?{params}'.format( schema=self.secure_protocol, user=NotifyReddit.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), app_id=self.pprint( self.client_id, privacy, mode=PrivacyMode.Secret, safe=''), app_secret=self.pprint( self.client_secret, privacy, mode=PrivacyMode.Secret, safe=''), targets='/'.join( [NotifyReddit.quote(x, safe='') for x in self.subreddits]), params=NotifyReddit.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.subreddits) def login(self): """ A simple wrapper to authenticate with the Reddit Server """ # Prepare our payload payload = { 'grant_type': 'password', 'username': self.user, 'password': self.password, } # Enforce a False flag setting before calling _fetch() self.__access_token = False # Send Login Information postokay, response = self._fetch( self.auth_url, payload=payload, ) if not postokay or not response: # Setting this variable to False as a way of letting us know # we failed to authenticate on our last attempt self.__access_token = False return False # Our response object looks like this (content has been altered for # presentation purposes): # { # "access_token": Your access token, # "token_type": "bearer", # "expires_in": Unix Epoch Seconds, # "scope": A scope string, # "refresh_token": Your refresh token # } # Acquire our token self.__access_token = response.get('access_token') # Handle other optional arguments we can use if 'expires_in' in response: delta = timedelta(seconds=int(response['expires_in'])) self.__access_token_expiry = \ delta + datetime.now(timezone.utc) - self.clock_skew else: self.__access_token_expiry = self.access_token_lifetime_sec + \ datetime.now(timezone.utc) - self.clock_skew # The Refresh Token self.__refresh_token = response.get( 'refresh_token', self.__refresh_token) if self.__access_token: self.logger.info('Authenticated to Reddit as {}'.format(self.user)) return True self.logger.warning( 'Failed to authenticate to Reddit as {}'.format(self.user)) # Mark our failure return False def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Reddit Notification """ # error tracking (used for function return) has_error = False if not self.__access_token and not self.login(): # We failed to authenticate - we're done return False if not len(self.subreddits): # We have nothing to notify; we're done self.logger.warning('There are no Reddit targets to notify') return False # Prepare our Message Type/Kind if self.kind == RedditMessageKind.AUTO: parsed = NotifyBase.parse_url(body) # Detect a link if parsed and parsed.get('schema', '').startswith('http') \ and parsed.get('host'): kind = RedditMessageKind.LINK else: kind = RedditMessageKind.SELF else: kind = self.kind # Create a copy of the subreddits list subreddits = list(self.subreddits) while len(subreddits) > 0: # Retrieve our subreddit subreddit = subreddits.pop() # Prepare our payload payload = { 'ad': True if self.advertisement else False, 'api_type': 'json', 'extension': 'json', 'sr': subreddit, 'title': title if title else self.app_desc, 'kind': kind, 'nsfw': True if self.nsfw else False, 'resubmit': True if self.resubmit else False, 'sendreplies': True if self.sendreplies else False, 'spoiler': True if self.spoiler else False, } if self.flair_id: payload['flair_id'] = self.flair_id if self.flair_text: payload['flair_text'] = self.flair_text if kind == RedditMessageKind.LINK: payload.update({ 'url': body, }) else: payload.update({ 'text': body, }) postokay, response = self._fetch(self.submit_url, payload=payload) # only toggle has_error flag if we had an error if not postokay: # Mark our failure has_error = True continue # If we reach here, we were successful self.logger.info( 'Sent Reddit notification to {}'.format( subreddit)) return not has_error def _fetch(self, url, payload=None): """ Wrapper to Reddit API requests object """ # use what was specified, otherwise build headers dynamically headers = { 'User-Agent': '{} v{}'.format(__title__, __version__) } if self.__access_token: # Set our token headers['Authorization'] = 'Bearer {}'.format(self.__access_token) # Prepare our url url = self.submit_url if self.__access_token else self.auth_url # Some Debug Logging self.logger.debug('Reddit POST URL: {} (cert_verify={})'.format( url, self.verify_certificate)) self.logger.debug('Reddit Payload: %s' % str(payload)) # By default set wait to None wait = None if self.ratelimit_remaining <= 0.0: # Determine how long we should wait for or if we should wait at # all. This isn't fool-proof because we can't be sure the client # time (calling this script) is completely synced up with the # Reddit server. One would hope we're on NTP and our clocks are # the same allowing this to role smoothly: now = datetime.now(timezone.utc).replace(tzinfo=None) if now < self.ratelimit_reset: # We need to throttle for the difference in seconds wait = abs( (self.ratelimit_reset - now + self.clock_skew) .total_seconds()) # Always call throttle before any remote server i/o is made; self.throttle(wait=wait) # Initialize a default value for our content value content = {} # acquire our request mode try: r = requests.post( url, data=payload, auth=None if self.__access_token else (self.client_id, self.client_secret), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # We attempt to login again and retry the original request # if we aren't in the process of handling a login already if r.status_code != requests.codes.ok \ and self.__access_token and url != self.auth_url: # We had a problem status_str = \ NotifyReddit.http_response_code_lookup( r.status_code, REDDIT_HTTP_ERROR_MAP) self.logger.debug( 'Taking countermeasures after failed to send to Reddit ' '{}: {}error={}'.format( url, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # We failed to authenticate with our token; login one more # time and retry this original request if not self.login(): return (False, {}) # Try again r = requests.post( url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout ) # Get our JSON content if it's possible try: content = loads(r.content) except (TypeError, ValueError, AttributeError): # TypeError = r.content is not a String # ValueError = r.content is Unparsable # AttributeError = r.content is None # We had a problem status_str = \ NotifyReddit.http_response_code_lookup( r.status_code, REDDIT_HTTP_ERROR_MAP) # Reddit always returns a JSON response self.logger.warning( 'Failed to send to Reddit after countermeasures {}: ' '{}error={}'.format( url, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return (False, {}) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyReddit.http_response_code_lookup( r.status_code, REDDIT_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send to Reddit {}: ' '{}error={}'.format( url, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure return (False, content) errors = [] if not content else \ content.get('json', {}).get('errors', []) if errors: self.logger.warning( 'Failed to send to Reddit {}: ' '{}'.format( url, str(errors))) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure return (False, content) try: # Store our rate limiting (if provided) self.ratelimit_remaining = \ float(r.headers.get( 'X-RateLimit-Remaining')) self.ratelimit_reset = datetime.fromtimestamp( int(r.headers.get('X-RateLimit-Reset')), timezone.utc ).replace(tzinfo=None) except (TypeError, ValueError): # This is returned if we could not retrieve this information # gracefully accept this state and move on pass except requests.RequestException as e: self.logger.warning( 'Exception received when sending Reddit to {}'. format(url)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure return (False, content) return (True, content) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Acquire our targets results['targets'] = NotifyReddit.split_path(results['fullpath']) # Kind override if 'kind' in results['qsd'] and results['qsd']['kind']: results['kind'] = NotifyReddit.unquote( results['qsd']['kind'].strip().lower()) else: results['kind'] = RedditMessageKind.AUTO # Is an Ad? results['ad'] = \ parse_bool(results['qsd'].get('ad', False)) # Get Not Safe For Work (NSFW) Flag results['nsfw'] = \ parse_bool(results['qsd'].get('nsfw', False)) # Send Replies Flag results['replies'] = \ parse_bool(results['qsd'].get('replies', True)) # Resubmit Flag results['resubmit'] = \ parse_bool(results['qsd'].get('resubmit', False)) # Is Spoiler Flag results['spoiler'] = \ parse_bool(results['qsd'].get('spoiler', False)) if 'flair_text' in results['qsd']: results['flair_text'] = \ NotifyReddit.unquote(results['qsd']['flair_text']) if 'flair_id' in results['qsd']: results['flair_id'] = \ NotifyReddit.unquote(results['qsd']['flair_id']) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyReddit.parse_list(results['qsd']['to']) if 'app_id' in results['qsd']: results['app_id'] = \ NotifyReddit.unquote(results['qsd']['app_id']) else: # The App/Bot ID is the hostname results['app_id'] = NotifyReddit.unquote(results['host']) if 'app_secret' in results['qsd']: results['app_secret'] = \ NotifyReddit.unquote(results['qsd']['app_secret']) else: # The first target identified is the App secret results['app_secret'] = \ None if not results['targets'] else results['targets'].pop(0) return results apprise-1.9.3/apprise/plugins/resend.py000066400000000000000000000357131477231770000201660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # You will need an API Key for this plugin to work. # From the Settings -> API Keys you can click "Create API Key" if you don't # have one already. The key must have at least the "Mail Send" permission # to work. # # The schema to use the plugin looks like this: # {schema}://{apikey}:{from_email} # # Your {from_email} must be comprissed of your Resend Authenticated # Domain. # Simple API Reference: # - https://resend.com/onboarding import requests from json import dumps from .base import NotifyBase from .. import exception from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import parse_list, is_email, validate_regex from ..locale import gettext_lazy as _ RESEND_HTTP_ERROR_MAP = { 200: "Successful request.", 400: "Check that the parameters were correct.", 401: "The API key used was missing.", 403: "The API key used was invalid.", 404: "The resource was not found.", 429: "The rate limit was exceeded.", } class NotifyResend(NotifyBase): """ A wrapper for Notify Resend Notifications """ # The default descriptive name associated with the Notification service_name = 'Resend' # The services URL service_url = 'https://resend.com' # The default secure protocol secure_protocol = 'resend' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_resend' # Default to markdown notify_format = NotifyFormat.HTML # The default Email API URL to use notify_url = 'https://api.resend.com/emails' # Support attachments attachment_support = True # Allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.2 # The default subject to use if one isn't specified. default_empty_subject = '' # Define object templates templates = ( '{schema}://{apikey}:{from_email}', '{schema}://{apikey}:{from_email}/{targets}', ) # Define our template arguments template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[A-Z0-9._-]+$', 'i'), }, 'from_email': { 'name': _('Source Email'), 'type': 'string', 'required': True, }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'cc': { 'name': _('Carbon Copy'), 'type': 'list:string', }, 'bcc': { 'name': _('Blind Carbon Copy'), 'type': 'list:string', }, }) def __init__(self, apikey, from_email, targets=None, cc=None, bcc=None, **kwargs): """ Initialize Notify Resend Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'An invalid Resend API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) result = is_email(from_email) if not result: msg = 'Invalid ~From~ email specified: {}'.format(from_email) self.logger.warning(msg) raise TypeError(msg) # Store email address self.from_email = result['full_email'] # Acquire Targets (To Emails) self.targets = list() # Acquire Carbon Copies self.cc = set() # Acquire Blind Carbon Copies self.bcc = set() # Validate recipients (to:) and drop bad ones: for recipient in parse_list(targets): result = is_email(recipient) if result: self.targets.append(result['full_email']) continue self.logger.warning( 'Dropped invalid email ' '({}) specified.'.format(recipient), ) # Validate recipients (cc:) and drop bad ones: for recipient in parse_list(cc): result = is_email(recipient) if result: self.cc.add(result['full_email']) continue self.logger.warning( 'Dropped invalid Carbon Copy email ' '({}) specified.'.format(recipient), ) # Validate recipients (bcc:) and drop bad ones: for recipient in parse_list(bcc): result = is_email(recipient) if result: self.bcc.add(result['full_email']) continue self.logger.warning( 'Dropped invalid Blind Carbon Copy email ' '({}) specified.'.format(recipient), ) if len(self.targets) == 0: # Notify ourselves self.targets.append(self.from_email) return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey, self.from_email) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) if len(self.cc) > 0: # Handle our Carbon Copy Addresses params['cc'] = ','.join(self.cc) if len(self.bcc) > 0: # Handle our Blind Carbon Copy Addresses params['bcc'] = ','.join(self.bcc) # a simple boolean check as to whether we display our target emails # or not has_targets = \ not (len(self.targets) == 1 and self.targets[0] == self.from_email) return '{schema}://{apikey}:{from_email}/{targets}?{params}'.format( schema=self.secure_protocol, apikey=self.pprint(self.apikey, privacy, safe=''), # never encode email since it plays a huge role in our hostname from_email=self.from_email, targets='' if not has_targets else '/'.join( [NotifyResend.quote(x, safe='') for x in self.targets]), params=NotifyResend.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.targets) def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Resend Notification """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'Authorization': 'Bearer {}'.format(self.apikey), } # error tracking (used for function return) has_error = False _payload = { 'from': self.from_email, # A subject is a requirement, so if none is specified we must # set a default with at least 1 character or Resend will deny # our request 'subject': title if title else self.default_empty_subject, 'text' if self.notify_format == NotifyFormat.TEXT else 'html': body, } if attach and self.attachment_support: attachments = [] # Send our attachments for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access Resend attachment {}.'.format( attachment.url(privacy=True))) return False try: attachments.append({ "content": attachment.base64(), "filename": attachment.name if attachment.name else f'file{no:03}.dat', "type": "application/octet-stream", "disposition": "attachment" }) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access Resend attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending Resend attachment {}'.format( attachment.url(privacy=True))) # Append our attachments to the payload _payload.update({ 'attachments': attachments, }) targets = list(self.targets) while len(targets) > 0: target = targets.pop(0) # Create a copy of our template payload = _payload.copy() # unique cc/bcc list management cc = (self.cc - self.bcc - set([target])) bcc = (self.bcc - set([target])) # Set our target payload['to'] = target if len(cc): payload['cc'] = list(cc) if len(bcc): payload['bcc'] = list(bcc) self.logger.debug('Resend POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('Resend Payload: %s', str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.accepted): # We had a problem status_str = \ NotifyResend.http_response_code_lookup( r.status_code, RESEND_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send Resend notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent Resend notification to {}.'.format(target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Resend ' 'notification to {}.'.format(target)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # Our URL looks like this: # {schema}://{apikey}:{from_email}/{targets} # # which actually equates to: # {schema}://{user}:{password}@{host}/{email1}/{email2}/etc.. # ^ ^ ^ # | | | # apikey -from addr- if not results.get('user'): # An API Key as not properly specified return None if not results.get('password'): # A From Email was not correctly specified return None # Prepare our API Key results['apikey'] = NotifyResend.unquote(results['user']) # Prepare our From Email Address results['from_email'] = '{}@{}'.format( NotifyResend.unquote(results['password']), NotifyResend.unquote(results['host']), ) # Acquire our targets results['targets'] = NotifyResend.split_path(results['fullpath']) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyResend.parse_list(results['qsd']['to']) # Handle Carbon Copy Addresses if 'cc' in results['qsd'] and len(results['qsd']['cc']): results['cc'] = \ NotifyResend.parse_list(results['qsd']['cc']) # Handle Blind Carbon Copy Addresses if 'bcc' in results['qsd'] and len(results['qsd']['bcc']): results['bcc'] = \ NotifyResend.parse_list(results['qsd']['bcc']) return results apprise-1.9.3/apprise/plugins/revolt.py000066400000000000000000000346471477231770000202260ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Youll need your own Revolt Bot and a Channel Id for the notifications to # be sent in since Revolt does not support webhooks yet. # # This plugin will simply work using the url of: # revolt://BOT_TOKEN/CHANNEL_ID # # API Documentation: # - https://api.revolt.chat/swagger/index.html # import requests from json import dumps, loads from datetime import timedelta from datetime import datetime from datetime import timezone from .base import NotifyBase from ..common import NotifyImageSize from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import validate_regex, parse_list from ..locale import gettext_lazy as _ class NotifyRevolt(NotifyBase): """ A wrapper for Revolt Notifications """ # The default descriptive name associated with the Notification service_name = 'Revolt' # The services URL service_url = 'https://revolt.chat/' # The default secure protocol secure_protocol = 'revolt' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_revolt' # Revolt Channel Message notify_url = 'https://api.revolt.chat/' # Revolt supports attachments but doesn't support it here (for now) attachment_support = False # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_256 # Revolt is kind enough to return how many more requests we're allowed to # continue to make within it's header response as: # X-RateLimit-Reset: The epoc time (in seconds) we can expect our # rate-limit to be reset. # X-RateLimit-Remaining: an integer identifying how many requests we're # still allow to make. request_rate_per_sec = 3 # Safety net clock_skew = timedelta(seconds=2) # The maximum allowable characters allowed in the body per message body_maxlen = 2000 # Title Maximum Length title_maxlen = 100 # Define object templates templates = ( '{schema}://{bot_token}/{targets}', ) # Defile out template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'bot_token': { 'name': _('Bot Token'), 'type': 'string', 'private': True, 'required': True, }, 'target_channel': { 'name': _('Channel ID'), 'type': 'string', 'map_to': 'targets', 'regex': (r'^[a-z0-9_-]+$', 'i'), 'private': True, 'required': True, }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'channel': { 'alias_of': 'targets', }, 'bot_token': { 'alias_of': 'bot_token', }, 'icon_url': { 'name': _('Icon URL'), 'type': 'string' }, 'url': { 'name': _('Embed URL'), 'type': 'string', 'map_to': 'link', }, 'to': { 'alias_of': 'targets', }, }) def __init__(self, bot_token, targets, icon_url=None, link=None, **kwargs): super().__init__(**kwargs) # Bot Token self.bot_token = validate_regex(bot_token) if not self.bot_token: msg = 'An invalid Revolt Bot Token ' \ '({}) was specified.'.format(bot_token) self.logger.warning(msg) raise TypeError(msg) # Parse our Channel IDs self.targets = [] for target in parse_list(targets): results = validate_regex( target, *self.template_tokens['target_channel']['regex']) if not results: self.logger.warning( 'Dropped invalid Revolt channel ({}) specified.' .format(target), ) continue # Add our target self.targets.append(target) # Image for Embed self.icon_url = icon_url # Url for embed title self.link = link # For Tracking Purposes self.ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) # Default to 1.0 self.ratelimit_remaining = 1.0 return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Revolt Notification """ if len(self.targets) == 0: self.logger.warning('There were not Revolt channels to notify.') return False payload = {} # Acquire image_url image_url = self.icon_url \ if self.icon_url else self.image_url(notify_type) if self.notify_format == NotifyFormat.MARKDOWN: payload['embeds'] = [{ 'title': None if not title else title[0:self.title_maxlen], 'description': body, # Our color associated with our notification 'colour': self.color(notify_type), 'replies': None }] if image_url: payload['embeds'][0]['icon_url'] = image_url if self.link: payload['embeds'][0]['url'] = self.link else: payload['content'] = \ body if not title else "{}\n{}".format(title, body) has_error = False channel_ids = list(self.targets) for channel_id in channel_ids: postokay, response = self._send(payload, channel_id) if not postokay: # Failed to send message has_error = True return not has_error def _send(self, payload, channel_id, retries=1, **kwargs): """ Wrapper to the requests (post) object """ headers = { 'User-Agent': self.app_id, 'X-Bot-Token': self.bot_token, 'Content-Type': 'application/json; charset=utf-8', 'Accept': 'application/json; charset=utf-8', } notify_url = '{0}channels/{1}/messages'.format( self.notify_url, channel_id ) self.logger.debug('Revolt POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate )) self.logger.debug('Revolt Payload: %s' % str(payload)) # By default set wait to None wait = None now = datetime.now(timezone.utc).replace(tzinfo=None) if self.ratelimit_remaining <= 0.0: # Determine how long we should wait for or if we should wait at # all. This isn't fool-proof because we can't be sure the client # time (calling this script) is completely synced up with the # Discord server. One would hope we're on NTP and our clocks are # the same allowing this to role smoothly: if now < self.ratelimit_reset: # We need to throttle for the difference in seconds wait = abs( (self.ratelimit_reset - now + self.clock_skew) .total_seconds()) # Default content response object content = {} # Always call throttle before any remote server i/o is made; self.throttle(wait=wait) try: r = requests.post( notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout ) try: content = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None content = {} # Handle rate limiting (if specified) try: # Store our rate limiting (if provided) self.ratelimit_remaining = \ int(r.headers.get('X-RateLimit-Remaining')) self.ratelimit_reset = \ now + timedelta(seconds=(int( r.headers.get('X-RateLimit-Reset-After')) / 1000)) except (TypeError, ValueError): # This is returned if we could not retrieve this # information gracefully accept this state and move on pass if r.status_code not in ( requests.codes.ok, requests.codes.no_content): # Some details to debug by self.logger.debug('Response Details:\r\n{}'.format( content if content else r.content)) # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) self.logger.warning( 'Revolt request limit reached; ' 'instructed to throttle for %.3fs', abs((self.ratelimit_reset - now + self.clock_skew) .total_seconds())) if r.status_code == requests.codes.too_many_requests \ and retries > 0: # Try again return self._send( payload=payload, channel_id=channel_id, retries=retries - 1, **kwargs) self.logger.warning( 'Failed to send to Revolt notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) # Return; we're done return (False, content) else: self.logger.info('Sent Revolt notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred posting to Revolt.') self.logger.debug('Socket Exception: %s' % str(e)) return (False, content) return (True, content) @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.bot_token) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = {} if self.icon_url: params['icon_url'] = self.icon_url if self.link: params['url'] = self.link params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{bot_token}/{targets}/?{params}'.format( schema=self.secure_protocol, bot_token=self.pprint(self.bot_token, privacy, safe=''), targets='/'.join( [self.pprint(x, privacy, safe='') for x in self.targets]), params=NotifyRevolt.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return 1 if not self.targets else len(self.targets) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Store our bot token bot_token = NotifyRevolt.unquote(results['host']) # Now fetch the Channel IDs targets = NotifyRevolt.split_path(results['fullpath']) results['bot_token'] = bot_token results['targets'] = targets # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyRevolt.parse_list(results['qsd']['to']) # Support channel id on the URL string (if specified) if 'channel' in results['qsd']: results['targets'] += \ NotifyRevolt.parse_list(results['qsd']['channel']) # Support bot token on the URL string (if specified) if 'bot_token' in results['qsd']: results['bot_token'] = \ NotifyRevolt.unquote(results['qsd']['bot_token']) if 'icon_url' in results['qsd']: results['icon_url'] = \ NotifyRevolt.unquote(results['qsd']['icon_url']) if 'url' in results['qsd']: results['link'] = NotifyRevolt.unquote(results['qsd']['url']) if 'format' not in results['qsd'] and ( 'url' in results or 'icon_url' in results): # Markdown is implied results['format'] = NotifyFormat.MARKDOWN return results apprise-1.9.3/apprise/plugins/rocketchat.py000066400000000000000000000632651477231770000210400ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from json import loads from json import dumps from itertools import chain from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyImageSize from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import parse_list, parse_bool from ..locale import gettext_lazy as _ IS_CHANNEL = re.compile(r'^#(?P[A-Za-z0-9_-]+)$') IS_USER = re.compile(r'^@(?P[A-Za-z0-9._-]+)$') IS_ROOM_ID = re.compile(r'^(?P[A-Za-z0-9]+)$') # Extend HTTP Error Messages RC_HTTP_ERROR_MAP = { 400: 'Channel/RoomId is wrong format, or missing from server.', 401: 'Authentication tokens provided is invalid or missing.', } class RocketChatAuthMode: """ The Chat Authentication mode is detected """ # providing a webhook WEBHOOK = "webhook" # Support token submission TOKEN = "token" # Providing a username and password (default) BASIC = "basic" # Define our authentication modes ROCKETCHAT_AUTH_MODES = ( RocketChatAuthMode.WEBHOOK, RocketChatAuthMode.TOKEN, RocketChatAuthMode.BASIC, ) class NotifyRocketChat(NotifyBase): """ A wrapper for Notify Rocket.Chat Notifications """ # The default descriptive name associated with the Notification service_name = 'Rocket.Chat' # The services URL service_url = 'https://rocket.chat/' # The default protocol protocol = 'rocket' # The default secure protocol secure_protocol = 'rockets' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_rocketchat' # Allows the user to specify the NotifyImageSize object; this is supported # through the webhook image_size = NotifyImageSize.XY_128 # The title is not used title_maxlen = 0 # The maximum size of the message body_maxlen = 1000 # Default to markdown notify_format = NotifyFormat.MARKDOWN # Define object templates templates = ( '{schema}://{user}:{password}@{host}:{port}/{targets}', '{schema}://{user}:{password}@{host}/{targets}', '{schema}://{user}:{token}@{host}:{port}/{targets}', '{schema}://{user}:{token}@{host}/{targets}', '{schema}://{webhook}@{host}', '{schema}://{webhook}@{host}:{port}', '{schema}://{webhook}@{host}/{targets}', '{schema}://{webhook}@{host}:{port}/{targets}', ) # Define our template arguments template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'token': { 'name': _('API Token'), 'map_to': 'password', 'private': True, }, 'webhook': { 'name': _('Webhook'), 'type': 'string', }, 'target_channel': { 'name': _('Target Channel'), 'type': 'string', 'prefix': '#', 'map_to': 'targets', }, 'target_user': { 'name': _('Target User'), 'type': 'string', 'prefix': '@', 'map_to': 'targets', }, 'target_room': { 'name': _('Target Room ID'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'mode': { 'name': _('Webhook Mode'), 'type': 'choice:string', 'values': ROCKETCHAT_AUTH_MODES, }, 'avatar': { 'name': _('Use Avatar'), 'type': 'bool', 'default': False, }, 'webhook': { 'alias_of': 'webhook', }, 'to': { 'alias_of': 'targets', }, }) def __init__(self, webhook=None, targets=None, mode=None, avatar=None, **kwargs): """ Initialize Notify Rocket.Chat Object """ super().__init__(**kwargs) # Set our schema self.schema = 'https' if self.secure else 'http' # Prepare our URL self.api_url = '%s://%s' % (self.schema, self.host) if isinstance(self.port, int): self.api_url += ':%d' % self.port # Initialize channels list self.channels = list() # Initialize room list self.rooms = list() # Initialize user list (webhook only) self.users = list() # Assign our webhook (if defined) self.webhook = webhook # Used to track token headers upon authentication (if successful) # This is only used if not on webhook mode self.headers = {} # Authentication mode self.mode = None \ if not isinstance(mode, str) \ else mode.lower() if self.mode and self.mode not in ROCKETCHAT_AUTH_MODES: msg = 'The authentication mode specified ({}) is invalid.'.format( mode) self.logger.warning(msg) raise TypeError(msg) # Detect our mode if it wasn't specified if not self.mode: if self.webhook is not None: # Just a username was specified, we treat this as a webhook self.mode = RocketChatAuthMode.WEBHOOK elif self.password and len(self.password) > 32: self.mode = RocketChatAuthMode.TOKEN else: self.mode = RocketChatAuthMode.BASIC self.logger.debug( "Auto-Detected Rocketchat Auth Mode: %s", self.mode) if self.mode in (RocketChatAuthMode.BASIC, RocketChatAuthMode.TOKEN) \ and not (self.user and self.password): # Username & Password is required for Rocket Chat to work msg = 'No Rocket.Chat {} was specified.'.format( 'user/pass combo' if self.mode == RocketChatAuthMode.BASIC else 'user/apikey') self.logger.warning(msg) raise TypeError(msg) elif self.mode == RocketChatAuthMode.WEBHOOK and not self.webhook: msg = 'No Rocket.Chat Incoming Webhook was specified.' self.logger.warning(msg) raise TypeError(msg) if self.mode == RocketChatAuthMode.TOKEN: # Set our headers for further communication self.headers.update({ 'X-User-Id': self.user, 'X-Auth-Token': self.password, }) # Validate recipients and drop bad ones: for recipient in parse_list(targets): result = IS_CHANNEL.match(recipient) if result: # store valid device self.channels.append(result.group('name')) continue result = IS_ROOM_ID.match(recipient) if result: # store valid room self.rooms.append(result.group('name')) continue result = IS_USER.match(recipient) if result: # store valid room self.users.append(result.group('name')) continue self.logger.warning( 'Dropped invalid channel/room/user ' '({}) specified.'.format(recipient), ) if self.mode == RocketChatAuthMode.BASIC and \ len(self.rooms) == 0 and len(self.channels) == 0: msg = 'No Rocket.Chat room and/or channels specified to notify.' self.logger.warning(msg) raise TypeError(msg) # Prepare our avatar setting # - if specified; that trumps all # - if not specified and we're dealing with a basic setup, the Avatar # is disabled by default. This is because if the account doesn't # have the bot flag set on it it won't work as documented here: # https://developer.rocket.chat/api/rest-api/endpoints\ # /team-collaboration-endpoints/chat/postmessage # - Otherwise if we're a webhook, we enable the avatar by default # (if not otherwise specified) since it will work nicely. # Place an avatar image to associate with our content if self.mode == RocketChatAuthMode.BASIC: self.avatar = False if avatar is None else avatar else: # self.mode == RocketChatAuthMode.WEBHOOK: self.avatar = True if avatar is None else avatar return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.host, self.port if self.port else (443 if self.secure else 80), self.user, self.password if self.mode in ( RocketChatAuthMode.BASIC, RocketChatAuthMode.TOKEN) else self.webhook, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'avatar': 'yes' if self.avatar else 'no', 'mode': self.mode, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine Authentication if self.mode in (RocketChatAuthMode.BASIC, RocketChatAuthMode.TOKEN): auth = '{user}:{password}@'.format( user=NotifyRocketChat.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) else: auth = '{user}{webhook}@'.format( user='{}:'.format(NotifyRocketChat.quote(self.user, safe='')) if self.user else '', webhook=self.pprint(self.webhook, privacy, mode=PrivacyMode.Secret, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}/{targets}/?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), targets='/'.join( [NotifyRocketChat.quote(x, safe='@#') for x in chain( # Channels are prefixed with a pound/hashtag symbol ['#{}'.format(x) for x in self.channels], # Rooms are as is self.rooms, # Users ['@{}'.format(x) for x in self.users], )]), params=NotifyRocketChat.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.channels) + len(self.rooms) + len(self.users) return targets if targets > 0 else 1 def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ wrapper to _send since we can alert more then one channel """ # Call the _send_ function applicable to whatever mode we're in # - calls _send_webhook_notification if the mode variable is set # - calls _send_basic_notification if the mode variable is not set return getattr(self, '_send_{}_notification'.format( RocketChatAuthMode.WEBHOOK if self.mode == RocketChatAuthMode.WEBHOOK else RocketChatAuthMode.BASIC))( body=body, title=title, notify_type=notify_type, **kwargs) def _send_webhook_notification(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Sends a webhook notification """ # Our payload object payload = self._payload(body, title, notify_type) # Assemble our webhook URL path = 'hooks/{}'.format(self.webhook) # Build our list of channels/rooms/users (if any identified) targets = ['@{}'.format(u) for u in self.users] targets.extend(['#{}'.format(c) for c in self.channels]) targets.extend(['{}'.format(r) for r in self.rooms]) if len(targets) == 0: # We can take an early exit return self._send( payload, notify_type=notify_type, path=path, **kwargs) # Otherwise we want to iterate over each of the targets # Initiaize our error tracking has_error = False while len(targets): # Retrieve our target target = targets.pop(0) # Assign our channel/room/user payload['channel'] = target if not self._send( payload, notify_type=notify_type, path=path, **kwargs): # toggle flag has_error = True return not has_error def _send_basic_notification(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Authenticates with the server using a user/pass combo for notifications. """ # Track whether we authenticated okay if self.mode == RocketChatAuthMode.BASIC and not self.login(): return False # prepare JSON Object _payload = self._payload(body, title, notify_type) # Initiaize our error tracking has_error = False # Build our list of channels/rooms/users (if any identified) channels = ['@{}'.format(u) for u in self.users] channels.extend(['#{}'.format(c) for c in self.channels]) # Create a copy of our channels to notify against payload = _payload.copy() while len(channels) > 0: # Get Channel channel = channels.pop(0) payload['channel'] = channel if not self._send(payload, notify_type=notify_type, **kwargs): # toggle flag has_error = True # Create a copy of our room id's to notify against rooms = list(self.rooms) payload = _payload.copy() while len(rooms): # Get Room room = rooms.pop(0) payload['roomId'] = room if not self._send(payload, notify_type=notify_type, **kwargs): # toggle flag has_error = True if self.mode == RocketChatAuthMode.BASIC: # logout self.logout() return not has_error def _payload(self, body, title='', notify_type=NotifyType.INFO): """ Prepares a payload object """ # prepare JSON Object payload = { "text": body, } # apply our images if they're set to be displayed image_url = self.image_url(notify_type) if self.avatar and image_url: payload['avatar'] = image_url return payload def _send(self, payload, notify_type, path='api/v1/chat.postMessage', **kwargs): """ Perform Notify Rocket.Chat Notification """ api_url = '{}/{}'.format(self.api_url, path) self.logger.debug('Rocket.Chat POST URL: %s (cert_verify=%r)' % ( api_url, self.verify_certificate)) self.logger.debug('Rocket.Chat Payload: %s' % str(payload)) # Copy our existing headers headers = self.headers.copy() # Apply minimum headers headers.update({ 'User-Agent': self.app_id, 'Content-Type': 'application/json', }) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( api_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyRocketChat.http_response_code_lookup( r.status_code, RC_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send Rocket.Chat {}:notification: ' '{}{}error={}.'.format( self.mode, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info( 'Sent Rocket.Chat {}:notification.'.format(self.mode)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Rocket.Chat ' '{}:notification.'.format(self.mode)) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True def login(self): """ login to our server """ payload = { 'username': self.user, 'password': self.password, } api_url = '{}/{}'.format(self.api_url, 'api/v1/login') try: r = requests.post( api_url, data=payload, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyRocketChat.http_response_code_lookup( r.status_code, RC_HTTP_ERROR_MAP) self.logger.warning( 'Failed to authenticate {} with Rocket.Chat: ' '{}{}error={}.'.format( self.user, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.debug('Rocket.Chat authentication successful') response = loads(r.content) if response.get('status') != "success": self.logger.warning( 'Could not authenticate {} with Rocket.Chat.'.format( self.user)) return False # Set our headers for further communication self.headers['X-Auth-Token'] = response.get( 'data', {'authToken': None}).get('authToken') self.headers['X-User-Id'] = response.get( 'data', {'userId': None}).get('userId') except (AttributeError, TypeError, ValueError): # Our response was not the JSON type we had expected it to be # - ValueError = r.content is Unparsable # - TypeError = r.content is None # - AttributeError = r is None self.logger.warning( 'A commuication error occurred authenticating {} on ' 'Rocket.Chat.'.format(self.user)) return False except requests.RequestException as e: self.logger.warning( 'A connection error occurred authenticating {} on ' 'Rocket.Chat.'.format(self.user)) self.logger.debug('Socket Exception: %s' % str(e)) return False return True def logout(self): """ logout of our server """ api_url = '{}/{}'.format(self.api_url, 'api/v1/logout') try: r = requests.post( api_url, headers=self.headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyRocketChat.http_response_code_lookup( r.status_code, RC_HTTP_ERROR_MAP) self.logger.warning( 'Failed to logoff {} from Rocket.Chat: ' '{}{}error={}.'.format( self.user, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.debug( 'Rocket.Chat log off successful; response %s.' % ( r.content)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred logging off the ' 'Rocket.Chat server') self.logger.debug('Socket Exception: %s' % str(e)) return False return True @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ try: # Attempt to detect the webhook (if specified in the URL) # If no webhook is specified, then we just pass along as if nothing # happened. However if we do find a webhook, we want to rebuild our # URL without it since it conflicts with standard URLs. Support # %2F since that is a forward slash escaped # rocket://webhook@host # rocket://user:webhook@host match = re.match( r'^\s*(?P[^:]+://)((?P[^:]+):)?' r'(?P[a-z0-9]+(/|%2F)' r'[a-z0-9]+)\@(?P.+)$', url, re.I) except TypeError: # Not a string return None if match: # Re-assemble our URL without the webhook url = '{schema}{user}{url}'.format( schema=match.group('schema'), user='{}@'.format(match.group('user')) if match.group('user') else '', url=match.group('url'), ) results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results if match: # store our webhook results['webhook'] = \ NotifyRocketChat.unquote(match.group('webhook')) # Take on the password too in the event we're in basic mode # We do not unquote() as this is done at a later state results['password'] = match.group('webhook') # Apply our targets results['targets'] = NotifyRocketChat.split_path(results['fullpath']) # The user may have forced the mode if 'mode' in results['qsd'] and len(results['qsd']['mode']): results['mode'] = \ NotifyRocketChat.unquote(results['qsd']['mode']) # avatar icon if 'avatar' in results['qsd'] and len(results['qsd']['avatar']): results['avatar'] = parse_bool(results['qsd'].get('avatar', True)) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyRocketChat.parse_list(results['qsd']['to']) # The 'webhook' over-ride (if specified) if 'webhook' in results['qsd'] and len(results['qsd']['webhook']): results['webhook'] = \ NotifyRocketChat.unquote(results['qsd']['webhook']) return results apprise-1.9.3/apprise/plugins/rsyslog.py000066400000000000000000000301141477231770000203760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import os import socket from .base import NotifyBase from ..common import NotifyType from ..utils.parse import parse_bool from ..locale import gettext_lazy as _ class syslog: """ Extrapoloated information from the syslog library so that this plugin would not be dependent on it. """ # Notification Categories LOG_KERN = 0 LOG_USER = 8 LOG_MAIL = 16 LOG_DAEMON = 24 LOG_AUTH = 32 LOG_SYSLOG = 40 LOG_LPR = 48 LOG_NEWS = 56 LOG_UUCP = 64 LOG_CRON = 72 LOG_LOCAL0 = 128 LOG_LOCAL1 = 136 LOG_LOCAL2 = 144 LOG_LOCAL3 = 152 LOG_LOCAL4 = 160 LOG_LOCAL5 = 168 LOG_LOCAL6 = 176 LOG_LOCAL7 = 184 # Notification Types LOG_INFO = 6 LOG_NOTICE = 5 LOG_WARNING = 4 LOG_CRIT = 2 class SyslogFacility: """ All of the supported facilities """ KERN = 'kern' USER = 'user' MAIL = 'mail' DAEMON = 'daemon' AUTH = 'auth' SYSLOG = 'syslog' LPR = 'lpr' NEWS = 'news' UUCP = 'uucp' CRON = 'cron' LOCAL0 = 'local0' LOCAL1 = 'local1' LOCAL2 = 'local2' LOCAL3 = 'local3' LOCAL4 = 'local4' LOCAL5 = 'local5' LOCAL6 = 'local6' LOCAL7 = 'local7' SYSLOG_FACILITY_MAP = { SyslogFacility.KERN: syslog.LOG_KERN, SyslogFacility.USER: syslog.LOG_USER, SyslogFacility.MAIL: syslog.LOG_MAIL, SyslogFacility.DAEMON: syslog.LOG_DAEMON, SyslogFacility.AUTH: syslog.LOG_AUTH, SyslogFacility.SYSLOG: syslog.LOG_SYSLOG, SyslogFacility.LPR: syslog.LOG_LPR, SyslogFacility.NEWS: syslog.LOG_NEWS, SyslogFacility.UUCP: syslog.LOG_UUCP, SyslogFacility.CRON: syslog.LOG_CRON, SyslogFacility.LOCAL0: syslog.LOG_LOCAL0, SyslogFacility.LOCAL1: syslog.LOG_LOCAL1, SyslogFacility.LOCAL2: syslog.LOG_LOCAL2, SyslogFacility.LOCAL3: syslog.LOG_LOCAL3, SyslogFacility.LOCAL4: syslog.LOG_LOCAL4, SyslogFacility.LOCAL5: syslog.LOG_LOCAL5, SyslogFacility.LOCAL6: syslog.LOG_LOCAL6, SyslogFacility.LOCAL7: syslog.LOG_LOCAL7, } SYSLOG_FACILITY_RMAP = { syslog.LOG_KERN: SyslogFacility.KERN, syslog.LOG_USER: SyslogFacility.USER, syslog.LOG_MAIL: SyslogFacility.MAIL, syslog.LOG_DAEMON: SyslogFacility.DAEMON, syslog.LOG_AUTH: SyslogFacility.AUTH, syslog.LOG_SYSLOG: SyslogFacility.SYSLOG, syslog.LOG_LPR: SyslogFacility.LPR, syslog.LOG_NEWS: SyslogFacility.NEWS, syslog.LOG_UUCP: SyslogFacility.UUCP, syslog.LOG_CRON: SyslogFacility.CRON, syslog.LOG_LOCAL0: SyslogFacility.LOCAL0, syslog.LOG_LOCAL1: SyslogFacility.LOCAL1, syslog.LOG_LOCAL2: SyslogFacility.LOCAL2, syslog.LOG_LOCAL3: SyslogFacility.LOCAL3, syslog.LOG_LOCAL4: SyslogFacility.LOCAL4, syslog.LOG_LOCAL5: SyslogFacility.LOCAL5, syslog.LOG_LOCAL6: SyslogFacility.LOCAL6, syslog.LOG_LOCAL7: SyslogFacility.LOCAL7, } # Used as a lookup when handling the Apprise -> Syslog Mapping SYSLOG_PUBLISH_MAP = { NotifyType.INFO: syslog.LOG_INFO, NotifyType.SUCCESS: syslog.LOG_NOTICE, NotifyType.FAILURE: syslog.LOG_CRIT, NotifyType.WARNING: syslog.LOG_WARNING, } class NotifyRSyslog(NotifyBase): """ A wrapper for Remote Syslog Notifications """ # The default descriptive name associated with the Notification service_name = 'Remote Syslog' # The services URL service_url = 'https://tools.ietf.org/html/rfc5424' # The default protocol protocol = 'rsyslog' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_rsyslog' # Disable throttle rate for RSyslog requests request_rate_per_sec = 0 # Define object templates templates = ( '{schema}://{host}', '{schema}://{host}:{port}', '{schema}://{host}/{facility}', '{schema}://{host}:{port}/{facility}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'facility': { 'name': _('Facility'), 'type': 'choice:string', 'values': [k for k in SYSLOG_FACILITY_MAP.keys()], 'default': SyslogFacility.USER, 'required': True, }, 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, 'default': 514, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'facility': { # We map back to the same element defined in template_tokens 'alias_of': 'facility', }, 'logpid': { 'name': _('Log PID'), 'type': 'bool', 'default': True, 'map_to': 'log_pid', }, }) def __init__(self, facility=None, log_pid=True, **kwargs): """ Initialize RSyslog Object """ super().__init__(**kwargs) if facility: try: self.facility = SYSLOG_FACILITY_MAP[facility] except KeyError: msg = 'An invalid syslog facility ' \ '({}) was specified.'.format(facility) self.logger.warning(msg) raise TypeError(msg) else: self.facility = \ SYSLOG_FACILITY_MAP[ self.template_tokens['facility']['default']] # Include PID with each message. self.log_pid = log_pid return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform RSyslog Notification """ if title: # Format title body = '{}: {}'.format(title, body) # Always call throttle before any remote server i/o is made self.throttle() host = self.host port = self.port if self.port \ else self.template_tokens['port']['default'] if self.log_pid: payload = '<%d>- %d - %s' % ( SYSLOG_PUBLISH_MAP[notify_type] + self.facility * 8, os.getpid(), body) else: payload = '<%d>- %s' % ( SYSLOG_PUBLISH_MAP[notify_type] + self.facility * 8, body) # send UDP packet to upstream server self.logger.debug( 'RSyslog Host: %s:%d/%s', host, port, SYSLOG_FACILITY_RMAP[self.facility]) self.logger.debug('RSyslog Payload: %s' % str(payload)) # our sent bytes sent = 0 try: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.settimeout(self.socket_connect_timeout) sent = sock.sendto(payload.encode('utf-8'), (host, port)) sock.close() except socket.gaierror as e: self.logger.warning( 'A connection error occurred sending RSyslog ' 'notification to %s:%d/%s', host, port, SYSLOG_FACILITY_RMAP[self.facility] ) self.logger.debug('Socket Exception: %s' % str(e)) return False except socket.timeout as e: self.logger.warning( 'A connection timeout occurred sending RSyslog ' 'notification to %s:%d/%s', host, port, SYSLOG_FACILITY_RMAP[self.facility] ) self.logger.debug('Socket Exception: %s' % str(e)) return False if sent < len(payload): self.logger.warning( 'RSyslog sent %d byte(s) but intended to send %d byte(s)', sent, len(payload)) return False self.logger.info('Sent RSyslog notification.') return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.protocol, self.host, self.port if self.port else self.template_tokens['port']['default'], ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'logpid': 'yes' if self.log_pid else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{hostname}{port}/{facility}/?{params}'.format( schema=self.protocol, hostname=NotifyRSyslog.quote(self.host, safe=''), port='' if self.port is None or self.port == self.template_tokens['port']['default'] else ':{}'.format(self.port), facility=self.template_tokens['facility']['default'] if self.facility not in SYSLOG_FACILITY_RMAP else SYSLOG_FACILITY_RMAP[self.facility], params=NotifyRSyslog.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results tokens = [] # Get our path values tokens.extend(NotifyRSyslog.split_path(results['fullpath'])) # Initialization facility = None if tokens: # Store the last entry as the facility facility = tokens[-1].lower() # However if specified on the URL, that will over-ride what was # identified if 'facility' in results['qsd'] and len(results['qsd']['facility']): facility = results['qsd']['facility'].lower() if facility and facility not in SYSLOG_FACILITY_MAP: # Find first match; if no match is found we set the result # to the matching key. This allows us to throw a TypeError # during the __init__() call. The benifit of doing this # check here is if we do have a valid match, we can support # short form matches like 'u' which will match against user facility = next((f for f in SYSLOG_FACILITY_MAP.keys() if f.startswith(facility)), facility) # Save facility if set if facility: results['facility'] = facility # Include PID as part of the message logged results['log_pid'] = parse_bool( results['qsd'].get( 'logpid', NotifyRSyslog.template_args['logpid']['default'])) return results apprise-1.9.3/apprise/plugins/ryver.py000066400000000000000000000275011477231770000200510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this plugin, you need to first generate a webhook. # When you're complete, you will recieve a URL that looks something like this: # https://apprise.ryver.com/application/webhook/ckhrjW8w672m6HG # ^ ^ # | | # These are important <---^----------------------------------------^ # import re import requests from json import dumps from .base import NotifyBase from ..common import NotifyImageSize from ..common import NotifyType from ..utils.parse import parse_bool, validate_regex from ..locale import gettext_lazy as _ class RyverWebhookMode: """ Ryver supports to webhook modes """ SLACK = 'slack' RYVER = 'ryver' # Define the types in a list for validation purposes RYVER_WEBHOOK_MODES = ( RyverWebhookMode.SLACK, RyverWebhookMode.RYVER, ) class NotifyRyver(NotifyBase): """ A wrapper for Ryver Notifications """ # The default descriptive name associated with the Notification service_name = 'Ryver' # The services URL service_url = 'https://ryver.com/' # The default secure protocol secure_protocol = 'ryver' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_ryver' # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_72 # The maximum allowable characters allowed in the body per message body_maxlen = 1000 # Define object templates templates = ( '{schema}://{organization}/{token}', '{schema}://{botname}@{organization}/{token}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'organization': { 'name': _('Organization'), 'type': 'string', 'required': True, 'regex': (r'^[A-Z0-9_-]{3,32}$', 'i'), }, 'token': { 'name': _('Token'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[A-Z0-9]{15}$', 'i'), }, 'botname': { 'name': _('Bot Name'), 'type': 'string', 'map_to': 'user', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'mode': { 'name': _('Webhook Mode'), 'type': 'choice:string', 'values': RYVER_WEBHOOK_MODES, 'default': RyverWebhookMode.RYVER, }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, }) def __init__(self, organization, token, mode=RyverWebhookMode.RYVER, include_image=True, **kwargs): """ Initialize Ryver Object """ super().__init__(**kwargs) # API Token (associated with project) self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'An invalid Ryver API Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) # Organization (associated with project) self.organization = validate_regex( organization, *self.template_tokens['organization']['regex']) if not self.organization: msg = 'An invalid Ryver Organization ' \ '({}) was specified.'.format(organization) self.logger.warning(msg) raise TypeError(msg) # Store our webhook mode self.mode = None \ if not isinstance(mode, str) else mode.lower() if self.mode not in RYVER_WEBHOOK_MODES: msg = 'The Ryver webhook mode specified ({}) is invalid.' \ .format(mode) self.logger.warning(msg) raise TypeError(msg) # Place an image inline with the message body self.include_image = include_image # Slack formatting requirements are defined here which Ryver supports: # https://api.slack.com/docs/message-formatting self._re_formatting_map = { # New lines must become the string version r'\r\*\n': '\\n', # Escape other special characters r'&': '&', r'<': '<', r'>': '>', } # Iterate over above list and store content accordingly self._re_formatting_rules = re.compile( r'(' + '|'.join(self._re_formatting_map.keys()) + r')', re.IGNORECASE, ) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Ryver Notification """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } if self.mode == RyverWebhookMode.SLACK: # Perform Slack formatting title = self._re_formatting_rules.sub( # pragma: no branch lambda x: self._re_formatting_map[x.group()], title, ) body = self._re_formatting_rules.sub( # pragma: no branch lambda x: self._re_formatting_map[x.group()], body, ) url = 'https://{}.ryver.com/application/webhook/{}'.format( self.organization, self.token, ) # prepare JSON Object payload = { 'body': body if not title else '**{}**\r\n{}'.format(title, body), 'createSource': { 'displayName': self.user, 'avatar': None, }, } # Acquire our image url if configured to do so image_url = None if not self.include_image else \ self.image_url(notify_type) if image_url: payload['createSource']['avatar'] = image_url self.logger.debug('Ryver POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Ryver Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Ryver notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Ryver notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Ryver:%s ' % ( self.organization) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.organization, self.token) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', 'mode': self.mode, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine if there is a botname present botname = '' if self.user: botname = '{botname}@'.format( botname=NotifyRyver.quote(self.user, safe=''), ) return '{schema}://{botname}{organization}/{token}/?{params}'.format( schema=self.secure_protocol, botname=botname, organization=NotifyRyver.quote(self.organization, safe=''), token=self.pprint(self.token, privacy, safe=''), params=NotifyRyver.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The first token is stored in the hostname results['organization'] = NotifyRyver.unquote(results['host']) # Now fetch the remaining tokens try: results['token'] = \ NotifyRyver.split_path(results['fullpath'])[0] except IndexError: # no token results['token'] = None # Retrieve the mode results['mode'] = results['qsd'].get('mode', RyverWebhookMode.RYVER) # use image= for consistency with the other plugins results['include_image'] = \ parse_bool(results['qsd'].get('image', True)) return results @staticmethod def parse_native_url(url): """ Support https://RYVER_ORG.ryver.com/application/webhook/TOKEN """ result = re.match( r'^https?://(?P[A-Z0-9_-]+)\.ryver\.com/application/webhook/' r'(?P[A-Z0-9]+)/?' r'(?P\?.+)?$', url, re.I) if result: return NotifyRyver.parse_url( '{schema}://{org}/{webhook_token}/{params}'.format( schema=NotifyRyver.secure_protocol, org=result.group('org'), webhook_token=result.group('webhook_token'), params='' if not result.group('params') else result.group('params'))) return None apprise-1.9.3/apprise/plugins/sendgrid.py000066400000000000000000000432471477231770000205060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # You will need an API Key for this plugin to work. # From the Settings -> API Keys you can click "Create API Key" if you don't # have one already. The key must have at least the "Mail Send" permission # to work. # # The schema to use the plugin looks like this: # {schema}://{apikey}:{from_email} # # Your {from_email} must be comprissed of your Sendgrid Authenticated # Domain. The same domain must have 'Link Branding' turned on as well or it # will not work. This can be seen from Settings -> Sender Authentication. # If you're (SendGrid) verified domain is example.com, then your schema may # look something like this: # Simple API Reference: # - https://sendgrid.com/docs/API_Reference/Web_API_v3/index.html # - https://sendgrid.com/docs/ui/sending-email/\ # how-to-send-an-email-with-dynamic-transactional-templates/ import requests from json import dumps from .base import NotifyBase from .. import exception from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import parse_list, is_email, validate_regex from ..locale import gettext_lazy as _ # Extend HTTP Error Messages SENDGRID_HTTP_ERROR_MAP = { 401: 'Unauthorized - You do not have authorization to make the request.', 413: 'Payload To Large - The JSON payload you have included in your ' 'request is too large.', 429: 'Too Many Requests - The number of requests you have made exceeds ' 'SendGrid’s rate limitations.', } class NotifySendGrid(NotifyBase): """ A wrapper for Notify SendGrid Notifications """ # The default descriptive name associated with the Notification service_name = 'SendGrid' # The services URL service_url = 'https://sendgrid.com' # The default secure protocol secure_protocol = 'sendgrid' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_sendgrid' # Default to markdown notify_format = NotifyFormat.HTML # The default Email API URL to use notify_url = 'https://api.sendgrid.com/v3/mail/send' # Support attachments attachment_support = True # Allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.2 # The default subject to use if one isn't specified. default_empty_subject = '' # Define object templates templates = ( '{schema}://{apikey}:{from_email}', '{schema}://{apikey}:{from_email}/{targets}', ) # Define our template arguments template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[A-Z0-9._-]+$', 'i'), }, 'from_email': { 'name': _('Source Email'), 'type': 'string', 'required': True, }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'cc': { 'name': _('Carbon Copy'), 'type': 'list:string', }, 'bcc': { 'name': _('Blind Carbon Copy'), 'type': 'list:string', }, 'template': { # Template ID # The template ID is 64 characters with one dash (d-uuid) 'name': _('Template'), 'type': 'string', }, }) # Support Template Dynamic Variables (Substitutions) template_kwargs = { 'template_data': { 'name': _('Template Data'), 'prefix': '+', }, } def __init__(self, apikey, from_email, targets=None, cc=None, bcc=None, template=None, template_data=None, **kwargs): """ Initialize Notify SendGrid Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'An invalid SendGrid API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) result = is_email(from_email) if not result: msg = 'Invalid ~From~ email specified: {}'.format(from_email) self.logger.warning(msg) raise TypeError(msg) # Store email address self.from_email = result['full_email'] # Acquire Targets (To Emails) self.targets = list() # Acquire Carbon Copies self.cc = set() # Acquire Blind Carbon Copies self.bcc = set() # Now our dynamic template (if defined) self.template = template # Now our dynamic template data (if defined) self.template_data = template_data \ if isinstance(template_data, dict) else {} # Validate recipients (to:) and drop bad ones: for recipient in parse_list(targets): result = is_email(recipient) if result: self.targets.append(result['full_email']) continue self.logger.warning( 'Dropped invalid email ' '({}) specified.'.format(recipient), ) # Validate recipients (cc:) and drop bad ones: for recipient in parse_list(cc): result = is_email(recipient) if result: self.cc.add(result['full_email']) continue self.logger.warning( 'Dropped invalid Carbon Copy email ' '({}) specified.'.format(recipient), ) # Validate recipients (bcc:) and drop bad ones: for recipient in parse_list(bcc): result = is_email(recipient) if result: self.bcc.add(result['full_email']) continue self.logger.warning( 'Dropped invalid Blind Carbon Copy email ' '({}) specified.'.format(recipient), ) if len(self.targets) == 0: # Notify ourselves self.targets.append(self.from_email) return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey, self.from_email) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) if len(self.cc) > 0: # Handle our Carbon Copy Addresses params['cc'] = ','.join(self.cc) if len(self.bcc) > 0: # Handle our Blind Carbon Copy Addresses params['bcc'] = ','.join(self.bcc) if self.template: # Handle our Template ID if if was specified params['template'] = self.template # Append our template_data into our parameter list params.update( {'+{}'.format(k): v for k, v in self.template_data.items()}) # a simple boolean check as to whether we display our target emails # or not has_targets = \ not (len(self.targets) == 1 and self.targets[0] == self.from_email) return '{schema}://{apikey}:{from_email}/{targets}?{params}'.format( schema=self.secure_protocol, apikey=self.pprint(self.apikey, privacy, safe=''), # never encode email since it plays a huge role in our hostname from_email=self.from_email, targets='' if not has_targets else '/'.join( [NotifySendGrid.quote(x, safe='') for x in self.targets]), params=NotifySendGrid.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.targets) def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform SendGrid Notification """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'Authorization': 'Bearer {}'.format(self.apikey), } # error tracking (used for function return) has_error = False # A Simple Email Payload Template _payload = { 'personalizations': [{ # Placeholder 'to': [{'email': None}], }], 'from': { 'email': self.from_email, }, # A subject is a requirement, so if none is specified we must # set a default with at least 1 character or SendGrid will deny # our request 'subject': title if title else self.default_empty_subject, 'content': [{ 'type': 'text/plain' if self.notify_format == NotifyFormat.TEXT else 'text/html', 'value': body, }], } if attach and self.attachment_support: attachments = [] # Send our attachments for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access SendGrid attachment {}.'.format( attachment.url(privacy=True))) return False try: attachments.append({ "content": attachment.base64(), "filename": attachment.name if attachment.name else f'file{no:03}.dat', "type": "application/octet-stream", "disposition": "attachment" }) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access SendGrid attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending SendGrid attachment {}'.format( attachment.url(privacy=True))) # Append our attachments to the payload _payload.update({ 'attachments': attachments, }) if self.template: _payload['template_id'] = self.template if self.template_data: _payload['personalizations'][0]['dynamic_template_data'] = \ {k: v for k, v in self.template_data.items()} targets = list(self.targets) while len(targets) > 0: target = targets.pop(0) # Create a copy of our template payload = _payload.copy() # the cc, bcc, to field must be unique or SendMail will fail, the # below code prepares this by ensuring the target isn't in the cc # list or bcc list. It also makes sure the cc list does not contain # any of the bcc entries cc = (self.cc - self.bcc - set([target])) bcc = (self.bcc - set([target])) # Set our target payload['personalizations'][0]['to'][0]['email'] = target if len(cc): payload['personalizations'][0]['cc'] = \ [{'email': email} for email in cc] if len(bcc): payload['personalizations'][0]['bcc'] = \ [{'email': email} for email in bcc] self.logger.debug('SendGrid POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('SendGrid Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.accepted): # We had a problem status_str = \ NotifySendGrid.http_response_code_lookup( r.status_code, SENDGRID_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send SendGrid notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent SendGrid notification to {}.'.format(target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending SendGrid ' 'notification to {}.'.format(target)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # Our URL looks like this: # {schema}://{apikey}:{from_email}/{targets} # # which actually equates to: # {schema}://{user}:{password}@{host}/{email1}/{email2}/etc.. # ^ ^ ^ # | | | # apikey -from addr- if not results.get('user'): # An API Key as not properly specified return None if not results.get('password'): # A From Email was not correctly specified return None # Prepare our API Key results['apikey'] = NotifySendGrid.unquote(results['user']) # Prepare our From Email Address results['from_email'] = '{}@{}'.format( NotifySendGrid.unquote(results['password']), NotifySendGrid.unquote(results['host']), ) # Acquire our targets results['targets'] = NotifySendGrid.split_path(results['fullpath']) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifySendGrid.parse_list(results['qsd']['to']) # Handle Carbon Copy Addresses if 'cc' in results['qsd'] and len(results['qsd']['cc']): results['cc'] = \ NotifySendGrid.parse_list(results['qsd']['cc']) # Handle Blind Carbon Copy Addresses if 'bcc' in results['qsd'] and len(results['qsd']['bcc']): results['bcc'] = \ NotifySendGrid.parse_list(results['qsd']['bcc']) # Handle Blind Carbon Copy Addresses if 'template' in results['qsd'] and len(results['qsd']['template']): results['template'] = \ NotifySendGrid.unquote(results['qsd']['template']) # Add any template substitutions results['template_data'] = results['qsd+'] return results apprise-1.9.3/apprise/plugins/serverchan.py000066400000000000000000000136461477231770000210470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from ..common import NotifyType from .base import NotifyBase from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ # Register at https://sct.ftqq.com/ # - do as the page describe and you will get the token # Syntax: # schan://{access_token}/ class NotifyServerChan(NotifyBase): """ A wrapper for ServerChan Notifications """ # The default descriptive name associated with the Notification service_name = 'ServerChan' # The services URL service_url = 'https://sct.ftqq.com/' # All notification requests are secure secure_protocol = 'schan' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_serverchan' # ServerChan API notify_url = 'https://sctapi.ftqq.com/{token}.send' # Define object templates templates = ( '{schema}://{token}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'token': { 'name': _('Token'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9-]+$', 'i'), }, }) def __init__(self, token, **kwargs): """ Initialize ServerChan Object """ super().__init__(**kwargs) # Token (associated with project) self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'An invalid ServerChan API Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform ServerChan Notification """ payload = { 'title': title, 'desp': body, } # Our Notification URL notify_url = self.notify_url.format(token=self.token) # Some Debug Logging self.logger.debug('ServerChan URL: {} (cert_verify={})'.format( notify_url, self.verify_certificate)) self.logger.debug('ServerChan Payload: {}'.format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=payload, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyServerChan.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send ServerChan notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False else: self.logger.info('Sent ServerChan notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occured sending ServerChan ' 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.token) def url(self, privacy=False): """ Returns the URL built dynamically based on specified arguments. """ return '{schema}://{token}'.format( schema=self.secure_protocol, token=self.pprint(self.token, privacy, safe='')) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to substantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't parse the URL return results pattern = 'schan://([a-zA-Z0-9]+)/' + \ ('?' if not url.endswith('/') else '') result = re.match(pattern, url) results['token'] = result.group(1) if result else '' return results apprise-1.9.3/apprise/plugins/ses.py000066400000000000000000001023601477231770000174710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # API Information: # - https://docs.aws.amazon.com/ses/latest/APIReference/API_SendRawEmail.html # # AWS Credentials (access_key and secret_access_key) # - https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/\ # setup-credentials.html # - https://docs.aws.amazon.com/toolkit-for-eclipse/v1/user-guide/\ # setup-credentials.html # # Other systems write these credentials to: # - ~/.aws/credentials on Linux, macOS, or Unix # - C:\Users\USERNAME\.aws\credentials on Windows # # # To get A users access key ID and secret access key # # 1. Open the IAM console: https://console.aws.amazon.com/iam/home # 2. On the navigation menu, choose Users. # 3. Choose your IAM user name (not the check box). # 4. Open the Security credentials tab, and then choose: # Create Access key - Programmatic access # 5. To see the new access key, choose Show. Your credentials resemble # the following: # Access key ID: AKIAIOSFODNN7EXAMPLE # Secret access key: wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY # # To download the key pair, choose Download .csv file. Store the keys # The account requries this permssion to 'SES v2 : SendEmail' in order to # work # # To get the root users account (if you're logged in as that) you can # visit: https://console.aws.amazon.com/iam/home#/\ # security_credentials$access_key # # This information is vital to work with SES # To use/test the service, i logged into the portal via: # - https://portal.aws.amazon.com # # Go to the dashboard of the Amazon SES (Simple Email Service) # 1. You must have a verified identity; click on that option and create one # if you don't already have one. Until it's verified, you won't be able to # do the next step. # 2. From here you'll be able to retrieve your ARN associated with your # identity you want Apprise to send emails on behalf. It might look # something like: # arn:aws:ses:us-east-2:133216123003:identity/user@example.com # # This is your ARN (Amazon Record Name) # # import re import hmac import base64 import requests from hashlib import sha256 from datetime import datetime from datetime import timezone from collections import OrderedDict from xml.etree import ElementTree from email.mime.text import MIMEText from email.mime.application import MIMEApplication from email.mime.multipart import MIMEMultipart from email.utils import formataddr from email.header import Header from urllib.parse import quote from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import parse_emails, validate_regex, is_email from ..locale import gettext_lazy as _ # Our Regin Identifier # support us-gov-west-1 syntax as well IS_REGION = re.compile( r'^\s*(?P[a-z]{2})-(?P[a-z-]+?)-(?P[0-9]+)\s*$', re.I) # Extend HTTP Error Messages AWS_HTTP_ERROR_MAP = { 403: 'Unauthorized - Invalid Access/Secret Key Combination.', } class NotifySES(NotifyBase): """ A wrapper for AWS SES (Amazon Simple Email Service) """ # The default descriptive name associated with the Notification service_name = 'AWS Simple Email Service (SES)' # The services URL service_url = 'https://aws.amazon.com/ses/' # The default secure protocol secure_protocol = 'ses' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_ses' # Support attachments attachment_support = True # AWS is pretty good for handling data load so request limits # can occur in much shorter bursts request_rate_per_sec = 2.5 # Default Notify Format notify_format = NotifyFormat.HTML # Define object templates templates = ( '{schema}://{from_email}/{access_key_id}/{secret_access_key}/' '{region}/{targets}', '{schema}://{from_email}/{access_key_id}/{secret_access_key}/' '{region}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'from_email': { 'name': _('From Email'), 'type': 'string', 'map_to': 'from_addr', 'required': True, }, 'access_key_id': { 'name': _('Access Key ID'), 'type': 'string', 'private': True, 'required': True, }, 'secret_access_key': { 'name': _('Secret Access Key'), 'type': 'string', 'private': True, 'required': True, }, 'region': { 'name': _('Region'), 'type': 'string', 'regex': (r'^[a-z]{2}-[a-z-]+?-[0-9]+$', 'i'), 'required': True, 'map_to': 'region_name', }, 'targets': { 'name': _('Target Emails'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'from_email', }, 'reply': { 'name': _('Reply To Email'), 'type': 'string', 'map_to': 'reply_to', }, 'name': { 'name': _('From Name'), 'type': 'string', 'map_to': 'from_name', }, 'cc': { 'name': _('Carbon Copy'), 'type': 'list:string', }, 'bcc': { 'name': _('Blind Carbon Copy'), 'type': 'list:string', }, 'access': { 'alias_of': 'access_key_id', }, 'secret': { 'alias_of': 'secret_access_key', }, 'region': { 'alias_of': 'region', }, }) def __init__(self, access_key_id, secret_access_key, region_name, reply_to=None, from_addr=None, from_name=None, targets=None, cc=None, bcc=None, **kwargs): """ Initialize Notify AWS SES Object """ super().__init__(**kwargs) # Store our AWS API Access Key self.aws_access_key_id = validate_regex(access_key_id) if not self.aws_access_key_id: msg = 'An invalid AWS Access Key ID was specified.' self.logger.warning(msg) raise TypeError(msg) # Store our AWS API Secret Access key self.aws_secret_access_key = validate_regex(secret_access_key) if not self.aws_secret_access_key: msg = 'An invalid AWS Secret Access Key ' \ '({}) was specified.'.format(secret_access_key) self.logger.warning(msg) raise TypeError(msg) # Acquire our AWS Region Name: # eg. us-east-1, cn-north-1, us-west-2, ... self.aws_region_name = validate_regex( region_name, *self.template_tokens['region']['regex']) if not self.aws_region_name: msg = 'An invalid AWS Region ({}) was specified.'.format( region_name) self.logger.warning(msg) raise TypeError(msg) # Acquire Email 'To' self.targets = list() # Acquire Carbon Copies self.cc = set() # Acquire Blind Carbon Copies self.bcc = set() # For tracking our email -> name lookups self.names = {} # Set our notify_url based on our region self.notify_url = 'https://email.{}.amazonaws.com'\ .format(self.aws_region_name) # AWS Service Details self.aws_service_name = 'ses' self.aws_canonical_uri = '/' # AWS Authentication Details self.aws_auth_version = 'AWS4' self.aws_auth_algorithm = 'AWS4-HMAC-SHA256' self.aws_auth_request = 'aws4_request' # Get our From username (if specified) self.from_name = from_name if from_addr: self.from_addr = from_addr else: # Get our from email address self.from_addr = '{user}@{host}'.format( user=self.user, host=self.host) if self.user else None if not (self.from_addr and is_email(self.from_addr)): msg = 'An invalid AWS From ({}) was specified.'.format( '{user}@{host}'.format(user=self.user, host=self.host)) self.logger.warning(msg) raise TypeError(msg) self.reply_to = None if reply_to: result = is_email(reply_to) if not result: msg = 'An invalid AWS Reply To ({}) was specified.'.format( '{user}@{host}'.format(user=self.user, host=self.host)) self.logger.warning(msg) raise TypeError(msg) self.reply_to = ( result['name'] if result['name'] else False, result['full_email']) if targets: # Validate recipients (to:) and drop bad ones: for recipient in parse_emails(targets): result = is_email(recipient) if result: self.targets.append( (result['name'] if result['name'] else False, result['full_email'])) continue self.logger.warning( 'Dropped invalid To email ' '({}) specified.'.format(recipient), ) else: # If our target email list is empty we want to add ourselves to it self.targets.append( (self.from_name if self.from_name else False, self.from_addr)) # Validate recipients (cc:) and drop bad ones: for recipient in parse_emails(cc): email = is_email(recipient) if email: self.cc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Carbon Copy email ' '({}) specified.'.format(recipient), ) # Validate recipients (bcc:) and drop bad ones: for recipient in parse_emails(bcc): email = is_email(recipient) if email: self.bcc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Blind Carbon Copy email ' '({}) specified.'.format(recipient), ) return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ wrapper to send_notification since we can alert more then one channel """ if not self.targets: # There is no one to email; we're done self.logger.warning( 'There are no SES email recipients to notify') return False # error tracking (used for function return) has_error = False # Initialize our default from name from_name = self.from_name if self.from_name \ else self.reply_to[0] if self.reply_to and \ self.reply_to[0] else self.app_desc reply_to = ( from_name, self.from_addr if not self.reply_to else self.reply_to[1]) # Create a copy of the targets list emails = list(self.targets) while len(emails): # Get our email to notify to_name, to_addr = emails.pop(0) # Strip target out of cc list if in To or Bcc cc = (self.cc - self.bcc - set([to_addr])) # Strip target out of bcc list if in To bcc = (self.bcc - set([to_addr])) # Format our cc addresses to support the Name field cc = [formataddr( (self.names.get(addr, False), addr), charset='utf-8') for addr in cc] # Format our bcc addresses to support the Name field bcc = [formataddr( (self.names.get(addr, False), addr), charset='utf-8') for addr in bcc] self.logger.debug('Email From: {} <{}>'.format( quote(reply_to[0], ' '), quote(reply_to[1], '@ '))) self.logger.debug('Email To: {}'.format(to_addr)) if cc: self.logger.debug('Email Cc: {}'.format(', '.join(cc))) if bcc: self.logger.debug('Email Bcc: {}'.format(', '.join(bcc))) # Prepare Email Message if self.notify_format == NotifyFormat.HTML: content = MIMEText(body, 'html', 'utf-8') else: content = MIMEText(body, 'plain', 'utf-8') # Create a Multipart container if there is an attachment base = MIMEMultipart() \ if attach and self.attachment_support else content # TODO: Deduplicate with `NotifyEmail`? base['Subject'] = Header(title, 'utf-8') base['From'] = formataddr( (from_name if from_name else False, self.from_addr), charset='utf-8') base['To'] = formataddr((to_name, to_addr), charset='utf-8') if reply_to[1] != self.from_addr: base['Reply-To'] = formataddr(reply_to, charset='utf-8') base['Cc'] = ','.join(cc) base['Date'] = \ datetime.now( timezone.utc).strftime("%a, %d %b %Y %H:%M:%S +0000") base['X-Application'] = self.app_id if attach and self.attachment_support: # First attach our body to our content as the first element base.attach(content) # Now store our attachments for no, attachment in enumerate(attach, start=1): if not attachment: # We could not load the attachment; take an early # exit since this isn't what the end user wanted # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Preparing Email attachment {}'.format( attachment.url(privacy=True))) with open(attachment.path, "rb") as abody: app = MIMEApplication(abody.read()) app.set_type(attachment.mimetype) filename = attachment.name \ if attachment.name else f'file{no:03}.dat' app.add_header( 'Content-Disposition', 'attachment; filename="{}"'.format( Header(filename, 'utf-8')), ) base.attach(app) # Prepare our payload object payload = { 'Action': 'SendRawEmail', 'Version': '2010-12-01', 'RawMessage.Data': base64.b64encode( base.as_string().encode('utf-8')).decode('utf-8') } for no, email in enumerate(([to_addr] + bcc + cc), start=1): payload['Destinations.member.{}'.format(no)] = email # Specify from address payload['Source'] = '{} <{}>'.format( quote(from_name, ' '), quote(self.from_addr, '@ ')) (result, response) = self._post(payload=payload, to=to_addr) if not result: # Mark our failure has_error = True continue return not has_error def _post(self, payload, to): """ Wrapper to request.post() to manage it's response better and make the send() function cleaner and easier to maintain. This function returns True if the _post was successful and False if it wasn't. """ # Always call throttle before any remote server i/o is made; for AWS # time plays a huge factor in the headers being sent with the payload. # So for AWS (SES) requests we must throttle before they're generated # and not directly before the i/o call like other notification # services do. self.throttle() # Convert our payload from a dict() into a urlencoded string payload = NotifySES.urlencode(payload) # Prepare our Notification URL # Prepare our AWS Headers based on our payload headers = self.aws_prepare_request(payload) self.logger.debug('AWS SES POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('AWS SES Payload (%d bytes)', len(payload)) try: r = requests.post( self.notify_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifySES.http_response_code_lookup( r.status_code, AWS_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send AWS SES notification to {}: ' '{}{}error={}.'.format( to, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) return (False, NotifySES.aws_response_to_dict(r.text)) else: self.logger.info( 'Sent AWS SES notification to "%s".' % (to)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending AWS SES ' 'notification to "%s".' % (to), ) self.logger.debug('Socket Exception: %s' % str(e)) return (False, NotifySES.aws_response_to_dict(None)) return (True, NotifySES.aws_response_to_dict(r.text)) def aws_prepare_request(self, payload, reference=None): """ Takes the intended payload and returns the headers for it. The payload is presumed to have been already urlencoded() """ # Define our AWS SES header headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', # Populated below 'Content-Length': 0, 'Authorization': None, 'X-Amz-Date': None, } # Get a reference time (used for header construction) reference = datetime.now(timezone.utc) # Provide Content-Length headers['Content-Length'] = str(len(payload)) # Amazon Date Format amzdate = reference.strftime('%Y%m%dT%H%M%SZ') headers['X-Amz-Date'] = amzdate # Credential Scope scope = '{date}/{region}/{service}/{request}'.format( date=reference.strftime('%Y%m%d'), region=self.aws_region_name, service=self.aws_service_name, request=self.aws_auth_request, ) # Similar to headers; but a subset. keys must be lowercase signed_headers = OrderedDict([ ('content-type', headers['Content-Type']), ('host', 'email.{region}.amazonaws.com'.format( region=self.aws_region_name)), ('x-amz-date', headers['X-Amz-Date']), ]) # # Build Canonical Request Object # canonical_request = '\n'.join([ # Method u'POST', # URL self.aws_canonical_uri, # Query String (none set for POST) '', # Header Content (must include \n at end!) # All entries except characters in amazon date must be # lowercase '\n'.join(['%s:%s' % (k, v) for k, v in signed_headers.items()]) + '\n', # Header Entries (in same order identified above) ';'.join(signed_headers.keys()), # Payload sha256(payload.encode('utf-8')).hexdigest(), ]) # Prepare Unsigned Signature to_sign = '\n'.join([ self.aws_auth_algorithm, amzdate, scope, sha256(canonical_request.encode('utf-8')).hexdigest(), ]) # Our Authorization header headers['Authorization'] = ', '.join([ '{algorithm} Credential={key}/{scope}'.format( algorithm=self.aws_auth_algorithm, key=self.aws_access_key_id, scope=scope, ), 'SignedHeaders={signed_headers}'.format( signed_headers=';'.join(signed_headers.keys()), ), 'Signature={signature}'.format( signature=self.aws_auth_signature(to_sign, reference) ), ]) return headers def aws_auth_signature(self, to_sign, reference): """ Generates a AWS v4 signature based on provided payload which should be in the form of a string. """ def _sign(key, msg, to_hex=False): """ Perform AWS Signing """ if to_hex: return hmac.new(key, msg.encode('utf-8'), sha256).hexdigest() return hmac.new(key, msg.encode('utf-8'), sha256).digest() _date = _sign(( self.aws_auth_version + self.aws_secret_access_key).encode('utf-8'), reference.strftime('%Y%m%d')) _region = _sign(_date, self.aws_region_name) _service = _sign(_region, self.aws_service_name) _signed = _sign(_service, self.aws_auth_request) return _sign(_signed, to_sign, to_hex=True) @staticmethod def aws_response_to_dict(aws_response): """ Takes an AWS Response object as input and returns it as a dictionary but not befor extracting out what is useful to us first. eg: IN: 010f017d87656ee2-a2ea291f-79ea- 44f3-9d25-00d041de3007-000000 7abb454e-904b-4e46-a23c-2f4d2fc127a6 OUT: { 'type': 'SendRawEmailResponse', 'message_id': '010f017d87656ee2-a2ea291f-79ea- 44f3-9d25-00d041de3007-000000', 'request_id': '7abb454e-904b-4e46-a23c-2f4d2fc127a6', } """ # Define ourselves a set of directives we want to keep if found and # then identify the value we want to map them to in our response # object aws_keep_map = { 'RequestId': 'request_id', 'MessageId': 'message_id', # Error Message Handling 'Type': 'error_type', 'Code': 'error_code', 'Message': 'error_message', } # A default response object that we'll manipulate as we pull more data # from our AWS Response object response = { 'type': None, 'request_id': None, 'message_id': None, } try: # we build our tree, but not before first eliminating any # reference to namespacing (if present) as it makes parsing # the tree so much easier. root = ElementTree.fromstring( re.sub(' xmlns="[^"]+"', '', aws_response, count=1)) # Store our response tag object name response['type'] = str(root.tag) def _xml_iter(root, response): if len(root) > 0: for child in root: # use recursion to parse everything _xml_iter(child, response) elif root.tag in aws_keep_map.keys(): response[aws_keep_map[root.tag]] = (root.text).strip() # Recursivly iterate over our AWS Response to extract the # fields we're interested in in efforts to populate our response # object. _xml_iter(root, response) except (ElementTree.ParseError, TypeError): # bad data just causes us to generate a bad response pass return response @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.from_addr, self.aws_access_key_id, self.aws_secret_access_key, self.aws_region_name, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Acquire any global URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) if self.from_name is not None: # from_name specified; pass it back on the url params['name'] = self.from_name if self.cc: # Handle our Carbon Copy Addresses params['cc'] = ','.join( ['{}{}'.format( '' if not e not in self.names else '{}:'.format(self.names[e]), e) for e in self.cc]) if self.bcc: # Handle our Blind Carbon Copy Addresses params['bcc'] = ','.join(self.bcc) if self.reply_to: # Handle our reply to address params['reply'] = '{} <{}>'.format(*self.reply_to) \ if self.reply_to[0] else self.reply_to[1] # a simple boolean check as to whether we display our target emails # or not has_targets = \ not (len(self.targets) == 1 and self.targets[0][1] == self.from_addr) return '{schema}://{from_addr}/{key_id}/{key_secret}/{region}/' \ '{targets}/?{params}'.format( schema=self.secure_protocol, from_addr=NotifySES.quote(self.from_addr, safe='@'), key_id=self.pprint(self.aws_access_key_id, privacy, safe=''), key_secret=self.pprint( self.aws_secret_access_key, privacy, mode=PrivacyMode.Secret, safe=''), region=NotifySES.quote(self.aws_region_name, safe=''), targets='' if not has_targets else '/'.join( [NotifySES.quote('{}{}'.format( '' if not e[0] else '{}:'.format(e[0]), e[1]), safe='') for e in self.targets]), params=NotifySES.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default entries = NotifySES.split_path(results['fullpath']) # The AWS Access Key ID is stored in the first entry access_key_id = entries.pop(0) if entries else None # Our AWS Access Key Secret contains slashes in it which unfortunately # means it is of variable length after the hostname. Since we require # that the user provides the region code, we intentionally use this # as our delimiter to detect where our Secret is. secret_access_key = None region_name = None # We need to iterate over each entry in the fullpath and find our # region. Once we get there we stop and build our secret from our # accumulated data. secret_access_key_parts = list() # Section 1: Get Region and Access Secret index = 0 for index, entry in enumerate(entries, start=1): # Are we at the region yet? result = IS_REGION.match(entry) if result: # Ensure region is nicely formatted region_name = "{country}-{area}-{no}".format( country=result.group('country').lower(), area=result.group('area').lower(), no=result.group('no'), ) # We're done with Section 1 of our url (the credentials) break elif is_email(entry): # We're done with Section 1 of our url (the credentials) index -= 1 break # Store our secret parts secret_access_key_parts.append(entry) # Prepare our Secret Access Key secret_access_key = '/'.join(secret_access_key_parts) \ if secret_access_key_parts else None # Section 2: Get our Recipients (basically all remaining entries) results['targets'] = entries[index:] if 'name' in results['qsd'] and len(results['qsd']['name']): # Extract from name to associate with from address results['from_name'] = \ NotifySES.unquote(results['qsd']['name']) # Handle 'to' email address if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'].append(results['qsd']['to']) # Handle Carbon Copy Addresses if 'cc' in results['qsd'] and len(results['qsd']['cc']): results['cc'] = NotifySES.parse_list(results['qsd']['cc']) # Handle Blind Carbon Copy Addresses if 'bcc' in results['qsd'] and len(results['qsd']['bcc']): results['bcc'] = NotifySES.parse_list(results['qsd']['bcc']) # Handle From Address handling if 'from' in results['qsd'] and len(results['qsd']['from']): results['from_addr'] = \ NotifySES.unquote(results['qsd']['from']) # Handle Reply To Address if 'reply' in results['qsd'] and len(results['qsd']['reply']): results['reply_to'] = \ NotifySES.unquote(results['qsd']['reply']) # Handle secret_access_key over-ride if 'secret' in results['qsd'] and len(results['qsd']['secret']): results['secret_access_key'] = \ NotifySES.unquote(results['qsd']['secret']) else: results['secret_access_key'] = secret_access_key # Handle access key id over-ride if 'access' in results['qsd'] and len(results['qsd']['access']): results['access_key_id'] = \ NotifySES.unquote(results['qsd']['access']) else: results['access_key_id'] = access_key_id # Handle region name id over-ride if 'region' in results['qsd'] and len(results['qsd']['region']): results['region_name'] = \ NotifySES.unquote(results['qsd']['region']) else: results['region_name'] = region_name # Return our result set return results apprise-1.9.3/apprise/plugins/seven.py000066400000000000000000000302261477231770000200200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Create an account https://www.seven.io if you don't already have one # # Get your (apikey) from here: # - https://help.seven.io/en/api-key-access # import requests import json from .base import NotifyBase from ..common import NotifyType from ..utils.parse import is_phone_no, parse_phone_no, parse_bool from ..locale import gettext_lazy as _ class NotifySeven(NotifyBase): """ A wrapper for seven Notifications """ # The default descriptive name associated with the Notification service_name = 'seven' # The services URL service_url = 'https://www.seven.io' # The default protocol secure_protocol = 'seven' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_seven' # Seven uses the http protocol with JSON requests notify_url = 'https://gateway.seven.io/api/sms' # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{apikey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'required': True, 'private': True, }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'source': { # Originating address,In cases where the rewriting of the sender's # address is supported or permitted by the SMS-C. This is used to # transmit the message, this number is transmitted as the # originating address and is completely optional. 'name': _('Originating Address'), 'type': 'string', 'map_to': 'source', }, 'from': { 'alias_of': 'source', }, 'flash': { 'name': _('Flash'), 'type': 'bool', 'default': False, }, 'label': { 'name': _('Label'), 'type': 'string' }, }) def __init__(self, apikey, targets=None, source=None, flash=None, label=None, **kwargs): """ Initialize Seven Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = apikey if not self.apikey: msg = 'An invalid seven API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) self.source = None \ if not isinstance(source, str) else source.strip() self.flash = self.template_args['flash']['default'] \ if flash is None else bool(flash) self.label = None \ if not isinstance(label, str) else label.strip() # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result['full']) return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another similar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform seven Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning('There were no seven targets to notify.') return False # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', 'SentWith': 'Apprise', 'X-Api-Key': self.apikey, } # Prepare our payload payload = { 'to': None, 'text': body, } if self.source: payload['from'] = self.source if self.flash: payload['flash'] = self.flash if self.label: payload['label'] = self.label # Create a copy of the targets list targets = list(self.targets) while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our user payload['to'] = '+{}'.format(target) # Some Debug Logging self.logger.debug( 'seven POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('seven Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=json.dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # Sample output of a successful transmission # { # "success": "100", # "total_price": 0.075, # "balance": 46.748, # "debug": "false", # "sms_type": "direct", # "messages": [ # { # "id": "77229135982", # "sender": "492022839080", # "recipient": "4917661254799", # "text": "x", # "encoding": "gsm", # "label": null, # "parts": 1, # "udh": null, # "is_binary": false, # "price": 0.075, # "success": true, # "error": null, # "error_text": null # } # ] # } if r.status_code not in ( requests.codes.ok, requests.codes.created): # We had a problem status_str = \ NotifySeven.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send seven notification to {}: ' '{}{}error={}.'.format( ','.join(target), status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent seven notification to {}.'.format(target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending seven:%s ' % ( target) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ params = { 'flash': 'yes' if self.flash else 'no', } if self.source: params['from'] = self.source if self.label: params['label'] = self.label # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{apikey}/{targets}/?{params}'.format( schema=self.secure_protocol, apikey=self.pprint(self.apikey, privacy, safe=''), targets='/'.join( [NotifySeven.quote(x, safe='') for x in self.targets]), params=NotifySeven.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifySeven.split_path(results['fullpath']) # The hostname is our authentication key results['apikey'] = NotifySeven.unquote(results['host']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifySeven.parse_phone_no(results['qsd']['to']) # Support the 'from' and source variable if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifySeven.unquote(results['qsd']['from']) elif 'source' in results['qsd'] and len(results['qsd']['source']): results['source'] = \ NotifySeven.unquote(results['qsd']['source']) results['flash'] = \ parse_bool(results['qsd'].get('flash', False)) results['label'] = \ results['qsd'].get('label', None) return results apprise-1.9.3/apprise/plugins/sfr.py000066400000000000000000000355301477231770000174750ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # For this to work correctly you need to have a valid SFR DMC service account # to whicthe API password can be generated. A "space" is also necessary # (space = a logical separation between clients), which will give you a # specific spaceId # # Expected credentials looks a little like this: # serviceId: 84920958892 - Random numbers # servicePassword: XxXXxXXx - Random characters # spaceId: 984348 - Random numbers # # 1. Visit https://www.sfr.fr/ # # 2. Url will look like this # https://www.dmc.sfr-sh.fr/DmcWS/1.5.8/JsonService// import requests import json from .base import NotifyBase from ..common import NotifyType from ..locale import gettext_lazy as _ from ..utils.parse import is_phone_no, parse_phone_no from ..url import PrivacyMode class NotifySFR(NotifyBase): """ A wrapper for SFR French Telecom DMC API """ # The default descriptive name associated with the Notification service_name = _('Société Française du Radiotéléphone') # The services URL service_url = 'https://www.sfr.fr/' # The default protocol protocol = 'sfr' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_sfr' # SFR api notify_url = ( 'https://www.dmc.sfr-sh.fr/DmcWS/1.5.8/JsonService/' 'MessagesUnitairesWS/addSingleCall' # this is the actual api call ) # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{user}:{password}@{space_id}/{targets}', ) # Define our tokens template_tokens = dict( NotifyBase.template_tokens, **{ 'user': { 'name': _('Service ID'), 'type': 'string', 'required': True, }, 'password': { 'name': _('Service Password'), 'type': 'string', 'private': True, 'required': True, }, 'space_id': { 'name': _('Space ID'), 'type': 'string', 'private': True, 'required': True, }, 'target': { 'name': _('Recipient Phone Number'), 'type': 'string', 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, } ) # Define our template arguments template_args = dict( NotifyBase.template_args, **{ 'lang': { 'name': _('Language'), 'type': 'string', 'default': 'fr_FR', 'required': True, }, 'sender': { 'name': _('Sender Name'), 'type': 'string', 'required': True, 'default': '', }, 'from': { 'alias_of': 'sender' }, 'media': { 'name': _('Media Type'), 'type': 'string', 'required': True, 'default': 'SMSUnicode', 'values': ['SMS', 'SMSLong', 'SMSUnicode', 'SMSUnicodeLong'], }, 'timeout': { 'name': _('Timeout'), 'type': 'int', 'default': 2880, 'required': False, }, 'voice': { 'name': _('TTS Voice'), 'type': 'string', 'default': 'claire08s', 'values': ['claire08s', 'laura8k'], 'required': False, }, 'to': { 'alias_of': 'targets', }, } ) def __init__(self, space_id=None, targets=None, lang=None, sender=None, media=None, timeout=None, voice=None, **kwargs): """ Initialize SFR Object """ super().__init__(**kwargs) if not (self.user and self.password): msg = 'A SFR user (serviceId) and password (servicePassword) ' \ 'combination was not provided.' self.logger.warning(msg) raise TypeError(msg) self.space_id = space_id if not self.space_id: msg = 'A SFR Space ID is required.' self.logger.warning(msg) raise TypeError(msg) self.voice = voice \ if voice else self.template_args['voice']['default'] self.lang = lang \ if lang else self.template_args['lang']['default'] self.media = media \ if media else self.template_args['media']['default'] self.sender = sender \ if sender else self.template_args['sender']['default'] # Set our Time to Live Flag self.timeout = self.template_args['timeout']['default'] try: self.timeout = int(timeout) except (ValueError, TypeError): # set default timeout self.timeout = 2880 pass # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result['full']) if not self.targets: msg = ('No receiver phone number has been provided. Please ' 'provide as least one valid phone number.') self.logger.warning(msg) raise TypeError(msg) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform the SFR notification """ # error tracking (used for function return) has_error = False # Create a copy of the targets list targets = list(self.targets) # Construct the authentication JSON auth_payload = json.dumps({ 'serviceId': self.user, 'servicePassword': self.password, 'spaceId': self.space_id, 'lang': self.lang, }) base_payload = { # Can be 'SMS', 'SMSLong', 'SMSUnicode', or 'SMSUnicodeLong' 'media': self.media, # Content of the message 'textMsg': body, # Receiver's phone number (set below) 'to': None, # Optional, default to '' 'from': self.sender, # Optional, default 2880 minutes 'timeout': self.timeout, # Optional, default to French voice 'ttsVoice': self.voice, } while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our target phone no base_payload['to'] = target # Always call throttle before any remote server i/o is made self.throttle() # Finalize our payload payload = { 'authenticate': auth_payload, 'messageUnitaire': json.dumps(base_payload, ensure_ascii=True) } # Some Debug Logging self.logger.debug('SFR POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('SFR Payload: {}' .format(payload)) try: r = requests.post( self.notify_url, params=payload, verify=self.verify_certificate, timeout=self.request_timeout, ) try: content = json.loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None content = {} # Check if the request was successfull if r.status_code not in ( requests.codes.ok, requests.codes.no_content, ): # We had a problem status_str = \ NotifySFR.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send SFR notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue # SFR returns a code 200 even if the authentication fails # It then indicates in the content['success'] field the # Actual state of the transaction if not content.get('success', False): self.logger.warning( 'SFR Notification to {} was not sent by the server: ' 'server_error={}, fatal={}.'.format( target, content.get('errorCode', 'UNKNOWN'), content.get('fatal', 'True'), )) # Mark our failure has_error = True continue self.logger.info( 'Sent SFR notification to %s.' % target) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending SFR:%s ' 'notification.' % target ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.space_id, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'from': self.sender, 'timeout': str(self.timeout), 'voice': self.voice, 'lang': self.lang, 'media': self.media, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{user}:{password}@{sid}/{targets}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, user=self.user, password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe='', ), sid=self.pprint(self.space_id, privacy, safe=''), targets='/'.join( [NotifySFR.quote(x, safe='') for x in self.targets]), params=self.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.targets) @staticmethod def parse_url(url): """ Parse the URL and return arguments required to initialize this plugin """ # NotifyBase.parse_url() will make the initial parsing of your string # very easy to use. It will tokenize the entire URL for you. The # tokens are then passed into your __init__() function you defined to # generate you're object results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Extract user and password results['space_id'] = results.get('host') results['targets'] = NotifySFR.split_path(results['fullpath']) # Extract additional parameters qsd = results.get('qsd', {}) results['sender'] = \ NotifySFR.unquote(qsd.get('sender', qsd.get('from'))) results['timeout'] = NotifySFR.unquote(qsd.get('timeout')) results['voice'] = NotifySFR.unquote(qsd.get('voice')) results['lang'] = NotifySFR.unquote(qsd.get('lang')) results['media'] = NotifySFR.unquote(qsd.get('media')) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifySFR.parse_phone_no(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/signal_api.py000066400000000000000000000412451477231770000210110ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from json import dumps from .base import NotifyBase from ..common import NotifyType from .. import exception from ..utils.parse import is_phone_no, parse_phone_no, parse_bool from ..url import PrivacyMode from ..locale import gettext_lazy as _ GROUP_REGEX = re.compile( r'^\s*((\@|\%40)?(group\.)|\@|\%40)(?P[a-z0-9_=-]+)', re.I) class NotifySignalAPI(NotifyBase): """ A wrapper for SignalAPI Notifications """ # The default descriptive name associated with the Notification service_name = 'Signal API' # The services URL service_url = 'https://bbernhard.github.io/signal-cli-rest-api/' # The default protocol protocol = 'signal' # The default protocol secure_protocol = 'signals' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_signal' # Support attachments attachment_support = True # The maximum targets to include when doing batch transfers default_batch_size = 10 # We don't support titles for Signal notifications title_maxlen = 0 # Define object templates templates = ( '{schema}://{host}/{from_phone}', '{schema}://{host}:{port}/{from_phone}', '{schema}://{user}@{host}/{from_phone}', '{schema}://{user}@{host}:{port}/{from_phone}', '{schema}://{user}:{password}@{host}/{from_phone}', '{schema}://{user}:{password}@{host}:{port}/{from_phone}', '{schema}://{host}/{from_phone}/{targets}', '{schema}://{host}:{port}/{from_phone}/{targets}', '{schema}://{user}@{host}/{from_phone}/{targets}', '{schema}://{user}@{host}:{port}/{from_phone}/{targets}', '{schema}://{user}:{password}@{host}/{from_phone}/{targets}', '{schema}://{user}:{password}@{host}:{port}/{from_phone}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'from_phone': { 'name': _('From Phone No'), 'type': 'string', 'required': True, 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'source', }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'target_channel': { 'name': _('Target Group ID'), 'type': 'string', 'prefix': '@', 'regex': (r'^[a-z0-9_=-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', } }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'from_phone', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, 'status': { 'name': _('Show Status'), 'type': 'bool', 'default': False, }, }) def __init__(self, source=None, targets=None, batch=False, status=False, **kwargs): """ Initialize SignalAPI Object """ super().__init__(**kwargs) # Prepare Batch Mode Flag self.batch = batch # Set Status type self.status = status # Parse our targets self.targets = list() # Used for URL generation afterwards only self.invalid_targets = list() # Manage our Source Phone result = is_phone_no(source) if not result: msg = 'An invalid Signal API Source Phone No ' \ '({}) was provided.'.format(source) self.logger.warning(msg) raise TypeError(msg) self.source = '+{}'.format(result['full']) if targets: # Validate our targerts for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if result: # store valid phone number self.targets.append('+{}'.format(result['full'])) continue result = GROUP_REGEX.match(target) if result: # Just store group information self.targets.append( 'group.{}'.format(result.group('group'))) continue self.logger.warning( 'Dropped invalid phone/group ' '({}) specified.'.format(target), ) self.invalid_targets.append(target) continue else: # Send a message to ourselves self.targets.append(self.source) return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Signal API Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning( 'There were no Signal API targets to notify.') return False # error tracking (used for function return) has_error = False attachments = [] if attach and self.attachment_support: for attachment in attach: # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access Signal API attachment {}.'.format( attachment.url(privacy=True))) return False try: attachments.append(attachment.base64()) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access Signal API attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending Signal API attachment {}'.format( attachment.url(privacy=True))) # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } # Format defined here: # https://bbernhard.github.io/signal-cli-rest-api\ # /#/Messages/post_v2_send # Example: # { # "base64_attachments": [ # "string" # ], # "message": "string", # "number": "string", # "recipients": [ # "string" # ] # } # Prepare our payload payload = { 'message': "{}{}".format( '' if not self.status else '{} '.format( self.asset.ascii(notify_type)), body).rstrip(), "number": self.source, "recipients": [] } if attachments: # Store our attachments payload['base64_attachments'] = attachments # Determine Authentication auth = None if self.user: auth = (self.user, self.password) # Set our schema schema = 'https' if self.secure else 'http' # Construct our URL notify_url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): notify_url += ':%d' % self.port notify_url += '/v2/send' # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size for index in range(0, len(self.targets), batch_size): # Prepare our recipients payload['recipients'] = self.targets[index:index + batch_size] self.logger.debug('Signal API POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('Signal API Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, auth=auth, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.created): # We had a problem status_str = \ NotifySignalAPI.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send {} Signal API notification{}: ' '{}{}error={}.'.format( len(self.targets[index:index + batch_size]), ' to {}'.format(self.targets[index]) if batch_size == 1 else '(s)', status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent {} Signal API notification{}.' .format( len(self.targets[index:index + batch_size]), ' to {}'.format(self.targets[index]) if batch_size == 1 else '(s)', )) except requests.RequestException as e: self.logger.warning( 'A Connection error occured sending {} Signal API ' 'notification(s).'.format( len(self.targets[index:index + batch_size]))) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port, self.source, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'batch': 'yes' if self.batch else 'no', 'status': 'yes' if self.status else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifySignalAPI.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifySignalAPI.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 # So we can strip out our own phone (if present); create a copy of our # targets if len(self.targets) == 1 and self.source in self.targets: targets = [] elif len(self.targets) == 0: # invalid phone-no were specified targets = self.invalid_targets else: # append @ to non-phone number entries as they are groups # Remove group. prefix as well targets = \ ['@{}'.format(x[6:]) if x[0] != '+' else x for x in self.targets] return '{schema}://{auth}{hostname}{port}/{src}/{dst}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), src=self.source, dst='/'.join( [NotifySignalAPI.quote(x, safe='@+') for x in targets]), params=NotifySignalAPI.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = \ NotifySignalAPI.split_path(results['fullpath']) # The hostname is our authentication key results['apikey'] = NotifySignalAPI.unquote(results['host']) if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifySignalAPI.unquote(results['qsd']['from']) elif results['targets']: # The from phone no is the first entry in the list otherwise results['source'] = results['targets'].pop(0) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifySignalAPI.parse_phone_no(results['qsd']['to']) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get('batch', False)) # Get status switch results['status'] = \ parse_bool(results['qsd'].get('status', False)) return results apprise-1.9.3/apprise/plugins/simplepush.py000066400000000000000000000274211477231770000210740ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from os import urandom from json import loads import requests from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ from base64 import urlsafe_b64encode import hashlib try: from cryptography.hazmat.primitives import padding from cryptography.hazmat.primitives.ciphers import Cipher from cryptography.hazmat.primitives.ciphers import algorithms from cryptography.hazmat.primitives.ciphers import modes from cryptography.hazmat.backends import default_backend # We're good to go! NOTIFY_SIMPLEPUSH_ENABLED = True except ImportError: # cryptography is required in order for this package to work NOTIFY_SIMPLEPUSH_ENABLED = False class NotifySimplePush(NotifyBase): """ A wrapper for SimplePush Notifications """ # Set our global enabled flag enabled = NOTIFY_SIMPLEPUSH_ENABLED requirements = { # Define our required packaging in order to work 'packages_required': 'cryptography' } # The default descriptive name associated with the Notification service_name = 'SimplePush' # The services URL service_url = 'https://simplepush.io/' # The default secure protocol secure_protocol = 'spush' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_simplepush' # SimplePush uses the http protocol with SimplePush requests notify_url = 'https://api.simplepush.io/send' # The maximum allowable characters allowed in the body per message body_maxlen = 10000 # Defines the maximum allowable characters in the title title_maxlen = 1024 # Define object templates templates = ( '{schema}://{apikey}', '{schema}://{salt}:{password}@{apikey}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, }, # Used for encrypted logins 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'salt': { 'name': _('Salt'), 'type': 'string', 'private': True, 'map_to': 'user', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'event': { 'name': _('Event'), 'type': 'string', }, }) def __init__(self, apikey, event=None, **kwargs): """ Initialize SimplePush Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex(apikey) if not self.apikey: msg = 'An invalid SimplePush API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) if event: # Event Name (associated with project) self.event = validate_regex(event) if not self.event: msg = 'An invalid SimplePush Event Name ' \ '({}) was specified.'.format(event) self.logger.warning(msg) raise TypeError(msg) else: # Default Event Name self.event = None # Used/cached in _encrypt() function self._iv = None self._iv_hex = None self._key = None def _encrypt(self, content): """ Encrypts message for use with SimplePush """ if self._iv is None: # initialization vector and cache it self._iv = urandom(algorithms.AES.block_size // 8) # convert vector into hex string (used in payload) self._iv_hex = ''.join(["{:02x}".format(ord(self._iv[idx:idx + 1])) for idx in range(len(self._iv))]).upper() # encrypted key and cache it self._key = bytes(bytearray.fromhex( hashlib.sha1('{}{}'.format(self.password, self.user) .encode('utf-8')).hexdigest()[0:32])) padder = padding.PKCS7(algorithms.AES.block_size).padder() content = padder.update(content.encode()) + padder.finalize() # # Encryption Notice # # CBC mode doesn't provide integrity guarantees. Unless the message # authentication for IV and the ciphertext are applied, it will be # vulnerable to a padding oracle attack # It is important to identify that both the Apprise package and team # recognizes this AES-CBC-128 weakness but requires that it exists due # to it being the SimplePush Requirement as documented on their # website here https://simplepush.io/features. # In the event the website link above does not exist/work, a screen # capture of the reference to the requirement for this encryption # can also be found on the Apprise SimplePush Wiki: # https://github.com/caronc/apprise/wiki/Notify_simplepush\ # #lock-aes-cbc-128-encryption-weakness # encryptor = Cipher( algorithms.AES(self._key), modes.CBC(self._iv), default_backend()).encryptor() return urlsafe_b64encode( encryptor.update(content) + encryptor.finalize()) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform SimplePush Notification """ headers = { 'User-Agent': self.app_id, 'Content-type': "application/x-www-form-urlencoded", } # Prepare our payload payload = { 'key': self.apikey, } if self.password and self.user: body = self._encrypt(body) title = self._encrypt(title) payload.update({ 'encrypted': 'true', 'iv': self._iv_hex, }) # prepare SimplePush Object payload.update({ 'msg': body, 'title': title, }) if self.event: # Store Event payload['event'] = self.event self.logger.debug('SimplePush POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('SimplePush Payload: %s' % str(payload)) # We need to rely on the status string returned in the SimplePush # response status_str = None status = None # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # Get our SimplePush response (if it's possible) try: json_response = loads(r.content) status_str = json_response.get('message') status = json_response.get('status') except (TypeError, ValueError, AttributeError): # TypeError = r.content is not a String # ValueError = r.content is Unparsable # AttributeError = r.content is None pass if r.status_code != requests.codes.ok or status != 'OK': # We had a problem status_str = status_str if status_str else\ NotifyBase.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send SimplePush notification:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent SimplePush notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending SimplePush notification.') self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.user, self.password, self.apikey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) if self.event: params['event'] = self.event # Determine Authentication auth = '' if self.user and self.password: auth = '{salt}:{password}@'.format( salt=self.pprint( self.user, privacy, mode=PrivacyMode.Secret, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) return '{schema}://{auth}{apikey}/?{params}'.format( schema=self.secure_protocol, auth=auth, apikey=self.pprint(self.apikey, privacy, safe=''), params=NotifySimplePush.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Set the API Key results['apikey'] = NotifySimplePush.unquote(results['host']) # Event if 'event' in results['qsd'] and len(results['qsd']['event']): # Extract the account sid from an argument results['event'] = \ NotifySimplePush.unquote(results['qsd']['event']) return results apprise-1.9.3/apprise/plugins/sinch.py000066400000000000000000000414011477231770000200010ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this service you will need a Sinch account to which you can get your # API_TOKEN and SERVICE_PLAN_ID right from your console/dashboard at: # https://dashboard.sinch.com/sms/overview # # You will also need to send the SMS From a phone number or account id name. # This is identified as the source (or where the SMS message will originate # from). Activated phone numbers can be found on your dashboard here: # - https://dashboard.sinch.com/numbers/your-numbers/numbers # import requests import json from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import is_phone_no, parse_phone_no, validate_regex from ..locale import gettext_lazy as _ class SinchRegion: """ Defines the Sinch Server Regions """ USA = 'us' EUROPE = 'eu' # Used for verification purposes SINCH_REGIONS = (SinchRegion.USA, SinchRegion.EUROPE) class NotifySinch(NotifyBase): """ A wrapper for Sinch Notifications """ # The default descriptive name associated with the Notification service_name = 'Sinch' # The services URL service_url = 'https://sinch.com/' # All notification requests are secure secure_protocol = 'sinch' # Allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.20 # the number of seconds undelivered messages should linger for # in the Sinch queue validity_period = 14400 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_sinch' # Sinch uses the http protocol with JSON requests # - the 'spi' gets substituted with the Service Provider ID # provided as part of the Apprise URL. notify_url = 'https://{region}.sms.api.sinch.com/xms/v1/{spi}/batches' # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{service_plan_id}:{api_token}@{from_phone}', '{schema}://{service_plan_id}:{api_token}@{from_phone}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'service_plan_id': { 'name': _('Account SID'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-f0-9]+$', 'i'), }, 'api_token': { 'name': _('Auth Token'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-f0-9]+$', 'i'), }, 'from_phone': { 'name': _('From Phone No'), 'type': 'string', 'required': True, 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'source', }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'short_code': { 'name': _('Target Short Code'), 'type': 'string', 'regex': (r'^[0-9]{5,6}$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'from_phone', }, 'spi': { 'alias_of': 'service_plan_id', }, 'region': { 'name': _('Region'), 'type': 'string', 'regex': (r'^[a-z]{2}$', 'i'), 'default': SinchRegion.USA, }, 'token': { 'alias_of': 'api_token', }, }) def __init__(self, service_plan_id, api_token, source, targets=None, region=None, **kwargs): """ Initialize Sinch Object """ super().__init__(**kwargs) # The Account SID associated with the account self.service_plan_id = validate_regex( service_plan_id, *self.template_tokens['service_plan_id']['regex']) if not self.service_plan_id: msg = 'An invalid Sinch Account SID ' \ '({}) was specified.'.format(service_plan_id) self.logger.warning(msg) raise TypeError(msg) # The Authentication Token associated with the account self.api_token = validate_regex( api_token, *self.template_tokens['api_token']['regex']) if not self.api_token: msg = 'An invalid Sinch Authentication Token ' \ '({}) was specified.'.format(api_token) self.logger.warning(msg) raise TypeError(msg) # Setup our region self.region = self.template_args['region']['default'] \ if not isinstance(region, str) else region.lower() if self.region and self.region not in SINCH_REGIONS: msg = 'The region specified ({}) is invalid.'.format(region) self.logger.warning(msg) raise TypeError(msg) # The Source Phone # and/or short-code result = is_phone_no(source, min_len=5) if not result: msg = 'The Account (From) Phone # or Short-code specified ' \ '({}) is invalid.'.format(source) self.logger.warning(msg) raise TypeError(msg) # Tidy source self.source = result['full'] if len(self.source) < 11 or len(self.source) > 14: # A short code is a special 5 or 6 digit telephone number # that's shorter than a full phone number. if len(self.source) not in (5, 6): msg = 'The Account (From) Phone # specified ' \ '({}) is invalid.'.format(source) self.logger.warning(msg) raise TypeError(msg) # else... it as a short code so we're okay else: # We're dealing with a phone number; so we need to just # place a plus symbol at the end of it self.source = '+{}'.format(self.source) # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Parse each phone number we found result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append('+{}'.format(result['full'])) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Sinch Notification """ if not self.targets: if len(self.source) in (5, 6): # Generate a warning since we're a short-code. We need # a number to message at minimum self.logger.warning( 'There are no valid Sinch targets to notify.') return False # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Authorization': 'Bearer {}'.format(self.api_token), 'Content-Type': 'application/json', } # Prepare our payload payload = { 'body': body, 'from': self.source, # The To gets populated in the loop below 'to': None, } # Prepare our Sinch URL (spi = Service Provider ID) url = self.notify_url.format( region=self.region, spi=self.service_plan_id) # Create a copy of the targets list targets = list(self.targets) if len(targets) == 0: # No sources specified, use our own phone no targets.append(self.source) while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our user payload['to'] = [target] # Some Debug Logging self.logger.debug('Sinch POST URL: {} (cert_verify={})'.format( url, self.verify_certificate)) self.logger.debug('Sinch Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=json.dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # The responsne might look like: # { # "id": "CJloRJOe3MtDITqx", # "to": ["15551112222"], # "from": "15553334444", # "canceled": false, # "body": "This is a test message from your Sinch account", # "type": "mt_text", # "created_at": "2020-01-14T01:05:20.694Z", # "modified_at": "2020-01-14T01:05:20.694Z", # "delivery_report": "none", # "expire_at": "2020-01-17T01:05:20.694Z", # "flash_message": false # } if r.status_code not in ( requests.codes.created, requests.codes.ok): # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) # set up our status code to use status_code = r.status_code try: # Update our status response if we can json_response = json.loads(r.content) status_code = json_response.get('code', status_code) status_str = json_response.get('message', status_str) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None # We could not parse JSON response. # We will just use the status we already have. pass self.logger.warning( 'Failed to send Sinch notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent Sinch notification to {}.'.format(target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Sinch:%s ' % ( target) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.service_plan_id, self.api_token, self.source, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'region': self.region, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{spi}:{token}@{source}/{targets}/?{params}'.format( schema=self.secure_protocol, spi=self.pprint( self.service_plan_id, privacy, mode=PrivacyMode.Tail, safe=''), token=self.pprint(self.api_token, privacy, safe=''), source=NotifySinch.quote(self.source, safe=''), targets='/'.join( [NotifySinch.quote(x, safe='') for x in self.targets]), params=NotifySinch.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifySinch.split_path(results['fullpath']) # The hostname is our source number results['source'] = NotifySinch.unquote(results['host']) # Get our service_plan_ide and api_token from the user/pass config results['service_plan_id'] = NotifySinch.unquote(results['user']) results['api_token'] = NotifySinch.unquote(results['password']) # Auth Token if 'token' in results['qsd'] and len(results['qsd']['token']): # Extract the account spi from an argument results['api_token'] = \ NotifySinch.unquote(results['qsd']['token']) # Account SID if 'spi' in results['qsd'] and len(results['qsd']['spi']): # Extract the account spi from an argument results['service_plan_id'] = \ NotifySinch.unquote(results['qsd']['spi']) # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifySinch.unquote(results['qsd']['from']) if 'source' in results['qsd'] and len(results['qsd']['source']): results['source'] = \ NotifySinch.unquote(results['qsd']['source']) # Allow one to define a region if 'region' in results['qsd'] and len(results['qsd']['region']): results['region'] = \ NotifySinch.unquote(results['qsd']['region']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifySinch.parse_phone_no(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/slack.py000066400000000000000000001264201477231770000177770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # There are 2 ways to use this plugin... # Method 1: Via Webhook: # Visit https://my.slack.com/services/new/incoming-webhook/ # to create a new incoming webhook for your account. You'll need to # follow the wizard to pre-determine the channel(s) you want your # message to broadcast to, and when you're complete, you will # recieve a URL that looks something like this: # https://hooks.slack.com/services/T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7 # ^ ^ ^ # | | | # These are important <--------------^---------^---------------^ # # Method 2: Via a Bot: # 1. visit: https://api.slack.com/apps?new_app=1 # 2. Pick an App Name (such as Apprise) and select your workspace. Then # press 'Create App' # 3. You'll be able to click on 'Bots' from here where you can then choose # to add a 'Bot User'. Give it a name and choose 'Add Bot User'. # 4. Now you can choose 'Install App' to which you can choose 'Install App # to Workspace'. # 5. You will need to authorize the app which you get prompted to do. # 6. Finally you'll get some important information providing you your # 'OAuth Access Token' and 'Bot User OAuth Access Token' such as: # slack://{Oauth Access Token} # # ... which might look something like: # slack://xoxp-1234-1234-1234-4ddbc191d40ee098cbaae6f3523ada2d # ... or: # slack://xoxb-1234-1234-4ddbc191d40ee098cbaae6f3523ada2d # # You must at least give your bot the following access for it to # be useful: # - chat:write - MUST be set otherwise you can not post into # a channel # - users:read.email - Required if you want to be able to lookup # users by their email address. # # The easiest way to bring a bot into a channel (so that it can send # a message to it is to invite it. At this time Apprise does not support # an auto-join functionality. To do this: # - In the 'Details' section of your channel # - Click on the 'More' [...] (elipse icon) # - Click 'Add apps' # - You will be able to select the Bot App you previously created # - Your bot will join your channel. import re import requests from json import dumps from json import loads from time import time from .base import NotifyBase from ..common import NotifyImageSize from ..common import NotifyType from ..common import NotifyFormat from ..utils.parse import ( is_email, parse_bool, parse_list, validate_regex) from ..locale import gettext_lazy as _ # Extend HTTP Error Messages SLACK_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', } # Used to break path apart into list of channels CHANNEL_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+') # Channel Regular Expression Parsing CHANNEL_RE = re.compile( r'^(?P[+#@]?[A-Z0-9_-]{1,32})(:(?P[0-9.]+))?$', re.I) class SlackMode: """ Tracks the mode of which we're using Slack """ # We're dealing with a webhook # Our token looks like: T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7 WEBHOOK = 'webhook' # We're dealing with a bot (using the OAuth Access Token) # Our token looks like: xoxp-1234-1234-1234-abc124 or # Our token looks like: xoxb-1234-1234-abc124 or BOT = 'bot' # Define our Slack Modes SLACK_MODES = ( SlackMode.WEBHOOK, SlackMode.BOT, ) class NotifySlack(NotifyBase): """ A wrapper for Slack Notifications """ # The default descriptive name associated with the Notification service_name = 'Slack' # The services URL service_url = 'https://slack.com/' # The default secure protocol secure_protocol = 'slack' # Allow 50 requests per minute (Tier 2). # 60/50 = 0.2 request_rate_per_sec = 1.2 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_slack' # Support attachments attachment_support = True # The maximum targets to include when doing batch transfers # Slack Webhook URL webhook_url = 'https://hooks.slack.com/services' # Slack API URL (used with Bots) api_url = 'https://slack.com/api/{}' # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_72 # The maximum allowable characters allowed in the body per message body_maxlen = 35000 # Default Notification Format notify_format = NotifyFormat.MARKDOWN # Bot's do not have default channels to notify; so #general # becomes the default channel in BOT mode default_notification_channel = '#general' # Define object templates templates = ( # Webhook '{schema}://{token_a}/{token_b}/{token_c}', '{schema}://{botname}@{token_a}/{token_b}{token_c}', '{schema}://{token_a}/{token_b}/{token_c}/{targets}', '{schema}://{botname}@{token_a}/{token_b}/{token_c}/{targets}', # Bot '{schema}://{access_token}/', '{schema}://{access_token}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'botname': { 'name': _('Bot Name'), 'type': 'string', 'map_to': 'user', }, # Bot User OAuth Access Token # which always starts with xoxp- e.g.: # xoxb-1234-1234-4ddbc191d40ee098cbaae6f3523ada2d 'access_token': { 'name': _('OAuth Access Token'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^xox[abp]-[A-Z0-9-]+$', 'i'), }, # Token required as part of the Webhook request # /AAAAAAAAA/........./........................ 'token_a': { 'name': _('Token A'), 'type': 'string', 'private': True, 'regex': (r'^[A-Z0-9]+$', 'i'), }, # Token required as part of the Webhook request # /........./BBBBBBBBB/........................ 'token_b': { 'name': _('Token B'), 'type': 'string', 'private': True, 'regex': (r'^[A-Z0-9]+$', 'i'), }, # Token required as part of the Webhook request # /........./........./CCCCCCCCCCCCCCCCCCCCCCCC 'token_c': { 'name': _('Token C'), 'type': 'string', 'private': True, 'regex': (r'^[A-Za-z0-9]+$', 'i'), }, 'target_encoded_id': { 'name': _('Target Encoded ID'), 'type': 'string', 'prefix': '+', 'map_to': 'targets', }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'target_user': { 'name': _('Target User'), 'type': 'string', 'prefix': '@', 'map_to': 'targets', }, 'target_channels': { 'name': _('Target Channel'), 'type': 'string', 'prefix': '#', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, 'footer': { 'name': _('Include Footer'), 'type': 'bool', 'default': True, 'map_to': 'include_footer', }, # Use Payload in Blocks (vs legacy way): # See: https://api.slack.com/reference/messaging/payload 'blocks': { 'name': _('Use Blocks'), 'type': 'bool', 'default': False, 'map_to': 'use_blocks', }, 'to': { 'alias_of': 'targets', }, 'token': { 'name': _('Token'), 'alias_of': ('access_token', 'token_a', 'token_b', 'token_c'), }, }) # Formatting requirements are defined here: # https://api.slack.com/docs/message-formatting _re_formatting_map = { # New lines must become the string version r'\r\*\n': '\\n', # Escape other special characters r'&': '&', r'<': '<', r'>': '>', } # To notify a channel, one uses _re_channel_support = re.compile( r'(?P(?:<|\<)?[ \t]*' r'!(?P[^| \n]+)' r'(?:[ \t]*\|[ \t]*(?:(?P[^\n]+?)[ \t]*)?(?:>|\>)' r'|(?:>|\>)))', re.IGNORECASE) # To notify a user by their ID, one uses <@U6TTX1F9R> _re_user_id_support = re.compile( r'(?P(?:<|\<)?[ \t]*' r'@(?P[^| \n]+)' r'(?:[ \t]*\|[ \t]*(?:(?P[^\n]+?)[ \t]*)?(?:>|\>)' r'|(?:>|\>)))', re.IGNORECASE) # The markdown in slack isn't [desc](url), it's # # To accomodate this, we need to ensure we don't escape URLs that match _re_url_support = re.compile( r'(?P(?:<|\<)?[ \t]*' r'(?P(?:https?|mailto)://[^| \n]+)' r'(?:[ \t]*\|[ \t]*(?:(?P[^\n]+?)[ \t]*)?(?:>|\>)' r'|(?:>|\>)))', re.IGNORECASE) def __init__(self, access_token=None, token_a=None, token_b=None, token_c=None, targets=None, include_image=True, include_footer=True, use_blocks=None, **kwargs): """ Initialize Slack Object """ super().__init__(**kwargs) # Setup our mode self.mode = SlackMode.BOT if access_token else SlackMode.WEBHOOK if self.mode is SlackMode.WEBHOOK: self.access_token = None self.token_a = validate_regex( token_a, *self.template_tokens['token_a']['regex']) if not self.token_a: msg = 'An invalid Slack (first) Token ' \ '({}) was specified.'.format(token_a) self.logger.warning(msg) raise TypeError(msg) self.token_b = validate_regex( token_b, *self.template_tokens['token_b']['regex']) if not self.token_b: msg = 'An invalid Slack (second) Token ' \ '({}) was specified.'.format(token_b) self.logger.warning(msg) raise TypeError(msg) self.token_c = validate_regex( token_c, *self.template_tokens['token_c']['regex']) if not self.token_c: msg = 'An invalid Slack (third) Token ' \ '({}) was specified.'.format(token_c) self.logger.warning(msg) raise TypeError(msg) else: self.token_a = None self.token_b = None self.token_c = None self.access_token = validate_regex( access_token, *self.template_tokens['access_token']['regex']) if not self.access_token: msg = 'An invalid Slack OAuth Access Token ' \ '({}) was specified.'.format(access_token) self.logger.warning(msg) raise TypeError(msg) # Look the users up by their email address and map them back to their # id here for future queries (if needed). This allows people to # specify a full email as a recipient via slack self._lookup_users = {} self.use_blocks = parse_bool( use_blocks, self.template_args['blocks']['default']) \ if use_blocks is not None \ else self.template_args['blocks']['default'] # Build list of channels self.channels = parse_list(targets) if len(self.channels) == 0: # No problem; the webhook is smart enough to just notify the # channel it was created for; adding 'None' is just used as # a flag lower to not set the channels self.channels.append( None if self.mode is SlackMode.WEBHOOK else self.default_notification_channel) # Iterate over above list and store content accordingly self._re_formatting_rules = re.compile( r'(' + '|'.join(self._re_formatting_map.keys()) + r')', re.IGNORECASE, ) # Place a thumbnail image inline with the message body self.include_image = include_image # Place a footer with each post self.include_footer = include_footer return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Slack Notification """ # error tracking (used for function return) has_error = False # # Prepare JSON Object (applicable to both WEBHOOK and BOT mode) # if self.use_blocks: # Our slack format _slack_format = 'mrkdwn' \ if self.notify_format == NotifyFormat.MARKDOWN \ else 'plain_text' payload = { 'username': self.user if self.user else self.app_id, 'attachments': [{ 'blocks': [{ 'type': 'section', 'text': { 'type': _slack_format, 'text': body } }], 'color': self.color(notify_type), }] } # Slack only accepts non-empty header sections if title: payload['attachments'][0]['blocks'].insert(0, { 'type': 'header', 'text': { 'type': 'plain_text', 'text': title, 'emoji': True } }) # Include the footer only if specified to do so if self.include_footer: # Acquire our to-be footer icon if configured to do so image_url = None if not self.include_image \ else self.image_url(notify_type) # Prepare our footer based on the block structure _footer = { 'type': 'context', 'elements': [{ 'type': _slack_format, 'text': self.app_id }] } if image_url: payload['icon_url'] = image_url _footer['elements'].insert(0, { 'type': 'image', 'image_url': image_url, 'alt_text': notify_type }) payload['attachments'][0]['blocks'].append(_footer) else: # # Legacy API Formatting # if self.notify_format == NotifyFormat.MARKDOWN: body = self._re_formatting_rules.sub( # pragma: no branch lambda x: self._re_formatting_map[x.group()], body, ) # Support , entries for match in self._re_channel_support.findall(body): # Swap back any ampersands previously updaated channel = match[1].strip() desc = match[2].strip() # Update our string body = re.sub( re.escape(match[0]), ''.format( channel=channel, desc=desc) if desc else ''.format(channel=channel), body, re.IGNORECASE) # Support <@userid|desc>, <@channel> entries for match in self._re_user_id_support.findall(body): # Swap back any ampersands previously updaated user = match[1].strip() desc = match[2].strip() # Update our string body = re.sub( re.escape(match[0]), '<@{user}|{desc}>'.format(user=user, desc=desc) if desc else '<@{user}>'.format(user=user), body, re.IGNORECASE) # Support , entries for match in self._re_url_support.findall(body): # Swap back any ampersands previously updaated url = match[1].replace('&', '&') desc = match[2].strip() # Update our string body = re.sub( re.escape(match[0]), '<{url}|{desc}>'.format(url=url, desc=desc) if desc else '<{url}>'.format(url=url), body, re.IGNORECASE) # Perform Formatting on title here; this is not needed for block # mode above title = self._re_formatting_rules.sub( # pragma: no branch lambda x: self._re_formatting_map[x.group()], title, ) # Prepare JSON Object (applicable to both WEBHOOK and BOT mode) payload = { 'username': self.user if self.user else self.app_id, # Use Markdown language 'mrkdwn': (self.notify_format == NotifyFormat.MARKDOWN), 'attachments': [{ 'title': title, 'text': body, 'color': self.color(notify_type), # Time 'ts': time(), }], } # Acquire our to-be footer icon if configured to do so image_url = None if not self.include_image \ else self.image_url(notify_type) if image_url: payload['icon_url'] = image_url # Include the footer only if specified to do so if self.include_footer: if image_url: payload['attachments'][0]['footer_icon'] = image_url # Include the footer only if specified to do so payload['attachments'][0]['footer'] = self.app_id if attach and self.attachment_support \ and self.mode is SlackMode.WEBHOOK: # Be friendly; let the user know why they can't send their # attachments if using the Webhook mode self.logger.warning( 'Slack Webhooks do not support attachments.') # Prepare our Slack URL (depends on mode) if self.mode is SlackMode.WEBHOOK: url = '{}/{}/{}/{}'.format( self.webhook_url, self.token_a, self.token_b, self.token_c, ) else: # SlackMode.BOT url = self.api_url.format('chat.postMessage') # Create a copy of the channel list channels = list(self.channels) attach_channel_list = [] while len(channels): channel = channels.pop(0) if channel is not None: # We'll perform a user lookup if we detect an email email = is_email(channel) if email: payload['channel'] = \ self.lookup_userid(email['full_email']) if not payload['channel']: # Move along; any notifications/logging would have # come from lookup_userid() has_error = True continue else: # Channel result = CHANNEL_RE.match(channel) if not result: # Channel over-ride was specified self.logger.warning( "The specified Slack target {} is invalid;" "skipping.".format(channel)) # Mark our failure has_error = True continue # Store oure content channel, thread_ts = \ result.group('channel'), result.group('thread_ts') if thread_ts: payload['thread_ts'] = thread_ts elif 'thread_ts' in payload: # Handle situations where one channel has a thread_id # specified, and the next does not. We do not want to # cary forward the last value specified del payload['thread_ts'] if channel[0] == '+': # Treat as encoded id if prefixed with a + payload['channel'] = channel[1:] elif channel[0] == '@': # Treat @ value 'as is' payload['channel'] = channel else: # Prefix with channel hash tag (if not already) payload['channel'] = \ channel if channel[0] == '#' \ else '#{}'.format(channel) response = self._send(url, payload) if not response: # Handle any error has_error = True continue # Store the valid channel or chat ID (for DMs) that will # be accepted by Slack's attachment method later. if response.get('channel'): attach_channel_list.append(response.get('channel')) self.logger.info( 'Sent Slack notification{}.'.format( ' to {}'.format(channel) if channel is not None else '')) if attach and self.attachment_support and \ self.mode is SlackMode.BOT and attach_channel_list: # Send our attachments (can only be done in bot mode) for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Posting Slack attachment {}'.format( attachment.url(privacy=True))) # Get the URL to which to upload the file. # https://api.slack.com/methods/files.getUploadURLExternal _params = { 'filename': attachment.name if attachment.name else f'file{no:03}.dat', 'length': len(attachment), } _url = self.api_url.format('files.getUploadURLExternal') response = self._send( _url, {}, http_method='get', params=_params ) if not ( response and response.get('file_id') and response.get('upload_url') ): self.logger.error('Could retrieve file upload URL.') # We failed to get an upload URL, take an early exit return False file_id = response.get('file_id') upload_url = response.get('upload_url') # Upload file response = self._send(upload_url, {}, attach=attachment) # Send file to channels # https://api.slack.com/methods/files.completeUploadExternal for channel_id in attach_channel_list: _payload = { 'files': [{ "id": file_id, "title": attachment.name, }], 'channel_id': channel_id } _url = self.api_url.format('files.completeUploadExternal') response = self._send(_url, _payload) # Expected response # { # "ok": true, # "files": [ # { # "id": "F123ABC456", # "title": "slack-test" # } # ] # } if not (response and response.get('files')): self.logger.error('Failed to send file to channel.') # We failed to send the file to the channel, # take an early exit return False return not has_error def lookup_userid(self, email): """ Takes an email address and attempts to resolve/acquire it's user id for notification purposes. """ if email in self._lookup_users: # We're done as entry has already been retrieved return self._lookup_users[email] if self.mode is not SlackMode.BOT: # You can not look up self.logger.warning( 'Emails can not be resolved to Slack User IDs unless you ' 'have a bot configured.') return None lookup_url = self.api_url.format('users.lookupByEmail') headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded', 'Authorization': 'Bearer {}'.format(self.access_token), } # we pass in our email address as the argument params = { 'email': email, } self.logger.debug('Slack User Lookup POST URL: %s (cert_verify=%r)' % ( lookup_url, self.verify_certificate, )) self.logger.debug('Slack User Lookup Parameters: %s' % str(params)) # Initialize our HTTP JSON response response = {'ok': False} # Initialize our detected user id (also the response to this function) user_id = None # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.get( lookup_url, headers=headers, params=params, verify=self.verify_certificate, timeout=self.request_timeout, ) # Attachment posts return a JSON string try: response = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None pass # We can get a 200 response, but still fail. A failure message # might look like this (missing bot permissions): # { # 'ok': False, # 'error': 'missing_scope', # 'needed': 'users:read.email', # 'provided': 'calls:write,chat:write' # } if r.status_code != requests.codes.ok \ or not (response and response.get('ok', False)): # We had a problem status_str = \ NotifySlack.http_response_code_lookup( r.status_code, SLACK_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send Slack User Lookup:' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False # If we reach here, then we were successful in looking up # the user. A response generally looks like this: # { # 'ok': True, # 'user': { # 'id': 'J1ZQB9T9Y', # 'team_id': 'K1WR6TML2', # 'name': 'l2g', # 'deleted': False, # 'color': '9f69e7', # 'real_name': 'Chris C', # 'tz': 'America/New_York', # 'tz_label': 'Eastern Standard Time', # 'tz_offset': -18000, # 'profile': { # 'title': '', # 'phone': '', # 'skype': '', # 'real_name': 'Chris C', # 'real_name_normalized': # 'Chris C', # 'display_name': 'l2g', # 'display_name_normalized': 'l2g', # 'fields': None, # 'status_text': '', # 'status_emoji': '', # 'status_expiration': 0, # 'avatar_hash': 'g785e9c0ddf6', # 'email': 'lead2gold@gmail.com', # 'first_name': 'Chris', # 'last_name': 'C', # 'image_24': 'https://secure.gravatar.com/...', # 'image_32': 'https://secure.gravatar.com/...', # 'image_48': 'https://secure.gravatar.com/...', # 'image_72': 'https://secure.gravatar.com/...', # 'image_192': 'https://secure.gravatar.com/...', # 'image_512': 'https://secure.gravatar.com/...', # 'status_text_canonical': '', # 'team': 'K1WR6TML2' # }, # 'is_admin': True, # 'is_owner': True, # 'is_primary_owner': True, # 'is_restricted': False, # 'is_ultra_restricted': False, # 'is_bot': False, # 'is_app_user': False, # 'updated': 1603904274 # } # } # We're only interested in the id user_id = response['user']['id'] # Cache it for future self._lookup_users[email] = user_id self.logger.info( 'Email %s resolves to the Slack User ID: %s.', email, user_id) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred looking up Slack User.', ) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return None return user_id def _send(self, url, payload, attach=None, http_method='post', params=None, **kwargs): """ Wrapper to the requests (post) object """ self.logger.debug('Slack POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Slack Payload: %s' % str(payload)) headers = { 'User-Agent': self.app_id, 'Accept': 'application/json', } if not attach: headers['Content-Type'] = 'application/json; charset=utf-8' if self.mode is SlackMode.BOT: headers['Authorization'] = 'Bearer {}'.format(self.access_token) # Our response object response = {'ok': False} # Always call throttle before any remote server i/o is made self.throttle() # Our attachment path (if specified) files = None try: # Open our attachment path if required: if attach: files = {'file': (attach.name, open(attach.path, 'rb'))} r = requests.request( http_method, url, data=payload if attach else dumps(payload), headers=headers, files=files, verify=self.verify_certificate, timeout=self.request_timeout, params=params if params else None, ) # Posts return a JSON string try: response = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None pass # Another response type is: # { # 'ok': False, # 'error': 'not_in_channel', # } status_okay = False if self.mode is SlackMode.BOT: status_okay = ( (response and response.get('ok', False)) or # Responses for file uploads look like this # 'OK - ' ( r.content and isinstance(r.content, bytes) and b'OK' in r.content ) ) elif r.content == b'ok': # The text 'ok' is returned if this is a Webhook request # So the below captures that as well. status_okay = True if r.status_code != requests.codes.ok or not status_okay: # We had a problem status_str = \ NotifySlack.http_response_code_lookup( r.status_code, SLACK_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send{} to Slack: ' '{}{}error={}.'.format( (' ' + attach.name) if attach else '', status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False # Message Post Response looks like this: # { # "attachments": [ # { # "color": "3AA3E3", # "fallback": "test", # "id": 1, # "text": "my body", # "title": "my title", # "ts": 1573694687 # } # ], # "bot_id": "BAK4K23G5", # "icons": { # "image_48": "https://s3-us-west-2.amazonaws.com/... # }, # "subtype": "bot_message", # "text": "", # "ts": "1573694689.003700", # "type": "message", # "username": "Apprise" # } # files.completeUploadExternal responses look like this: # { # "ok": true, # "files": [ # { # "id": "F123ABC456", # "title": "slack-test" # } # ] # } except requests.RequestException as e: self.logger.warning( 'A Connection error occurred posting {}to Slack.'.format( attach.name if attach else '')) self.logger.debug('Socket Exception: %s' % str(e)) return False except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while reading {}.'.format( attach.name if attach else 'attachment')) self.logger.debug('I/O Exception: %s' % str(e)) return False finally: # Close our file (if it's open) stored in the second element # of our files tuple (index 1) if files: files['file'][1].close() # Return the response for processing return response @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.token_a, self.token_b, self.token_c, self.access_token, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', 'footer': 'yes' if self.include_footer else 'no', 'blocks': 'yes' if self.use_blocks else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine if there is a botname present botname = '' if self.user: botname = '{botname}@'.format( botname=NotifySlack.quote(self.user, safe=''), ) if self.mode == SlackMode.WEBHOOK: return '{schema}://{botname}{token_a}/{token_b}/{token_c}/'\ '{targets}/?{params}'.format( schema=self.secure_protocol, botname=botname, token_a=self.pprint(self.token_a, privacy, safe=''), token_b=self.pprint(self.token_b, privacy, safe=''), token_c=self.pprint(self.token_c, privacy, safe=''), targets='/'.join( [NotifySlack.quote(x, safe='') for x in self.channels]), params=NotifySlack.urlencode(params), ) # else -> self.mode == SlackMode.BOT: return '{schema}://{botname}{access_token}/{targets}/'\ '?{params}'.format( schema=self.secure_protocol, botname=botname, access_token=self.pprint(self.access_token, privacy, safe=''), targets='/'.join( [NotifySlack.quote(x, safe='') for x in self.channels]), params=NotifySlack.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.channels) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The first token is stored in the hostname token = NotifySlack.unquote(results['host']) # Get unquoted entries entries = NotifySlack.split_path(results['fullpath']) # Verify if our token_a us a bot token or part of a webhook: if token.startswith('xo'): # We're dealing with a bot results['access_token'] = token else: # We're dealing with a webhook results['token_a'] = token results['token_b'] = entries.pop(0) if entries else None results['token_c'] = entries.pop(0) if entries else None # assign remaining entries to the channels we wish to notify results['targets'] = entries # Support the token flag where you can set it to the bot token # or the webhook token (with slash delimiters) if 'token' in results['qsd'] and len(results['qsd']['token']): # Break our entries up into a list; we can ue the Channel # list delimiter above since it doesn't contain any characters # we don't otherwise accept anyway in our token entries = [x for x in filter( bool, CHANNEL_LIST_DELIM.split( NotifySlack.unquote(results['qsd']['token'])))] # check to see if we're dealing with a bot/user token if entries and entries[0].startswith('xo'): # We're dealing with a bot results['access_token'] = entries[0] results['token_a'] = None results['token_b'] = None results['token_c'] = None else: # Webhook results['access_token'] = None results['token_a'] = entries.pop(0) if entries else None results['token_b'] = entries.pop(0) if entries else None results['token_c'] = entries.pop(0) if entries else None # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += [x for x in filter( bool, CHANNEL_LIST_DELIM.split( NotifySlack.unquote(results['qsd']['to'])))] # Get Image Flag results['include_image'] = \ parse_bool(results['qsd'].get('image', True)) # Get Payload structure (use blocks?) if 'blocks' in results['qsd'] and len(results['qsd']['blocks']): results['use_blocks'] = parse_bool(results['qsd']['blocks']) # Get Footer Flag results['include_footer'] = \ parse_bool(results['qsd'].get('footer', True)) return results @staticmethod def parse_native_url(url): """ Support https://hooks.slack.com/services/TOKEN_A/TOKEN_B/TOKEN_C """ result = re.match( r'^https?://hooks\.slack\.com/services/' r'(?P[A-Z0-9]+)/' r'(?P[A-Z0-9]+)/' r'(?P[A-Z0-9]+)/?' r'(?P\?.+)?$', url, re.I) if result: return NotifySlack.parse_url( '{schema}://{token_a}/{token_b}/{token_c}/{params}'.format( schema=NotifySlack.secure_protocol, token_a=result.group('token_a'), token_b=result.group('token_b'), token_c=result.group('token_c'), params='' if not result.group('params') else result.group('params'))) return None apprise-1.9.3/apprise/plugins/smseagle.py000066400000000000000000000576431477231770000205140ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import requests from json import dumps, loads from itertools import chain from .base import NotifyBase from ..common import NotifyType from .. import exception from ..utils.parse import ( validate_regex, is_phone_no, parse_phone_no, parse_bool) from ..url import PrivacyMode from ..locale import gettext_lazy as _ GROUP_REGEX = re.compile( r'^\s*(\#|\%35)(?P[a-z0-9_-]+)', re.I) CONTACT_REGEX = re.compile( r'^\s*(\@|\%40)?(?P[a-z0-9_-]+)', re.I) # Priorities class SMSEaglePriority: NORMAL = 0 HIGH = 1 SMSEAGLE_PRIORITIES = ( SMSEaglePriority.NORMAL, SMSEaglePriority.HIGH, ) SMSEAGLE_PRIORITY_MAP = { # short for 'normal' 'normal': SMSEaglePriority.NORMAL, # short for 'high' '+': SMSEaglePriority.HIGH, 'high': SMSEaglePriority.HIGH, } class SMSEagleCategory: """ We define the different category types that we can notify via SMS Eagle """ PHONE = 'phone' GROUP = 'group' CONTACT = 'contact' SMSEAGLE_CATEGORIES = ( SMSEagleCategory.PHONE, SMSEagleCategory.GROUP, SMSEagleCategory.CONTACT, ) class NotifySMSEagle(NotifyBase): """ A wrapper for SMSEagle Notifications """ # The default descriptive name associated with the Notification service_name = 'SMS Eagle' # The services URL service_url = 'https://smseagle.eu' # The default protocol protocol = 'smseagle' # The default protocol secure_protocol = 'smseagles' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_smseagle' # The path we send our notification to notify_path = '/jsonrpc/sms' # Support attachments attachment_support = True # The maxumum length of the text message # The actual limit is 160 but SMSEagle looks after the handling # of large messages in it's upstream service body_maxlen = 1200 # The maximum targets to include when doing batch transfers default_batch_size = 10 # We don't support titles for SMSEagle notifications title_maxlen = 0 # Define object templates templates = ( '{schema}://{token}@{host}/{targets}', '{schema}://{token}@{host}:{port}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'token': { 'name': _('Access Token'), 'type': 'string', 'required': True, }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'target_group': { 'name': _('Target Group ID'), 'type': 'string', 'prefix': '#', 'regex': (r'^[a-z0-9_-]+$', 'i'), 'map_to': 'targets', }, 'target_contact': { 'name': _('Target Contact'), 'type': 'string', 'prefix': '@', 'regex': (r'^[a-z0-9_-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, } }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'token': { 'alias_of': 'token', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, 'status': { 'name': _('Show Status'), 'type': 'bool', 'default': False, }, 'test': { 'name': _('Test Only'), 'type': 'bool', 'default': False, }, 'flash': { 'name': _('Flash'), 'type': 'bool', 'default': False, }, 'priority': { 'name': _('Priority'), 'type': 'choice:int', 'values': SMSEAGLE_PRIORITIES, 'default': SMSEaglePriority.NORMAL, }, }) def __init__(self, token=None, targets=None, priority=None, batch=False, status=False, flash=False, test=False, **kwargs): """ Initialize SMSEagle Object """ super().__init__(**kwargs) # Prepare Flash Mode Flag self.flash = flash # Prepare Test Mode Flag self.test = test # Prepare Batch Mode Flag self.batch = batch # Set Status type self.status = status # Parse our targets self.target_phones = list() self.target_groups = list() self.target_contacts = list() # Used for URL generation afterwards only self.invalid_targets = list() # We always use a token if provided self.token = validate_regex(self.user if not token else token) if not self.token: msg = \ 'An invalid SMSEagle Access Token ({}) was specified.'.format( self.user if not token else token) self.logger.warning(msg) raise TypeError(msg) # # Priority # try: # Acquire our priority if we can: # - We accept both the integer form as well as a string # representation self.priority = int(priority) except TypeError: # NoneType means use Default; this is an okay exception self.priority = self.template_args['priority']['default'] except ValueError: # Input is a string; attempt to get the lookup from our # priority mapping priority = priority.lower().strip() # This little bit of black magic allows us to match against # low, lo, l (for low); # normal, norma, norm, nor, no, n (for normal) # ... etc result = next((key for key in SMSEAGLE_PRIORITY_MAP.keys() if key.startswith(priority)), None) \ if priority else None # Now test to see if we got a match if not result: msg = 'An invalid SMSEagle priority ' \ '({}) was specified.'.format(priority) self.logger.warning(msg) raise TypeError(msg) # store our successfully looked up priority self.priority = SMSEAGLE_PRIORITY_MAP[result] if self.priority is not None and \ self.priority not in SMSEAGLE_PRIORITY_MAP.values(): msg = 'An invalid SMSEagle priority ' \ '({}) was specified.'.format(priority) self.logger.warning(msg) raise TypeError(msg) # Validate our targerts for target in parse_phone_no(targets): # Validate targets and drop bad ones: # Allow 9 digit numbers (without country code) result = is_phone_no(target, min_len=9) if result: # store valid phone number self.target_phones.append( '{}{}'.format( '' if target[0] != '+' else '+', result['full'])) continue result = GROUP_REGEX.match(target) if result: # Just store group information self.target_groups.append(result.group('group')) continue result = CONTACT_REGEX.match(target) if result: # Just store contact information self.target_contacts.append(result.group('contact')) continue self.logger.warning( 'Dropped invalid phone/group/contact ' '({}) specified.'.format(target), ) self.invalid_targets.append(target) continue return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform SMSEagle Notification """ if not self.target_groups and not self.target_phones \ and not self.target_contacts: # There were no services to notify self.logger.warning( 'There were no SMSEagle targets to notify.') return False # error tracking (used for function return) has_error = False attachments = [] if attach and self.attachment_support: for attachment in attach: # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access SMSEagle attachment {}.'.format( attachment.url(privacy=True))) return False if not re.match(r'^image/.*', attachment.mimetype, re.I): # Only support images at this time self.logger.warning( 'Ignoring unsupported SMSEagle attachment {}.'.format( attachment.url(privacy=True))) continue try: # Prepare our Attachment in Base64 attachments.append({ 'content_type': attachment.mimetype, 'content': attachment.base64(), }) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access SMSEagle attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending SMSEagle attachment {}'.format( attachment.url(privacy=True))) # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } # Prepare our payload params_template = { # Our Access Token 'access_token': self.token, # The message to send (populated below) "message": None, # 0 = normal priority, 1 = high priority "highpriority": self.priority, # Support unicode characters "unicode": 1, # sms or mms (if attachment) "message_type": 'sms', # Response Types: # simple: format response as simple object with one result field # extended: format response as extended JSON object "responsetype": 'extended', # SMS will be sent as flash message (1 = yes, 0 = no) "flash": 1 if self.flash else 0, # Message Simulation "test": 1 if self.test else 0, } # Set our schema schema = 'https' if self.secure else 'http' # Construct our URL notify_url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): notify_url += ':%d' % self.port notify_url += self.notify_path # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size notify_by = { SMSEagleCategory.PHONE: { "method": "sms.send_sms", 'target': 'to', }, SMSEagleCategory.GROUP: { "method": "sms.send_togroup", 'target': 'groupname', }, SMSEagleCategory.CONTACT: { "method": "sms.send_tocontact", 'target': 'contactname', }, } # categories separated into a tuple since notify_by.keys() # returns an unpredicable list in Python 2.7 which causes # tests to fail every so often for category in SMSEAGLE_CATEGORIES: # Create a copy of our template payload = { 'method': notify_by[category]['method'], 'params': { notify_by[category]['target']: None, }, } # Apply Template payload['params'].update(params_template) # Set our Message payload["params"]["message"] = "{}{}".format( '' if not self.status else '{} '.format( self.asset.ascii(notify_type)), body) if attachments: # Store our attachments payload['params']['message_type'] = 'mms' payload['params']['attachments'] = attachments targets = getattr(self, 'target_{}s'.format(category)) for index in range(0, len(targets), batch_size): # Prepare our recipients payload['params'][notify_by[category]['target']] = \ ','.join(targets[index:index + batch_size]) self.logger.debug('SMSEagle POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('SMSEagle Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) try: content = loads(r.content) # Store our status status_str = str(content['result']) except (AttributeError, TypeError, ValueError, KeyError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None # KeyError = 'result' is not found in result content = {} # The result set can be a list such as: # b'{"result":[{"message_id":4753,"status":"ok"}]}' # # It can also just be as a dictionary: # b'{"result":{"message_id":4753,"status":"ok"}}' # # The below code handles both cases only only fails if a # non-ok value was returned if r.status_code not in ( requests.codes.ok, requests.codes.created) or \ not isinstance(content.get('result'), (dict, list)) or \ (isinstance(content.get('result'), dict) and content['result'].get('status') != 'ok') or \ (isinstance(content.get('result'), list) and next((True for entry in content.get('result') if isinstance(entry, dict) and entry.get('status') != 'ok'), False ) # pragma: no cover ): # We had a problem status_str = content.get('result') \ if content.get('result') else \ NotifySMSEagle.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send {} {} SMSEagle {} notification: ' '{}{}error={}.'.format( len(targets[index:index + batch_size]), 'to {}'.format(targets[index]) if batch_size == 1 else '(s)', category, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response {} Details:\r\n{}'.format( category.upper(), r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent {} SMSEagle {} notification{}.' .format( len(targets[index:index + batch_size]), category, ' to {}'.format(targets[index]) if batch_size == 1 else '(s)', )) except requests.RequestException as e: self.logger.warning( 'A Connection error occured sending {} SMSEagle ' '{} notification(s).'.format( len(targets[index:index + batch_size]), category)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.token, self.host, self.port, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'batch': 'yes' if self.batch else 'no', 'status': 'yes' if self.status else 'no', 'flash': 'yes' if self.flash else 'no', 'test': 'yes' if self.test else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) default_priority = self.template_args['priority']['default'] if self.priority is not None: # Store our priority; but only if it was specified params['priority'] = \ next((key for key, value in SMSEAGLE_PRIORITY_MAP.items() if value == self.priority), default_priority) # pragma: no cover # Default port handling default_port = 443 if self.secure else 80 return '{schema}://{token}@{hostname}{port}/{targets}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, token=self.pprint( self.token, privacy, mode=PrivacyMode.Secret, safe=''), # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), targets='/'.join( [NotifySMSEagle.quote(x, safe='#@') for x in chain( # Pass phones directly as is self.target_phones, # Contacts ['@{}'.format(x) for x in self.target_contacts], # Groups ['#{}'.format(x) for x in self.target_groups], # Pass along the same invalid entries as were provided self.invalid_targets, )]), params=NotifySMSEagle.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size if batch_size > 1: # Batches can only be sent by group (you can't combine groups into # a single batch) total_targets = 0 for c in SMSEAGLE_CATEGORIES: targets = len(getattr(self, f'target_{c}s')) total_targets += int(targets / batch_size) + \ (1 if targets % batch_size else 0) return total_targets # Normal batch count; just count the targets return len(self.target_phones) + len(self.target_contacts) + \ len(self.target_groups) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = \ NotifySMSEagle.split_path(results['fullpath']) if 'token' in results['qsd'] and len(results['qsd']['token']): results['token'] = NotifySMSEagle.unquote(results['qsd']['token']) elif not results['password'] and results['user']: results['token'] = NotifySMSEagle.unquote(results['user']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifySMSEagle.parse_phone_no(results['qsd']['to']) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get('batch', False)) # Get Flash Mode Flag results['flash'] = \ parse_bool(results['qsd'].get('flash', False)) # Get Test Mode Flag results['test'] = \ parse_bool(results['qsd'].get('test', False)) # Get status switch results['status'] = \ parse_bool(results['qsd'].get('status', False)) # Get priority if 'priority' in results['qsd'] and len(results['qsd']['priority']): results['priority'] = \ NotifySMSEagle.unquote(results['qsd']['priority']) return results apprise-1.9.3/apprise/plugins/smsmanager.py000066400000000000000000000340211477231770000210320ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # API Reference: https://smsmanager.cz/api/http#send # To use this service you will need a SMS Manager account # You will need credits (new accounts start with a few) # https://smsmanager.cz # 1. Sign up and get test credit # 2. Generate an API key in web administration. import requests from .base import NotifyBase from ..common import NotifyType from ..utils.parse import ( is_phone_no, parse_phone_no, parse_bool, validate_regex) from ..locale import gettext_lazy as _ class SMSManagerGateway(object): """ The different gateway values """ HIGH = "high" ECONOMY = "economy" LOW = "low" DIRECT = "direct" # Used for verification purposes SMS_MANAGER_GATEWAYS = ( SMSManagerGateway.HIGH, SMSManagerGateway.ECONOMY, SMSManagerGateway.LOW, SMSManagerGateway.DIRECT, ) class NotifySMSManager(NotifyBase): """ A wrapper for SMS Manager Notifications """ # The default descriptive name associated with the Notification service_name = 'SMS Manager' # The services URL service_url = 'https://smsmanager.cz' # All notification requests are secure secure_protocol = ('smsmgr', 'smsmanager',) # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_sms_manager' # SMS Manager uses the http protocol with JSON requests notify_url = 'https://http-api.smsmanager.cz/Send' # The maximum amount of texts that can go out in one batch default_batch_size = 4000 # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{apikey}@{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'key': { 'alias_of': 'apikey', }, 'to': { 'alias_of': 'targets', }, 'from': { 'name': _('From Phone No'), 'type': 'string', 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'sender', }, 'sender': { 'alias_of': 'from', }, 'gateway': { 'name': _('Gateway'), 'type': 'choice:string', 'values': SMS_MANAGER_GATEWAYS, 'default': SMS_MANAGER_GATEWAYS[0], }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, }) def __init__(self, apikey=None, sender=None, targets=None, batch=None, gateway=None, **kwargs): """ Initialize SMS Manager Object """ super(NotifySMSManager, self).__init__(**kwargs) self.apikey = validate_regex(apikey) if not self.apikey: msg = 'An invalid API Key ({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # Setup our gateway self.gateway = self.template_args['gateway']['default'] \ if not isinstance(gateway, str) else gateway.lower() if self.gateway not in SMS_MANAGER_GATEWAYS: msg = 'The Gateway specified ({}) is invalid.'.format(gateway) self.logger.warning(msg) raise TypeError(msg) # Define whether or not we should operate in a batch mode self.batch = self.template_args['batch']['default'] \ if batch is None else bool(batch) # Maximum 11 characters and must be approved by administrators of site self.sender = sender[0:11] if isinstance(sender, str) else None # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Parse each phone number we found # It is documented that numbers with a length of 9 characters are # supplemented by "420". result = is_phone_no(target, min_len=9) if result: # Carry forward '+' if defined, otherwise do not... self.targets.append( ('+' + result['full']) if target.lstrip()[0] == '+' else result['full']) continue self.logger.warning( 'Dropped invalid phone # ({}) specified.'.format(target), ) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform SMS Manager Notification """ if not self.targets: # We have nothing to notify self.logger.warning('There are no SMS Manager targets to notify') return False # error tracking (used for function return) has_error = False # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size # Prepare our headers headers = { 'User-Agent': self.app_id, } # Prepare our targets targets = list(self.targets) if batch_size == 1 else \ [self.targets[index:index + batch_size] for index in range(0, len(self.targets), batch_size)] while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our payload # Note: Payload is assembled inside of our while-loop due to # mock testing issues (payload singleton isn't persistent # when performing follow up checks on the params object. payload = { 'apikey': self.apikey, 'gateway': self.gateway, # The number gets populated in the loop below 'number': None, 'message': body, } if self.sender: # Sender is ony set if specified payload['sender'] = self.sender # Printable target details if isinstance(target, list): p_target = '{} targets'.format(len(target)) # Prepare our target numbers payload['number'] = ';'.join(target) else: p_target = target # Prepare our target numbers payload['number'] = target # Some Debug Logging self.logger.debug( 'SMS Manager POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('SMS Manager Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.get( self.notify_url, params=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) # set up our status code to use status_code = r.status_code self.logger.warning( 'Failed to send SMS Manager notification to {}: ' '{}{}error={}.'.format( p_target, status_str, ', ' if status_str else '', status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent SMS Manager notification to {}.'.format( p_target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending SMS Manager: to %s ', p_target) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol[0], self.apikey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'batch': 'yes' if self.batch else 'no', 'gateway': self.gateway, } if self.sender: # Set our sender if it was set params['sender'] = self.sender # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{apikey}@{targets}' \ '?{params}'.format( schema=self.secure_protocol[0], apikey=self.pprint(self.apikey, privacy, safe=''), targets='/'.join([ NotifySMSManager.quote('{}'.format(x), safe='+') for x in self.targets]), params=NotifySMSManager.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # # Note: Groups always require a separate request (and can not be # included in batch calculations) batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our API Key results['apikey'] = NotifySMSManager.unquote(results['user']) # Store our targets results['targets'] = [ *NotifySMSManager.parse_phone_no(results['host']), *NotifySMSManager.split_path(results['fullpath'])] # The 'from' makes it easier to use yaml configuration if 'from' in results['qsd'] and len(results['qsd']['from']): results['sender'] = \ NotifySMSManager.unquote(results['qsd']['from']) elif 'sender' in results['qsd'] and len(results['qsd']['sender']): # Support sender= value as well to align with SMS Manager API results['sender'] = \ NotifySMSManager.unquote(results['qsd']['sender']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifySMSManager.parse_phone_no(results['qsd']['to']) if 'key' in results['qsd'] and len(results['qsd']['key']): results['apikey'] = \ NotifySMSManager.unquote(results['qsd']['key']) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifySMSManager.template_args['batch']['default'])) # Define our gateway if 'gateway' in results['qsd'] and len(results['qsd']['gateway']): results['gateway'] = \ NotifySMSManager.unquote(results['qsd']['gateway']) return results apprise-1.9.3/apprise/plugins/smtp2go.py000066400000000000000000000470201477231770000202730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Signup @ https://smtp2go.com (free accounts available) # # From your dashboard, you can generate an API Key if you haven't already # at https://app.smtp2go.com/settings/apikeys/ # The API Key from here which will look something like: # api-60F0DD0AB5BA11ABA421F23C91C88EF4 # # Knowing this, you can buid your smtp2go url as follows: # smtp2go://{user}@{domain}/{apikey} # smtp2go://{user}@{domain}/{apikey}/{email} # # You can email as many addresses as you want as: # smtp2go://{user}@{domain}/{apikey}/{email1}/{email2}/{emailN} # # The {user}@{domain} effectively assembles the 'from' email address # the email will be transmitted from. If no email address is specified # then it will also become the 'to' address as well. # import requests from json import dumps from email.utils import formataddr from .base import NotifyBase from .. import exception from ..common import NotifyType from ..common import NotifyFormat from ..utils.parse import ( parse_emails, parse_bool, is_email, validate_regex) from ..locale import gettext_lazy as _ SMTP2GO_HTTP_ERROR_MAP = { 429: 'To many requests.', } class NotifySMTP2Go(NotifyBase): """ A wrapper for SMTP2Go Notifications """ # The default descriptive name associated with the Notification service_name = 'SMTP2Go' # The services URL service_url = 'https://www.smtp2go.com/' # All notification requests are secure secure_protocol = 'smtp2go' # SMTP2Go advertises they allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.20 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_smtp2go' # Notify URL notify_url = 'https://api.smtp2go.com/v3/email/send' # Support attachments attachment_support = True # Default Notify Format notify_format = NotifyFormat.HTML # The maximum amount of emails that can reside within a single # batch transfer default_batch_size = 100 # Define object templates templates = ( '{schema}://{user}@{host}:{apikey}/', '{schema}://{user}@{host}:{apikey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'user': { 'name': _('User Name'), 'type': 'string', 'required': True, }, 'host': { 'name': _('Domain'), 'type': 'string', 'required': True, }, 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, }, 'targets': { 'name': _('Target Emails'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'name': { 'name': _('From Name'), 'type': 'string', 'map_to': 'from_name', }, 'to': { 'alias_of': 'targets', }, 'cc': { 'name': _('Carbon Copy'), 'type': 'list:string', }, 'bcc': { 'name': _('Blind Carbon Copy'), 'type': 'list:string', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('Email Header'), 'prefix': '+', }, } def __init__(self, apikey, targets, cc=None, bcc=None, from_name=None, headers=None, batch=False, **kwargs): """ Initialize SMTP2Go Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex(apikey) if not self.apikey: msg = 'An invalid SMTP2Go API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # Validate our username if not self.user: msg = 'No SMTP2Go username was specified.' self.logger.warning(msg) raise TypeError(msg) # Acquire Email 'To' self.targets = list() # Acquire Carbon Copies self.cc = set() # Acquire Blind Carbon Copies self.bcc = set() # For tracking our email -> name lookups self.names = {} self.headers = {} if headers: # Store our extra headers self.headers.update(headers) # Prepare Batch Mode Flag self.batch = batch # Get our From username (if specified) self.from_name = from_name # Get our from email address self.from_addr = '{user}@{host}'.format(user=self.user, host=self.host) if not is_email(self.from_addr): # Parse Source domain based on from_addr msg = 'Invalid ~From~ email format: {}'.format(self.from_addr) self.logger.warning(msg) raise TypeError(msg) if targets: # Validate recipients (to:) and drop bad ones: for recipient in parse_emails(targets): result = is_email(recipient) if result: self.targets.append( (result['name'] if result['name'] else False, result['full_email'])) continue self.logger.warning( 'Dropped invalid To email ' '({}) specified.'.format(recipient), ) else: # If our target email list is empty we want to add ourselves to it self.targets.append( (self.from_name if self.from_name else False, self.from_addr)) # Validate recipients (cc:) and drop bad ones: for recipient in parse_emails(cc): email = is_email(recipient) if email: self.cc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Carbon Copy email ' '({}) specified.'.format(recipient), ) # Validate recipients (bcc:) and drop bad ones: for recipient in parse_emails(bcc): email = is_email(recipient) if email: self.bcc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Blind Carbon Copy email ' '({}) specified.'.format(recipient), ) def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform SMTP2Go Notification """ if not self.targets: # There is no one to email; we're done self.logger.warning( 'There are no Email recipients to notify') return False # error tracking (used for function return) has_error = False # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size # Prepare our headers headers = { 'User-Agent': self.app_id, 'Accept': 'application/json', 'Content-Type': 'application/json', } # Track our potential attachments attachments = [] if attach and self.attachment_support: for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access SMTP2Go attachment {}.'.format( attachment.url(privacy=True))) return False try: # Format our attachment attachments.append({ 'filename': attachment.name if attachment.name else f'file{no:03}.dat', 'fileblob': attachment.base64(), 'mimetype': attachment.mimetype, }) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access SMTP2Go attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending SMTP2Go attachment {}'.format( attachment.url(privacy=True))) sender = formataddr( (self.from_name if self.from_name else False, self.from_addr), charset='utf-8') # Prepare our payload payload = { # API Key 'api_key': self.apikey, # Base payload options 'sender': sender, 'subject': title, # our To array 'to': [], } if attachments: payload['attachments'] = attachments if self.notify_format == NotifyFormat.HTML: payload['html_body'] = body else: payload['text_body'] = body # Create a copy of the targets list emails = list(self.targets) for index in range(0, len(emails), batch_size): # Initialize our cc list cc = (self.cc - self.bcc) # Initialize our bcc list bcc = set(self.bcc) # Initialize our to list to = list() for to_addr in self.targets[index:index + batch_size]: # Strip target out of cc list if in To cc = (cc - set([to_addr[1]])) # Strip target out of bcc list if in To bcc = (bcc - set([to_addr[1]])) # Prepare our `to` to.append(formataddr(to_addr, charset='utf-8')) # Prepare our To payload['to'] = to if cc: # Format our cc addresses to support the Name field payload['cc'] = [formataddr( (self.names.get(addr, False), addr), charset='utf-8') for addr in cc] # Format our bcc addresses to support the Name field if bcc: # set our bcc variable (convert to list first so it's # JSON serializable) payload['bcc'] = list(bcc) # Store our header entries if defined into the payload # in their payload if self.headers: payload['custom_headers'] = \ [{'header': k, 'value': v} for k, v in self.headers.items()] # Some Debug Logging self.logger.debug('SMTP2Go POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('SMTP2Go Payload: {}' .format(payload)) # For logging output of success and errors; we get a head count # of our outbound details: verbose_dest = ', '.join( [x[1] for x in self.targets[index:index + batch_size]]) \ if len(self.targets[index:index + batch_size]) <= 3 \ else '{} recipients'.format( len(self.targets[index:index + batch_size])) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyBase.http_response_code_lookup( r.status_code, SMTP2GO_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send SMTP2Go notification to {}: ' '{}{}error={}.'.format( verbose_dest, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent SMTP2Go notification to {}.'.format( verbose_dest)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending SMTP2Go:%s ' % ( verbose_dest) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while reading attachments') self.logger.debug('I/O Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.user, self.host, self.apikey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'batch': 'yes' if self.batch else 'no', } # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) if self.from_name is not None: # from_name specified; pass it back on the url params['name'] = self.from_name if self.cc: # Handle our Carbon Copy Addresses params['cc'] = ','.join( ['{}{}'.format( '' if not e not in self.names else '{}:'.format(self.names[e]), e) for e in self.cc]) if self.bcc: # Handle our Blind Carbon Copy Addresses params['bcc'] = ','.join(self.bcc) # a simple boolean check as to whether we display our target emails # or not has_targets = \ not (len(self.targets) == 1 and self.targets[0][1] == self.from_addr) return '{schema}://{user}@{host}/{apikey}/{targets}?{params}'.format( schema=self.secure_protocol, host=self.host, user=NotifySMTP2Go.quote(self.user, safe=''), apikey=self.pprint(self.apikey, privacy, safe=''), targets='' if not has_targets else '/'.join( [NotifySMTP2Go.quote('{}{}'.format( '' if not e[0] else '{}:'.format(e[0]), e[1]), safe='') for e in self.targets]), params=NotifySMTP2Go.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifySMTP2Go.split_path(results['fullpath']) # Our very first entry is reserved for our api key try: results['apikey'] = results['targets'].pop(0) except IndexError: # We're done - no API Key found results['apikey'] = None if 'name' in results['qsd'] and len(results['qsd']['name']): # Extract from name to associate with from address results['from_name'] = \ NotifySMTP2Go.unquote(results['qsd']['name']) # Handle 'to' email address if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'].append(results['qsd']['to']) # Handle Carbon Copy Addresses if 'cc' in results['qsd'] and len(results['qsd']['cc']): results['cc'] = results['qsd']['cc'] # Handle Blind Carbon Copy Addresses if 'bcc' in results['qsd'] and len(results['qsd']['bcc']): results['bcc'] = results['qsd']['bcc'] # Add our Meta Headers that the user can provide with their outbound # emails results['headers'] = {NotifyBase.unquote(x): NotifyBase.unquote(y) for x, y in results['qsd+'].items()} # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifySMTP2Go.template_args['batch']['default'])) return results apprise-1.9.3/apprise/plugins/sns.py000066400000000000000000000576501477231770000175150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import hmac import requests from hashlib import sha256 from datetime import datetime from datetime import timezone from collections import OrderedDict from xml.etree import ElementTree from itertools import chain from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import is_phone_no, parse_list, validate_regex from ..locale import gettext_lazy as _ # Topic Detection # Summary: 256 Characters max, only alpha/numeric plus underscore (_) and # dash (-) additionally allowed. # # Soure: https://docs.aws.amazon.com/AWSSimpleQueueService/latest\ # /SQSDeveloperGuide/sqs-limits.html#limits-queues # # Allow a starting hashtag (#) specification to help eliminate possible # ambiguity between a topic that is comprised of all digits and a phone number IS_TOPIC = re.compile(r'^#?(?P[A-Za-z0-9_-]+)\s*$') # Because our AWS Access Key Secret contains slashes, we actually use the # region as a delimiter. This is a bit hacky; but it's much easier than having # users of this product search though this Access Key Secret and escape all # of the forward slashes! IS_REGION = re.compile( r'^\s*(?P[a-z]{2})-(?P[a-z-]+?)-(?P[0-9]+)\s*$', re.I) # Extend HTTP Error Messages AWS_HTTP_ERROR_MAP = { 403: 'Unauthorized - Invalid Access/Secret Key Combination.', } class NotifySNS(NotifyBase): """ A wrapper for AWS SNS (Amazon Simple Notification) """ # The default descriptive name associated with the Notification service_name = 'AWS Simple Notification Service (SNS)' # The services URL service_url = 'https://aws.amazon.com/sns/' # The default secure protocol secure_protocol = 'sns' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_sns' # AWS is pretty good for handling data load so request limits # can occur in much shorter bursts request_rate_per_sec = 2.5 # The maximum length of the body # Source: https://docs.aws.amazon.com/sns/latest/api/API_Publish.html body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{access_key_id}/{secret_access_key}/{region}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'access_key_id': { 'name': _('Access Key ID'), 'type': 'string', 'private': True, 'required': True, }, 'secret_access_key': { 'name': _('Secret Access Key'), 'type': 'string', 'private': True, 'required': True, }, 'region': { 'name': _('Region'), 'type': 'string', 'required': True, 'regex': (r'^[a-z]{2}-[a-z-]+?-[0-9]+$', 'i'), 'required': True, 'map_to': 'region_name', }, 'target_phone_no': { 'name': _('Target Phone No'), 'type': 'string', 'map_to': 'targets', 'regex': (r'^[0-9\s)(+-]+$', 'i') }, 'target_topic': { 'name': _('Target Topic'), 'type': 'string', 'map_to': 'targets', 'prefix': '#', 'regex': (r'^[A-Za-z0-9_-]+$', 'i'), }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'access': { 'alias_of': 'access_key_id', }, 'secret': { 'alias_of': 'secret_access_key', }, 'region': { 'alias_of': 'region', }, }) def __init__(self, access_key_id, secret_access_key, region_name, targets=None, **kwargs): """ Initialize Notify AWS SNS Object """ super().__init__(**kwargs) # Store our AWS API Access Key self.aws_access_key_id = validate_regex(access_key_id) if not self.aws_access_key_id: msg = 'An invalid AWS Access Key ID was specified.' self.logger.warning(msg) raise TypeError(msg) # Store our AWS API Secret Access key self.aws_secret_access_key = validate_regex(secret_access_key) if not self.aws_secret_access_key: msg = 'An invalid AWS Secret Access Key ' \ '({}) was specified.'.format(secret_access_key) self.logger.warning(msg) raise TypeError(msg) # Acquire our AWS Region Name: # eg. us-east-1, cn-north-1, us-west-2, ... self.aws_region_name = validate_regex( region_name, *self.template_tokens['region']['regex']) if not self.aws_region_name: msg = 'An invalid AWS Region ({}) was specified.'.format( region_name) self.logger.warning(msg) raise TypeError(msg) # Initialize topic list self.topics = list() # Initialize numbers list self.phone = list() # Set our notify_url based on our region self.notify_url = 'https://sns.{}.amazonaws.com/'\ .format(self.aws_region_name) # AWS Service Details self.aws_service_name = 'sns' self.aws_canonical_uri = '/' # AWS Authentication Details self.aws_auth_version = 'AWS4' self.aws_auth_algorithm = 'AWS4-HMAC-SHA256' self.aws_auth_request = 'aws4_request' # Validate targets and drop bad ones: for target in parse_list(targets): result = is_phone_no(target) if result: # store valid phone number in E.164 format self.phone.append('+{}'.format(result['full'])) continue result = IS_TOPIC.match(target) if result: # store valid topic self.topics.append(result.group('name')) continue self.logger.warning( 'Dropped invalid phone/topic ' '(%s) specified.' % target, ) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ wrapper to send_notification since we can alert more then one channel """ if len(self.phone) == 0 and len(self.topics) == 0: # We have a bot token and no target(s) to message self.logger.warning('No AWS targets to notify.') return False # Initiaize our error tracking error_count = 0 # Create a copy of our phone #'s to notify against phone = list(self.phone) topics = list(self.topics) while len(phone) > 0: # Get Phone No no = phone.pop(0) # Prepare SNS Message Payload payload = { 'Action': u'Publish', 'Message': body, 'Version': u'2010-03-31', 'PhoneNumber': no, } (result, _) = self._post(payload=payload, to=no) if not result: error_count += 1 # Send all our defined topic id's while len(topics): # Get Topic topic = topics.pop(0) # First ensure our topic exists, if it doesn't, it gets created payload = { 'Action': u'CreateTopic', 'Version': u'2010-03-31', 'Name': topic, } (result, response) = self._post(payload=payload, to=topic) if not result: error_count += 1 continue # Get the Amazon Resource Name topic_arn = response.get('topic_arn') if not topic_arn: # Could not acquire our topic; we're done error_count += 1 continue # Build our payload now that we know our topic_arn payload = { 'Action': u'Publish', 'Version': u'2010-03-31', 'TopicArn': topic_arn, 'Message': body, } # Send our payload to AWS (result, _) = self._post(payload=payload, to=topic) if not result: error_count += 1 return error_count == 0 def _post(self, payload, to): """ Wrapper to request.post() to manage it's response better and make the send() function cleaner and easier to maintain. This function returns True if the _post was successful and False if it wasn't. """ # Always call throttle before any remote server i/o is made; for AWS # time plays a huge factor in the headers being sent with the payload. # So for AWS (SNS) requests we must throttle before they're generated # and not directly before the i/o call like other notification # services do. self.throttle() # Convert our payload from a dict() into a urlencoded string payload = NotifySNS.urlencode(payload) # Prepare our Notification URL # Prepare our AWS Headers based on our payload headers = self.aws_prepare_request(payload) self.logger.debug('AWS POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('AWS Payload: %s' % str(payload)) try: r = requests.post( self.notify_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifySNS.http_response_code_lookup( r.status_code, AWS_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send AWS notification to {}: ' '{}{}error={}.'.format( to, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) return (False, NotifySNS.aws_response_to_dict(r.text)) else: self.logger.info( 'Sent AWS notification to "%s".' % (to)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending AWS ' 'notification to "%s".' % (to), ) self.logger.debug('Socket Exception: %s' % str(e)) return (False, NotifySNS.aws_response_to_dict(None)) return (True, NotifySNS.aws_response_to_dict(r.text)) def aws_prepare_request(self, payload, reference=None): """ Takes the intended payload and returns the headers for it. The payload is presumed to have been already urlencoded() """ # Define our AWS header headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', # Populated below 'Content-Length': 0, 'Authorization': None, 'X-Amz-Date': None, } # Get a reference time (used for header construction) reference = datetime.now(timezone.utc) # Provide Content-Length headers['Content-Length'] = str(len(payload)) # Amazon Date Format amzdate = reference.strftime('%Y%m%dT%H%M%SZ') headers['X-Amz-Date'] = amzdate # Credential Scope scope = '{date}/{region}/{service}/{request}'.format( date=reference.strftime('%Y%m%d'), region=self.aws_region_name, service=self.aws_service_name, request=self.aws_auth_request, ) # Similar to headers; but a subset. keys must be lowercase signed_headers = OrderedDict([ ('content-type', headers['Content-Type']), ('host', '{service}.{region}.amazonaws.com'.format( service=self.aws_service_name, region=self.aws_region_name)), ('x-amz-date', headers['X-Amz-Date']), ]) # # Build Canonical Request Object # canonical_request = '\n'.join([ # Method u'POST', # URL self.aws_canonical_uri, # Query String (none set for POST) '', # Header Content (must include \n at end!) # All entries except characters in amazon date must be # lowercase '\n'.join(['%s:%s' % (k, v) for k, v in signed_headers.items()]) + '\n', # Header Entries (in same order identified above) ';'.join(signed_headers.keys()), # Payload sha256(payload.encode('utf-8')).hexdigest(), ]) # Prepare Unsigned Signature to_sign = '\n'.join([ self.aws_auth_algorithm, amzdate, scope, sha256(canonical_request.encode('utf-8')).hexdigest(), ]) # Our Authorization header headers['Authorization'] = ', '.join([ '{algorithm} Credential={key}/{scope}'.format( algorithm=self.aws_auth_algorithm, key=self.aws_access_key_id, scope=scope, ), 'SignedHeaders={signed_headers}'.format( signed_headers=';'.join(signed_headers.keys()), ), 'Signature={signature}'.format( signature=self.aws_auth_signature(to_sign, reference) ), ]) return headers def aws_auth_signature(self, to_sign, reference): """ Generates a AWS v4 signature based on provided payload which should be in the form of a string. """ def _sign(key, msg, to_hex=False): """ Perform AWS Signing """ if to_hex: return hmac.new(key, msg.encode('utf-8'), sha256).hexdigest() return hmac.new(key, msg.encode('utf-8'), sha256).digest() _date = _sign(( self.aws_auth_version + self.aws_secret_access_key).encode('utf-8'), reference.strftime('%Y%m%d')) _region = _sign(_date, self.aws_region_name) _service = _sign(_region, self.aws_service_name) _signed = _sign(_service, self.aws_auth_request) return _sign(_signed, to_sign, to_hex=True) @staticmethod def aws_response_to_dict(aws_response): """ Takes an AWS Response object as input and returns it as a dictionary but not befor extracting out what is useful to us first. eg: IN: arn:aws:sns:us-east-1:000000000000:abcd 604bef0f-369c-50c5-a7a4-bbd474c83d6a OUT: { type: 'CreateTopicResponse', request_id: '604bef0f-369c-50c5-a7a4-bbd474c83d6a', topic_arn: 'arn:aws:sns:us-east-1:000000000000:abcd', } """ # Define ourselves a set of directives we want to keep if found and # then identify the value we want to map them to in our response # object aws_keep_map = { 'RequestId': 'request_id', 'TopicArn': 'topic_arn', 'MessageId': 'message_id', # Error Message Handling 'Type': 'error_type', 'Code': 'error_code', 'Message': 'error_message', } # A default response object that we'll manipulate as we pull more data # from our AWS Response object response = { 'type': None, 'request_id': None, } try: # we build our tree, but not before first eliminating any # reference to namespacing (if present) as it makes parsing # the tree so much easier. root = ElementTree.fromstring( re.sub(' xmlns="[^"]+"', '', aws_response, count=1)) # Store our response tag object name response['type'] = str(root.tag) def _xml_iter(root, response): if len(root) > 0: for child in root: # use recursion to parse everything _xml_iter(child, response) elif root.tag in aws_keep_map.keys(): response[aws_keep_map[root.tag]] = (root.text).strip() # Recursivly iterate over our AWS Response to extract the # fields we're interested in in efforts to populate our response # object. _xml_iter(root, response) except (ElementTree.ParseError, TypeError): # bad data just causes us to generate a bad response pass return response @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.aws_access_key_id, self.aws_secret_access_key, self.aws_region_name, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{key_id}/{key_secret}/{region}/{targets}/'\ '?{params}'.format( schema=self.secure_protocol, key_id=self.pprint(self.aws_access_key_id, privacy, safe=''), key_secret=self.pprint( self.aws_secret_access_key, privacy, mode=PrivacyMode.Secret, safe=''), region=NotifySNS.quote(self.aws_region_name, safe=''), targets='/'.join( [NotifySNS.quote(x) for x in chain( # Phone # are already prefixed with a plus symbol self.phone, # Topics are prefixed with a pound/hashtag symbol ['#{}'.format(x) for x in self.topics], )]), params=NotifySNS.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.phone) + len(self.topics) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The AWS Access Key ID is stored in the hostname access_key_id = NotifySNS.unquote(results['host']) # Our AWS Access Key Secret contains slashes in it which unfortunately # means it is of variable length after the hostname. Since we require # that the user provides the region code, we intentionally use this # as our delimiter to detect where our Secret is. secret_access_key = None region_name = None # We need to iterate over each entry in the fullpath and find our # region. Once we get there we stop and build our secret from our # accumulated data. secret_access_key_parts = list() # Start with a list of entries to work with entries = NotifySNS.split_path(results['fullpath']) # Section 1: Get Region and Access Secret index = 0 for i, entry in enumerate(entries): # Are we at the region yet? result = IS_REGION.match(entry) if result: # We found our Region; Rebuild our access key secret based on # all entries we found prior to this: secret_access_key = '/'.join(secret_access_key_parts) # Ensure region is nicely formatted region_name = "{country}-{area}-{no}".format( country=result.group('country').lower(), area=result.group('area').lower(), no=result.group('no'), ) # Track our index as we'll use this to grab the remaining # content in the next Section index = i + 1 # We're done with Section 1 break # Store our secret parts secret_access_key_parts.append(entry) # Section 2: Get our Recipients (basically all remaining entries) results['targets'] = entries[index:] # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifySNS.parse_list(results['qsd']['to']) # Handle secret_access_key over-ride if 'secret' in results['qsd'] and len(results['qsd']['secret']): results['secret_access_key'] = \ NotifySNS.unquote(results['qsd']['secret']) else: results['secret_access_key'] = secret_access_key # Handle access key id over-ride if 'access' in results['qsd'] and len(results['qsd']['access']): results['access_key_id'] = \ NotifySNS.unquote(results['qsd']['access']) else: results['access_key_id'] = access_key_id # Handle region name id over-ride if 'region' in results['qsd'] and len(results['qsd']['region']): results['region_name'] = \ NotifySNS.unquote(results['qsd']['region']) else: results['region_name'] = region_name # Return our result set return results apprise-1.9.3/apprise/plugins/sparkpost.py000066400000000000000000000667211477231770000207370ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Signup @ https://www.sparkpost.com # # Ensure you've added a Senders Domain and have generated yourself an # API Key at: # https://app.sparkpost.com/dashboard # Note: For SMTP Access, your API key must have at least been granted the # 'Send via SMTP' privileges. # From here you can click on the domain you're interested in. You can acquire # the API Key from here which will look something like: # 1e1d479fcf1a87527e9411e083c700689fa1acdc # # Knowing this, you can buid your sparkpost url as follows: # sparkpost://{user}@{domain}/{apikey} # sparkpost://{user}@{domain}/{apikey}/{email} # # You can email as many addresses as you want as: # sparkpost://{user}@{domain}/{apikey}/{email1}/{email2}/{emailN} # # The {user}@{domain} effectively assembles the 'from' email address # the email will be transmitted from. If no email address is specified # then it will also become the 'to' address as well. # # The {domain} must cross reference a domain you've set up with Spark Post # # API Documentation: https://developers.sparkpost.com/api/ # Specifically: https://developers.sparkpost.com/api/transmissions/ import requests from json import loads from json import dumps from .base import NotifyBase from .. import exception from ..common import NotifyType from ..common import NotifyFormat from email.utils import formataddr from ..utils.parse import ( validate_regex, parse_emails, parse_bool, is_email) from ..locale import gettext_lazy as _ # Provide some known codes SparkPost uses and what they translate to: # Based on https://www.sparkpost.com/docs/tech-resources/extended-error-codes/ SPARKPOST_HTTP_ERROR_MAP = { 400: 'A bad request was made to the server', 401: 'Invalid User ID and/or Unauthorized User', 403: 'Permission Denied; the provided API Key was not valid', 404: 'There is a problem with the server query URI.', 405: 'Invalid HTTP method', 420: 'Sending limit reached.', 422: 'Invalid data/format/type/length', 429: 'To many requests per sec; rate limit', } class SparkPostRegion: """ Regions """ US = 'us' EU = 'eu' # SparkPost APIs SPARKPOST_API_LOOKUP = { SparkPostRegion.US: 'https://api.sparkpost.com/api/v1', SparkPostRegion.EU: 'https://api.eu.sparkpost.com/api/v1', } # A List of our regions we can use for verification SPARKPOST_REGIONS = ( SparkPostRegion.US, SparkPostRegion.EU, ) class NotifySparkPost(NotifyBase): """ A wrapper for SparkPost Notifications """ # The default descriptive name associated with the Notification service_name = 'SparkPost' # The services URL service_url = 'https://sparkpost.com/' # Support attachments attachment_support = True # All notification requests are secure secure_protocol = 'sparkpost' # SparkPost advertises they allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.20 # Words straight from their website: # https://developers.sparkpost.com/api/#header-rate-limiting # These limits are dynamic, but as a general rule, wait 1 to 5 seconds # after receiving a 429 response before requesting again. # As a simple work around, this is what we will do... Wait X seconds # (defined below) before trying again when we get a 429 error sparkpost_retry_wait_sec = 5 # The maximum number of times we'll retry to send our message when we've # reached a throttling situatin before giving up sparkpost_retry_attempts = 3 # The maximum amount of emails that can reside within a single # batch transfer based on: # https://www.sparkpost.com/docs/tech-resources/\ # smtp-rest-api-performance/#sending-via-the-transmission-rest-api default_batch_size = 2000 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_sparkpost' # Default Notify Format notify_format = NotifyFormat.HTML # Define object templates templates = ( '{schema}://{user}@{host}:{apikey}/', '{schema}://{user}@{host}:{apikey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'user': { 'name': _('User Name'), 'type': 'string', 'required': True, }, 'host': { 'name': _('Domain'), 'type': 'string', 'required': True, }, 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, }, 'targets': { 'name': _('Target Emails'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'name': { 'name': _('From Name'), 'type': 'string', 'map_to': 'from_name', }, 'region': { 'name': _('Region Name'), 'type': 'choice:string', 'values': SPARKPOST_REGIONS, 'default': SparkPostRegion.US, 'map_to': 'region_name', }, 'to': { 'alias_of': 'targets', }, 'cc': { 'name': _('Carbon Copy'), 'type': 'list:string', }, 'bcc': { 'name': _('Blind Carbon Copy'), 'type': 'list:string', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': False, }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('Email Header'), 'prefix': '+', }, 'tokens': { 'name': _('Template Tokens'), 'prefix': ':', }, } def __init__(self, apikey, targets, cc=None, bcc=None, from_name=None, region_name=None, headers=None, tokens=None, batch=None, **kwargs): """ Initialize SparkPost Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex(apikey) if not self.apikey: msg = 'An invalid SparkPost API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # Validate our username if not self.user: msg = 'No SparkPost username was specified.' self.logger.warning(msg) raise TypeError(msg) # Acquire Email 'To' self.targets = list() # Acquire Carbon Copies self.cc = set() # Acquire Blind Carbon Copies self.bcc = set() # For tracking our email -> name lookups self.names = {} # Store our region try: self.region_name = self.template_args['region']['default'] \ if region_name is None else region_name.lower() if self.region_name not in SPARKPOST_REGIONS: # allow the outer except to handle this common response raise except: # Invalid region specified msg = 'The SparkPost region specified ({}) is invalid.' \ .format(region_name) self.logger.warning(msg) raise TypeError(msg) # Get our From username (if specified) self.from_name = from_name # Get our from email address self.from_addr = '{user}@{host}'.format(user=self.user, host=self.host) if not is_email(self.from_addr): # Parse Source domain based on from_addr msg = 'Invalid ~From~ email format: {}'.format(self.from_addr) self.logger.warning(msg) raise TypeError(msg) self.headers = {} if headers: # Store our extra headers self.headers.update(headers) self.tokens = {} if tokens: # Store our template tokens self.tokens.update(tokens) # Prepare Batch Mode Flag self.batch = self.template_args['batch']['default'] \ if batch is None else batch if targets: # Validate recipients (to:) and drop bad ones: for recipient in parse_emails(targets): result = is_email(recipient) if result: self.targets.append( (result['name'] if result['name'] else False, result['full_email'])) continue self.logger.warning( 'Dropped invalid To email ' '({}) specified.'.format(recipient), ) else: # If our target email list is empty we want to add ourselves to it self.targets.append( (self.from_name if self.from_name else False, self.from_addr)) # Validate recipients (cc:) and drop bad ones: for recipient in parse_emails(cc): email = is_email(recipient) if email: self.cc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Carbon Copy email ' '({}) specified.'.format(recipient), ) # Validate recipients (bcc:) and drop bad ones: for recipient in parse_emails(bcc): email = is_email(recipient) if email: self.bcc.add(email['full_email']) # Index our name (if one exists) self.names[email['full_email']] = \ email['name'] if email['name'] else False continue self.logger.warning( 'Dropped invalid Blind Carbon Copy email ' '({}) specified.'.format(recipient), ) def __post(self, payload, retry): """ Performs the actual post and returns the response """ # Prepare our headers headers = { 'User-Agent': self.app_id, 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': self.apikey, } # Prepare our URL as it's based on our hostname url = '{}/transmissions/'.format( SPARKPOST_API_LOOKUP[self.region_name]) # Some Debug Logging self.logger.debug('SparkPost POST URL: {} (cert_verify={})'.format( url, self.verify_certificate)) if 'attachments' in payload['content']: # Since we print our payload; attachments make it a bit too noisy # we just strip out the data block to accomodate it log_payload = \ {k: v for k, v in payload.items() if k != "content"} log_payload['content'] = \ {k: v for k, v in payload['content'].items() if k != "attachments"} log_payload['content']['attachments'] = \ [{k: v for k, v in x.items() if k != "data"} for x in payload['content']['attachments']] else: # No tidying is needed log_payload = payload self.logger.debug('SparkPost Payload: {}' .format(log_payload)) wait = None # For logging output of success and errors; we get a head count # of our outbound details: verbose_dest = ', '.join( [x['address']['email'] for x in payload['recipients']]) \ if len(payload['recipients']) <= 3 \ else '{} recipients'.format(len(payload['recipients'])) # Initialize our response object json_response = {} # Set ourselves a status code status_code = -1 while 1: # pragma: no branch # Always call throttle before any remote server i/o is made self.throttle(wait=wait) try: r = requests.post( url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # A Good response (200) looks like this: # "results": { # "total_rejected_recipients": 0, # "total_accepted_recipients": 1, # "id": "11668787484950529" # } # } # # A Bad response looks like this: # { # "errors": [ # { # "description": # "Unconfigured or unverified sending domain.", # "code": "7001", # "message": "Invalid domain" # } # ] # } # try: # Update our status response if we can json_response = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None # We could not parse JSON response. # We will just use the status we already have. pass status_code = r.status_code payload['recipients'] = list() if status_code == requests.codes.ok: self.logger.info( 'Sent SparkPost notification to {}.'.format( verbose_dest)) return status_code, json_response # We had a problem if we get here status_str = \ NotifyBase.http_response_code_lookup( status_code, SPARKPOST_API_LOOKUP) self.logger.warning( 'Failed to send SparkPost notification to {}: ' '{}{}error={}.'.format( verbose_dest, status_str, ', ' if status_str else '', status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) if status_code == requests.codes.too_many_requests and retry: retry = retry - 1 if retry > 0: wait = self.sparkpost_retry_wait_sec continue except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending SparkPost ' 'notification') self.logger.debug('Socket Exception: %s' % str(e)) # Anything else and we're done return status_code, json_response # Our code will never reach here (outside of infinite while loop above) def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform SparkPost Notification """ if not self.targets: # There is no one to email; we're done self.logger.warning( 'There are no SparkPost Email recipients to notify') return False # Initialize our has_error flag has_error = False # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size reply_to = formataddr((self.from_name if self.from_name else False, self.from_addr), charset='utf-8') payload = { "options": { # When set to True, an image is included with the email which # is used to detect if the user looked at the image or not. 'open_tracking': False, # Track if links were clicked that were found within email 'click_tracking': False, }, "content": { "from": { "name": self.from_name if self.from_name else self.app_desc, "email": self.from_addr, }, # SparkPost does not allow empty subject lines or lines that # only contain whitespace; Since Apprise allows an empty title # parameter we swap empty title entries with the period "subject": title if title.strip() else '.', "reply_to": reply_to, } } if self.notify_format == NotifyFormat.HTML: payload['content']['html'] = body else: payload['content']['text'] = body if attach and self.attachment_support: # Prepare ourselves an attachment object payload['content']['attachments'] = [] for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access SparkPost attachment {}.'.format( attachment.url(privacy=True))) return False try: # Prepare API Upload Payload payload['content']['attachments'].append({ 'name': attachment.name if attachment.name else f'file{no:03}.dat', 'type': attachment.mimetype, 'data': attachment.base64(), }) except exception.AppriseException: # We could not access the attachment self.logger.error( 'Could not access SparkPost attachment {}.'.format( attachment.url(privacy=True))) return False self.logger.debug( 'Appending SparkPost attachment {}'.format( attachment.url(privacy=True))) # Take a copy of our token dictionary tokens = self.tokens.copy() # Apply some defaults template values tokens['app_body'] = body tokens['app_title'] = title tokens['app_type'] = notify_type tokens['app_id'] = self.app_id tokens['app_desc'] = self.app_desc tokens['app_color'] = self.color(notify_type) tokens['app_url'] = self.app_url # Store our tokens if they're identified payload['substitution_data'] = self.tokens # Create a copy of the targets list emails = list(self.targets) for index in range(0, len(emails), batch_size): # Generate our email listing payload['recipients'] = list() # Initialize our cc list cc = (self.cc - self.bcc) # Initialize our bcc list bcc = set(self.bcc) # Initialize our headers headers = self.headers.copy() for addr in self.targets[index:index + batch_size]: entry = { 'address': { 'email': addr[1], } } # Strip target out of cc list if in To cc = (cc - set([addr[1]])) # Strip target out of bcc list if in To bcc = (bcc - set([addr[1]])) if addr[0]: entry['address']['name'] = addr[0] # Add our recipient to our list payload['recipients'].append(entry) if cc: # Handle our cc List for addr in cc: entry = { 'address': { 'email': addr, 'header_to': # Take the first email in the To self.targets[index:index + batch_size][0][1], }, } if self.names.get(addr): entry['address']['name'] = self.names[addr] # Add our recipient to our list payload['recipients'].append(entry) headers['CC'] = ','.join(cc) # Handle our bcc for addr in bcc: # Add our recipient to our list payload['recipients'].append({ 'address': { 'email': addr, 'header_to': # Take the first email in the To self.targets[index:index + batch_size][0][1], }, }) if headers: payload['content']['headers'] = headers # Send our message status_code, response = \ self.__post(payload, self.sparkpost_retry_attempts) # Failed if status_code != requests.codes.ok: has_error = True return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.user, self.apikey, self.host) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'region': self.region_name, 'batch': 'yes' if self.batch else 'no', } # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Append our template tokens into our parameters params.update({':{}'.format(k): v for k, v in self.tokens.items()}) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) if self.from_name is not None: # from_name specified; pass it back on the url params['name'] = self.from_name if self.cc: # Handle our Carbon Copy Addresses params['cc'] = ','.join( ['{}{}'.format( '' if not e not in self.names else '{}:'.format(self.names[e]), e) for e in self.cc]) if self.bcc: # Handle our Blind Carbon Copy Addresses params['bcc'] = ','.join(self.bcc) # a simple boolean check as to whether we display our target emails # or not has_targets = \ not (len(self.targets) == 1 and self.targets[0][1] == self.from_addr) return '{schema}://{user}@{host}/{apikey}/{targets}/?{params}'.format( schema=self.secure_protocol, host=self.host, user=NotifySparkPost.quote(self.user, safe=''), apikey=self.pprint(self.apikey, privacy, safe=''), targets='' if not has_targets else '/'.join( [NotifySparkPost.quote('{}{}'.format( '' if not e[0] else '{}:'.format(e[0]), e[1]), safe='') for e in self.targets]), params=NotifySparkPost.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: targets = int(targets / batch_size) + \ (1 if targets % batch_size else 0) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifySparkPost.split_path(results['fullpath']) # Our very first entry is reserved for our api key try: results['apikey'] = results['targets'].pop(0) except IndexError: # We're done - no API Key found results['apikey'] = None if 'name' in results['qsd'] and len(results['qsd']['name']): # Extract from name to associate with from address results['from_name'] = \ NotifySparkPost.unquote(results['qsd']['name']) if 'region' in results['qsd'] and len(results['qsd']['region']): # Extract region results['region_name'] = \ NotifySparkPost.unquote(results['qsd']['region']) # Handle 'to' email address if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'].append(results['qsd']['to']) # Handle Carbon Copy Addresses if 'cc' in results['qsd'] and len(results['qsd']['cc']): results['cc'] = results['qsd']['cc'] # Handle Blind Carbon Copy Addresses if 'bcc' in results['qsd'] and len(results['qsd']['bcc']): results['bcc'] = results['qsd']['bcc'] # Add our Meta Headers that the user can provide with their outbound # emails results['headers'] = {NotifyBase.unquote(x): NotifyBase.unquote(y) for x, y in results['qsd+'].items()} # Add our template tokens (if defined) results['tokens'] = {NotifyBase.unquote(x): NotifyBase.unquote(y) for x, y in results['qsd:'].items()} # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifySparkPost.template_args['batch']['default'])) return results apprise-1.9.3/apprise/plugins/splunk.py000066400000000000000000000403371477231770000202200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Splunk On-Call # API: https://portal.victorops.com/public/api-docs.html # Main: https://www.splunk.com/en_us/products/on-call.html # Routing Keys https://help.victorops.com/knowledge-base/routing-keys/ # Setup: https://help.victorops.com/knowledge-base/rest-endpoint-integration\ # -guide/ import re import requests from json import dumps from .base import NotifyBase from ..common import NotifyType, NOTIFY_TYPES from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ class SplunkAction: """ Tracks the actions supported by Apprise Splunk Plugin """ # Use mapping (specify :key=arg to over-ride) MAP = 'map' # Creates a timeline event but does not trigger an incident INFO = 'info' # Triggers a warning (possibly causing incident) in all cases WARNING = 'warning' # Triggers an incident in all cases CRITICAL = 'critical' # Acknowldege entity_id provided in all cases ACKNOWLEDGE = 'acknowledgement' # Recovery entity_id provided in all cases RECOVERY = 'recovery' # Resolve (aliase of Recover) RESOLVE = 'resolve' # Define our Splunk Actions SPLUNK_ACTIONS = ( SplunkAction.MAP, SplunkAction.INFO, SplunkAction.ACKNOWLEDGE, SplunkAction.WARNING, SplunkAction.RECOVERY, SplunkAction.RESOLVE, SplunkAction.CRITICAL, ) class SplunkMessageType: """ Defines the supported splunk message types """ # Triggers an incident CRITICAL = 'CRITICAL' # May trigger an incident, depending on your settings WARNING = 'WARNING' # Acks an incident ACKNOWLEDGEMENT = 'ACKNOWLEDGEMENT' # Creates a timeline event but does not trigger an incident INFO = 'INFO' # Resolves an incident RECOVERY = 'RECOVERY' # Defines our supported message types SPLUNK_MESSAGE_TYPES = ( SplunkMessageType.CRITICAL, SplunkMessageType.WARNING, SplunkMessageType.ACKNOWLEDGEMENT, SplunkMessageType.INFO, SplunkMessageType.RECOVERY, ) class NotifySplunk(NotifyBase): """ A wrapper for Splunk Notifications """ # The default descriptive name associated with the Notification service_name = _('Splunk On-Call') # The services URL service_url = 'https://www.splunk.com/en_us/products/on-call.html' # The default secure protocol secure_protocol = ('splunk', 'victorops') # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_splunk' # Notification URL notify_url = 'https://alert.victorops.com/integrations/generic/20131114/'\ 'alert/{apikey}/{routing_key}' # Define object templates templates = ( '{schema}://{routing_key}@{apikey}', '{schema}://{routing_key}@{apikey}/{entity_id}', ) # The title is not used title_maxlen = 60 # body limit body_maxlen = 400 # Defines our default message mapping splunk_message_map = { # Creates a timeline event but doesnot trigger an incident NotifyType.INFO: SplunkMessageType.INFO, # Resolves an incident NotifyType.SUCCESS: SplunkMessageType.RECOVERY, # May trigger an incident, depending on your settings NotifyType.WARNING: SplunkMessageType.WARNING, # Triggers an incident NotifyType.FAILURE: SplunkMessageType.CRITICAL, } # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[A-Z0-9_-]+$', 'i'), }, 'routing_key': { 'name': _('Target Routing Key'), 'type': 'string', 'required': True, 'regex': (r'^[A-Z0-9_-]+$', 'i'), }, 'entity_id': { # Provide a value such as: "disk space/db01.mycompany.com" 'name': _('Entity ID'), 'type': 'string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'apikey': { 'alias_of': 'apikey', }, 'routing_key': { 'alias_of': 'routing_key', }, 'route': { 'alias_of': 'routing_key', }, 'entity_id': { 'alias_of': 'entity_id', }, 'action': { 'name': _('Action'), 'type': 'choice:string', 'values': SPLUNK_ACTIONS, 'default': SPLUNK_ACTIONS[0], } }) # Define any kwargs we're using template_kwargs = { 'mapping': { 'name': _('Action Mapping'), 'prefix': ':', }, } def __init__(self, apikey, routing_key, entity_id=None, action=None, mapping=None, **kwargs): """ Initialize Splunk Object """ super().__init__(**kwargs) self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'The Splunk API Key specified ({}) is invalid.'\ .format(apikey) self.logger.warning(msg) raise TypeError(msg) self.routing_key = validate_regex( routing_key, *self.template_tokens['routing_key']['regex']) if not self.routing_key: msg = 'The Splunk Routing Key specified ({}) is invalid.'\ .format(routing_key) self.logger.warning(msg) raise TypeError(msg) if not (isinstance(entity_id, str) and len(entity_id.strip(' \r\n\t\v/'))): # Use routing key self.entity_id = f"{self.app_id}/{self.routing_key}" else: # Assign what was defined: self.entity_id = entity_id.strip(' \r\n\t\v/') if action and isinstance(action, str): self.action = next( (a for a in SPLUNK_ACTIONS if a.startswith(action)), None) if self.action not in SPLUNK_ACTIONS: msg = 'The Splunk action specified ({}) is invalid.'\ .format(action) self.logger.warning(msg) raise TypeError(msg) else: self.action = self.template_args['action']['default'] # Store our mappings self.mapping = self.splunk_message_map.copy() if mapping and isinstance(mapping, dict): for _k, _v in mapping.items(): # Get our mapping k = next((t for t in NOTIFY_TYPES if t.startswith(_k)), None) if not k: msg = 'The Splunk mapping key specified ({}) is invalid.'\ .format(_k) self.logger.warning(msg) raise TypeError(msg) _v_upper = _v.upper() v = next((v for v in SPLUNK_MESSAGE_TYPES if v.startswith(_v_upper)), None) if not v: msg = 'The Splunk mapping value (assigned to {}) ' \ 'specified ({}) is invalid.'.format(k, _v) self.logger.warning(msg) raise TypeError(msg) # Update our mapping self.mapping[k] = v return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Send our notification """ # prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': "application/json", } # Set up our message type if self.action == SplunkAction.MAP: # Use Mapping message_type = self.mapping[notify_type] elif self.action == SplunkAction.ACKNOWLEDGE: # Always Acknowledge message_type = SplunkMessageType.ACKNOWLEDGEMENT elif self.action == SplunkAction.INFO: # Creates a timeline event but does not trigger an incident message_type = SplunkMessageType.INFO elif self.action == SplunkAction.CRITICAL: # Always create Incident message_type = SplunkMessageType.CRITICAL elif self.action == SplunkAction.WARNING: # Always trigger warning (potentially creating incident) message_type = SplunkMessageType.WARNING else: # self.action == SplunkAction.RECOVERY or SplunkAction.RESOLVE # Always Recover message_type = SplunkMessageType.RECOVERY # Prepare our payload payload = { "entity_id": self.entity_id, "message_type": message_type, "entity_display_name": title if title else self.app_desc, "state_message": body, "monitoring_tool": self.app_id, } notify_url = self.notify_url.format( apikey=self.apikey, routing_key=self.routing_key) self.logger.debug('Splunk GET URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate)) self.logger.debug('Splunk Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, data=dumps(payload).encode('utf-8'), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # Sample Response # { # "result" : "success", # "entity_id" : "disk space/db01.mycompany.com" # } if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifySplunk.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Splunk notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Splunk notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Splunk ' 'notification.') self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol[0], self.routing_key, self.entity_id, self.apikey, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'action': self.action, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our assignment extra's into our parameters params.update( {':{}'.format(k): v for k, v in self.mapping.items()}) return '{schema}://{routing_key}@{apikey}/{entity_id}?{params}'.format( schema=self.secure_protocol[0], routing_key=self.routing_key, entity_id='' if self.entity_id == self.routing_key else self.entity_id, apikey=self.pprint(self.apikey, privacy, safe=''), params=NotifySplunk.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ # parse_url already handles getting the `user` and `password` fields # populated. results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Entity ID if 'entity_id' in results['qsd'] and len(results['qsd']['entity_id']): results['entity_id'] = \ NotifySplunk.unquote(results['qsd']['entity_id']) else: results['entity_id'] = NotifySplunk.unquote(results['fullpath']) # API Key if 'apikey' in results['qsd'] and len(results['qsd']['apikey']): results['apikey'] = NotifySplunk.unquote(results['qsd']['apikey']) else: results['apikey'] = NotifySplunk.unquote(results['host']) # Routing Key if 'routing_key' in results['qsd'] \ and len(results['qsd']['routing_key']): results['routing_key'] = \ NotifySplunk.unquote(results['qsd']['routing_key']) elif 'route' in results['qsd'] and len(results['qsd']['route']): results['routing_key'] = \ NotifySplunk.unquote(results['qsd']['route']) else: results['routing_key'] = NotifySplunk.unquote(results['user']) # Store our action (if defined) if 'action' in results['qsd'] and len(results['qsd']['action']): results['action'] = NotifySplunk.unquote(results['qsd']['action']) # store any custom mapping defined results['mapping'] = {NotifySplunk.unquote(x): NotifySplunk.unquote(y) for x, y in results['qsd:'].items()} return results @staticmethod def parse_native_url(url): """ Support https://alert.victorops.com/integrations/generic/20131114/ \ alert/apikey/routing_key """ result = re.match( r'^https?://alert\.victorops\.com/integrations/generic/' r'(?P[0-9]+)/alert/(?P[0-9a-z_-]+)' r'(/(?P[^?/]+))' r'(/(?P[^?]+))?/*' r'(?P\?.+)?$', url, re.I) if result: return NotifySplunk.parse_url( '{schema}://{routing_key}@{apikey}/{entity_id}{params}'.format( schema=NotifySplunk.secure_protocol[0], apikey=result.group('apikey'), routing_key=result.group('routing_key'), entity_id='' if not result.group('entity_id') else result.group('entity_id'), params='' if not result.group('params') else result.group('params'))) return None apprise-1.9.3/apprise/plugins/streamlabs.py000066400000000000000000000376621477231770000210500ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # For this to work correctly you need to register an app # and generate an access token # # # This plugin will simply work using the url of: # streamlabs://access_token/ # # API Documentation on Webhooks: # - https://dev.streamlabs.com/ # import requests from .base import NotifyBase from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ # calls class StrmlabsCall: ALERT = 'ALERTS' DONATION = 'DONATIONS' # A List of calls we can use for verification STRMLABS_CALLS = ( StrmlabsCall.ALERT, StrmlabsCall.DONATION, ) # alerts class StrmlabsAlert: FOLLOW = 'follow' SUBSCRIPTION = 'subscription' DONATION = 'donation' HOST = 'host' # A List of calls we can use for verification STRMLABS_ALERTS = ( StrmlabsAlert.FOLLOW, StrmlabsAlert.SUBSCRIPTION, StrmlabsAlert.DONATION, StrmlabsAlert.HOST, ) class NotifyStreamlabs(NotifyBase): """ A wrapper to Streamlabs Donation Notifications """ # The default descriptive name associated with the Notification service_name = 'Streamlabs' # The services URL service_url = 'https://streamlabs.com/' # The default secure protocol secure_protocol = 'strmlabs' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_streamlabs' # Streamlabs Api endpoint notify_url = 'https://streamlabs.com/api/v1.0/' # The maximum allowable characters allowed in the body per message body_maxlen = 255 # Define object templates templates = ( '{schema}://{access_token}/', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'access_token': { 'name': _('Access Token'), 'private': True, 'required': True, 'type': 'string', 'regex': (r'^[a-z0-9]{40}$', 'i') }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'call': { 'name': _('Call'), 'type': 'choice:string', 'values': STRMLABS_CALLS, 'default': StrmlabsCall.ALERT, }, 'alert_type': { 'name': _('Alert Type'), 'type': 'choice:string', 'values': STRMLABS_ALERTS, 'default': StrmlabsAlert.DONATION, }, 'image_href': { 'name': _('Image Link'), 'type': 'string', 'default': '', }, 'sound_href': { 'name': _('Sound Link'), 'type': 'string', 'default': '', }, 'duration': { 'name': _('Duration'), 'type': 'int', 'default': 1000, 'min': 0 }, 'special_text_color': { 'name': _('Special Text Color'), 'type': 'string', 'default': '', 'regex': (r'^[A-Z]$', 'i'), }, 'amount': { 'name': _('Amount'), 'type': 'int', 'default': 0, 'min': 0 }, 'currency': { 'name': _('Currency'), 'type': 'string', 'default': 'USD', 'regex': (r'^[A-Z]{3}$', 'i'), }, 'name': { 'name': _('Name'), 'type': 'string', 'default': 'Anon', 'regex': (r'^[^\s].{1,24}$', 'i') }, 'identifier': { 'name': _('Identifier'), 'type': 'string', 'default': 'Apprise', }, }) def __init__(self, access_token, call=StrmlabsCall.ALERT, alert_type=StrmlabsAlert.DONATION, image_href='', sound_href='', duration=1000, special_text_color='', amount=0, currency='USD', name='Anon', identifier='Apprise', **kwargs): """ Initialize Streamlabs Object """ super().__init__(**kwargs) # access token is generated by user # using https://streamlabs.com/api/v1.0/token # Tokens for Streamlabs never need to be refreshed. self.access_token = validate_regex( access_token, *self.template_tokens['access_token']['regex'] ) if not self.access_token: msg = 'An invalid Streamslabs access token was specified.' self.logger.warning(msg) raise TypeError(msg) # Store the call try: if call not in STRMLABS_CALLS: # allow the outer except to handle this common response raise else: self.call = call except Exception as e: # Invalid region specified msg = 'The streamlabs call specified ({}) is invalid.' \ .format(call) self.logger.warning(msg) self.logger.debug('Socket Exception: %s' % str(e)) raise TypeError(msg) # Store the alert_type # only applicable when calling /alerts try: if alert_type not in STRMLABS_ALERTS: # allow the outer except to handle this common response raise else: self.alert_type = alert_type except Exception as e: # Invalid region specified msg = 'The streamlabs alert type specified ({}) is invalid.' \ .format(call) self.logger.warning(msg) self.logger.debug('Socket Exception: %s' % str(e)) raise TypeError(msg) # params only applicable when calling /alerts self.image_href = image_href self.sound_href = sound_href self.duration = duration self.special_text_color = special_text_color # only applicable when calling /donations # The amount of this donation. self.amount = amount # only applicable when calling /donations # The 3 letter currency code for this donation. # Must be one of the supported currency codes. self.currency = validate_regex( currency, *self.template_args['currency']['regex'] ) # only applicable when calling /donations if not self.currency: msg = 'An invalid Streamslabs currency was specified.' self.logger.warning(msg) raise TypeError(msg) # only applicable when calling /donations # The name of the donor self.name = validate_regex( name, *self.template_args['name']['regex'] ) if not self.name: msg = 'An invalid Streamslabs donor was specified.' self.logger.warning(msg) raise TypeError(msg) # An identifier for this donor, # which is used to group donations with the same donor. # only applicable when calling /donations self.identifier = identifier return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Streamlabs notification call (either donation or alert) """ headers = { 'User-Agent': self.app_id, } if self.call == StrmlabsCall.ALERT: data = { 'access_token': self.access_token, 'type': self.alert_type.lower(), 'image_href': self.image_href, 'sound_href': self.sound_href, 'message': title, 'user_massage': body, 'duration': self.duration, 'special_text_color': self.special_text_color, } try: r = requests.post( self.notify_url + self.call.lower(), headers=headers, data=data, verify=self.verify_certificate, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyStreamlabs.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Streamlabs alert: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False else: self.logger.info('Sent Streamlabs alert.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Streamlabs ' 'alert.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False if self.call == StrmlabsCall.DONATION: data = { 'name': self.name, 'identifier': self.identifier, 'amount': self.amount, 'currency': self.currency, 'access_token': self.access_token, 'message': body, } try: r = requests.post( self.notify_url + self.call.lower(), headers=headers, data=data, verify=self.verify_certificate, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyStreamlabs.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Streamlabs donation: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False else: self.logger.info('Sent Streamlabs donation.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Streamlabs ' 'donation.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.access_token) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'call': self.call, # donation 'name': self.name, 'identifier': self.identifier, 'amount': self.amount, 'currency': self.currency, # alert 'alert_type': self.alert_type, 'image_href': self.image_href, 'sound_href': self.sound_href, 'duration': self.duration, 'special_text_color': self.special_text_color, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{access_token}/?{params}'.format( schema=self.secure_protocol, access_token=self.pprint(self.access_token, privacy, safe=''), params=NotifyStreamlabs.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. Syntax: strmlabs://access_token """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Store our access code access_token = NotifyStreamlabs.unquote(results['host']) results['access_token'] = access_token # call if 'call' in results['qsd'] and results['qsd']['call']: results['call'] = NotifyStreamlabs.unquote( results['qsd']['call'].strip().upper()) # donation - amount if 'amount' in results['qsd'] and results['qsd']['amount']: results['amount'] = NotifyStreamlabs.unquote( results['qsd']['amount']) # donation - currency if 'currency' in results['qsd'] and results['qsd']['currency']: results['currency'] = NotifyStreamlabs.unquote( results['qsd']['currency'].strip().upper()) # donation - name if 'name' in results['qsd'] and results['qsd']['name']: results['name'] = NotifyStreamlabs.unquote( results['qsd']['name'].strip().upper()) # donation - identifier if 'identifier' in results['qsd'] and results['qsd']['identifier']: results['identifier'] = NotifyStreamlabs.unquote( results['qsd']['identifier'].strip().upper()) # alert - alert_type if 'alert_type' in results['qsd'] and results['qsd']['alert_type']: results['alert_type'] = NotifyStreamlabs.unquote( results['qsd']['alert_type']) # alert - image_href if 'image_href' in results['qsd'] and results['qsd']['image_href']: results['image_href'] = NotifyStreamlabs.unquote( results['qsd']['image_href']) # alert - sound_href if 'sound_href' in results['qsd'] and results['qsd']['sound_href']: results['sound_href'] = NotifyStreamlabs.unquote( results['qsd']['sound_href'].strip().upper()) # alert - duration if 'duration' in results['qsd'] and results['qsd']['duration']: results['duration'] = NotifyStreamlabs.unquote( results['qsd']['duration'].strip().upper()) # alert - special_text_color if 'special_text_color' in results['qsd'] \ and results['qsd']['special_text_color']: results['special_text_color'] = NotifyStreamlabs.unquote( results['qsd']['special_text_color'].strip().upper()) return results apprise-1.9.3/apprise/plugins/synology.py000066400000000000000000000264051477231770000205670ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from json import dumps from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..locale import gettext_lazy as _ # For API Details see: # https://kb.synology.com/en-au/DSM/help/Chat/chat_integration class NotifySynology(NotifyBase): """ A wrapper for Synology Chat Notifications """ # The default descriptive name associated with the Notification service_name = 'Synology Chat' # The services URL service_url = 'https://www.synology.com/' # The default protocol protocol = 'synology' # The default secure protocol secure_protocol = 'synologys' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_synology_chat' # Title is to be part of body title_maxlen = 0 # Disable throttle rate for Synology requests since they are normally # local anyway request_rate_per_sec = 0 # Define object templates templates = ( '{schema}://{host}/{token}', '{schema}://{host}:{port}/{token}', '{schema}://{user}@{host}/{token}', '{schema}://{user}@{host}:{port}/{token}', '{schema}://{user}:{password}@{host}/{token}', '{schema}://{user}:{password}@{host}:{port}/{token}', ) # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, 'token': { 'name': _('Token'), 'type': 'string', 'required': True, 'private': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'file_url': { 'name': _('Upload'), 'type': 'string', }, 'token': { 'alias_of': 'token', }, }) # Define any kwargs we're using template_kwargs = { 'headers': { 'name': _('HTTP Header'), 'prefix': '+', }, } def __init__(self, token=None, headers=None, file_url=None, **kwargs): """ Initialize Synology Chat Object headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with """ super().__init__(**kwargs) self.token = token if not self.token: msg = 'An invalid Synology Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) self.fullpath = kwargs.get('fullpath') # A URL to an attachment you want to upload (must be less then 32MB # Acording to API details (at the time of writing plugin) self.file_url = file_url self.headers = {} if headers: # Store our extra headers self.headers.update(headers) return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port, self.token, self.fullpath.rstrip('/'), ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = {} if self.file_url: params['file_url'] = self.file_url # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifySynology.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifySynology.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}/{token}' \ '{fullpath}?{params}'.format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid # one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), token=self.pprint(self.token, privacy, safe=''), fullpath=NotifySynology.quote(self.fullpath, safe='/') if self.fullpath else '/', params=NotifySynology.urlencode(params), ) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Synology Chat Notification """ # Prepare HTTP Headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded', 'Accept': '*/*', } # Apply any/all header over-rides defined headers.update(self.headers) # prepare Synology Object payload = { 'text': body, } if self.file_url: payload['file_url'] = self.file_url # Prepare our parameters params = { 'api': 'SYNO.Chat.External', 'method': 'incoming', 'version': 2, 'token': self.token, } auth = None if self.user: auth = (self.user, self.password) # Set our schema schema = 'https' if self.secure else 'http' url = '%s://%s' % (schema, self.host) if isinstance(self.port, int): url += ':%d' % self.port # Prepare our Synology API URL url += self.fullpath + '/webapi/entry.cgi' self.logger.debug('Synology Chat POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Synology Chat Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=f"payload={dumps(payload)}", params=params, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code < 200 or r.status_code >= 300: # We had a problem status_str = \ NotifySynology.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Synology Chat %s notification: ' '%serror=%s.', status_str, ', ' if status_str else '', str(r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent Synology Chat notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Synology ' 'Chat notification to %s.' % self.host) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = { NotifySynology.unquote(x): NotifySynology.unquote(y) for x, y in results['qsd+'].items()} # Set our token if found as an argument if 'token' in results['qsd'] and len(results['qsd']['token']): results['token'] = NotifySynology.unquote(results['qsd']['token']) else: # Get unquoted entries entries = NotifySynology.split_path(results['fullpath']) if entries: # Pop the first element results['token'] = entries.pop(0) # Update our fullpath to not include our token results['fullpath'] = \ results['fullpath'][len(results['token']) + 1:] # Set upload/file_url if not otherwise set if 'file_url' in results['qsd'] and len(results['qsd']['file_url']): results['file_url'] = \ NotifySynology.unquote(results['qsd']['file_url']) return results apprise-1.9.3/apprise/plugins/syslog.py000066400000000000000000000250411477231770000202170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import syslog from .base import NotifyBase from ..common import NotifyType from ..utils.parse import parse_bool from ..locale import gettext_lazy as _ class SyslogFacility: """ All of the supported facilities """ KERN = 'kern' USER = 'user' MAIL = 'mail' DAEMON = 'daemon' AUTH = 'auth' SYSLOG = 'syslog' LPR = 'lpr' NEWS = 'news' UUCP = 'uucp' CRON = 'cron' LOCAL0 = 'local0' LOCAL1 = 'local1' LOCAL2 = 'local2' LOCAL3 = 'local3' LOCAL4 = 'local4' LOCAL5 = 'local5' LOCAL6 = 'local6' LOCAL7 = 'local7' SYSLOG_FACILITY_MAP = { SyslogFacility.KERN: syslog.LOG_KERN, SyslogFacility.USER: syslog.LOG_USER, SyslogFacility.MAIL: syslog.LOG_MAIL, SyslogFacility.DAEMON: syslog.LOG_DAEMON, SyslogFacility.AUTH: syslog.LOG_AUTH, SyslogFacility.SYSLOG: syslog.LOG_SYSLOG, SyslogFacility.LPR: syslog.LOG_LPR, SyslogFacility.NEWS: syslog.LOG_NEWS, SyslogFacility.UUCP: syslog.LOG_UUCP, SyslogFacility.CRON: syslog.LOG_CRON, SyslogFacility.LOCAL0: syslog.LOG_LOCAL0, SyslogFacility.LOCAL1: syslog.LOG_LOCAL1, SyslogFacility.LOCAL2: syslog.LOG_LOCAL2, SyslogFacility.LOCAL3: syslog.LOG_LOCAL3, SyslogFacility.LOCAL4: syslog.LOG_LOCAL4, SyslogFacility.LOCAL5: syslog.LOG_LOCAL5, SyslogFacility.LOCAL6: syslog.LOG_LOCAL6, SyslogFacility.LOCAL7: syslog.LOG_LOCAL7, } SYSLOG_FACILITY_RMAP = { syslog.LOG_KERN: SyslogFacility.KERN, syslog.LOG_USER: SyslogFacility.USER, syslog.LOG_MAIL: SyslogFacility.MAIL, syslog.LOG_DAEMON: SyslogFacility.DAEMON, syslog.LOG_AUTH: SyslogFacility.AUTH, syslog.LOG_SYSLOG: SyslogFacility.SYSLOG, syslog.LOG_LPR: SyslogFacility.LPR, syslog.LOG_NEWS: SyslogFacility.NEWS, syslog.LOG_UUCP: SyslogFacility.UUCP, syslog.LOG_CRON: SyslogFacility.CRON, syslog.LOG_LOCAL0: SyslogFacility.LOCAL0, syslog.LOG_LOCAL1: SyslogFacility.LOCAL1, syslog.LOG_LOCAL2: SyslogFacility.LOCAL2, syslog.LOG_LOCAL3: SyslogFacility.LOCAL3, syslog.LOG_LOCAL4: SyslogFacility.LOCAL4, syslog.LOG_LOCAL5: SyslogFacility.LOCAL5, syslog.LOG_LOCAL6: SyslogFacility.LOCAL6, syslog.LOG_LOCAL7: SyslogFacility.LOCAL7, } # Used as a lookup when handling the Apprise -> Syslog Mapping SYSLOG_PUBLISH_MAP = { NotifyType.INFO: syslog.LOG_INFO, NotifyType.SUCCESS: syslog.LOG_NOTICE, NotifyType.FAILURE: syslog.LOG_CRIT, NotifyType.WARNING: syslog.LOG_WARNING, } class NotifySyslog(NotifyBase): """ A wrapper for Syslog Notifications """ # The default descriptive name associated with the Notification service_name = 'Syslog' # The services URL service_url = 'https://tools.ietf.org/html/rfc5424' # The default protocol protocol = 'syslog' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_syslog' # No URL Identifier will be defined for this service as there simply isn't # enough details to uniquely identify one dbus:// from another. url_identifier = False # Disable throttle rate for Syslog requests since they are normally # local anyway request_rate_per_sec = 0 # Define object templates templates = ( '{schema}://', '{schema}://{facility}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'facility': { 'name': _('Facility'), 'type': 'choice:string', 'values': [k for k in SYSLOG_FACILITY_MAP.keys()], 'default': SyslogFacility.USER, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'facility': { # We map back to the same element defined in template_tokens 'alias_of': 'facility', }, 'logpid': { 'name': _('Log PID'), 'type': 'bool', 'default': True, 'map_to': 'log_pid', }, 'logperror': { 'name': _('Log to STDERR'), 'type': 'bool', 'default': False, 'map_to': 'log_perror', }, }) def __init__(self, facility=None, log_pid=True, log_perror=False, **kwargs): """ Initialize Syslog Object """ super().__init__(**kwargs) if facility: try: self.facility = SYSLOG_FACILITY_MAP[facility] except KeyError: msg = 'An invalid syslog facility ' \ '({}) was specified.'.format(facility) self.logger.warning(msg) raise TypeError(msg) else: self.facility = \ SYSLOG_FACILITY_MAP[ self.template_tokens['facility']['default']] # Logging Options self.logoptions = 0 # Include PID with each message. # This may not appear evident if using journalctl since the pid # will always display itself; however it will appear visible # for log_perror combinations self.log_pid = log_pid # Print to stderr as well. self.log_perror = log_perror if log_pid: self.logoptions |= syslog.LOG_PID if log_perror: self.logoptions |= syslog.LOG_PERROR # Initialize our logging syslog.openlog( self.app_id, logoption=self.logoptions, facility=self.facility) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Syslog Notification """ SYSLOG_PUBLISH_MAP = { NotifyType.INFO: syslog.LOG_INFO, NotifyType.SUCCESS: syslog.LOG_NOTICE, NotifyType.FAILURE: syslog.LOG_CRIT, NotifyType.WARNING: syslog.LOG_WARNING, } if title: # Format title body = '{}: {}'.format(title, body) # Always call throttle before any remote server i/o is made self.throttle() try: syslog.syslog(SYSLOG_PUBLISH_MAP[notify_type], body) except KeyError: # An invalid notification type was specified self.logger.warning( 'An invalid notification type ' '({}) was specified.'.format(notify_type)) return False self.logger.info('Sent Syslog notification.') return True def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'logperror': 'yes' if self.log_perror else 'no', 'logpid': 'yes' if self.log_pid else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{facility}/?{params}'.format( facility=self.template_tokens['facility']['default'] if self.facility not in SYSLOG_FACILITY_RMAP else SYSLOG_FACILITY_RMAP[self.facility], schema=self.protocol, params=NotifySyslog.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results tokens = [] if results['host']: tokens.append(NotifySyslog.unquote(results['host'])) # Get our path values tokens.extend(NotifySyslog.split_path(results['fullpath'])) # Initialization facility = None if tokens: # Store the last entry as the facility facility = tokens[-1].lower() # However if specified on the URL, that will over-ride what was # identified if 'facility' in results['qsd'] and len(results['qsd']['facility']): facility = results['qsd']['facility'].lower() if facility and facility not in SYSLOG_FACILITY_MAP: # Find first match; if no match is found we set the result # to the matching key. This allows us to throw a TypeError # during the __init__() call. The benifit of doing this # check here is if we do have a valid match, we can support # short form matches like 'u' which will match against user facility = next((f for f in SYSLOG_FACILITY_MAP.keys() if f.startswith(facility)), facility) # Save facility if set if facility: results['facility'] = facility # Include PID as part of the message logged results['log_pid'] = parse_bool( results['qsd'].get( 'logpid', NotifySyslog.template_args['logpid']['default'])) # Print to stderr as well. results['log_perror'] = parse_bool( results['qsd'].get( 'logperror', NotifySyslog.template_args['logperror']['default'])) return results apprise-1.9.3/apprise/plugins/techuluspush.py000066400000000000000000000165501477231770000214400ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this plugin, you need to download the app # - Apple: https://itunes.apple.com/us/app/\ # push-by-techulus/id1444391917?ls=1&mt=8 # - Android: https://play.google.com/store/apps/\ # details?id=com.techulus.push # # You have to sign up through the account via your mobile device. # # Once you've got your account, you can get your API key from here: # https://push.techulus.com/login.html # # You can also just get the {apikey} right out of the phone app that is # installed. # # your {apikey} will look something like: # b444a40f-3db9-4224-b489-9a514c41c009 # # You will need to assemble all of your URLs for this plugin to work as: # push://{apikey} # # Resources # - https://push.techulus.com/ - Main Website # - https://pushtechulus.docs.apiary.io - API Documentation import requests from json import dumps from .base import NotifyBase from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ # Token required as part of the API request # Used to prepare our UUID regex matching UUID4_RE = \ r'[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}' class NotifyTechulusPush(NotifyBase): """ A wrapper for Techulus Push Notifications """ # The default descriptive name associated with the Notification service_name = 'Techulus Push' # The services URL service_url = 'https://push.techulus.com' # The default secure protocol secure_protocol = 'push' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_techulus' # Techulus Push uses the http protocol with JSON requests notify_url = 'https://push.techulus.com/api/v1/notify' # The maximum allowable characters allowed in the body per message body_maxlen = 1000 # Define object templates templates = ( '{schema}://{apikey}', ) # Define our template apikeys template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^{}$'.format(UUID4_RE), 'i'), }, }) def __init__(self, apikey, **kwargs): """ Initialize Techulus Push Object """ super().__init__(**kwargs) # The apikey associated with the account self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'An invalid Techulus Push API key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Techulus Push Notification """ # Setup our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'x-api-key': self.apikey, } payload = { 'title': title, 'body': body, } self.logger.debug('Techulus Push POST URL: %s (cert_verify=%r)' % ( self.notify_url, self.verify_certificate, )) self.logger.debug('Techulus Push Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.no_content): # We had a problem status_str = \ NotifyTechulusPush.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Techulus Push notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False else: self.logger.info( 'Sent Techulus Push notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Techulus Push ' 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{apikey}/?{params}'.format( schema=self.secure_protocol, apikey=self.pprint(self.apikey, privacy, safe=''), params=NotifyTechulusPush.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The first apikey is stored in the hostname results['apikey'] = NotifyTechulusPush.unquote(results['host']) return results apprise-1.9.3/apprise/plugins/telegram.py000066400000000000000000001153551477231770000205070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this plugin, you need to first access https://api.telegram.org # You need to create a bot and acquire it's Token Identifier (bot_token) # # Basically you need to create a chat with a user called the 'BotFather' # and type: /newbot # # Then follow through the wizard, it will provide you an api key # that looks like this:123456789:alphanumeri_characters # # For each chat_id a bot joins will have a chat_id associated with it. # You will need this value as well to send the notification. # # Log into the webpage version of the site if you like by accessing: # https://web.telegram.org # # You can't check out to see if your entry is working using: # https://api.telegram.org/botAPI_KEY/getMe # # Pay attention to the word 'bot' that must be present infront of your # api key that the BotFather gave you. # # For example, a url might look like this: # https://api.telegram.org/bot123456789:alphanumeric_characters/getMe # # Development API Reference:: # - https://core.telegram.org/bots/api import requests import re import os from json import loads from json import dumps from .base import NotifyBase from ..common import NotifyType from ..common import NotifyImageSize from ..common import NotifyFormat from ..common import PersistentStoreMode from ..utils.parse import parse_bool, parse_list, validate_regex from ..locale import gettext_lazy as _ from ..attachment.base import AttachBase TELEGRAM_IMAGE_XY = NotifyImageSize.XY_256 # Chat ID is required # If the Chat ID is positive, then it's addressed to a single person # If the Chat ID is negative, then it's targeting a group # We can support :topic (an integer) if specified as well IS_CHAT_ID_RE = re.compile( r'^((?P-?[0-9]{1,32})|(@|%40)?(?P[a-z_-][a-z0-9_-]+))' r'((:|%3A)(?P[0-9]+))?$', re.IGNORECASE, ) class TelegramMarkdownVersion: """ Telegram Markdown Version """ # Classic (Original Telegram Markdown) ONE = 'MARKDOWN' # Supports strikethrough and many other items TWO = 'MarkdownV2' TELEGRAM_MARKDOWN_VERSION_MAP = { # v1 "v1": TelegramMarkdownVersion.ONE, "1": TelegramMarkdownVersion.ONE, # v2 "v2": TelegramMarkdownVersion.TWO, "2": TelegramMarkdownVersion.TWO, "default": TelegramMarkdownVersion.TWO, } TELEGRAM_MARKDOWN_VERSIONS = { # Note: This also acts as a reverse lookup mapping TelegramMarkdownVersion.ONE: 'v1', TelegramMarkdownVersion.TWO: 'v2', } class TelegramContentPlacement: """ The Telegram Content Placement """ # Before Attachments BEFORE = "before" # After Attachments AFTER = "after" # Identify Placement Categories TELEGRAM_CONTENT_PLACEMENT = ( TelegramContentPlacement.BEFORE, TelegramContentPlacement.AFTER, ) class NotifyTelegram(NotifyBase): """ A wrapper for Telegram Notifications """ # The default descriptive name associated with the Notification service_name = 'Telegram' # The services URL service_url = 'https://telegram.org/' # The default secure protocol secure_protocol = 'tgram' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_telegram' # Default Notify Format notify_format = NotifyFormat.HTML # Telegram uses the http protocol with JSON requests notify_url = 'https://api.telegram.org/bot' # Support attachments attachment_support = True # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_256 # The maximum allowable characters allowed in the body per message body_maxlen = 4096 # The maximum number of characters a telegram attachment caption can be # If an attachment is provided and the body is within the caption limit # then it is captioned with the attachment instead. telegram_caption_maxlen = 1024 # Title is to be part of body title_maxlen = 0 # Telegram is limited to sending a maximum of 100 requests per second. request_rate_per_sec = 0.001 # Our default is to no not use persistent storage beyond in-memory # reference storage_mode = PersistentStoreMode.AUTO # Define object templates templates = ( '{schema}://{bot_token}', '{schema}://{bot_token}/{targets}', ) # Telegram Attachment Support mime_lookup = ( # This list is intentionally ordered so that it can be scanned # from top to bottom. The last entry is a catch-all # Animations are documented to only support gif or H.264/MPEG-4 # Source: https://core.telegram.org/bots/api#sendanimation { 'regex': re.compile(r'^(image/gif|video/H264)', re.I), 'function_name': 'sendAnimation', 'key': 'animation', }, # This entry is intentially placed below the sendAnimiation allowing # it to catch gif files. This then becomes a catch all to remaining # image types. # Source: https://core.telegram.org/bots/api#sendphoto { 'regex': re.compile(r'^image/.*', re.I), 'function_name': 'sendPhoto', 'key': 'photo', }, # Video is documented to only support .mp4 # Source: https://core.telegram.org/bots/api#sendvideo { 'regex': re.compile(r'^video/mp4', re.I), 'function_name': 'sendVideo', 'key': 'video', }, # Voice supports ogg # Source: https://core.telegram.org/bots/api#sendvoice { 'regex': re.compile(r'^(application|audio)/ogg', re.I), 'function_name': 'sendVoice', 'key': 'voice', }, # Audio supports mp3 and m4a only # Source: https://core.telegram.org/bots/api#sendaudio { 'regex': re.compile(r'^audio/(mpeg|mp4a-latm)', re.I), 'function_name': 'sendAudio', 'key': 'audio', }, # Catch All (all other types) # Source: https://core.telegram.org/bots/api#senddocument { 'regex': re.compile(r'.*', re.I), 'function_name': 'sendDocument', 'key': 'document', }, ) # Telegram's HTML support doesn't like having HTML escaped # characters passed into it. to handle this situation, we need to # search the body for these sequences and convert them to the # output the user expected __telegram_escape_html_entries = ( # Comments (re.compile( r'\s*\s*', (re.I | re.M | re.S)), '', {}), # the following tags are not supported (re.compile( r'\s*<\s*(!?DOCTYPE|p|div|span|body|script|link|' r'meta|html|font|head|label|form|input|textarea|select|iframe|' r'source|script)([^a-z0-9>][^>]*)?>\s*', (re.I | re.M | re.S)), '', {}), # All closing tags to be removed are put here (re.compile( r'\s*<\s*/(span|body|script|meta|html|font|head|' r'label|form|input|textarea|select|ol|ul|link|' r'iframe|source|script)([^a-z0-9>][^>]*)?>\s*', (re.I | re.M | re.S)), '', {}), # Bold (re.compile( r'<\s*(strong)([^a-z0-9>][^>]*)?>', (re.I | re.M | re.S)), '', {}), (re.compile( r'<\s*/\s*(strong)([^a-z0-9>][^>]*)?>', (re.I | re.M | re.S)), '', {}), (re.compile( r'\s*<\s*(h[1-6]|title)([^a-z0-9>][^>]*)?>\s*', (re.I | re.M | re.S)), '{}', {'html': '\r\n'}), (re.compile( r'\s*<\s*/\s*(h[1-6]|title)([^a-z0-9>][^>]*)?>\s*', (re.I | re.M | re.S)), '{}', {'html': '
'}), # Italic (re.compile( r'<\s*(caption|em)([^a-z0-9>][^>]*)?>', (re.I | re.M | re.S)), '', {}), (re.compile( r'<\s*/\s*(caption|em)([^a-z0-9>][^>]*)?>', (re.I | re.M | re.S)), '', {}), # Bullet Lists (re.compile( r'<\s*li([^a-z0-9>][^>]*)?>\s*', (re.I | re.M | re.S)), ' -', {}), # convert pre tags to code (supported by Telegram) (re.compile( r'<\s*pre([^a-z0-9>][^>]*)?>', (re.I | re.M | re.S)), '{}', {'html': '\r\n'}), (re.compile( r'<\s*/\s*pre([^a-z0-9>][^>]*)?>', (re.I | re.M | re.S)), '{}', {'html': '\r\n'}), # New Lines (re.compile( r'\s*<\s*/?\s*(ol|ul|br|hr)\s*/?>\s*', (re.I | re.M | re.S)), '\r\n', {}), (re.compile( r'\s*<\s*/\s*(br|p|hr|li|div)([^a-z0-9>][^>]*)?>\s*', (re.I | re.M | re.S)), '\r\n', {}), # HTML Spaces ( ) and tabs ( ) aren't supported # See https://core.telegram.org/bots/api#html-style (re.compile(r'\ ?', re.I), ' ', {}), # Tabs become 3 spaces (re.compile(r'\ ?', re.I), ' ', {}), # Some characters get re-escaped by the Telegram upstream # service so we need to convert these back, (re.compile(r'\'?', re.I), '\'', {}), (re.compile(r'\"?', re.I), '"', {}), # New line cleanup (re.compile(r'\r*\n[\r\n]+', re.I), '\r\n', {}), ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'bot_token': { 'name': _('Bot Token'), 'type': 'string', 'private': True, 'required': True, # Token required as part of the API request, allow the word 'bot' # infront of it 'regex': (r'^(bot)?(?P[0-9]+:[a-z0-9_-]+)$', 'i'), }, 'target_user': { 'name': _('Target Chat ID'), 'type': 'string', 'map_to': 'targets', 'regex': (r'^((-?[0-9]{1,32})|([a-z_-][a-z0-9_-]+))$', 'i'), }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': False, 'map_to': 'include_image', }, 'detect': { 'name': _('Detect Bot Owner'), 'type': 'bool', 'default': True, 'map_to': 'detect_owner', }, 'silent': { 'name': _('Silent Notification'), 'type': 'bool', 'default': False, }, 'preview': { 'name': _('Web Page Preview'), 'type': 'bool', 'default': False, }, 'topic': { 'name': _('Topic Thread ID'), 'type': 'int', }, 'thread': { 'alias_of': 'topic', }, 'mdv': { 'name': _('Markdown Version'), 'type': 'choice:string', 'values': ('v1', 'v2'), 'default': 'v1', }, 'to': { 'alias_of': 'targets', }, 'content': { 'name': _('Content Placement'), 'type': 'choice:string', 'values': TELEGRAM_CONTENT_PLACEMENT, 'default': TelegramContentPlacement.BEFORE, }, }) def __init__(self, bot_token, targets, detect_owner=True, include_image=False, silent=None, preview=None, topic=None, content=None, mdv=None, **kwargs): """ Initialize Telegram Object """ super().__init__(**kwargs) self.bot_token = validate_regex( bot_token, *self.template_tokens['bot_token']['regex'], fmt='{key}') if not self.bot_token: err = 'The Telegram Bot Token specified ({}) is invalid.'.format( bot_token) self.logger.warning(err) raise TypeError(err) # Get our Markdown Version self.markdown_ver = \ TELEGRAM_MARKDOWN_VERSION_MAP[NotifyTelegram. template_args['mdv']['default']] \ if mdv is None else \ next(( v for k, v in TELEGRAM_MARKDOWN_VERSION_MAP.items() if str(mdv).lower().startswith(k)), TELEGRAM_MARKDOWN_VERSION_MAP[NotifyTelegram. template_args['mdv']['default']]) # Define whether or not we should make audible alarms self.silent = self.template_args['silent']['default'] \ if silent is None else bool(silent) # Define whether or not we should display a web page preview self.preview = self.template_args['preview']['default'] \ if preview is None else bool(preview) # Setup our content placement self.content = self.template_args['content']['default'] \ if not isinstance(content, str) else content.lower() if self.content and self.content not in TELEGRAM_CONTENT_PLACEMENT: msg = 'The content placement specified ({}) is invalid.'\ .format(content) self.logger.warning(msg) raise TypeError(msg) if topic: try: self.topic = int(topic) except (TypeError, ValueError): # Not a valid integer; ignore entry err = 'The Telegram Topic ID specified ({}) is invalid.'\ .format(topic) self.logger.warning(err) raise TypeError(err) else: # No Topic Thread self.topic = None # if detect_owner is set to True, we will attempt to determine who # the bot owner is based on the first person who messaged it. This # is not a fool proof way of doing things as over time Telegram removes # the message history for the bot. So what appears (later on) to be # the first message to it, maybe another user who sent it a message # much later. Users who set this flag should update their Apprise # URL later to directly include the user that we should message. self.detect_owner = detect_owner # Parse our list self.targets = [] for target in parse_list(targets): results = IS_CHAT_ID_RE.match(target) if not results: self.logger.warning( 'Dropped invalid Telegram chat/group ({}) specified.' .format(target), ) # Ensure we don't fall back to owner detection self.detect_owner = False continue if results.group('topic'): topic = int( results.group('topic') if results.group('topic') else self.topic) else: # Default (if one set) topic = self.topic if results.group('name') is not None: # Name self.targets.append(('@%s' % results.group('name'), topic)) else: # ID self.targets.append((int(results.group('idno')), topic)) # Track whether or not we want to send an image with our notification # or not. self.include_image = include_image def send_media(self, target, notify_type, payload={}, attach=None): """ Sends a sticker based on the specified notify type """ # Prepare our Headers headers = { 'User-Agent': self.app_id, } # Our function name and payload are determined on the path function_name = 'SendPhoto' key = 'photo' path = None if isinstance(attach, AttachBase): if not attach: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attach.url(privacy=True))) return False self.logger.debug( 'Posting Telegram attachment {}'.format( attach.url(privacy=True))) # Store our path to our file path = attach.path file_name = attach.name mimetype = attach.mimetype # Process our attachment function_name, key = \ next(((x['function_name'], x['key']) for x in self.mime_lookup if x['regex'].match(mimetype))) # pragma: no cover else: attach = self.image_path(notify_type) if attach is None else attach if attach is None: # Nothing specified to send return True # Take on specified attachent as path path = attach file_name = os.path.basename(path) url = '%s%s/%s' % ( self.notify_url, self.bot_token, function_name, ) # Always call throttle before any remote server i/o is made; # Telegram throttles to occur before sending the image so that # content can arrive together. self.throttle() # Extract our target chat_id, topic = target payload['chat_id'] = chat_id if topic: payload['message_thread_id'] = topic try: with open(path, 'rb') as f: # Configure file payload (for upload) files = {key: (file_name, f)} self.logger.debug( 'Telegram attachment POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate)) r = requests.post( url, headers=headers, files=files, data=payload, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = NotifyTelegram\ .http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Telegram attachment: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False # Content was sent successfully if we got here return True except requests.RequestException as e: self.logger.warning( 'A connection error occurred posting Telegram ' 'attachment.') self.logger.debug('Socket Exception: %s' % str(e)) except (IOError, OSError): # IOError is present for backwards compatibility with Python # versions older then 3.3. >= 3.3 throw OSError now. # Could not open and/or read the file; this is not a problem since # we scan a lot of default paths. self.logger.error( 'File can not be opened for read: {}'.format(path)) return False def detect_bot_owner(self): """ Takes a bot and attempts to detect it's chat id from that """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } url = '%s%s/%s' % ( self.notify_url, self.bot_token, 'getUpdates' ) self.logger.debug( 'Telegram User Detection POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate)) # Track our response object response = None try: r = requests.post( url, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyTelegram.http_response_code_lookup(r.status_code) try: # Try to get the error message if we can: error_msg = loads(r.content).get('description', 'unknown') except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None error_msg = None if error_msg: self.logger.warning( 'Failed to detect the Telegram user: (%s) %s.' % ( r.status_code, error_msg)) else: self.logger.warning( 'Failed to detect the Telegram user: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) return 0 # Load our response and attempt to fetch our userid response = loads(r.content) except (AttributeError, TypeError, ValueError): # Our response was not the JSON type we had expected it to be # - ValueError = r.content is Unparsable # - TypeError = r.content is None # - AttributeError = r is None self.logger.warning( 'A communication error occurred detecting the Telegram User.') return 0 except requests.RequestException as e: self.logger.warning( 'A connection error occurred detecting the Telegram User.') self.logger.debug('Socket Exception: %s' % str(e)) return 0 # A Response might look something like this: # { # "ok":true, # "result":[{ # "update_id":645421321, # "message":{ # "message_id":1, # "from":{ # "id":532389719, # "is_bot":false, # "first_name":"Chris", # "language_code":"en-US" # }, # "chat":{ # "id":532389719, # "first_name":"Chris", # "type":"private" # }, # "date":1519694394, # "text":"/start", # "entities":[{"offset":0,"length":6,"type":"bot_command"}]}}] if response.get('ok', False): for entry in response.get('result', []): if 'message' in entry and 'from' in entry['message']: _id = entry['message']['from'].get('id', 0) _user = entry['message']['from'].get('first_name') self.logger.info( 'Detected Telegram user %s (userid=%d)' % (_user, _id)) # Return our detected userid self.store.set('bot_owner', _id) return _id self.logger.warning( 'Failed to detect a Telegram user; ' 'try sending your bot a message first.') return 0 def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, body_format=None, **kwargs): """ Perform Telegram Notification """ if len(self.targets) == 0 and self.detect_owner: _id = self.store.get('bot_owner') or self.detect_bot_owner() if _id: # Permanently store our id in our target list for next time self.targets.append((str(_id), self.topic)) self.logger.info( 'Update your Telegram Apprise URL to read: ' '{}'.format(self.url(privacy=True))) if len(self.targets) == 0: self.logger.warning('There were not Telegram chat_ids to notify.') return False headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } # error tracking (used for function return) has_error = False url = '%s%s/%s' % ( self.notify_url, self.bot_token, 'sendMessage' ) _payload = { # Notification Audible Control 'disable_notification': self.silent, # Display Web Page Preview (if possible) 'disable_web_page_preview': not self.preview, } # Prepare Message Body if self.notify_format == NotifyFormat.MARKDOWN: if body_format not in (None, NotifyFormat.MARKDOWN) \ and self.markdown_ver == TelegramMarkdownVersion.TWO: # Telegram Markdown v2 is not very accomodating to some # characters such as the hashtag (#) which is fine in v1. # To try and be accomodating we escape them in advance # See: https://stackoverflow.com/a/69892704/355584 # Also: https://core.telegram.org/bots/api#markdownv2-style body = re.sub(r'(?#+=|{}.!-])', r'\\\1', body) _payload['parse_mode'] = self.markdown_ver _payload['text'] = body else: # HTML # Use Telegram's HTML mode _payload['parse_mode'] = 'HTML' for r, v, m in self.__telegram_escape_html_entries: if 'html' in m: # Handle special cases where we need to alter new lines # for presentation purposes v = v.format(m['html'] if body_format in ( NotifyFormat.HTML, NotifyFormat.MARKDOWN) else '') body = r.sub(v, body) # Prepare our payload based on HTML or TEXT _payload['text'] = body # Prepare our caption payload caption_payload = { 'caption': _payload['text'], 'show_caption_above_media': True if self.content == TelegramContentPlacement.BEFORE else False, 'parse_mode': _payload['parse_mode']} \ if attach and body and len(_payload.get('text', '')) < \ self.telegram_caption_maxlen else {} # Handle payloads without a body specified (but an attachment present) attach_content = \ TelegramContentPlacement.AFTER \ if not body or caption_payload else self.content # Create a copy of the chat_ids list targets = list(self.targets) while len(targets): target = targets.pop(0) chat_id, topic = target # Printable chat_id details pchat_id = f'{chat_id}' if not topic else f'{chat_id}:{topic}' payload = _payload.copy() payload['chat_id'] = chat_id if topic: payload['message_thread_id'] = topic if self.include_image is True: # Define our path if not self.send_media(target, notify_type): # We failed to send the image associated with our notify_type self.logger.warning( 'Failed to send Telegram attachment to {}.', pchat_id) if attach and self.attachment_support and \ attach_content == TelegramContentPlacement.AFTER: # Send our attachments now (if specified and if it exists) if not self._send_attachments( target, notify_type=notify_type, payload=caption_payload, attach=attach): has_error = True continue if not body: # Nothing more to do; move along to the next attachment continue if caption_payload: # nothing further to do; move along to the next attachment continue # Always call throttle before any remote server i/o is made; # Telegram throttles to occur before sending the image so that # content can arrive together. self.throttle() self.logger.debug('Telegram POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Telegram Payload: %s' % str(payload)) try: r = requests.post( url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyTelegram.http_response_code_lookup(r.status_code) try: # Try to get the error message if we can: error_msg = loads(r.content).get( 'description', 'unknown') except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None error_msg = None self.logger.warning( 'Failed to send Telegram notification to {}: ' '{}, error={}.'.format( pchat_id, error_msg if error_msg else status_str, r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Flag our error has_error = True continue except requests.RequestException as e: self.logger.warning( 'A connection error occurred sending Telegram:%s ' % ( pchat_id) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Flag our error has_error = True continue self.logger.info('Sent Telegram notification.') if attach and self.attachment_support \ and attach_content == TelegramContentPlacement.BEFORE: # Send our attachments now (if specified and if it exists) as # it was identified to send the content before the attachments # which is now done. if not self._send_attachments( target=target, notify_type=notify_type, attach=attach): has_error = True continue return not has_error def _send_attachments(self, target, notify_type, attach, payload={}): """ Sends our attachments """ has_error = False # Send our attachments now (if specified and if it exists) for no, attachment in enumerate(attach, start=1): payload = payload if payload and no == 1 else {} payload.update({ 'title': attachment.name if attachment.name else f'file{no:03}.dat'}) if not self.send_media( target, notify_type, payload=payload, attach=attachment): # We failed; don't continue has_error = True break self.logger.info( 'Sent Telegram attachment: {}.'.format(attachment)) return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.bot_token) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': self.include_image, 'detect': 'yes' if self.detect_owner else 'no', 'silent': 'yes' if self.silent else 'no', 'preview': 'yes' if self.preview else 'no', 'content': self.content, 'mdv': TELEGRAM_MARKDOWN_VERSIONS[self.markdown_ver], } if self.topic: params['topic'] = self.topic # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) targets = [] for (chat_id, _topic) in self.targets: topic = _topic if _topic else self.topic targets.append(''.join( [NotifyTelegram.quote(f'{chat_id}', safe='@') if isinstance(chat_id, str) else f'{chat_id}', '' if not topic else f':{topic}'])) # No need to check the user token because the user automatically gets # appended into the list of chat ids return '{schema}://{bot_token}/{targets}/?{params}'.format( schema=self.secure_protocol, bot_token=self.pprint(self.bot_token, privacy, safe=''), targets='/'.join(targets), params=NotifyTelegram.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ return 1 if not self.targets else len(self.targets) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ # This is a dirty hack; but it's the only work around to tgram:// # messages since the bot_token has a colon in it. It invalidates a # normal URL. # This hack searches for this bogus URL and corrects it so we can # properly load it further down. The other alternative is to ask users # to actually change the colon into a slash (which will work too), but # it's more likely to cause confusion... So this is the next best thing # we also check for %3A (incase the URL is encoded) as %3A == : try: tgram = re.match( r'(?P{schema}://)(bot)?(?P([a-z0-9_-]+)' r'(:[a-z0-9_-]+)?@)?(?P[0-9]+)(:|%3A)+' r'(?P.*)$'.format( schema=NotifyTelegram.secure_protocol), url, re.I) except (TypeError, AttributeError): # url is bad; force tgram to be None tgram = None if not tgram: # Content is simply not parseable return None if tgram.group('prefix'): # Try again results = NotifyBase.parse_url('%s%s%s/%s' % ( tgram.group('protocol'), tgram.group('prefix'), tgram.group('btoken_a'), tgram.group('remaining')), verify_host=False) else: # Try again results = NotifyBase.parse_url('%s%s/%s' % ( tgram.group('protocol'), tgram.group('btoken_a'), tgram.group('remaining')), verify_host=False) # The first token is stored in the hostname bot_token_a = NotifyTelegram.unquote(results['host']) # Get a nice unquoted list of path entries entries = NotifyTelegram.split_path(results['fullpath']) # Now fetch the remaining tokens bot_token_b = entries.pop(0) bot_token = '%s:%s' % (bot_token_a, bot_token_b) # Store our chat ids (as these are the remaining entries) results['targets'] = entries # content to be displayed 'before' or 'after' attachments if 'content' in results['qsd'] and len(results['qsd']['content']): results['content'] = results['qsd']['content'] # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyTelegram.parse_list(results['qsd']['to']) # Store our bot token results['bot_token'] = bot_token # Support Markdown Version if 'mdv' in results['qsd'] and len(results['qsd']['mdv']): results['mdv'] = results['qsd']['mdv'] # Support Thread Topic if 'topic' in results['qsd'] and len(results['qsd']['topic']): results['topic'] = results['qsd']['topic'] elif 'thread' in results['qsd'] and len(results['qsd']['thread']): results['topic'] = results['qsd']['thread'] # Silent (Sends the message Silently); users will receive # notification with no sound. results['silent'] = \ parse_bool(results['qsd'].get('silent', False)) # Show Web Page Preview results['preview'] = \ parse_bool(results['qsd'].get('preview', False)) # Include images with our message results['include_image'] = \ parse_bool(results['qsd'].get('image', False)) # Include images with our message results['detect_owner'] = \ parse_bool( results['qsd'].get('detect', not results['targets'])) return results apprise-1.9.3/apprise/plugins/threema.py000066400000000000000000000275161477231770000203350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Create an account https://gateway.threema.ch/en/ if you don't already have # one # # Read more about Threema Gateway API here: # - https://gateway.threema.ch/en/developer/api import requests from itertools import chain from .base import NotifyBase from ..common import NotifyType from ..utils.parse import is_phone_no, validate_regex, is_email, parse_list from ..url import PrivacyMode from ..locale import gettext_lazy as _ class ThreemaRecipientTypes: """ The supported recipient specifiers """ THREEMA_ID = 'to' PHONE = 'phone' EMAIL = 'email' class NotifyThreema(NotifyBase): """ A wrapper for Threema Gateway Notifications """ # The default descriptive name associated with the Notification service_name = 'Threema Gateway' # The services URL service_url = 'https://gateway.threema.ch/' # The default protocol secure_protocol = 'threema' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_threema' # Threema Gateway uses the http protocol with JSON requests notify_url = 'https://msgapi.threema.ch/send_simple' # The maximum length of the body body_maxlen = 3500 # No title support title_maxlen = 0 # Define object templates templates = ( '{schema}://{gateway_id}@{secret}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'gateway_id': { 'name': _('Gateway ID'), 'type': 'string', 'private': True, 'required': True, 'map_to': 'user', }, 'secret': { 'name': _('API Secret'), 'type': 'string', 'private': True, 'required': True, }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'target_email': { 'name': _('Target Email'), 'type': 'string', 'map_to': 'targets', }, 'target_threema_id': { 'name': _('Target Threema ID'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'gateway_id', }, 'gwid': { 'alias_of': 'gateway_id', }, 'secret': { 'alias_of': 'secret', }, }) def __init__(self, secret=None, targets=None, **kwargs): """ Initialize Threema Gateway Object """ super().__init__(**kwargs) # Validate our params here. if not self.user: msg = 'Threema Gateway ID must be specified' self.logger.warning(msg) raise TypeError(msg) # Verify our Gateway ID if len(self.user) != 8: msg = 'Threema Gateway ID must be 8 characters in length' self.logger.warning(msg) raise TypeError(msg) # Verify our secret self.secret = validate_regex(secret) if not self.secret: msg = \ 'An invalid Threema API Secret ({}) was specified'.format( secret) self.logger.warning(msg) raise TypeError(msg) # Parse our targets self.targets = list() # Used for URL generation afterwards only self.invalid_targets = list() for target in parse_list(targets, allow_whitespace=False): if len(target) == 8: # Store our user self.targets.append( (ThreemaRecipientTypes.THREEMA_ID, target)) continue # Check if an email was defined result = is_email(target) if result: # Store our user self.targets.append( (ThreemaRecipientTypes.EMAIL, result['full_email'])) continue # Validate targets and drop bad ones: result = is_phone_no(target) if result: # store valid phone number self.targets.append(( ThreemaRecipientTypes.PHONE, result['full'])) continue self.logger.warning( 'Dropped invalid user/email/phone ' '({}) specified'.format(target), ) self.invalid_targets.append(target) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Threema Gateway Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning( 'There were no Threema Gateway targets to notify') return False # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', 'Accept': '*/*', } # Prepare our payload _payload = { 'secret': self.secret, 'from': self.user, 'text': body.encode('utf-8'), } # Create a copy of the targets list targets = list(self.targets) while len(targets): # Get our target to notify key, target = targets.pop(0) # Prepare a payload object payload = _payload.copy() # Set Target payload[key] = target # Some Debug Logging self.logger.debug( 'Threema Gateway GET URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('Threema Gateway Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, params=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyThreema.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Threema Gateway notification to {}: ' '{}{}error={}'.format( target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue # We wee successful self.logger.info( 'Sent Threema Gateway notification to %s' % target) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Threema Gateway:%s ' 'notification' % target ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.user, self.secret) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) schemaStr = \ '{schema}://{gatewayid}@{secret}/{targets}?{params}' return schemaStr.format( schema=self.secure_protocol, gatewayid=NotifyThreema.quote(self.user), secret=self.pprint( self.secret, privacy, mode=PrivacyMode.Secret, safe=''), targets='/'.join(chain( [NotifyThreema.quote(x[1], safe='@+') for x in self.targets], [NotifyThreema.quote(x, safe='@+') for x in self.invalid_targets])), params=NotifyThreema.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results results['targets'] = list() if 'secret' in results['qsd'] and len(results['qsd']['secret']): results['secret'] = \ NotifyThreema.unquote(results['qsd']['secret']) else: results['secret'] = NotifyThreema.unquote(results['host']) results['targets'] += \ NotifyThreema.split_path(results['fullpath']) if 'from' in results['qsd'] and len(results['qsd']['from']): results['user'] = \ NotifyThreema.unquote(results['qsd']['from']) elif 'gwid' in results['qsd'] and len(results['qsd']['gwid']): results['user'] = \ NotifyThreema.unquote(results['qsd']['gwid']) if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyThreema.parse_list( results['qsd']['to'], allow_whitespace=False) return results apprise-1.9.3/apprise/plugins/twilio.py000066400000000000000000000440441477231770000202120ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this service you will need a Twilio account to which you can get your # AUTH_TOKEN and ACCOUNT SID right from your console/dashboard at: # https://www.twilio.com/console # # You will also need to send the SMS From a phone number or account id name. # This is identified as the source (or where the SMS message will originate # from). Activated phone numbers can be found on your dashboard here: # - https://www.twilio.com/console/phone-numbers/incoming # # Alternatively, you can open your wallet and request a different Twilio # phone # from: # https://www.twilio.com/console/phone-numbers/search # # or consider purchasing a short-code from here: # https://www.twilio.com/docs/glossary/what-is-a-short-code # import re import requests from json import loads from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import is_phone_no, parse_phone_no, validate_regex from ..locale import gettext_lazy as _ # Twilio Mode Detection MODE_DETECT_RE = re.compile( r'\s*((?P[^:]+)\s*:\s*)?(?P.+)$', re.I) class TwilioMessageMode: """ Twilio Message Mode """ # SMS/MMS TEXT = 'T' # via WhatsApp WHATSAPP = 'W' class NotifyTwilio(NotifyBase): """ A wrapper for Twilio Notifications """ # The default descriptive name associated with the Notification service_name = 'Twilio' # The services URL service_url = 'https://www.twilio.com/' # All notification requests are secure secure_protocol = 'twilio' # Allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.20 # the number of seconds undelivered messages should linger for # in the Twilio queue validity_period = 14400 # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_twilio' # Twilio uses the http protocol with JSON requests notify_url = 'https://api.twilio.com/2010-04-01/Accounts/' \ '{sid}/Messages.json' # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{account_sid}:{auth_token}@{from_phone}', '{schema}://{account_sid}:{auth_token}@{from_phone}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'account_sid': { 'name': _('Account SID'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^AC[a-f0-9]+$', 'i'), }, 'auth_token': { 'name': _('Auth Token'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, 'from_phone': { 'name': _('From Phone No'), 'type': 'string', 'required': True, 'regex': (r'^([a-z]+:)?\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'source', }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^([a-z]+:)?[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'short_code': { 'name': _('Target Short Code'), 'type': 'string', 'regex': (r'^[0-9]{5,6}$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'from_phone', }, 'sid': { 'alias_of': 'account_sid', }, 'token': { 'alias_of': 'auth_token', }, 'apikey': { 'name': _('API Key'), 'type': 'string', 'private': True, 'regex': (r'^SK[a-f0-9]+$', 'i'), }, }) def __init__(self, account_sid, auth_token, source, targets=None, apikey=None, ** kwargs): """ Initialize Twilio Object """ super().__init__(**kwargs) # The Account SID associated with the account self.account_sid = validate_regex( account_sid, *self.template_tokens['account_sid']['regex']) if not self.account_sid: msg = 'An invalid Twilio Account SID ' \ '({}) was specified.'.format(account_sid) self.logger.warning(msg) raise TypeError(msg) # The Authentication Token associated with the account self.auth_token = validate_regex( auth_token, *self.template_tokens['auth_token']['regex']) if not self.auth_token: msg = 'An invalid Twilio Authentication Token ' \ '({}) was specified.'.format(auth_token) self.logger.warning(msg) raise TypeError(msg) # The API Key associated with the account (optional) self.apikey = validate_regex( apikey, *self.template_args['apikey']['regex']) # Detect mode result = MODE_DETECT_RE.match(source) if not result: msg = 'The Account (From) Phone # or Short-code specified ' \ '({}) is invalid.'.format(source) self.logger.warning(msg) raise TypeError(msg) # prepare our default mode to use for all numbers that follow in # target definitions self.default_mode = TwilioMessageMode.WHATSAPP \ if result.group('mode') and \ result.group('mode')[0].lower() == 'w' \ else TwilioMessageMode.TEXT result = is_phone_no(result.group('phoneno'), min_len=5) if not result: msg = 'The Account (From) Phone # or Short-code specified ' \ '({}) is invalid.'.format(source) self.logger.warning(msg) raise TypeError(msg) # Store The Source Phone # and/or short-code self.source = result['full'] if len(self.source) < 11 or len(self.source) > 14: # https://www.twilio.com/docs/glossary/what-is-a-short-code # A short code is a special 5 or 6 digit telephone number # that's shorter than a full phone number. if len(self.source) not in (5, 6): msg = 'The Account (From) Phone # specified ' \ '({}) is invalid.'.format(source) self.logger.warning(msg) raise TypeError(msg) # else... it as a short code so we're okay else: # We're dealing with a phone number; so we need to just # place a plus symbol at the end of it self.source = '+{}'.format(self.source) # Parse our targets self.targets = list() for entry in parse_phone_no(targets, prefix=True): # Detect mode # w: (or whatsapp:) will trigger whatsapp message otherwise # sms/mms as normal result = MODE_DETECT_RE.match(entry) mode = TwilioMessageMode.WHATSAPP if result.group('mode') and \ result.group('mode')[0].lower() == 'w' else self.default_mode # Validate targets and drop bad ones: result = is_phone_no(result.group('phoneno')) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(entry), ) continue # We can't send twilio messages using short-codes as our source if len(self.source) in (5, 6) and mode is \ TwilioMessageMode.WHATSAPP: self.logger.warning( 'Dropped WhatsApp phone # ' '({}) because source provided was a short-code.'.format( entry), ) continue # store valid phone number self.targets.append((mode, '+{}'.format(result['full']))) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Twilio Notification """ if not self.targets: if len(self.source) in (5, 6): # Generate a warning since we're a short-code. We need # a number to message at minimum self.logger.warning( 'There are no valid Twilio targets to notify.') return False # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Accept': 'application/json', } # Prepare our payload payload = { 'Body': body, # The From and To gets populated in the loop below 'From': None, 'To': None, } # Prepare our Twilio URL url = self.notify_url.format(sid=self.account_sid) # Create a copy of the targets list targets = list(self.targets) # Set up our authentication. Prefer the API Key if provided. auth = (self.apikey or self.account_sid, self.auth_token) if len(targets) == 0: # No sources specified, use our own phone no targets.append((self.default_mode, self.source)) while len(targets): # Get our target to notify (mode, target) = targets.pop(0) # Prepare our user if mode is TwilioMessageMode.TEXT: payload['From'] = self.source payload['To'] = target else: # WhatsApp support (via Twilio) payload['From'] = f'whatsapp:{self.source}' payload['To'] = f'whatsapp:{target}' # Some Debug Logging self.logger.debug('Twilio POST URL: {} (cert_verify={})'.format( url, self.verify_certificate)) self.logger.debug('Twilio Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, auth=auth, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.created, requests.codes.ok): # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) # set up our status code to use status_code = r.status_code try: # Update our status response if we can json_response = loads(r.content) status_code = json_response.get('code', status_code) status_str = json_response.get('message', status_str) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None # We could not parse JSON response. # We will just use the status we already have. pass self.logger.warning( 'Failed to send Twilio notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent Twilio notification to {}.'.format(target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Twilio:%s ' % ( target) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.account_sid, self.auth_token, self.source, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) if self.apikey is not None: # apikey specified; pass it back on the url params['apikey'] = self.apikey return '{schema}://{sid}:{token}@{source}/{targets}/?{params}'.format( schema=self.secure_protocol, sid=self.pprint( self.account_sid, privacy, mode=PrivacyMode.Tail, safe=''), token=self.pprint(self.auth_token, privacy, safe=''), source=NotifyTwilio.quote( self.source if self.default_mode is TwilioMessageMode.TEXT else 'w:{}'.format(self.source), safe=''), targets='/'.join( [NotifyTwilio.quote( x[1] if x[0] is TwilioMessageMode.TEXT else 'w:{}'.format(x[1]), safe='') for x in self.targets]), params=NotifyTwilio.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifyTwilio.split_path(results['fullpath']) # The hostname is our source number results['source'] = NotifyTwilio.unquote(results['host']) # Get our account_side and auth_token from the user/pass config results['account_sid'] = NotifyTwilio.unquote(results['user']) results['auth_token'] = NotifyTwilio.unquote(results['password']) # Auth Token if 'token' in results['qsd'] and len(results['qsd']['token']): # Extract the account sid from an argument results['auth_token'] = \ NotifyTwilio.unquote(results['qsd']['token']) # Account SID if 'sid' in results['qsd'] and len(results['qsd']['sid']): # Extract the account sid from an argument results['account_sid'] = \ NotifyTwilio.unquote(results['qsd']['sid']) # API Key if 'apikey' in results['qsd'] and len(results['qsd']['apikey']): results['apikey'] = results['qsd']['apikey'] # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyTwilio.unquote(results['qsd']['from']) if 'source' in results['qsd'] and len(results['qsd']['source']): results['source'] = \ NotifyTwilio.unquote(results['qsd']['source']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyTwilio.parse_phone_no(results['qsd']['to'], prefix=True) return results apprise-1.9.3/apprise/plugins/twist.py000066400000000000000000000710151477231770000200530ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # All of the documentation needed to work with the Twist API can be found # here: https://developer.twist.com/v3/ import re import requests from json import loads from itertools import chain from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyFormat from ..common import NotifyType from ..utils.parse import parse_list, is_email from ..locale import gettext_lazy as _ # A workspace can also be interpreted as a team name too! IS_CHANNEL = re.compile( r'^#?(?P((?P[A-Za-z0-9_-]+):)?' r'(?P[^\s]{1,64}))$') IS_CHANNEL_ID = re.compile( r'^(?P((?P[0-9]+):)?(?P[0-9]+))$') # Used to break apart list of potential tags by their delimiter # into a usable list. LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+') class NotifyTwist(NotifyBase): """ A wrapper for Notify Twist Notifications """ # The default descriptive name associated with the Notification service_name = 'Twist' # The services URL service_url = 'https://twist.com' # The default secure protocol secure_protocol = 'twist' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_twist' # The maximum size of the message body_maxlen = 1000 # Default to markdown notify_format = NotifyFormat.MARKDOWN # The default Notification URL to use api_url = 'https://api.twist.com/api/v3/' # Allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.2 # The default channel to notify if no targets are specified default_notification_channel = 'general' # Define object templates templates = ( '{schema}://{password}:{email}', '{schema}://{password}:{email}/{targets}', ) # Define our template arguments template_tokens = dict(NotifyBase.template_tokens, **{ 'password': { 'name': _('Password'), 'type': 'string', 'private': True, 'required': True, }, 'email': { 'name': _('Email'), 'type': 'string', 'required': True, }, 'target_channel': { 'name': _('Target Channel'), 'type': 'string', 'prefix': '#', 'map_to': 'targets', }, 'target_channel_id': { 'name': _('Target Channel ID'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, }) def __init__(self, email=None, targets=None, **kwargs): """ Initialize Notify Twist Object """ super().__init__(**kwargs) # Initialize channels list self.channels = set() # Initialize Channel ID which are stored as: # : self.channel_ids = set() # The token is None if we're not logged in and False if we # failed to log in. Otherwise it is set to the actual token self.token = None # Our default workspace (associated with our token) self.default_workspace = None # A set of all of the available workspaces self._cached_workspaces = set() # A mapping of channel names, the layout is as follows: # { # : { # : , # : , # ... # }, # : { # : , # : , # ... # }, # } self._cached_channels = dict() # Initialize our Email Object self.email = email if email else '{}@{}'.format( self.user, self.host, ) # Check if it is valid result = is_email(self.email) if not result: # let outer exception handle this msg = 'The Twist Auth email specified ({}) is invalid.'\ .format(self.email) self.logger.warning(msg) raise TypeError(msg) # Re-assign email based on what was parsed self.email = result['full_email'] if email: # Force user/host to be that of the defined email for # consistency. This is very important for those initializing # this object with the the email object would could potentially # cause inconsistency to contents in the NotifyBase() object self.user = result['user'] self.host = result['domain'] if not self.password: msg = 'No Twist password was specified with account: {}'\ .format(self.email) self.logger.warning(msg) raise TypeError(msg) # Validate recipients and drop bad ones: for recipient in parse_list(targets): result = IS_CHANNEL_ID.match(recipient) if result: # store valid channel id self.channel_ids.add(result.group('name')) continue result = IS_CHANNEL.match(recipient) if result: # store valid device self.channels.add(result.group('name').lower()) continue self.logger.warning( 'Dropped invalid channel/id ' '({}) specified.'.format(recipient), ) if len(self.channels) + len(self.channel_ids) == 0: # Notify our default channel self.channels.add(self.default_notification_channel) self.logger.warning( 'Added default notification channel {}'.format( self.default_notification_channel)) return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, self.user, self.password, self.host, self.port, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{password}:{user}@{host}/{targets}/' \ '?{params}'.format( schema=self.secure_protocol, password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), user=self.quote(self.user, safe=''), host=self.host, targets='/'.join( [NotifyTwist.quote(x, safe='') for x in chain( # Channels are prefixed with a pound/hashtag symbol ['#{}'.format(x) for x in self.channels], # Channel IDs self.channel_ids, )]), params=NotifyTwist.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.channels) + len(self.channel_ids) def login(self): """ A simple wrapper to authenticate with the Twist Server """ # Prepare our payload payload = { 'email': self.email, 'password': self.password, } # Reset our default workspace self.default_workspace = None # Reset our cached objects self._cached_workspaces = set() self._cached_channels = dict() # Send Login Information postokay, response = self._fetch( 'users/login', payload=payload, # We set this boolean so internal recursion doesn't take place. login=True, ) if not postokay or not response: # Setting this variable to False as a way of letting us know # we failed to authenticate on our last attempt self.token = False return False # Our response object looks like this (content has been altered for # presentation purposes): # { # "contact_info": null, # "profession": null, # "timezone": "UTC", # "avatar_id": null, # "id": 123456, # "first_name": "Jordan", # "comet_channel": # "124371-34be423219130343030d4ec0a3dabbbbbe565eee", # "restricted": false, # "default_workspace": 92020, # "snooze_dnd_end": null, # "email": "user@example.com", # "comet_server": "https://comet.twist.com", # "snooze_until": null, # "lang": "en", # "feature_flags": [], # "short_name": "Jordan P.", # "away_mode": null, # "time_format": "12", # "client_id": "cb01f37e-a5b2-13e9-ba2a-023a33d10dc0", # "removed": false, # "emails": [ # { # "connected": [], # "email": "user@example.com", # "primary": true # } # ], # "scheduled_banners": [ # "threads_3", # "threads_1", # "notification_permissions", # "search_1", # "messages_1", # "team_1", # "inbox_2", # "inbox_1" # ], # "snooze_dnd_start": null, # "name": "Jordan Peterson", # "off_days": [], # "bot": false, # "token": "2e82c1e4e8b0091fdaa34ff3972351821406f796", # "snoozed": false, # "setup_pending": false, # "date_format": "MM/DD/YYYY" # } # Store our default workspace self.default_workspace = response.get('default_workspace') # Acquire our token self.token = response.get('token') self.logger.info('Authenticated to Twist as {}'.format(self.email)) return True def logout(self): """ A simple wrapper to log out of the server """ if not self.token: # Nothing more to do return True # Send Logout Message postokay, response = self._fetch('users/logout') # reset our token self.token = None # There is no need to handling failed log out attempts at this time return True def get_workspaces(self): """ Returns all workspaces associated with this user account as a set This returned object is either an empty dictionary or one that looks like this: { 'workspace': , 'workspace': , 'workspace': , } All workspaces are made lowercase for comparison purposes """ if not self.token and not self.login(): # Nothing more to do return dict() postokay, response = self._fetch('workspaces/get') if not postokay or not response: # We failed to retrieve return dict() # The response object looks like so: # [ # { # "created_ts": 1563044447, # "name": "apprise", # "creator": 123571, # "color": 1, # "default_channel": 13245, # "plan": "free", # "default_conversation": 63022, # "id": 12345 # } # ] # Knowing our response, we can iterate over each object and cache our # object result = {} for entry in response: result[entry.get('name', '').lower()] = entry.get('id', '') return result def get_channels(self, wid): """ Simply returns the channel objects associated with the specified workspace id. This returned object is either an empty dictionary or one that looks like this: { 'channel1': , 'channel2': , 'channel3': , } All channels are made lowercase for comparison purposes """ if not self.token and not self.login(): # Nothing more to do return {} payload = {'workspace_id': wid} postokay, response = self._fetch( 'channels/get', payload=payload) if not postokay or not isinstance(response, list): # We failed to retrieve return {} # Response looks like this: # [ # { # "id": 123, # "name": "General" # "workspace_id": 12345, # "color": 1, # "description": "", # "archived": false, # "public": true, # "user_ids": [ # 8754 # ], # "created_ts": 1563044447, # "creator": 123571, # } # ] # # Knowing our response, we can iterate over each object and cache our # object result = {} for entry in response: result[entry.get('name', '').lower()] = entry.get('id', '') return result def _channel_migration(self): """ A simple wrapper to get all of the current workspaces including the default one. This plays a role in what channel(s) get notified and where. A cache lookup has overhead, and is only required to be preformed if the user specified channels by their string value """ if not self.token and not self.login(): # Nothing more to do return False if not len(self.channels): # Nothing to do; take an early exit return True if self.default_workspace \ and self.default_workspace not in self._cached_channels: # Get our default workspace entries self._cached_channels[self.default_workspace] = \ self.get_channels(self.default_workspace) # initialize our error tracking has_error = False while len(self.channels): # Pop our channel off of the stack result = IS_CHANNEL.match(self.channels.pop()) # Populate our key variables workspace = result.group('workspace') channel = result.group('channel').lower() # Acquire our workspace_id if we can if workspace: # We always work with the workspace in it's lowercase form workspace = workspace.lower() # A workspace was defined if not len(self._cached_workspaces): # cache our workspaces; this only needs to be done once self._cached_workspaces = self.get_workspaces() if workspace not in self._cached_workspaces: # not found self.logger.warning( 'The Twist User {} is not associated with the ' 'Team {}'.format(self.email, workspace)) # Toggle our return flag has_error = True continue # Store the workspace id workspace_id = self._cached_workspaces[workspace] else: # use default workspace workspace_id = self.default_workspace # Check to see if our channel exists in our default workspace if workspace_id in self._cached_channels \ and channel in self._cached_channels[workspace_id]: # Store our channel ID self.channel_ids.add('{}:{}'.format( workspace_id, self._cached_channels[workspace_id][channel], )) continue # if we reach here, we failed to add our channel self.logger.warning( 'The Channel #{} was not found{}.'.format( channel, '' if not workspace else ' with Team {}'.format(workspace), )) # Toggle our return flag has_error = True continue # There is no need to handling failed log out attempts at this time return not has_error def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Twist Notification """ # error tracking (used for function return) has_error = False if not self.token and not self.login(): # We failed to authenticate - we're done return False if len(self.channels) > 0: # Converts channels to their maped IDs if found; this is the only # way to send notifications to Twist self._channel_migration() if not len(self.channel_ids): # We have nothing to notify self.logger.warning('There are no Twist targets to notify') return False # Notify all of our identified channels ids = list(self.channel_ids) while len(ids) > 0: # Retrieve our Channel Object result = IS_CHANNEL_ID.match(ids.pop()) # We need both the workspace/team id and channel id channel_id = int(result.group('channel')) # Prepare our payload payload = { 'channel_id': channel_id, 'title': title, 'content': body, } postokay, response = self._fetch( 'threads/add', payload=payload, ) # only toggle has_error flag if we had an error if not postokay: # Mark our failure has_error = True continue # If we reach here, we were successful self.logger.info( 'Sent Twist notification to {}.'.format( result.group('name'))) return not has_error def _fetch(self, url, payload=None, method='POST', login=False): """ Wrapper to Twist API requests object """ # use what was specified, otherwise build headers dynamically headers = { 'User-Agent': self.app_id, } headers['Content-Type'] = \ 'application/x-www-form-urlencoded; charset=utf-8' if self.token: # Set our token headers['Authorization'] = 'Bearer {}'.format(self.token) # Prepare our api url api_url = '{}{}'.format(self.api_url, url) # Some Debug Logging self.logger.debug('Twist {} URL: {} (cert_verify={})'.format( method, api_url, self.verify_certificate)) self.logger.debug('Twist Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made; self.throttle() # Initialize a default value for our content value content = {} # acquire our request mode fn = requests.post if method == 'POST' else requests.get try: r = fn( api_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) # Get our JSON content if it's possible try: content = loads(r.content) except (TypeError, ValueError, AttributeError): # TypeError = r.content is not a String # ValueError = r.content is Unparsable # AttributeError = r.content is None content = {} # handle authentication errors where our token has just simply # expired. The error response content looks like this: # { # "error_code": 200, # "error_uuid": "af80bd0715434231a649f2258d7fb946", # "error_extra": {}, # "error_string": "Invalid token" # } # # Authentication related codes: # 120 = You are not logged in # 200 = Invalid Token # # Source: https://developer.twist.com/v3/#errors # # We attempt to login again and retry the original request # if we aren't in the process of handling a login already if r.status_code != requests.codes.ok and login is False \ and isinstance(content, dict) and \ content.get('error_code') in (120, 200): # We failed to authenticate with our token; login one more # time and retry this original request if self.login(): r = fn( api_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout ) # Get our JSON content if it's possible try: content = loads(r.content) except (TypeError, ValueError, AttributeError): # TypeError = r.content is not a String # ValueError = r.content is Unparsable # AttributeError = r.content is None content = {} if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyTwist.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Twist {} to {}: ' '{}error={}.'.format( method, api_url, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure return (False, content) except requests.RequestException as e: self.logger.warning( 'Exception received when sending Twist {} to {}: '. format(method, api_url)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure return (False, content) return (True, content) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results if not results.get('user'): # A username is required return None # Acquire our targets results['targets'] = NotifyTwist.split_path(results['fullpath']) if not results.get('password'): # Password is required; we will accept the very first entry on the # path as a password instead if len(results['targets']) == 0: # No targets to get our password from return None # We need to requote contents since this variable will get # unquoted later on in the process. This step appears a bit # hacky, but it allows us to support the password in this location # - twist://user@example.com/password results['password'] = NotifyTwist.quote( results['targets'].pop(0), safe='') else: # Now we handle our format: # twist://password:email # # since URL logic expects # schema://user:password@host # # you can see how this breaks. The colon at the front delmits # passwords and you can see the twist:// url inverts what we # expect: # twist://password:user@example.com # # twist://abc123:bob@example.com using normal conventions would # have interpreted 'bob' as the password and 'abc123' as the user. # For the purpose of apprise simplifying this for us, we need to # swap these arguments when we prepare the email. _password = results['user'] results['user'] = results['password'] results['password'] = _password # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyTwist.parse_list(results['qsd']['to']) return results def __del__(self): """ Destructor """ try: self.logout() except LookupError: # pragma: no cover # Python v3.5 call to requests can sometimes throw the exception # "/usr/lib64/python3.7/socket.py", line 748, in getaddrinfo # LookupError: unknown encoding: idna # # This occurs every time when running unit-tests against Apprise: # LANG=C.UTF-8 PYTHONPATH=$(pwd) py.test-3.7 # # There has been an open issue on this since Jan 2017. # - https://bugs.python.org/issue29288 # # A ~similar~ issue can be identified here in the requests # ticket system as unresolved and has provided work-arounds # - https://github.com/kennethreitz/requests/issues/3578 pass except ImportError: # pragma: no cover # The actual exception is `ModuleNotFoundError` however ImportError # grants us backwards compatibility with versions of Python older # than v3.6 # Python code that makes early calls to sys.exit() can cause # the __del__() code to run. However, in some newer versions of # Python, this causes the `sys` library to no longer be # available. The stack overflow also goes on to suggest that # it's not wise to use the __del__() as a destructor # which is the case here. # https://stackoverflow.com/questions/67218341/\ # modulenotfounderror-import-of-time-halted-none-in-sys-\ # modules-occured-when-obj?noredirect=1&lq=1 # # # Also see: https://stackoverflow.com/questions\ # /1481488/what-is-the-del-method-and-how-do-i-call-it # At this time it seems clean to try to log out (if we can) # but not throw any unnecessary exceptions (like this one) to # the end user if we don't have to. pass apprise-1.9.3/apprise/plugins/twitter.py000066400000000000000000000742741477231770000204150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # See https://developer.twitter.com/en/docs/direct-messages/\ # sending-and-receiving/api-reference/new-event.html import re import requests from copy import deepcopy from datetime import datetime from datetime import timezone from requests_oauthlib import OAuth1 from json import dumps from json import loads from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import parse_list, parse_bool, validate_regex from ..locale import gettext_lazy as _ from ..attachment.base import AttachBase IS_USER = re.compile(r'^\s*@?(?P[A-Z0-9_]+)$', re.I) class TwitterMessageMode: """ Twitter Message Mode """ # DM (a Direct Message) DM = 'dm' # A Public Tweet TWEET = 'tweet' # Define the types in a list for validation purposes TWITTER_MESSAGE_MODES = ( TwitterMessageMode.DM, TwitterMessageMode.TWEET, ) class NotifyTwitter(NotifyBase): """ A wrapper to Twitter Notifications """ # The default descriptive name associated with the Notification service_name = 'Twitter' # The services URL service_url = 'https://twitter.com/' # The default secure protocol is twitter. secure_protocol = ('x', 'twitter', 'tweet') # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_twitter' # Support attachments attachment_support = True # Do not set body_maxlen as it is set in a property value below # since the length varies depending if we are doing a direct message # or a tweet # body_maxlen = see below @propery defined # Twitter does have titles when creating a message title_maxlen = 0 # Twitter API Reference To Acquire Someone's Twitter ID twitter_lookup = 'https://api.twitter.com/1.1/users/lookup.json' # Twitter API Reference To Acquire Current Users Information twitter_whoami = \ 'https://api.twitter.com/1.1/account/verify_credentials.json' # Twitter API Reference To Send A Private DM twitter_dm = 'https://api.twitter.com/1.1/direct_messages/events/new.json' # Twitter API Reference To Send A Public Tweet twitter_tweet = 'https://api.twitter.com/1.1/statuses/update.json' # it is documented on the site that the maximum images per tweet # is 4 (unless it's a GIF, then it's only 1) __tweet_non_gif_images_batch = 4 # Twitter Media (Attachment) Upload Location twitter_media = 'https://upload.twitter.com/1.1/media/upload.json' # Twitter is kind enough to return how many more requests we're allowed to # continue to make within it's header response as: # X-Rate-Limit-Reset: The epoc time (in seconds) we can expect our # rate-limit to be reset. # X-Rate-Limit-Remaining: an integer identifying how many requests we're # still allow to make. request_rate_per_sec = 0 # For Tracking Purposes ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) # Default to 1000; users can send up to 1000 DM's and 2400 tweets a day # This value only get's adjusted if the server sets it that way ratelimit_remaining = 1 templates = ( '{schema}://{ckey}/{csecret}/{akey}/{asecret}', '{schema}://{ckey}/{csecret}/{akey}/{asecret}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'ckey': { 'name': _('Consumer Key'), 'type': 'string', 'private': True, 'required': True, }, 'csecret': { 'name': _('Consumer Secret'), 'type': 'string', 'private': True, 'required': True, }, 'akey': { 'name': _('Access Key'), 'type': 'string', 'private': True, 'required': True, }, 'asecret': { 'name': _('Access Secret'), 'type': 'string', 'private': True, 'required': True, }, 'target_user': { 'name': _('Target User'), 'type': 'string', 'prefix': '@', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'mode': { 'name': _('Message Mode'), 'type': 'choice:string', 'values': TWITTER_MESSAGE_MODES, 'default': TwitterMessageMode.DM, }, 'cache': { 'name': _('Cache Results'), 'type': 'bool', 'default': True, }, 'to': { 'alias_of': 'targets', }, 'batch': { 'name': _('Batch Mode'), 'type': 'bool', 'default': True, }, }) def __init__(self, ckey, csecret, akey, asecret, targets=None, mode=None, cache=True, batch=True, **kwargs): """ Initialize Twitter Object """ super().__init__(**kwargs) self.ckey = validate_regex(ckey) if not self.ckey: msg = 'An invalid Twitter Consumer Key was specified.' self.logger.warning(msg) raise TypeError(msg) self.csecret = validate_regex(csecret) if not self.csecret: msg = 'An invalid Twitter Consumer Secret was specified.' self.logger.warning(msg) raise TypeError(msg) self.akey = validate_regex(akey) if not self.akey: msg = 'An invalid Twitter Access Key was specified.' self.logger.warning(msg) raise TypeError(msg) self.asecret = validate_regex(asecret) if not self.asecret: msg = 'An invalid Access Secret was specified.' self.logger.warning(msg) raise TypeError(msg) # Store our webhook mode self.mode = self.template_args['mode']['default'] \ if not isinstance(mode, str) else mode.lower() if mode and isinstance(mode, str): self.mode = next( (a for a in TWITTER_MESSAGE_MODES if a.startswith(mode)), None) if self.mode not in TWITTER_MESSAGE_MODES: msg = 'The Twitter message mode specified ({}) is invalid.'\ .format(mode) self.logger.warning(msg) raise TypeError(msg) else: self.mode = self.template_args['mode']['default'] # Set Cache Flag self.cache = cache # Prepare Image Batch Mode Flag self.batch = batch # Track any errors has_error = False # Identify our targets self.targets = [] for target in parse_list(targets): match = IS_USER.match(target) if match and match.group('user'): self.targets.append(match.group('user')) continue has_error = True self.logger.warning( 'Dropped invalid Twitter user ({}) specified.'.format(target), ) if has_error and not self.targets: # We have specified that we want to notify one or more individual # and we failed to load any of them. Since it's also valid to # notify no one at all (which means we notify ourselves), it's # important we don't switch from the users original intentions self.targets = None # Initialize our cache values self._whoami_cache = None self._user_cache = {} return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ Perform Twitter Notification """ if self.targets is None: self.logger.warning('No valid Twitter targets to notify.') return False # Build a list of our attachments attachments = [] if attach and self.attachment_support: # We need to upload our payload first so that we can source it # in remaining messages for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: # We could not access the attachment self.logger.error( 'Could not access attachment {}.'.format( attachment.url(privacy=True))) return False if not re.match(r'^image/.*', attachment.mimetype, re.I): # Only support images at this time self.logger.warning( 'Ignoring unsupported Twitter attachment {}.'.format( attachment.url(privacy=True))) continue self.logger.debug( 'Preparing Twitter attachment {}'.format( attachment.url(privacy=True))) # Upload our image and get our id associated with it # see: https://developer.twitter.com/en/docs/twitter-api/v1/\ # media/upload-media/api-reference/post-media-upload postokay, response = self._fetch( self.twitter_media, payload=attachment, ) if not postokay: # We can't post our attachment return False # Prepare our filename filename = attachment.name \ if attachment.name else f'file{no:03}.dat' if not (isinstance(response, dict) and response.get('media_id')): self.logger.debug( 'Could not attach the file to Twitter: %s (mime=%s)', filename, attachment.mimetype) continue # If we get here, our output will look something like this: # { # "media_id": 710511363345354753, # "media_id_string": "710511363345354753", # "media_key": "3_710511363345354753", # "size": 11065, # "expires_after_secs": 86400, # "image": { # "image_type": "image/jpeg", # "w": 800, # "h": 320 # } # } response.update({ # Update our response to additionally include the # attachment details 'file_name': filename, 'file_mime': attachment.mimetype, 'file_path': attachment.path, }) # Save our pre-prepared payload for attachment posting attachments.append(response) # - calls _send_tweet if the mode is set so # - calls _send_dm (direct message) otherwise return getattr(self, '_send_{}'.format(self.mode))( body=body, title=title, notify_type=notify_type, attachments=attachments, **kwargs) def _send_tweet(self, body, title='', notify_type=NotifyType.INFO, attachments=None, **kwargs): """ Twitter Public Tweet """ # Error Tracking has_error = False payload = { 'status': body, } payloads = [] if not attachments: payloads.append(payload) else: # Group our images if batch is set to do so batch_size = 1 if not self.batch \ else self.__tweet_non_gif_images_batch # Track our batch control in our message generation batches = [] batch = [] for attachment in attachments: batch.append(str(attachment['media_id'])) # Twitter supports batching images together. This allows # the batching of multiple images together. Twitter also # makes it clear that you can't batch `gif` files; they need # to be separate. So the below preserves the ordering that # a user passed their attachments in. if 4-non-gif images # are passed, they are all part of a single message. # # however, if they pass in image, gif, image, gif. The # gif's inbetween break apart the batches so this would # produce 4 separate tweets. # # If you passed in, image, image, gif, image. <- This would # produce 3 images (as the first 2 images could be lumped # together as a batch) if not re.match( r'^image/(png|jpe?g)', attachment['file_mime'], re.I) \ or len(batch) >= batch_size: batches.append(','.join(batch)) batch = [] if batch: batches.append(','.join(batch)) for no, media_ids in enumerate(batches): _payload = deepcopy(payload) _payload['media_ids'] = media_ids if no or not body: # strip text and replace it with the image representation _payload['status'] = \ '{:02d}/{:02d}'.format(no + 1, len(batches)) payloads.append(_payload) for no, payload in enumerate(payloads, start=1): # Send Tweet postokay, response = self._fetch( self.twitter_tweet, payload=payload, json=False, ) if not postokay: # Track our error has_error = True errors = [] try: errors = ['Error Code {}: {}'.format( e.get('code', 'unk'), e.get('message')) for e in response['errors']] except (KeyError, TypeError): pass for error in errors: self.logger.debug( 'Tweet [%.2d/%.2d] Details: %s', no, len(payloads), error) continue try: url = 'https://twitter.com/{}/status/{}'.format( response['user']['screen_name'], response['id_str']) except (KeyError, TypeError): url = 'unknown' self.logger.debug( 'Tweet [%.2d/%.2d] Details: %s', no, len(payloads), url) self.logger.info( 'Sent [%.2d/%.2d] Twitter notification as public tweet.', no, len(payloads)) return not has_error def _send_dm(self, body, title='', notify_type=NotifyType.INFO, attachments=None, **kwargs): """ Twitter Direct Message """ # Error Tracking has_error = False payload = { 'event': { 'type': 'message_create', 'message_create': { 'target': { # This gets assigned 'recipient_id': None, }, 'message_data': { 'text': body, } } } } # Lookup our users (otherwise we look up ourselves) targets = self._whoami(lazy=self.cache) if not len(self.targets) \ else self._user_lookup(self.targets, lazy=self.cache) if not targets: # We failed to lookup any users self.logger.warning( 'Failed to acquire user(s) to Direct Message via Twitter') return False payloads = [] if not attachments: payloads.append(payload) else: for no, attachment in enumerate(attachments): _payload = deepcopy(payload) _data = _payload['event']['message_create']['message_data'] _data['attachment'] = { 'type': 'media', 'media': { 'id': attachment['media_id'] }, 'additional_owners': ','.join([str(x) for x in targets.values()]) } if no or not body: # strip text and replace it with the image representation _data['text'] = \ '{:02d}/{:02d}'.format(no + 1, len(attachments)) payloads.append(_payload) for no, payload in enumerate(payloads, start=1): for screen_name, user_id in targets.items(): # Assign our user target = payload['event']['message_create']['target'] target['recipient_id'] = user_id # Send Twitter DM postokay, response = self._fetch( self.twitter_dm, payload=payload, ) if not postokay: # Track our error has_error = True continue self.logger.info( 'Sent [{:02d}/{:02d}] Twitter DM notification to @{}.' .format(no, len(payloads), screen_name)) return not has_error def _whoami(self, lazy=True): """ Looks details of current authenticated user """ if lazy and self._whoami_cache is not None: # Use cached response return self._whoami_cache # Contains a mapping of screen_name to id results = {} # Send Twitter DM postokay, response = self._fetch( self.twitter_whoami, method='GET', json=False, ) if postokay: try: results[response['screen_name']] = response['id'] self._whoami_cache = { response['screen_name']: response['id'], } self._user_cache.update(results) except (TypeError, KeyError): pass return results def _user_lookup(self, screen_name, lazy=True): """ Looks up a screen name and returns the user id the screen_name can be a list/set/tuple as well """ # Contains a mapping of screen_name to id results = {} # Build a unique set of names names = parse_list(screen_name) if lazy and self._user_cache: # Use cached response results = { k: v for k, v in self._user_cache.items() if k in names} # limit our names if they already exist in our cache names = [name for name in names if name not in results] if not len(names): # They're is nothing further to do return results # Twitters API documents that it can lookup to 100 # results at a time. # https://developer.twitter.com/en/docs/accounts-and-users/\ # follow-search-get-users/api-reference/get-users-lookup for i in range(0, len(names), 100): # Look up our names by their screen_name postokay, response = self._fetch( self.twitter_lookup, payload={ 'screen_name': names[i:i + 100], }, json=False, ) if not postokay or not isinstance(response, list): # Track our error continue # Update our user index for entry in response: try: results[entry['screen_name']] = entry['id'] except (TypeError, KeyError): pass # Cache our response for future use; this saves on un-nessisary extra # hits against the Twitter API when we already know the answer self._user_cache.update(results) return results def _fetch(self, url, payload=None, method='POST', json=True): """ Wrapper to Twitter API requests object """ headers = { 'User-Agent': self.app_id, } data = None files = None # Open our attachment path if required: if isinstance(payload, AttachBase): # prepare payload files = {'media': (payload.name, open(payload.path, 'rb'))} elif json: headers['Content-Type'] = 'application/json' data = dumps(payload) else: data = payload auth = OAuth1( self.ckey, client_secret=self.csecret, resource_owner_key=self.akey, resource_owner_secret=self.asecret, ) # Some Debug Logging self.logger.debug('Twitter {} URL: {} (cert_verify={})'.format( method, url, self.verify_certificate)) self.logger.debug('Twitter Payload: %s' % str(payload)) # By default set wait to None wait = None if self.ratelimit_remaining == 0: # Determine how long we should wait for or if we should wait at # all. This isn't fool-proof because we can't be sure the client # time (calling this script) is completely synced up with the # Twitter server. One would hope we're on NTP and our clocks are # the same allowing this to role smoothly: now = datetime.now(timezone.utc).replace(tzinfo=None) if now < self.ratelimit_reset: # We need to throttle for the difference in seconds # We add 0.5 seconds to the end just to allow a grace # period. wait = (self.ratelimit_reset - now).total_seconds() + 0.5 # Default content response object content = {} # Always call throttle before any remote server i/o is made; self.throttle(wait=wait) # acquire our request mode fn = requests.post if method == 'POST' else requests.get try: r = fn( url, data=data, files=files, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) try: content = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None content = {} if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyTwitter.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Twitter {} to {}: ' '{}error={}.'.format( method, url, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure return (False, content) try: # Capture rate limiting if possible self.ratelimit_remaining = \ int(r.headers.get('x-rate-limit-remaining')) self.ratelimit_reset = datetime.fromtimestamp( int(r.headers.get('x-rate-limit-reset')), timezone.utc ).replace(tzinfo=None) except (TypeError, ValueError): # This is returned if we could not retrieve this information # gracefully accept this state and move on pass except requests.RequestException as e: self.logger.warning( 'Exception received when sending Twitter {} to {}: '. format(method, url)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure return (False, content) except (OSError, IOError) as e: self.logger.warning( 'An I/O error occurred while handling {}.'.format( payload.name if isinstance(payload, AttachBase) else payload)) self.logger.debug('I/O Exception: %s' % str(e)) return (False, content) finally: # Close our file (if it's open) stored in the second element # of our files tuple (index 1) if files: files['media'][1].close() return (True, content) @property def body_maxlen(self): """ The maximum allowable characters allowed in the body per message This is used during a Private DM Message Size (not Public Tweets which are limited to 280 characters) """ return 10000 if self.mode == TwitterMessageMode.DM else 280 @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol[0], self.ckey, self.csecret, self.akey, self.asecret, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'mode': self.mode, 'batch': 'yes' if self.batch else 'no', 'cache': 'yes' if self.cache else 'no', } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{ckey}/{csecret}/{akey}/{asecret}' \ '/{targets}?{params}'.format( schema=self.secure_protocol[0], ckey=self.pprint(self.ckey, privacy, safe=''), csecret=self.pprint( self.csecret, privacy, mode=PrivacyMode.Secret, safe=''), akey=self.pprint(self.akey, privacy, safe=''), asecret=self.pprint( self.asecret, privacy, mode=PrivacyMode.Secret, safe=''), targets='/'.join( [NotifyTwitter.quote('@{}'.format(target), safe='@') for target in self.targets]) if self.targets else '', params=NotifyTwitter.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Acquire remaining tokens tokens = NotifyTwitter.split_path(results['fullpath']) # The consumer token is stored in the hostname results['ckey'] = NotifyTwitter.unquote(results['host']) # # Now fetch the remaining tokens # # Consumer Secret results['csecret'] = tokens.pop(0) if tokens else None # Access Token Key results['akey'] = tokens.pop(0) if tokens else None # Access Token Secret results['asecret'] = tokens.pop(0) if tokens else None # The defined twitter mode if 'mode' in results['qsd'] and len(results['qsd']['mode']): results['mode'] = \ NotifyTwitter.unquote(results['qsd']['mode']) elif results['schema'].startswith('tweet'): results['mode'] = TwitterMessageMode.TWEET results['targets'] = [] # if a user has been defined, add it to the list of targets if results.get('user'): results['targets'].append(results.get('user')) # Store any remaining items as potential targets results['targets'].extend(tokens) # Get Cache Flag (reduces lookup hits) if 'cache' in results['qsd'] and len(results['qsd']['cache']): results['cache'] = \ parse_bool(results['qsd']['cache'], True) # Get Batch Mode Flag results['batch'] = \ parse_bool(results['qsd'].get( 'batch', NotifyTwitter.template_args['batch']['default'])) # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyTwitter.parse_list(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/voipms.py000066400000000000000000000314721477231770000202210ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Create an account https://voip.ms/ if you don't already have one # # Enable API and set an API password here: # - https://voip.ms/m/api.php # # Read more about VoIP.ms API here: # - https://voip.ms/m/apidocs.php import requests from json import loads from .base import NotifyBase from ..common import NotifyType from ..utils.parse import is_phone_no, is_email, parse_phone_no from ..locale import gettext_lazy as _ class NotifyVoipms(NotifyBase): """ A wrapper for VoIPms Notifications """ # The default descriptive name associated with the Notification service_name = 'VoIPms' # The services URL service_url = 'https://voip.ms' # The default protocol secure_protocol = 'voipms' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_voipms' # VoIPms uses the http protocol with JSON requests notify_url = 'https://voip.ms/api/v1/rest.php' # The maximum length of the body body_maxlen = 160 # The supported country code by VoIP.ms voip_ms_country_code = '1' # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{password}:{email}/{from_phone}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'email': { 'name': _('User Email'), 'type': 'string', 'required': True, }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, 'required': True, }, 'from_phone': { 'name': _('From Phone No'), 'type': 'string', 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'source', }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', 'required': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'from_phone', }, }) def __init__(self, email, source=None, targets=None, **kwargs): """ Initialize VoIPms Object """ super().__init__(**kwargs) # Validate our params here. if self.password is None: msg = 'Password has to be specified.' self.logger.warning(msg) raise TypeError(msg) # User is the email associated with the account result = is_email(email) if not result: msg = 'An invalid VoIPms user email: ' \ '({}) was specified.'.format(email) self.logger.warning(msg) raise TypeError(msg) self.email = result['full_email'] # Validate our source Phone # result = is_phone_no(source) if not result: msg = 'An invalid VoIPms source phone # ' \ '({}) was specified.'.format(source) self.logger.warning(msg) raise TypeError(msg) # Source Phone # only supports +1 country code # Allow 7 digit phones (presume they're local with +1 country code) if result['country'] \ and result['country'] != self.voip_ms_country_code: msg = 'VoIPms only supports +1 country code ' \ '({}) was specified.'.format(source) self.logger.warning(msg) raise TypeError(msg) # Store our source phone number (without country code) self.source = result['area'] + result['line'] # Parse our targets self.targets = list() if targets: for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) # Target Phone # only supports +1 country code if result['country'] \ and result['country'] != self.voip_ms_country_code: self.logger.warning( 'Ignoring invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result['area'] + result['line']) else: # Send a message to ourselves self.targets.append(self.source) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform VoIPms Notification """ if len(self.targets) == 0: # There were no services to notify self.logger.warning('There were no VoIPms targets to notify.') return False # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded', } # Prepare our payload payload = { 'api_username': self.email, 'api_password': self.password, 'did': self.source, 'message': body, 'method': 'sendSMS', # Gets filled in the loop below 'dst': None } # Create a copy of the targets list targets = list(self.targets) while len(targets): # Get our target to notify target = targets.pop(0) # Add target Phone # payload['dst'] = target # Some Debug Logging self.logger.debug('VoIPms GET URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('VoIPms Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() response = {'status': 'unknown', 'message': ''} try: r = requests.get( self.notify_url, params=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) try: response = loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None pass if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyVoipms.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send VoIPms SMS notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue # VoIPms sends 200 OK even if there is an error # check if status in response and if it is not success if response is not None and response['status'] != 'success': self.logger.warning( 'Failed to send VoIPms SMS notification to {}: ' 'status: {}, message: {}'.format( target, response['status'], response['message']) ) # Mark our failure has_error = True continue else: self.logger.info( 'Sent VoIPms SMS notification to %s' % target) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending VoIPms:%s ' 'SMS notification.' % target ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.email, self.password, self.source, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) schemaStr = \ '{schema}://{password}:{email}/{from_phone}/{targets}/?{params}' return schemaStr.format( schema=self.secure_protocol, email=self.email, password=self.pprint(self.password, privacy, safe=''), from_phone=self.voip_ms_country_code + self.pprint(self.source, privacy, safe=''), targets='/'.join( [self.voip_ms_country_code + NotifyVoipms.quote(x, safe='') for x in self.targets]), params=NotifyVoipms.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results results['targets'] = \ NotifyVoipms.split_path(results['fullpath']) if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyVoipms.unquote(results['qsd']['from']) elif results['targets']: # The from phone no is the first entry in the list otherwise results['source'] = results['targets'].pop(0) # Swap user for pass since our input is: password:email # where email is user@hostname (or user@domain) user = results['password'] password = results['user'] results['password'] = password results['user'] = user results['email'] = '{}@{}'.format( NotifyVoipms.unquote(user), NotifyVoipms.unquote(results['host']), ) if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyVoipms.parse_phone_no(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/vonage.py000066400000000000000000000325601477231770000201620ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Sign-up with https://dashboard.nexmo.com/ # # Get your (api) key and secret here: # - https://dashboard.nexmo.com/getting-started-guide # import requests from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..utils.parse import is_phone_no, parse_phone_no, validate_regex from ..locale import gettext_lazy as _ class NotifyVonage(NotifyBase): """ A wrapper for Vonage Notifications """ # The default descriptive name associated with the Notification service_name = 'Vonage' # The services URL service_url = 'https://dashboard.nexmo.com/' # The default protocol (nexmo kept for backwards compatibility) secure_protocol = ('vonage', 'nexmo') # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_nexmo' # Vonage uses the http protocol with JSON requests notify_url = 'https://rest.nexmo.com/sms/json' # The maximum length of the body body_maxlen = 160 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{apikey}:{secret}@{from_phone}', '{schema}://{apikey}:{secret}@{from_phone}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'apikey': { 'name': _('API Key'), 'type': 'string', 'required': True, 'regex': (r'^[a-z0-9]+$', 'i'), 'private': True, }, 'secret': { 'name': _('API Secret'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, 'from_phone': { 'name': _('From Phone No'), 'type': 'string', 'required': True, 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), 'map_to': 'source', }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'from_phone', }, 'key': { 'alias_of': 'apikey', }, 'secret': { 'alias_of': 'secret', }, # Default Time To Live # By default Vonage attempt delivery for 72 hours, however the maximum # effective value depends on the operator and is typically 24 - 48 # hours. We recommend this value should be kept at its default or at # least 30 minutes. 'ttl': { 'name': _('ttl'), 'type': 'int', 'default': 900000, 'min': 20000, 'max': 604800000, }, }) def __init__(self, apikey, secret, source, targets=None, ttl=None, **kwargs): """ Initialize Vonage Object """ super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex( apikey, *self.template_tokens['apikey']['regex']) if not self.apikey: msg = 'An invalid Vonage API Key ' \ '({}) was specified.'.format(apikey) self.logger.warning(msg) raise TypeError(msg) # API Secret (associated with project) self.secret = validate_regex( secret, *self.template_tokens['secret']['regex']) if not self.secret: msg = 'An invalid Vonage API Secret ' \ '({}) was specified.'.format(secret) self.logger.warning(msg) raise TypeError(msg) # Set our Time to Live Flag self.ttl = self.template_args['ttl']['default'] try: self.ttl = int(ttl) except (ValueError, TypeError): # Do nothing pass if self.ttl < self.template_args['ttl']['min'] or \ self.ttl > self.template_args['ttl']['max']: msg = 'The Vonage TTL specified ({}) is out of range.'\ .format(self.ttl) self.logger.warning(msg) raise TypeError(msg) # The Source Phone # self.source = source result = is_phone_no(source) if not result: msg = 'The Account (From) Phone # specified ' \ '({}) is invalid.'.format(source) self.logger.warning(msg) raise TypeError(msg) # Store our parsed value self.source = result['full'] # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append(result['full']) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Vonage Notification """ # error tracking (used for function return) has_error = False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded', } # Prepare our payload payload = { 'api_key': self.apikey, 'api_secret': self.secret, 'ttl': self.ttl, 'from': self.source, 'text': body, # The to gets populated in the loop below 'to': None, } # Create a copy of the targets list targets = list(self.targets) if len(targets) == 0: # No sources specified, use our own phone no targets.append(self.source) while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our user payload['to'] = target # Some Debug Logging self.logger.debug('Vonage POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('Vonage Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=payload, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyVonage.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Vonage notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent Vonage notification to %s.' % target) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Vonage:%s ' 'notification.' % target ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol[0], self.apikey, self.secret) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'ttl': str(self.ttl), } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://{key}:{secret}@{source}/{targets}/?{params}'.format( schema=self.secure_protocol[0], key=self.pprint(self.apikey, privacy, safe=''), secret=self.pprint( self.secret, privacy, mode=PrivacyMode.Secret, safe=''), source=NotifyVonage.quote(self.source, safe=''), targets='/'.join( [NotifyVonage.quote(x, safe='') for x in self.targets]), params=NotifyVonage.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifyVonage.split_path(results['fullpath']) # The hostname is our source number results['source'] = NotifyVonage.unquote(results['host']) # Get our account_side and auth_token from the user/pass config results['apikey'] = NotifyVonage.unquote(results['user']) results['secret'] = NotifyVonage.unquote(results['password']) # API Key if 'key' in results['qsd'] and len(results['qsd']['key']): # Extract the API Key from an argument results['apikey'] = \ NotifyVonage.unquote(results['qsd']['key']) # API Secret if 'secret' in results['qsd'] and len(results['qsd']['secret']): # Extract the API Secret from an argument results['secret'] = \ NotifyVonage.unquote(results['qsd']['secret']) # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration if 'from' in results['qsd'] and len(results['qsd']['from']): results['source'] = \ NotifyVonage.unquote(results['qsd']['from']) if 'source' in results['qsd'] and len(results['qsd']['source']): results['source'] = \ NotifyVonage.unquote(results['qsd']['source']) # Support the 'ttl' variable if 'ttl' in results['qsd'] and len(results['qsd']['ttl']): results['ttl'] = \ NotifyVonage.unquote(results['qsd']['ttl']) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyVonage.parse_phone_no(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/webexteams.py000066400000000000000000000223411477231770000210430ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # At the time I created this plugin, their website had lots of issues with the # Firefox Browser. I fell back to Chrome and had no problems. # To use this plugin, you need to first access https://teams.webex.com and # make yourself an account if you don't already have one. You'll want to # create at least one 'space' before getting the 'incoming webhook'. # # Next you'll need to install the 'Incoming webhook' plugin found under # the 'other' category here: https://apphub.webex.com/integrations/ # These links may not always work as time goes by and websites always # change, but at the time of creating this plugin this was a direct link # to it: https://apphub.webex.com/integrations/incoming-webhooks-cisco-systems # If you're logged in, you'll be able to click on the 'Connect' button. From # there you'll need to accept the permissions it will ask of you. Give the # webhook a name such as 'apprise'. # When you're complete, you will recieve a URL that looks something like this: # https://api.ciscospark.com/v1/webhooks/incoming/\ # Y3lzY29zcGkyazovL3VzL1dFQkhPT0sajkkzYWU4fTMtMGE4Yy00 # # The last part of the URL is all you need to be interested in. Think of this # url as: # https://api.ciscospark.com/v1/webhooks/incoming/{token} # # You will need to assemble all of your URLs for this plugin to work as: # wxteams://{token} # # Resources # - https://developer.webex.com/docs/api/basics - markdown/post syntax # - https://developer.cisco.com/ecosystem/webex/apps/\ # incoming-webhooks-cisco-systems/ - Simple webhook example import re import requests from json import dumps from .base import NotifyBase from ..common import NotifyType from ..common import NotifyFormat from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ # Extend HTTP Error Messages # Based on: https://developer.webex.com/docs/api/basics/rate-limiting WEBEX_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', 415: 'Unsuported media specified', 429: 'To many consecutive requests were made.', 503: 'Service is overloaded, try again later', } class NotifyWebexTeams(NotifyBase): """ A wrapper for Webex Teams Notifications """ # The default descriptive name associated with the Notification service_name = 'Cisco Webex Teams' # The services URL service_url = 'https://webex.teams.com/' # The default secure protocol secure_protocol = ('wxteams', 'webex') # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_wxteams' # Webex Teams uses the http protocol with JSON requests notify_url = 'https://api.ciscospark.com/v1/webhooks/incoming/' # The maximum allowable characters allowed in the body per message body_maxlen = 1000 # We don't support titles for Webex notifications title_maxlen = 0 # Default to markdown; fall back to text notify_format = NotifyFormat.MARKDOWN # Define object templates templates = ( '{schema}://{token}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'token': { 'name': _('Token'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9]{80,160}$', 'i'), }, }) def __init__(self, token, **kwargs): """ Initialize Webex Teams Object """ super().__init__(**kwargs) # The token associated with the account self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'The Webex Teams token specified ({}) is invalid.'\ .format(token) self.logger.warning(msg) raise TypeError(msg) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Webex Teams Notification """ # Setup our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } # Prepare our URL url = '{}/{}'.format(self.notify_url, self.token) payload = { 'markdown' if (self.notify_format == NotifyFormat.MARKDOWN) else 'text': body, } self.logger.debug('Webex Teams POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Webex Teams Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.no_content): # We had a problem status_str = \ NotifyWebexTeams.http_response_code_lookup( r.status_code) self.logger.warning( 'Failed to send Webex Teams notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) return False else: self.logger.info( 'Sent Webex Teams notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Webex Teams ' 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol[0], self.token) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{token}/?{params}'.format( schema=self.secure_protocol[0], token=self.pprint(self.token, privacy, safe=''), params=NotifyWebexTeams.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The first token is stored in the hostname results['token'] = NotifyWebexTeams.unquote(results['host']) return results @staticmethod def parse_native_url(url): """ Support https://api.ciscospark.com/v1/webhooks/incoming/WEBHOOK_TOKEN """ result = re.match( r'^https?://(api\.ciscospark\.com|webexapis\.com)' r'/v[1-9][0-9]*/webhooks/incoming/' r'(?P[A-Z0-9_-]+)/?' r'(?P\?.+)?$', url, re.I) if result: return NotifyWebexTeams.parse_url( '{schema}://{webhook_token}/{params}'.format( schema=NotifyWebexTeams.secure_protocol[0], webhook_token=result.group('webhook_token'), params='' if not result.group('params') else result.group('params'))) return None apprise-1.9.3/apprise/plugins/wecombot.py000066400000000000000000000215451477231770000205230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # WeCom for PC # 1. On WeCom for PC, find the target WeCom group for receiving alarm # notifications. # 2. Right-click the WeCom group. In the window that appears, click # "Add Group Bot". # 3. In the window that appears, click Create a Bot. # 4. In the window that appears, enter a custom bot name and click Add. # 5. You will be provided a Webhook URL that looks like: # https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=abcd # # WeCom for Web # 1. On WebCom for Web, open the target WeCom group for receiving alarm # notifications. # 2. Click the group settings icon in the upper-right corner. # 3. On the group settings page, choose Group Bots > Add a Bot. # 4. On the management page for adding bots, enter a custom name for the new # bot. # 5. Click Add, copy the webhook address, and configure the API callback by # following Step 2. # the URL will look something like this: # https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=abcd # ^ # | # webhook key # # This plugin also supports taking the URL (as identified above) directly # as well. import re import requests from json import dumps from .base import NotifyBase from ..common import NotifyType from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ class NotifyWeComBot(NotifyBase): """ A wrapper for WeCom Bot Notifications """ # The default descriptive name associated with the Notification service_name = 'WeCom Bot' # The services URL service_url = 'https://weixin.qq.com/' # The default secure protocol secure_protocol = 'wecombot' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_wecombot' # Plain Text Notification URL notify_url = 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key={key}' # Define object templates templates = ( '{schema}://{key}', ) # The title is not used title_maxlen = 0 # Define our template arguments template_tokens = dict(NotifyBase.template_tokens, **{ # The Bot Key can be found at the end of the webhook provided (?key=) 'key': { 'name': _('Bot Webhook Key'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[a-z0-9_-]+$', 'i'), }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ # You can optionally pass IRC colors into 'key': { 'alias_of': 'key', }, }) def __init__(self, key, **kwargs): """ Initialize WeCom Bot Object """ super().__init__(**kwargs) # Assign our bot webhook self.key = validate_regex( key, *self.template_tokens['key']['regex']) if not self.key: msg = 'An invalid WeCom Bot Webhook Key ' \ '({}) was specified.'.format(key) self.logger.warning(msg) raise TypeError(msg) # Prepare our notification URL now: self.api_url = self.notify_url.format( key=self.key, ) return @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.key) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Prepare our parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{key}/?{params}'.format( schema=self.secure_protocol, key=self.pprint(self.key, privacy, safe=''), params=NotifyWeComBot.urlencode(params), ) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ wrapper to _send since we can alert more then one channel """ # prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json; charset=utf-8', } # Prepare our payload payload = { 'msgtype': "text", 'text': { 'content': body, } } self.logger.debug('WeCom Bot GET URL: %s (cert_verify=%r)' % ( self.api_url, self.verify_certificate)) self.logger.debug('WeCom Bot Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.api_url, data=dumps(payload).encode('utf-8'), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyWeComBot.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send WeCom Bot notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent WeCom Bot notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending WeCom Bot ' 'notification.') self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The first token is stored in the hostname results['key'] = NotifyWeComBot.unquote(results['host']) # The 'key' makes it easier to use yaml configuration if 'key' in results['qsd'] and len(results['qsd']['key']): results['key'] = \ NotifyWeComBot.unquote(results['qsd']['key']) return results @staticmethod def parse_native_url(url): """ Support https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=BOTKEY """ result = re.match( r'^https?://qyapi\.weixin\.qq\.com/cgi-bin/webhook/send/?\?key=' r'(?P[A-Z0-9_-]+)/?' r'&?(?P.+)?$', url, re.I) if result: return NotifyWeComBot.parse_url( '{schema}://{key}{params}'.format( schema=NotifyWeComBot.secure_protocol, key=result.group('key'), params='' if not result.group('params') else '?' + result.group('params'))) return None apprise-1.9.3/apprise/plugins/whatsapp.py000066400000000000000000000473241477231770000205360ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # API Source: # https://developers.facebook.com/docs/whatsapp/cloud-api/reference/messages # # 1. Register a developer account with Meta: # https://developers.facebook.com/docs/whatsapp/cloud-api/get-started # 2. Enable 2 Factor Authentication (2FA) with your account (if not done # already) # 3. Create a App using WhatsApp Product. There are 2 to create an app from # Do NOT chose the WhatsApp Webhook one (choose the other) # # When you click on the API Setup section of your new app you need to record # both the access token and the From Phone Number ID. Note that this not the # from phone number itself, but it's ID. It's displayed below and contains # way more numbers then your typical phone number import re import requests from json import loads, dumps from .base import NotifyBase from ..common import NotifyType from ..utils.parse import is_phone_no, parse_phone_no, validate_regex from ..locale import gettext_lazy as _ class NotifyWhatsApp(NotifyBase): """ A wrapper for WhatsApp Notifications """ # The default descriptive name associated with the Notification service_name = 'WhatsApp' # The services URL service_url = \ 'https://developers.facebook.com/docs/whatsapp/cloud-api/get-started' # All notification requests are secure secure_protocol = 'whatsapp' # Allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.20 # Facebook Graph version fb_graph_version = 'v17.0' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_whatsapp' # WhatsApp Message Notification URL notify_url = 'https://graph.facebook.com/{fb_ver}/{phone_id}/messages' # The maximum length of the body body_maxlen = 1024 # A title can not be used for SMS Messages. Setting this to zero will # cause any title (if defined) to get placed into the message body. title_maxlen = 0 # Define object templates templates = ( '{schema}://{token}@{from_phone_id}/{targets}', '{schema}://{template}:{token}@{from_phone_id}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'token': { 'name': _('Access Token'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, 'template': { 'name': _('Template Name'), 'type': 'string', 'required': False, 'regex': (r'^[^\s]+$', 'i'), }, 'from_phone_id': { 'name': _('From Phone ID'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[0-9]+$', 'i'), }, 'target_phone': { 'name': _('Target Phone No'), 'type': 'string', 'prefix': '+', 'regex': (r'^[0-9\s)(+-]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, 'language': { 'name': _('Language'), 'type': 'string', 'default': 'en_US', 'regex': (r'^[^0-9\s]+$', 'i'), }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'from': { 'alias_of': 'from_phone_id', }, 'token': { 'alias_of': 'token', }, 'template': { 'alias_of': 'template', }, 'lang': { 'alias_of': 'language', }, }) # Our supported mappings and component keys component_key_re = re.compile( r'(?P((?P[1-9][0-9]*)|(?Pbody|type)))', re.IGNORECASE) # Define any kwargs we're using template_kwargs = { 'template_mapping': { 'name': _('Template Mapping'), 'prefix': ':', }, } def __init__(self, token, from_phone_id, template=None, targets=None, language=None, template_mapping=None, **kwargs): """ Initialize WhatsApp Object """ super().__init__(**kwargs) # The Access Token associated with the account self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'An invalid WhatsApp Access Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) # The From Phone ID associated with the account self.from_phone_id = validate_regex( from_phone_id, *self.template_tokens['from_phone_id']['regex']) if not self.from_phone_id: msg = 'An invalid WhatsApp From Phone ID ' \ '({}) was specified.'.format(from_phone_id) self.logger.warning(msg) raise TypeError(msg) # The template to associate with the message if template: self.template = validate_regex( template, *self.template_tokens['template']['regex']) if not self.template: msg = 'An invalid WhatsApp Template Name ' \ '({}) was specified.'.format(template) self.logger.warning(msg) raise TypeError(msg) # The Template language Code to use if language: self.language = validate_regex( language, *self.template_tokens['language']['regex']) if not self.language: msg = 'An invalid WhatsApp Template Language Code ' \ '({}) was specified.'.format(language) self.logger.warning(msg) raise TypeError(msg) else: self.language = self.template_tokens['language']['default'] else: # # Message Mode # self.template = None # Parse our targets self.targets = list() for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( 'Dropped invalid phone # ' '({}) specified.'.format(target), ) continue # store valid phone number self.targets.append('+{}'.format(result['full'])) self.template_mapping = {} if template_mapping: # Store our extra payload entries self.template_mapping.update(template_mapping) # Validate Mapping and prepare Components self.components = dict() self.component_keys = list() for key, val in self.template_mapping.items(): matched = self.component_key_re.match(key) if not matched: msg = 'An invalid Template Component ID ' \ '({}) was specified.'.format(key) self.logger.warning(msg) raise TypeError(msg) if matched.group('id'): # # Manual Component Assigment (by id) # index = matched.group('id') map_to = { "type": "text", "text": val, } else: # matched.group('map') map_to = matched.group('map').lower() matched = self.component_key_re.match(val) if not (matched and matched.group('id')): msg = 'An invalid Template Component Mapping ' \ '(:{}={}) was specified.'.format(key, val) self.logger.warning(msg) raise TypeError(msg) index = matched.group('id') if index in self.components: msg = 'The Template Component index ' \ '({}) was already assigned.'.format(key) self.logger.warning(msg) raise TypeError(msg) self.components[index] = map_to self.component_keys = self.components.keys() # Adjust sorting and assume that the user put the order correctly; # if not Facebook just won't be very happy and will reject the # message sorted(self.component_keys) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform WhatsApp Notification """ if not self.targets: self.logger.warning( 'There are no valid WhatsApp targets to notify.') return False # error tracking (used for function return) has_error = False # Prepare our URL url = self.notify_url.format( fb_ver=self.fb_graph_version, phone_id=self.from_phone_id, ) # Prepare our headers headers = { 'User-Agent': self.app_id, 'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': f'Bearer {self.token}', } payload = { 'messaging_product': 'whatsapp', # The To gets populated in the loop below 'to': None, } if not self.template: # # Send Message # payload.update({ 'recipient_type': "individual", 'type': 'text', 'text': {"body": body}, }) else: # # Send Template # payload.update({ 'type': 'template', "template": { "name": self.template, "language": {"code": self.language}, }, }) if self.components: payload['template']['components'] = [ { "type": "body", "parameters": [], } ] for key in self.component_keys: if isinstance(self.components[key], dict): # Manual Assignment payload['template']['components'][0]["parameters"]\ .append(self.components[key]) continue # Mapping of body and/or notify type payload['template']['components'][0]["parameters"].append({ "type": "text", "text": body if self.components[key] == 'body' else notify_type, }) # Create a copy of the targets list targets = list(self.targets) while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our user payload['to'] = target # Some Debug Logging self.logger.debug('WhatsApp POST URL: {} (cert_verify={})'.format( url, self.verify_certificate)) self.logger.debug('WhatsApp Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.created, requests.codes.ok): # We had a problem status_str = \ NotifyBase.http_response_code_lookup(r.status_code) # set up our status code to use status_code = r.status_code try: # Update our status response if we can json_response = loads(r.content) status_code = \ json_response['error'].get('code', status_code) status_str = \ json_response['error'].get('message', status_str) except (AttributeError, TypeError, ValueError, KeyError): # KeyError = r.content is parseable but does not # contain 'error' # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None # We could not parse JSON response. # We will just use the status we already have. pass self.logger.warning( 'Failed to send WhatsApp notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent WhatsApp notification to {}.'.format(target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending WhatsApp:%s ' % ( target) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.from_phone_id, self.token) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = {} if self.template: # Add language to our URL params['lang'] = self.language # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Payload body extras prefixed with a ':' sign # Append our payload extras into our parameters params.update( {':{}'.format(k): v for k, v in self.template_mapping.items()}) return '{schema}://{template}{token}@{from_id}/{targets}/?{params}'\ .format( schema=self.secure_protocol, from_id=self.pprint( self.from_phone_id, privacy, safe=''), token=self.pprint(self.token, privacy, safe=''), template='' if not self.template else '{}:'.format( NotifyWhatsApp.quote(self.template, safe='')), targets='/'.join( [NotifyWhatsApp.quote(x, safe='') for x in self.targets]), params=NotifyWhatsApp.urlencode(params)) def __len__(self): """ Returns the number of targets associated with this notification """ targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifyWhatsApp.split_path(results['fullpath']) # The hostname is our From Phone ID results['from_phone_id'] = NotifyWhatsApp.unquote(results['host']) # Determine if we have a Template, otherwise load our token if results['password']: # # Template Mode # results['template'] = NotifyWhatsApp.unquote(results['user']) results['token'] = NotifyWhatsApp.unquote(results['password']) else: # # Message Mode # results['token'] = NotifyWhatsApp.unquote(results['user']) # Access token if 'token' in results['qsd'] and len(results['qsd']['token']): # Extract the account sid from an argument results['token'] = \ NotifyWhatsApp.unquote(results['qsd']['token']) # Template if 'template' in results['qsd'] and len(results['qsd']['template']): results['template'] = results['qsd']['template'] # Template Language if 'lang' in results['qsd'] and len(results['qsd']['lang']): results['language'] = results['qsd']['lang'] # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration if 'from' in results['qsd'] and len(results['qsd']['from']): results['from_phone_id'] = \ NotifyWhatsApp.unquote(results['qsd']['from']) if 'source' in results['qsd'] and \ len(results['qsd']['source']): results['from_phone_id'] = \ NotifyWhatsApp.unquote(results['qsd']['source']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyWhatsApp.parse_phone_no(results['qsd']['to']) # store any additional payload extra's defined results['template_mapping'] = { NotifyWhatsApp.unquote(x): NotifyWhatsApp.unquote(y) for x, y in results['qsd:'].items() } return results apprise-1.9.3/apprise/plugins/windows.py000066400000000000000000000210331477231770000203660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from time import sleep from .base import NotifyBase from ..common import NotifyImageSize from ..common import NotifyType from ..utils.parse import parse_bool from ..locale import gettext_lazy as _ # Default our global support flag NOTIFY_WINDOWS_SUPPORT_ENABLED = False try: # 3rd party modules (Windows Only) import win32api import win32con import win32gui # We're good to go! NOTIFY_WINDOWS_SUPPORT_ENABLED = True except ImportError: # No problem; we just simply can't support this plugin because we're # either using Linux, or simply do not have pywin32 installed. pass class NotifyWindows(NotifyBase): """ A wrapper for local Windows Notifications """ # Set our global enabled flag enabled = NOTIFY_WINDOWS_SUPPORT_ENABLED requirements = { # Define our required packaging in order to work 'details': _('A local Microsoft Windows environment is required.') } # The default descriptive name associated with the Notification service_name = 'Windows Notification' # The default protocol protocol = 'windows' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_windows' # Disable throttle rate for Windows requests since they are normally # local anyway request_rate_per_sec = 0 # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_128 # Limit results to just the first 2 line otherwise there is just to much # content to display body_max_line_count = 2 # The number of seconds to display the popup for default_popup_duration_sec = 12 # No URL Identifier will be defined for this service as there simply isn't # enough details to uniquely identify one dbus:// from another. url_identifier = False # Define object templates templates = ( '{schema}://', ) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'duration': { 'name': _('Duration'), 'type': 'int', 'min': 1, 'default': 12, }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, }) def __init__(self, include_image=True, duration=None, **kwargs): """ Initialize Windows Object """ super().__init__(**kwargs) # Number of seconds to display notification for self.duration = self.default_popup_duration_sec \ if not (isinstance(duration, int) and duration > 0) else duration # Define our handler self.hwnd = None # Track whether or not we want to send an image with our notification # or not. self.include_image = include_image def _on_destroy(self, hwnd, msg, wparam, lparam): """ Destroy callback function """ nid = (self.hwnd, 0) win32gui.Shell_NotifyIcon(win32gui.NIM_DELETE, nid) win32api.PostQuitMessage(0) return 0 def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Windows Notification """ # Always call throttle before any remote server i/o is made self.throttle() try: # Register destruction callback message_map = {win32con.WM_DESTROY: self._on_destroy, } # Register the window class. self.wc = win32gui.WNDCLASS() self.hinst = self.wc.hInstance = win32api.GetModuleHandle(None) self.wc.lpszClassName = str("PythonTaskbar") self.wc.lpfnWndProc = message_map self.classAtom = win32gui.RegisterClass(self.wc) # Styling and window type style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU self.hwnd = win32gui.CreateWindow( self.classAtom, "Taskbar", style, 0, 0, win32con.CW_USEDEFAULT, win32con.CW_USEDEFAULT, 0, 0, self.hinst, None) win32gui.UpdateWindow(self.hwnd) # image path (if configured to acquire) icon_path = None if not self.include_image \ else self.image_path(notify_type, extension='.ico') if icon_path: icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE try: hicon = win32gui.LoadImage( self.hinst, icon_path, win32con.IMAGE_ICON, 0, 0, icon_flags) except Exception as e: self.logger.warning( "Could not load windows notification icon ({}): {}" .format(icon_path, e)) # disable icon hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION) else: # disable icon hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION) # Taskbar icon flags = win32gui.NIF_ICON | win32gui.NIF_MESSAGE | win32gui.NIF_TIP nid = (self.hwnd, 0, flags, win32con.WM_USER + 20, hicon, "Tooltip") win32gui.Shell_NotifyIcon(win32gui.NIM_ADD, nid) win32gui.Shell_NotifyIcon(win32gui.NIM_MODIFY, ( self.hwnd, 0, win32gui.NIF_INFO, win32con.WM_USER + 20, hicon, "Balloon Tooltip", body, 200, title)) # take a rest then destroy sleep(self.duration) win32gui.DestroyWindow(self.hwnd) win32gui.UnregisterClass(self.wc.lpszClassName, None) self.logger.info('Sent Windows notification.') except Exception as e: self.logger.warning('Failed to send Windows notification.') self.logger.debug('Windows Exception: {}', str(e)) return False return True def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', 'duration': str(self.duration), } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) return '{schema}://?{params}'.format( schema=self.protocol, params=NotifyWindows.urlencode(params), ) @staticmethod def parse_url(url): """ There are no parameters nessisary for this protocol; simply having windows:// is all you need. This function just makes sure that is in place. """ results = NotifyBase.parse_url(url, verify_host=False) # Include images with our message results['include_image'] = \ parse_bool(results['qsd'].get('image', True)) # Set duration try: results['duration'] = int(results['qsd'].get('duration')) except (TypeError, ValueError): # Not a valid integer; ignore entry pass # return results return results apprise-1.9.3/apprise/plugins/workflows.py000066400000000000000000000466121477231770000207430ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this plugin, you need to create a MS Teams Azure Webhook Workflow: # https://support.microsoft.com/en-us/office/browse-and-add-workflows-\ # in-microsoft-teams-4998095c-8b72-4b0e-984c-f2ad39e6ba9a # Your webhook will look somthing like this: # https://prod-161.westeurope.logic.azure.com:443/\ # workflows/643e69f83c8944438d68119179a10a64/triggers/manual/\ # paths/invoke?api-version=2016-06-01&sp=%2Ftriggers%2Fmanual%2Frun&\ # sv=1.0&sig=KODuebWbDGYFr0z0eu-6Rj8aUKz7108W3wrNJZxFE5A # # Yes... The URL is that big... But it looks like this (greatly simplified): # https://HOST:PORT/workflows/ABCD/triggers/manual/path/...sig=DEFG # ^ ^ ^ ^ # | | | | # These are important <---------^------------------------------^ # # # Apprise can support this webhook as is (directly passed into it) # Alternatively it can be shortend to: # These 3 tokens need to be placed in the URL after the Team # workflows://HOST:PORT/ABCD/DEFG/ # import re import requests import json from json.decoder import JSONDecodeError from .base import NotifyBase from ..common import NotifyImageSize from ..common import NotifyType from ..common import NotifyFormat from ..utils.parse import parse_bool, validate_regex from ..utils.templates import apply_template, TemplateType from ..apprise_attachment import AppriseAttachment from ..locale import gettext_lazy as _ class NotifyWorkflows(NotifyBase): """ A wrapper for Microsoft Workflows (MS Teams) Notifications """ # The default descriptive name associated with the Notification service_name = 'Power Automate / Workflows (for MSTeams)' # The services URL service_url = 'https://www.microsoft.com/power-platform/' \ 'products/power-automate' # The default secure protocol secure_protocol = ('workflow', 'workflows') # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_workflows' # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_32 # The maximum allowable characters allowed in the body per message body_maxlen = 1000 # Default Notification Format notify_format = NotifyFormat.MARKDOWN # There is no reason we should exceed 35KB when reading in a JSON file. # If it is more than this, then it is not accepted max_workflows_template_size = 35000 # Adaptive Card Version adaptive_card_version = '1.4' # Define object templates templates = ( '{schema}://{host}/{workflow}/{signature}', '{schema}://{host}:{port}/{workflow}/{signature}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, # workflow identifier 'workflow': { 'name': _('Workflow ID'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[A-Z0-9_-]+$', 'i'), }, # Signature 'signature': { 'name': _('Signature'), 'type': 'string', 'private': True, 'required': True, 'regex': (r'^[a-z0-9_-]+$', 'i'), }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'id': { 'alias_of': 'workflow', }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, 'wrap': { 'name': _('Wrap Text'), 'type': 'bool', 'default': True, 'map_to': 'wrap', }, 'template': { 'name': _('Template Path'), 'type': 'string', 'private': True, }, # Below variable shortforms are taken from the Workflows webhook # for consistency 'sig': { 'alias_of': 'signature', }, 'ver': { 'name': _('API Version'), 'type': 'string', 'default': '2016-06-01', 'map_to': 'version', }, 'api-version': { 'alias_of': 'ver' }, }) # Define our token control template_kwargs = { 'tokens': { 'name': _('Template Tokens'), 'prefix': ':', }, } def __init__(self, workflow, signature, include_image=None, version=None, template=None, tokens=None, wrap=None, **kwargs): """ Initialize Microsoft Workflows Object """ super().__init__(**kwargs) self.workflow = validate_regex( workflow, *self.template_tokens['workflow']['regex']) if not self.workflow: msg = 'An invalid Workflows ID ' \ '({}) was specified.'.format(workflow) self.logger.warning(msg) raise TypeError(msg) self.signature = validate_regex( signature, *self.template_tokens['signature']['regex']) if not self.signature: msg = 'An invalid Signature ' \ '({}) was specified.'.format(signature) self.logger.warning(msg) raise TypeError(msg) # Place a thumbnail image inline with the message body self.include_image = True if ( include_image if include_image is not None else self.template_args['image']['default']) else False # Wrap Text self.wrap = True if ( wrap if wrap is not None else self.template_args['wrap']['default']) else False # Our template object is just an AppriseAttachment object self.template = AppriseAttachment(asset=self.asset) if template: # Add our definition to our template self.template.add(template) # Enforce maximum file size self.template[0].max_file_size = self.max_workflows_template_size # Prepare Version self.api_version = version if version is not None \ else self.template_args['ver']['default'] # Template functionality self.tokens = {} if isinstance(tokens, dict): self.tokens.update(tokens) elif tokens: msg = 'The specified Workflows Template Tokens ' \ '({}) are not identified as a dictionary.'.format(tokens) self.logger.warning(msg) raise TypeError(msg) # else: NoneType - this is okay return def gen_payload(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ This function generates our payload whether it be the generic one Apprise generates by default, or one provided by a specified external template. """ # Acquire our to-be footer icon if configured to do so image_url = None if not self.include_image \ else self.image_url(notify_type) body_content = [] if image_url: body_content.append({ "type": "Image", "url": image_url, "height": "32px", "altText": notify_type, }) if title: body_content.append({ "type": "TextBlock", "text": f'{title}', "style": "heading", "weight": "Bolder", "size": "Large", "id": "title", }) body_content.append({ "type": "TextBlock", "text": body, "style": "default", "wrap": self.wrap, "id": "body", }) if not self.template: # By default we use a generic working payload if there was # no template specified schema = "http://adaptivecards.io/schemas/adaptive-card.json" payload = { "type": "message", "attachments": [ { "contentType": "application/vnd.microsoft.card.adaptive", "contentUrl": None, "content": { "$schema": schema, "type": "AdaptiveCard", "version": self.adaptive_card_version, "body": body_content, # Additionally "msteams": {"width": "full"}, } } ] } return payload # If our code reaches here, then we generate ourselves the payload template = self.template[0] if not template: # We could not access the attachment self.logger.error( 'Could not access Workflow template {}.'.format( template.url(privacy=True))) return False # Take a copy of our token dictionary tokens = self.tokens.copy() # Apply some defaults template values tokens['app_body'] = body tokens['app_title'] = title tokens['app_type'] = notify_type tokens['app_id'] = self.app_id tokens['app_desc'] = self.app_desc tokens['app_color'] = self.color(notify_type) tokens['app_image_url'] = image_url tokens['app_url'] = self.app_url # Enforce Application mode tokens['app_mode'] = TemplateType.JSON try: with open(template.path, 'r') as fp: content = json.loads(apply_template(fp.read(), **tokens)) except (OSError, IOError): self.logger.error( 'MSTeam template {} could not be read.'.format( template.url(privacy=True))) return None except JSONDecodeError as e: self.logger.error( 'MSTeam template {} contains invalid JSON.'.format( template.url(privacy=True))) self.logger.debug('JSONDecodeError: {}'.format(e)) return None return content def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Microsoft Teams Notification """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', } params = { 'api-version': self.api_version, 'sp': '/triggers/manual/run', 'sv': '1.0', 'sig': self.signature, } notify_url = 'https://{host}{port}/workflows/{workflow}/' \ 'triggers/manual/paths/invoke'.format( host=self.host, port='' if not self.port else f':{self.port}', workflow=self.workflow) # Generate our payload if it's possible payload = self.gen_payload( body=body, title=title, notify_type=notify_type, **kwargs) if not payload: # No need to present a reason; that will come from the # gen_payload() function itself return False self.logger.debug('Workflows POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) self.logger.debug('Workflows Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( notify_url, params=params, data=json.dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code not in ( requests.codes.ok, requests.codes.accepted): # We had a problem status_str = \ NotifyWorkflows.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send Workflows notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # We failed return False else: self.logger.info('Sent Workflows notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Workflows notification.') self.logger.debug('Socket Exception: %s' % str(e)) # We failed return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol[0], self.host, self.port, self.workflow, self.signature, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', 'wrap': 'yes' if self.wrap else 'no', } if self.template: params['template'] = NotifyWorkflows.quote( self.template[0].url(), safe='') # Store our version if it differs from default if self.api_version != self.template_args['ver']['default']: params['ver'] = self.api_version # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Store any template entries if specified params.update({':{}'.format(k): v for k, v in self.tokens.items()}) return '{schema}://{host}{port}/{workflow}/{signature}/' \ '?{params}'.format( schema=self.secure_protocol[0], host=self.host, port='' if not self.port else f':{self.port}', workflow=self.pprint(self.workflow, privacy, safe=''), signature=self.pprint(self.signature, privacy, safe=''), params=NotifyWorkflows.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # store values if provided entries = NotifyWorkflows.split_path(results['fullpath']) # Display image? results['include_image'] = parse_bool(results['qsd'].get( 'image', NotifyWorkflows.template_args['image']['default'])) # Wrap Text? results['wrap'] = parse_bool(results['qsd'].get( 'wrap', NotifyWorkflows.template_args['wrap']['default'])) # Template Handling if 'template' in results['qsd'] and results['qsd']['template']: results['template'] = \ NotifyWorkflows.unquote(results['qsd']['template']) if 'workflow' in results['qsd'] and results['qsd']['workflow']: results['workflow'] = \ NotifyWorkflows.unquote(results['qsd']['workflow']) elif 'id' in results['qsd'] and results['qsd']['id']: results['workflow'] = \ NotifyWorkflows.unquote(results['qsd']['id']) else: results['workflow'] = None if not entries \ else NotifyWorkflows.unquote(entries.pop(0)) # Signature if 'signature' in results['qsd'] and results['qsd']['signature']: results['signature'] = \ NotifyWorkflows.unquote(results['qsd']['signature']) elif 'sig' in results['qsd'] and results['qsd']['sig']: results['signature'] = \ NotifyWorkflows.unquote(results['qsd']['sig']) else: # Read information from path results['signature'] = None if not entries \ else NotifyWorkflows.unquote(entries.pop(0)) # Version if 'api-version' in results['qsd'] and results['qsd']['api-version']: results['version'] = \ NotifyWorkflows.unquote(results['qsd']['api-version']) elif 'ver' in results['qsd'] and results['qsd']['ver']: results['version'] = \ NotifyWorkflows.unquote(results['qsd']['ver']) # Store our tokens results['tokens'] = results['qsd:'] return results @staticmethod def parse_native_url(url): """ Support parsing the webhook straight out of workflows https://HOST:443/workflows/WORKFLOWID/triggers/manual/paths/invoke """ # Match our workflows webhook URL and re-assemble result = re.match( r'^https?://(?P[A-Z0-9_.-]+)' r'(?P:[1-9][0-9]{0,5})?' r'/workflows/' r'(?P[A-Z0-9_-]+)' r'/triggers/manual/paths/invoke/?' r'(?P\?.+)$', url, re.I) if result: # Construct our URL return NotifyWorkflows.parse_url( '{schema}://{host}{port}/{workflow}' '/{params}'.format( schema=NotifyWorkflows.secure_protocol[0], host=result.group('host'), port='' if not result.group('port') else result.group('port'), workflow=result.group('workflow'), params=result.group('params'))) return None apprise-1.9.3/apprise/plugins/wxpusher.py000066400000000000000000000306201477231770000205630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # Sign-up at https://wxpusher.zjiecode.com/ # # Login and acquire your App Token # - Open the backend of the application: # https://wxpusher.zjiecode.com/admin/ # - Find the appToken menu from the left menu bar, here you can reset the # appToken, please note that after resetting, the old appToken will be # invalid immediately and the call interface will fail. import re import json import requests from itertools import chain from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..common import NotifyFormat from ..utils.parse import parse_list, validate_regex from ..locale import gettext_lazy as _ # Topics are always numerical IS_TOPIC = re.compile(r'^\s*(?P[1-9][0-9]{0,20})\s*$') # users always start with UID_ IS_USER = re.compile( r'^\s*(?P(?PUID_)(?P[^\s]+))\s*$', re.I) WXPUSHER_RESPONSE_CODES = { 1000: "The request was processed successfully.", 1001: "The token provided in the request is missing.", 1002: "The token provided in the request is incorrect or expired.", 1003: "The body of the message was not provided.", 1004: "The user or topic you're trying to send the message to does not " "exist", 1005: "The app or topic binding process failed.", 1006: "There was an error in sending the message.", 1007: "The message content exceeds the allowed length.", 1008: "The API call frequency is too high and the server rejected the " "request.", 1009: "There might be other issues that are not explicitly covered by " "the above codes", 1010: "The IP address making the request is not whitelisted.", } class WxPusherContentType: """ Defines the different supported content types """ TEXT = 1 HTML = 2 MARKDOWN = 3 class SubscriptionType: # Verify Subscription Time UNVERIFIED = 0 PAID_USERS = 1 UNSUBSCRIBED = 2 class NotifyWxPusher(NotifyBase): """ A wrapper for WxPusher Notifications """ # The default descriptive name associated with the Notification service_name = 'WxPusher' # The services URL service_url = 'https://wxpusher.zjiecode.com/' # The default protocol secure_protocol = 'wxpusher' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_wxpusher' # WxPusher notification endpoint notify_url = 'https://wxpusher.zjiecode.com/api/send/message' # Define object templates templates = ( '{schema}://{token}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'token': { 'name': _('App Token'), 'type': 'string', 'required': True, 'regex': (r'^AT_[^\s]+$', 'i'), 'private': True, }, 'target_topic': { 'name': _('Target Topic'), 'type': 'int', 'map_to': 'targets', }, 'target_user': { 'name': _('Target User ID'), 'type': 'string', 'regex': (r'^UID_[^\s]+$', 'i'), 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'token': { 'alias_of': 'token', }, }) # Used for mapping the content type to our output since Apprise supports # The same formats that WxPusher does. __content_type_map = { NotifyFormat.MARKDOWN: WxPusherContentType.MARKDOWN, NotifyFormat.TEXT: WxPusherContentType.TEXT, NotifyFormat.HTML: WxPusherContentType.HTML, } def __init__(self, token, targets=None, **kwargs): """ Initialize WxPusher Object """ super().__init__(**kwargs) # App Token (associated with WxPusher account) self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'An invalid WxPusher App Token ' \ '({}) was specified.'.format(token) self.logger.warning(msg) raise TypeError(msg) # Used for URL generation afterwards only self._invalid_targets = list() # For storing what is detected self._users = list() self._topics = list() # Parse our targets for target in parse_list(targets): # Validate targets and drop bad ones: result = IS_USER.match(target) if result: # store valid user self._users.append(result['full']) continue result = IS_TOPIC.match(target) if result: # store valid topic self._topics.append(int(result['topic'])) continue self.logger.warning( 'Dropped invalid WxPusher user/topic ' '(%s) specified.' % target, ) self._invalid_targets.append(target) return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform WxPusher Notification """ if not self._users and not self._topics: # There were no services to notify self.logger.warning( 'There were no WxPusher targets to notify') return False # Prepare our headers headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', 'Accept': 'application/json', } # Prepare our payload payload = { 'appToken': self.token, 'content': body, 'summary': title, 'contentType': self.__content_type_map[self.notify_format], 'topicIds': self._topics, 'uids': self._users, # unsupported at this time # 'verifyPay': False, # 'verifyPayType': 0, 'url': None, } # Some Debug Logging self.logger.debug('WxPusher POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) self.logger.debug('WxPusher Payload: {}' .format(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( self.notify_url, data=json.dumps(payload).encode('utf-8'), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) try: content = json.loads(r.content) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable # TypeError = r.content is None # AttributeError = r is None content = {} # 1000 is the expected return code for a successful query if r.status_code == requests.codes.ok and \ content and content.get("code") == 1000: # We're good! self.logger.info( 'Sent WxPusher notification to %d targets.' % ( len(self._users) + len(self._topics))) else: error_str = content.get('msg') if content else ( WXPUSHER_RESPONSE_CODES.get( content.get("code") if content else None, "An unknown error occured.")) # We had a problem status_str = \ NotifyWxPusher.http_response_code_lookup( r.status_code) if not error_str else error_str self.logger.warning( 'Failed to send WxPusher notification, ' 'code={}/{}: {}'.format( r.status_code, 'unk' if not content else content.get("code"), status_str)) self.logger.debug( 'Response Details:\r\n{}'.format( content if content else r.content)) # Mark our failure return False except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending WxPusher ' 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return (self.secure_protocol, self.token) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) return '{schema}://{token}/{targets}/?{params}'.format( schema=self.secure_protocol, token=self.pprint( self.token, privacy, mode=PrivacyMode.Secret, safe=''), targets='/'.join(chain( [str(t) for t in self._topics], self._users, [NotifyWxPusher.quote(x, safe='') for x in self._invalid_targets])), params=NotifyWxPusher.urlencode(params)) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # Get our entries; split_path() looks after unquoting content for us # by default results['targets'] = NotifyWxPusher.split_path(results['fullpath']) # App Token if 'token' in results['qsd'] and len(results['qsd']['token']): # Extract the App token from an argument results['token'] = \ NotifyWxPusher.unquote(results['qsd']['token']) # Any host entry defined is actually part of the path # store it's element (if defined) if results['host']: results['targets'].append( NotifyWxPusher.split_path(results['host'])) else: # The hostname is our source number results['token'] = NotifyWxPusher.unquote(results['host']) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ NotifyWxPusher.parse_list(results['qsd']['to']) return results apprise-1.9.3/apprise/plugins/xbmc.py000066400000000000000000000307061477231770000176340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import requests from json import dumps from .base import NotifyBase from ..url import PrivacyMode from ..common import NotifyType from ..common import NotifyImageSize from ..utils.parse import parse_bool from ..locale import gettext_lazy as _ class NotifyXBMC(NotifyBase): """ A wrapper for XBMC/KODI Notifications """ # The default descriptive name associated with the Notification service_name = 'Kodi/XBMC' # The services URL service_url = 'http://kodi.tv/' xbmc_protocol = 'xbmc' xbmc_secure_protocol = 'xbmcs' kodi_protocol = 'kodi' kodi_secure_protocol = 'kodis' # The default protocols protocol = (xbmc_protocol, kodi_protocol) # The default secure protocols secure_protocol = (xbmc_secure_protocol, kodi_secure_protocol) # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_kodi' # Disable throttle rate for XBMC/KODI requests since they are normally # local anyway request_rate_per_sec = 0 # Limit results to just the first 2 line otherwise there is just to much # content to display body_max_line_count = 2 # XBMC uses the http protocol with JSON requests xbmc_default_port = 8080 # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_128 # XBMC default protocol version (v2) xbmc_remote_protocol = 2 # KODI default protocol version (v6) kodi_remote_protocol = 6 # Define object templates templates = ( '{schema}://{host}', '{schema}://{host}:{port}', '{schema}://{user}:{password}@{host}', '{schema}://{user}:{password}@{host}:{port}', ) # Define our tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'host': { 'name': _('Hostname'), 'type': 'string', 'required': True, }, 'port': { 'name': _('Port'), 'type': 'int', 'min': 1, 'max': 65535, }, 'user': { 'name': _('Username'), 'type': 'string', }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'duration': { 'name': _('Duration'), 'type': 'int', 'min': 1, 'default': 12, }, 'image': { 'name': _('Include Image'), 'type': 'bool', 'default': True, 'map_to': 'include_image', }, }) def __init__(self, include_image=True, duration=None, **kwargs): """ Initialize XBMC/KODI Object """ super().__init__(**kwargs) # Number of seconds to display notification for self.duration = self.template_args['duration']['default'] \ if not (isinstance(duration, int) and self.template_args['duration']['min'] > 0) else duration # Build our schema self.schema = 'https' if self.secure else 'http' # Prepare the default header self.headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json' } # Default protocol self.protocol = kwargs.get('protocol', self.xbmc_remote_protocol) # Track whether or not we want to send an image with our notification # or not. self.include_image = include_image def _payload_60(self, title, body, notify_type, **kwargs): """ Builds payload for KODI API v6.0 Returns (headers, payload) """ # prepare JSON Object payload = { 'jsonrpc': '2.0', 'method': 'GUI.ShowNotification', 'params': { 'title': title, 'message': body, # displaytime is defined in microseconds so we need to just # do some simple math 'displaytime': int(self.duration * 1000), }, 'id': 1, } # Acquire our image url if configured to do so image_url = None if not self.include_image else \ self.image_url(notify_type) if image_url: payload['params']['image'] = image_url if notify_type is NotifyType.FAILURE: payload['type'] = 'error' elif notify_type is NotifyType.WARNING: payload['type'] = 'warning' else: payload['type'] = 'info' return (self.headers, dumps(payload)) def _payload_20(self, title, body, notify_type, **kwargs): """ Builds payload for XBMC API v2.0 Returns (headers, payload) """ # prepare JSON Object payload = { 'jsonrpc': '2.0', 'method': 'GUI.ShowNotification', 'params': { 'title': title, 'message': body, # displaytime is defined in microseconds so we need to just # do some simple math 'displaytime': int(self.duration * 1000), }, 'id': 1, } # Include our logo if configured to do so image_url = None if not self.include_image \ else self.image_url(notify_type) if image_url: payload['params']['image'] = image_url return (self.headers, dumps(payload)) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform XBMC/KODI Notification """ if self.protocol == self.xbmc_remote_protocol: # XBMC v2.0 (headers, payload) = self._payload_20( title, body, notify_type, **kwargs) else: # KODI v6.0 (headers, payload) = self._payload_60( title, body, notify_type, **kwargs) auth = None if self.user: auth = (self.user, self.password) url = '%s://%s' % (self.schema, self.host) if self.port: url += ':%d' % self.port url += '/jsonrpc' self.logger.debug('XBMC/KODI POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('XBMC/KODI Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=payload, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyXBMC.http_response_code_lookup(r.status_code) self.logger.warning( 'Failed to send XBMC/KODI notification: ' '{}{}error={}.'.format( status_str, ', ' if status_str else '', r.status_code)) self.logger.debug('Response Details:\r\n{}'.format(r.content)) # Return; we're done return False else: self.logger.info('Sent XBMC/KODI notification.') except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending XBMC/KODI ' 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) # Return; we're done return False return True @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ default_schema = self.xbmc_protocol if ( self.protocol <= self.xbmc_remote_protocol) else self.kodi_protocol if self.secure: # Append 's' to schema default_schema += 's' port = self.port if self.port else ( 443 if self.secure else self.xbmc_default_port) return ( default_schema, self.user, self.password, self.host, port, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Define any URL parameters params = { 'image': 'yes' if self.include_image else 'no', 'duration': str(self.duration), } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=NotifyXBMC.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=NotifyXBMC.quote(self.user, safe=''), ) default_schema = self.xbmc_protocol if ( self.protocol <= self.xbmc_remote_protocol) else self.kodi_protocol default_port = 443 if self.secure else self.xbmc_default_port if self.secure: # Append 's' to schema default_schema += 's' return '{schema}://{auth}{hostname}{port}/?{params}'.format( schema=default_schema, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if not self.port or self.port == default_port else ':{}'.format(self.port), params=NotifyXBMC.urlencode(params), ) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url) if not results: # We're done early return results # We want to set our protocol depending on whether we're using XBMC # or KODI if results.get('schema', '').startswith('xbmc'): # XBMC Support results['protocol'] = NotifyXBMC.xbmc_remote_protocol # Assign Default XBMC Port if not results['port']: results['port'] = NotifyXBMC.xbmc_default_port else: # KODI Support results['protocol'] = NotifyXBMC.kodi_remote_protocol # Include images with our message results['include_image'] = \ parse_bool(results['qsd'].get('image', True)) # Set duration try: results['duration'] = abs(int(results['qsd'].get('duration'))) except (TypeError, ValueError): # Not a valid integer; ignore entry pass return results apprise-1.9.3/apprise/plugins/zulip.py000066400000000000000000000340301477231770000200400ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # To use this plugin, you must have a ZulipChat bot defined; See here: # https://zulipchat.com/help/add-a-bot-or-integration # # At the time of writing this plugin the instructions were: # 1. From your desktop, click on the gear icon in the upper right corner. # 2. Select Settings. # 3. On the left, click Your bots. # 4. Click Add a new bot. # 5. Fill out the fields, and click Create bot. # If you know your organization {ID} (as it's part of the zulipchat.com url # after you signup, then you can also access your bot information by visting: # https://ID.zulipchat.com/#settings/your-bots # For example, I create an organization called apprise. Thus my URL would be # https://apprise.zulipchat.com/#settings/your-bots # When you're done and have a bot, it's important to remember the username # you provided the bot and the API key generated. # # If your {user} was : goober-bot@apprise.zulipchat.com # and your {apikey} was: lqn6mpwpam6VZzbCW0o7olmk3hwbQSK # # Then the following URLs would be accepted by Apprise: # - zulip://goober-bot@apprise.zulipchat.com/lqn6mpwpam6VZzbCW0o7olmk3hwbQSK # - zulip://goober-bot@apprise/lqn6mpwpam6VZzbCW0o7olmk3hwbQSK # - zulip://goober@apprise/lqn6mpwpam6VZzbCW0o7olmk3hwbQSK # - zulip://goober@apprise.zulipchat.com/lqn6mpwpam6VZzbCW0o7olmk3hwbQSK # The API reference used to build this plugin was documented here: # https://zulipchat.com/api/send-message # import re import requests from .base import NotifyBase from ..common import NotifyType from ..utils.parse import (parse_list, validate_regex, is_email) from ..locale import gettext_lazy as _ # A Valid Bot Name VALIDATE_BOTNAME = re.compile(r'(?P[A-Z0-9_-]{1,32})', re.I) # Organization required as part of the API request VALIDATE_ORG = re.compile( r'(?P[A-Z0-9_-]{1,32})(\.(?P[^\s]+))?', re.I) # Extend HTTP Error Messages ZULIP_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', } # Used to break path apart into list of streams TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+') # Used to detect a streams IS_VALID_TARGET_RE = re.compile( r'#?(?P[A-Z0-9_]{1,32})', re.I) class NotifyZulip(NotifyBase): """ A wrapper for Zulip Notifications """ # The default descriptive name associated with the Notification service_name = 'Zulip' # The services URL service_url = 'https://zulipchat.com/' # The default secure protocol secure_protocol = 'zulip' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_zulip' # Zulip uses the http protocol with JSON requests notify_url = 'https://{org}.{hostname}/api/v1/messages' # The maximum allowable characters allowed in the title per message title_maxlen = 60 # The maximum allowable characters allowed in the body per message body_maxlen = 10000 # Define object templates templates = ( '{schema}://{botname}@{organization}/{token}', '{schema}://{botname}@{organization}/{token}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ 'botname': { 'name': _('Bot Name'), 'type': 'string', 'regex': (r'^[A-Z0-9_-]{1,32}$', 'i'), 'required': True, }, 'organization': { 'name': _('Organization'), 'type': 'string', 'required': True, 'regex': (r'^[A-Z0-9_-]{1,32})$', 'i') }, 'token': { 'name': _('Token'), 'type': 'string', 'required': True, 'private': True, 'regex': (r'^[A-Z0-9]{32}$', 'i'), }, 'target_user': { 'name': _('Target User'), 'type': 'string', 'map_to': 'targets', }, 'target_stream': { 'name': _('Target Stream'), 'type': 'string', 'map_to': 'targets', }, 'targets': { 'name': _('Targets'), 'type': 'list:string', }, }) # Define our template arguments template_args = dict(NotifyBase.template_args, **{ 'to': { 'alias_of': 'targets', }, 'token': { 'alias_of': 'token', }, }) # The default hostname to append to a defined organization # if one isn't defined in the apprise url default_hostname = 'zulipchat.com' # The default stream to notify if no targets are specified default_notification_stream = 'general' def __init__(self, botname, organization, token, targets=None, **kwargs): """ Initialize Zulip Object """ super().__init__(**kwargs) # our default hostname self.hostname = self.default_hostname try: match = VALIDATE_BOTNAME.match(botname.strip()) if not match: # let outer exception handle this raise TypeError # The botname botname = match.group('name') suffix = '-bot' # Eliminate suffix if found botname = \ botname[:-len(suffix)] if botname.endswith(suffix) else botname self.botname = botname except (TypeError, AttributeError): msg = 'The Zulip botname specified ({}) is invalid.'\ .format(botname) self.logger.warning(msg) raise TypeError(msg) try: match = VALIDATE_ORG.match(organization.strip()) if not match: # let outer exception handle this raise TypeError # The organization self.organization = match.group('org') if match.group('hostname'): self.hostname = match.group('hostname') except (TypeError, AttributeError): msg = 'The Zulip organization specified ({}) is invalid.'\ .format(organization) self.logger.warning(msg) raise TypeError(msg) self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: msg = 'The Zulip token specified ({}) is invalid.'\ .format(token) self.logger.warning(msg) raise TypeError(msg) self.targets = parse_list(targets) if len(self.targets) == 0: # No streams identified, use default self.targets.append(self.default_notification_stream) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Zulip Notification """ headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', } # error tracking (used for function return) has_error = False # Prepare our notification URL url = self.notify_url.format( org=self.organization, hostname=self.hostname, ) # prepare JSON Object payload = { 'subject': title, 'content': body, } # Determine Authentication auth = ( '{botname}-bot@{org}.{hostname}'.format( botname=self.botname, org=self.organization, hostname=self.hostname, ), self.token, ) # Create a copy of the target list targets = list(self.targets) while len(targets): target = targets.pop(0) result = is_email(target) if result: # Send a private message payload['type'] = 'private' else: # Send a stream message payload['type'] = 'stream' # Set our target payload['to'] = target if not result else result['full_email'] self.logger.debug('Zulip POST URL: %s (cert_verify=%r)' % ( url, self.verify_certificate, )) self.logger.debug('Zulip Payload: %s' % str(payload)) # Always call throttle before any remote server i/o is made self.throttle() try: r = requests.post( url, data=payload, headers=headers, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem status_str = \ NotifyZulip.http_response_code_lookup( r.status_code, ZULIP_HTTP_ERROR_MAP) self.logger.warning( 'Failed to send Zulip notification to {}: ' '{}{}error={}.'.format( target, status_str, ', ' if status_str else '', r.status_code)) self.logger.debug( 'Response Details:\r\n{}'.format(r.content)) # Mark our failure has_error = True continue else: self.logger.info( 'Sent Zulip notification to {}.'.format(target)) except requests.RequestException as e: self.logger.warning( 'A Connection error occurred sending Zulip ' 'notification to {}.'.format(target)) self.logger.debug('Socket Exception: %s' % str(e)) # Mark our failure has_error = True continue return not has_error @property def url_identifier(self): """ Returns all of the identifiers that make this URL unique from another simliar one. Targets or end points should never be identified here. """ return ( self.secure_protocol, self.organization, self.hostname, self.token, ) def url(self, privacy=False, *args, **kwargs): """ Returns the URL built dynamically based on specified arguments. """ # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) # simplify our organization in our URL if we can organization = '{}{}'.format( self.organization, '.{}'.format(self.hostname) if self.hostname != self.default_hostname else '') return '{schema}://{botname}@{org}/{token}/' \ '{targets}?{params}'.format( schema=self.secure_protocol, botname=NotifyZulip.quote(self.botname, safe=''), org=NotifyZulip.quote(organization, safe=''), token=self.pprint(self.token, privacy, safe=''), targets='/'.join( [NotifyZulip.quote(x, safe='') for x in self.targets]), params=NotifyZulip.urlencode(params), ) def __len__(self): """ Returns the number of targets associated with this notification """ return len(self.targets) @staticmethod def parse_url(url): """ Parses the URL and returns enough arguments that can allow us to re-instantiate this object. """ results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The botname results['botname'] = NotifyZulip.unquote(results['user']) # The organization is stored in the hostname results['organization'] = NotifyZulip.unquote(results['host']) # Store our targets results['targets'] = NotifyZulip.split_path(results['fullpath']) if 'token' in results['qsd'] and len(results['qsd']['token']): # Store our token if specified results['token'] = NotifyZulip.unquote(results['qsd']['token']) elif results['targets']: # First item is the token results['token'] = results['targets'].pop(0) else: # no token results['token'] = None # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += [x for x in filter( bool, TARGET_LIST_DELIM.split( NotifyZulip.unquote(results['qsd']['to'])))] return results apprise-1.9.3/apprise/py.typed000066400000000000000000000000001477231770000163270ustar00rootroot00000000000000apprise-1.9.3/apprise/url.py000066400000000000000000001043221477231770000160200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import sys import re from .logger import logger import time import hashlib from datetime import datetime from xml.sax.saxutils import escape as sax_escape from urllib.parse import unquote as _unquote from urllib.parse import quote as _quote from .locale import gettext_lazy as _ from .asset import AppriseAsset from .utils.parse import ( urlencode, parse_url, parse_bool, parse_list, parse_phone_no) # Used to break a path list into parts PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+') class PrivacyMode: # Defines different privacy modes strings can be printed as # Astrisk sets 4 of them: e.g. **** # This is used for passwords Secret = '*' # Outer takes the first and last character displaying them with # 3 dots between. Hence, 'i-am-a-token' would become 'i...n' Outer = 'o' # Displays the last four characters Tail = 't' # Define the HTML Lookup Table HTML_LOOKUP = { 400: 'Bad Request - Unsupported Parameters.', 401: 'Verification Failed.', 404: 'Page not found.', 405: 'Method not allowed.', 500: 'Internal server error.', 503: 'Servers are overloaded.', } class URLBase: """ This is the base class for all URL Manipulation """ # The default descriptive name associated with the URL service_name = None # The default simple (insecure) protocol # all inheriting entries must provide their protocol lookup # protocol:// (in this example they would specify 'protocol') protocol = None # The default secure protocol # all inheriting entries must provide their protocol lookup # protocols:// (in this example they would specify 'protocols') # This value can be the same as the defined protocol. secure_protocol = None # Throttle request_rate_per_sec = 0 # The connect timeout is the number of seconds Requests will wait for your # client to establish a connection to a remote machine (corresponding to # the connect()) call on the socket. socket_connect_timeout = 4.0 # The read timeout is the number of seconds the client will wait for the # server to send a response. socket_read_timeout = 4.0 # provide the information required to allow for unique id generation when # calling url_id(). Over-ride this in calling classes. Calling classes # should set this to false if there can be no url_id generated url_identifier = None # Tracks the last generated url_id() to prevent regeneration; initializes # to False and is set thereafter. This is an internal value for this class # only and should not be set to anything other then False below... __cached_url_identifier = False # Handle # Maintain a set of tags to associate with this specific notification tags = set() # Secure sites should be verified against a Certificate Authority verify_certificate = True # Logging to our global logger logger = logger # Define a default set of template arguments used for dynamically building # details about our individual plugins for developers. # Define object templates templates = () # Provides a mapping of tokens, certain entries are fixed and automatically # configured if found (such as schema, host, user, pass, and port) template_tokens = {} # Here is where we define all of the arguments we accept on the url # such as: schema://whatever/?cto=5.0&rto=15 # These act the same way as tokens except they are optional and/or # have default values set if mandatory. This rule must be followed template_args = { 'verify': { 'name': _('Verify SSL'), # SSL Certificate Authority Verification 'type': 'bool', # Provide a default 'default': verify_certificate, # look up default using the following parent class value at # runtime. '_lookup_default': 'verify_certificate', }, 'rto': { 'name': _('Socket Read Timeout'), 'type': 'float', # Provide a default 'default': socket_read_timeout, # look up default using the following parent class value at # runtime. The variable name identified here (in this case # socket_read_timeout) is checked and it's result is placed # over-top of the 'default'. This is done because once a parent # class inherits this one, the overflow_mode already set as a # default 'could' be potentially over-ridden and changed to a # different value. '_lookup_default': 'socket_read_timeout', }, 'cto': { 'name': _('Socket Connect Timeout'), 'type': 'float', # Provide a default 'default': socket_connect_timeout, # look up default using the following parent class value at # runtime. The variable name identified here (in this case # socket_connect_timeout) is checked and it's result is placed # over-top of the 'default'. This is done because once a parent # class inherits this one, the overflow_mode already set as a # default 'could' be potentially over-ridden and changed to a # different value. '_lookup_default': 'socket_connect_timeout', }, } # kwargs are dynamically built because a prefix causes us to parse the # content slightly differently. The prefix is required and can be either # a (+ or -). Below would handle the +key=value: # { # 'headers': { # 'name': _('HTTP Header'), # 'prefix': '+', # 'type': 'string', # }, # }, # # In a kwarg situation, the 'key' is always presumed to be treated as # a string. When the 'type' is defined, it is being defined to respect # the 'value'. template_kwargs = {} # Internal Values def __init__(self, asset=None, **kwargs): """ Initialize some general logging and common server arguments that will keep things consistent when working with the children that inherit this class. """ # Prepare our Asset Object self.asset = \ asset if isinstance(asset, AppriseAsset) else AppriseAsset() # Certificate Verification (for SSL calls); default to being enabled self.verify_certificate = parse_bool( kwargs.get('verify', URLBase.verify_certificate)) # Schema self.schema = kwargs.get('schema', 'unknown').lower() # Secure Mode self.secure = kwargs.get('secure', None) if not isinstance(self.secure, bool): # Attempt to detect self.secure = self.schema[-1:] == 's' self.host = URLBase.unquote(kwargs.get('host')) self.port = kwargs.get('port') if self.port: try: self.port = int(self.port) except (TypeError, ValueError): self.logger.warning( 'Invalid port number specified {}' .format(self.port)) self.port = None self.user = kwargs.get('user') if self.user: # Always unquote user if it exists self.user = URLBase.unquote(self.user) self.password = kwargs.get('password') if self.password: # Always unquote the password if it exists self.password = URLBase.unquote(self.password) # Store our full path consistently ensuring it ends with a `/' self.fullpath = URLBase.unquote(kwargs.get('fullpath')) if not isinstance(self.fullpath, str) or not self.fullpath: self.fullpath = '/' # Store our Timeout Variables if 'rto' in kwargs: try: self.socket_read_timeout = float(kwargs.get('rto')) except (TypeError, ValueError): self.logger.warning( 'Invalid socket read timeout (rto) was specified {}' .format(kwargs.get('rto'))) if 'cto' in kwargs: try: self.socket_connect_timeout = float(kwargs.get('cto')) except (TypeError, ValueError): self.logger.warning( 'Invalid socket connect timeout (cto) was specified {}' .format(kwargs.get('cto'))) if 'tag' in kwargs: # We want to associate some tags with our notification service. # the code below gets the 'tag' argument if defined, otherwise # it just falls back to whatever was already defined globally self.tags = set(parse_list(kwargs.get('tag'), self.tags)) # Tracks the time any i/o was made to the remote server. This value # is automatically set and controlled through the throttle() call. self._last_io_datetime = None def throttle(self, last_io=None, wait=None): """ A common throttle control if a wait is specified, then it will force a sleep of the specified time if it is larger then the calculated throttle time. """ if last_io is not None: # Assume specified last_io self._last_io_datetime = last_io # Get ourselves a reference time of 'now' reference = datetime.now() if self._last_io_datetime is None: # Set time to 'now' and no need to throttle self._last_io_datetime = reference return if self.request_rate_per_sec <= 0.0 and not wait: # We're done if there is no throttle limit set return # If we reach here, we need to do additional logic. # If the difference between the reference time and 'now' is less than # the defined request_rate_per_sec then we need to throttle for the # remaining balance of this time. elapsed = (reference - self._last_io_datetime).total_seconds() if wait is not None: self.logger.debug('Throttling forced for {}s...'.format(wait)) time.sleep(wait) elif elapsed < self.request_rate_per_sec: self.logger.debug('Throttling for {}s...'.format( self.request_rate_per_sec - elapsed)) time.sleep(self.request_rate_per_sec - elapsed) # Update our timestamp before we leave self._last_io_datetime = datetime.now() return def url(self, privacy=False, *args, **kwargs): """ Assembles the URL associated with the notification based on the arguments provied. """ # Our default parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) # Determine Authentication auth = '' if self.user and self.password: auth = '{user}:{password}@'.format( user=URLBase.quote(self.user, safe=''), password=self.pprint( self.password, privacy, mode=PrivacyMode.Secret, safe=''), ) elif self.user: auth = '{user}@'.format( user=URLBase.quote(self.user, safe=''), ) default_port = 443 if self.secure else 80 return '{schema}://{auth}{hostname}{port}{fullpath}{params}'.format( schema='https' if self.secure else 'http', auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, port='' if self.port is None or self.port == default_port else ':{}'.format(self.port), fullpath=URLBase.quote(self.fullpath, safe='/') if self.fullpath else '/', params=('?' + URLBase.urlencode(params) if params else ''), ) def url_id(self, lazy=True, hash_engine=hashlib.sha256): """ Returns a unique URL identifier that representing the Apprise URL itself. The url_id is always a hash string or None if it can't be generated. The idea is to only build the ID based on the credentials or specific elements relative to the URL itself. The URL ID should never factor in (or else it's a bug) the following: - any targets defined - all GET parameters options unless they explicitly change the complete function of the code. For example: GET parameters like ?image=false&avatar=no should have no bearing in the uniqueness of the Apprise URL Identifier. Consider plugins where some get parameters completely change how the entire upstream comunication works such as slack:// and matrix:// which has a mode. In these circumstances, they should be considered in he unique generation. The intention of this function is to help align Apprise URLs that are common with one another and therefore can share the same persistent storage even when subtle changes are made to them. Hence the following would all return the same URL Identifier: json://abc/def/ghi?image=no json://abc/def/ghi/?test=yes&image=yes """ if lazy and self.__cached_url_identifier is not False: return self.__cached_url_identifier \ if not (self.__cached_url_identifier and self.asset.storage_idlen) \ else self.__cached_url_identifier[:self.asset.storage_idlen] # Python v3.9 introduces usedforsecurity argument kwargs = {'usedforsecurity': False} \ if sys.version_info >= (3, 9) else {} if self.url_identifier is False: # Disabled self.__cached_url_identifier = None elif self.url_identifier in (None, True): # Prepare our object engine = hash_engine( self.asset.storage_salt + self.schema.encode( self.asset.encoding), **kwargs) # We want to treat `None` differently then a blank entry engine.update( b'\0' if self.password is None else self.password.encode(self.asset.encoding)) engine.update( b'\0' if self.user is None else self.user.encode(self.asset.encoding)) engine.update( b'\0' if not self.host else self.host.encode(self.asset.encoding)) engine.update( b'\0' if self.port is None else f'{self.port}'.encode(self.asset.encoding)) engine.update( self.fullpath.rstrip('/').encode(self.asset.encoding)) engine.update(b's' if self.secure else b'i') # Save our generated content self.__cached_url_identifier = engine.hexdigest() elif isinstance(self.url_identifier, str): self.__cached_url_identifier = hash_engine( self.asset.storage_salt + self.url_identifier.encode( self.asset.encoding), **kwargs).hexdigest() elif isinstance(self.url_identifier, bytes): self.__cached_url_identifier = hash_engine( self.asset.storage_salt + self.url_identifier, **kwargs).hexdigest() elif isinstance(self.url_identifier, (list, tuple, set)): self.__cached_url_identifier = hash_engine( self.asset.storage_salt + b''.join([ (x if isinstance(x, bytes) else str(x).encode(self.asset.encoding)) for x in self.url_identifier]), **kwargs).hexdigest() elif isinstance(self.url_identifier, dict): self.__cached_url_identifier = hash_engine( self.asset.storage_salt + b''.join([ (x if isinstance(x, bytes) else str(x).encode(self.asset.encoding)) for x in self.url_identifier.values()]), **kwargs).hexdigest() else: self.__cached_url_identifier = hash_engine( self.asset.storage_salt + str( self.url_identifier).encode(self.asset.encoding), **kwargs).hexdigest() return self.__cached_url_identifier \ if not (self.__cached_url_identifier and self.asset.storage_idlen) \ else self.__cached_url_identifier[:self.asset.storage_idlen] def __contains__(self, tags): """ Returns true if the tag specified is associated with this notification. tag can also be a tuple, set, and/or list """ if isinstance(tags, (tuple, set, list)): return bool(set(tags) & self.tags) # return any match return tags in self.tags def __str__(self): """ Returns the url path """ return self.url(privacy=True) @staticmethod def escape_html(html, convert_new_lines=False, whitespace=True): """ Takes html text as input and escapes it so that it won't conflict with any xml/html wrapping characters. Args: html (str): The HTML code to escape convert_new_lines (:obj:`bool`, optional): escape new lines (\n) whitespace (:obj:`bool`, optional): escape whitespace Returns: str: The escaped html """ if not isinstance(html, str) or not html: return '' # Escape HTML escaped = sax_escape(html, {"'": "'", "\"": """}) if whitespace: # Tidy up whitespace too escaped = escaped\ .replace(u'\t', u' ')\ .replace(u' ', u' ') if convert_new_lines: return escaped.replace(u'\n', u'
') return escaped @staticmethod def unquote(content, encoding='utf-8', errors='replace'): """ Replace %xx escapes by their single-character equivalent. The optional encoding and errors parameters specify how to decode percent-encoded sequences. Wrapper to Python's `unquote` while remaining compatible with both Python 2 & 3 since the reference to this function changed between versions. Note: errors set to 'replace' means that invalid sequences are replaced by a placeholder character. Args: content (str): The quoted URI string you wish to unquote encoding (:obj:`str`, optional): encoding type errors (:obj:`str`, errors): how to handle invalid character found in encoded string (defined by encoding) Returns: str: The unquoted URI string """ if not content: return '' return _unquote(content, encoding=encoding, errors=errors) @staticmethod def quote(content, safe='/', encoding=None, errors=None): """ Replaces single character non-ascii characters and URI specific ones by their %xx code. Wrapper to Python's `quote` while remaining compatible with both Python 2 & 3 since the reference to this function changed between versions. Args: content (str): The URI string you wish to quote safe (str): non-ascii characters and URI specific ones that you do not wish to escape (if detected). Setting this string to an empty one causes everything to be escaped. encoding (:obj:`str`, optional): encoding type errors (:obj:`str`, errors): how to handle invalid character found in encoded string (defined by encoding) Returns: str: The quoted URI string """ if not content: return '' return _quote(content, safe=safe, encoding=encoding, errors=errors) @staticmethod def pprint(content, privacy=True, mode=PrivacyMode.Outer, # privacy print; quoting is ignored when privacy is set to True quote=True, safe='/', encoding=None, errors=None): """ Privacy Print is used to mainpulate the string before passing it into part of the URL. It is used to mask/hide private details such as tokens, passwords, apikeys, etc from on-lookers. If the privacy=False is set, then the quote variable is the next flag checked. Quoting is never done if the privacy flag is set to true to avoid skewing the expected output. """ if not privacy: if quote: # Return quoted string if specified to do so return URLBase.quote( content, safe=safe, encoding=encoding, errors=errors) # Return content 'as-is' return content if mode is PrivacyMode.Secret: # Return 4 Asterisks return '****' if not isinstance(content, str) or not content: # Nothing more to do return '' if mode is PrivacyMode.Tail: # Return the trailing 4 characters return '...{}'.format(content[-4:]) # Default mode is Outer Mode return '{}...{}'.format(content[0:1], content[-1:]) @staticmethod def urlencode(query, doseq=False, safe='', encoding=None, errors=None): """Convert a mapping object or a sequence of two-element tuples Wrapper to Python's `urlencode` while remaining compatible with both Python 2 & 3 since the reference to this function changed between versions. The resulting string is a series of key=value pairs separated by '&' characters, where both key and value are quoted using the quote() function. Note: If the dictionary entry contains an entry that is set to None it is not included in the final result set. If you want to pass in an empty variable, set it to an empty string. Args: query (str): The dictionary to encode doseq (:obj:`bool`, optional): Handle sequences safe (:obj:`str`): non-ascii characters and URI specific ones that you do not wish to escape (if detected). Setting this string to an empty one causes everything to be escaped. encoding (:obj:`str`, optional): encoding type errors (:obj:`str`, errors): how to handle invalid character found in encoded string (defined by encoding) Returns: str: The escaped parameters returned as a string """ return urlencode( query, doseq=doseq, safe=safe, encoding=encoding, errors=errors) @staticmethod def split_path(path, unquote=True): """Splits a URL up into a list object. Parses a specified URL and breaks it into a list. Args: path (str): The path to split up into a list. unquote (:obj:`bool`, optional): call unquote on each element added to the returned list. Returns: list: A list containing all of the elements in the path """ try: paths = PATHSPLIT_LIST_DELIM.split(path.lstrip('/')) if unquote: paths = \ [URLBase.unquote(x) for x in filter(bool, paths)] except AttributeError: # path is not useable, we still want to gracefully return an # empty list paths = [] return paths @staticmethod def parse_list(content, allow_whitespace=True, unquote=True): """A wrapper to utils.parse_list() with unquoting support Parses a specified set of data and breaks it into a list. Args: content (str): The path to split up into a list. If a list is provided, then it's individual entries are processed. allow_whitespace (:obj:`bool`, optional): whitespace is to be treated as a delimiter unquote (:obj:`bool`, optional): call unquote on each element added to the returned list. Returns: list: A unique list containing all of the elements in the path """ content = parse_list(content, allow_whitespace=allow_whitespace) if unquote: content = \ [URLBase.unquote(x) for x in filter(bool, content)] return content @staticmethod def parse_phone_no(content, unquote=True, prefix=False): """A wrapper to utils.parse_phone_no() with unquoting support Parses a specified set of data and breaks it into a list. Args: content (str): The path to split up into a list. If a list is provided, then it's individual entries are processed. unquote (:obj:`bool`, optional): call unquote on each element added to the returned list. Returns: list: A unique list containing all of the elements in the path """ if unquote: try: content = URLBase.unquote(content) except TypeError: # Nothing further to do return [] content = parse_phone_no(content, prefix=prefix) return content @property def app_id(self): return self.asset.app_id if self.asset.app_id else '' @property def app_desc(self): return self.asset.app_desc if self.asset.app_desc else '' @property def app_url(self): return self.asset.app_url if self.asset.app_url else '' @property def request_timeout(self): """This is primarily used to fullfill the `timeout` keyword argument that is used by requests.get() and requests.put() calls. """ return (self.socket_connect_timeout, self.socket_read_timeout) @property def request_auth(self): """This is primarily used to fullfill the `auth` keyword argument that is used by requests.get() and requests.put() calls. """ return (self.user, self.password) if self.user else None @property def request_url(self): """ Assemble a simple URL that can be used by the requests library """ # Acquire our schema schema = 'https' if self.secure else 'http' # Prepare our URL url = '%s://%s' % (schema, self.host) # Apply Port information if present if isinstance(self.port, int): url += ':%d' % self.port # Append our full path return url + self.fullpath def url_parameters(self, *args, **kwargs): """ Provides a default set of args to work with. This can greatly simplify URL construction in the acommpanied url() function. The following property returns a dictionary (of strings) containing all of the parameters that can be set on a URL and managed through this class. """ # parameters are only provided on demand to keep the URL short params = {} # The socket read timeout if self.socket_read_timeout != URLBase.socket_read_timeout: params['rto'] = str(self.socket_read_timeout) # The request/socket connect timeout if self.socket_connect_timeout != URLBase.socket_connect_timeout: params['cto'] = str(self.socket_connect_timeout) # Certificate verification if self.verify_certificate != URLBase.verify_certificate: params['verify'] = 'yes' if self.verify_certificate else 'no' return params @staticmethod def post_process_parse_url_results(results): """ After parsing the URL, this function applies a bit of extra logic to support extra entries like `pass` becoming `password`, etc This function assumes that parse_url() was called previously setting up the basics to be checked """ # if our URL ends with an 's', then assume our secure flag is set. results['secure'] = (results['schema'][-1] == 's') # QSD Checking (over-rides all) qsd_exists = True if isinstance(results.get('qsd'), dict) else False if qsd_exists and 'verify' in results['qsd']: # Pulled from URL String results['verify'] = parse_bool( results['qsd'].get('verify', True)) elif 'verify' in results: # Pulled from YAML Configuratoin results['verify'] = parse_bool(results.get('verify', True)) else: # Support SSL Certificate 'verify' keyword. Default to being # enabled results['verify'] = True # Password overrides if 'pass' in results: results['password'] = results['pass'] del results['pass'] if qsd_exists: if 'password' in results['qsd']: results['password'] = results['qsd']['password'] if 'pass' in results['qsd']: results['password'] = results['qsd']['pass'] # User overrides if 'user' in results['qsd']: results['user'] = results['qsd']['user'] # parse_url() always creates a 'password' and 'user' entry in the # results returned. Entries are set to None if they weren't # specified if results['password'] is None and 'user' in results['qsd']: # Handle cases where the user= provided in 2 locations, we want # the original to fall back as a being a password (if one # wasn't otherwise defined) e.g. # mailtos://PASSWORD@hostname?user=admin@mail-domain.com # - in the above, the PASSWORD gets lost in the parse url() # since a user= over-ride is specified. presults = parse_url(results['url']) if presults: # Store our Password results['password'] = presults['user'] # Store our socket read timeout if specified if 'rto' in results['qsd']: results['rto'] = results['qsd']['rto'] # Store our socket connect timeout if specified if 'cto' in results['qsd']: results['cto'] = results['qsd']['cto'] if 'port' in results['qsd']: results['port'] = results['qsd']['port'] return results @staticmethod def parse_url(url, verify_host=True, plus_to_space=False, strict_port=False, sanitize=True): """Parses the URL and returns it broken apart into a dictionary. This is very specific and customized for Apprise. Args: url (str): The URL you want to fully parse. verify_host (:obj:`bool`, optional): a flag kept with the parsed URL which some child classes will later use to verify SSL keys (if SSL transactions take place). Unless under very specific circumstances, it is strongly recomended that you leave this default value set to True. Returns: A dictionary is returned containing the URL fully parsed if successful, otherwise None is returned. """ results = parse_url( url, default_schema='unknown', verify_host=verify_host, plus_to_space=plus_to_space, strict_port=strict_port, sanitize=sanitize) if not results: # We're done; we failed to parse our url return results return URLBase.post_process_parse_url_results(results) @staticmethod def http_response_code_lookup(code, response_mask=None): """Parses the interger response code returned by a remote call from a web request into it's human readable string version. You can over-ride codes or add new ones by providing your own response_mask that contains a dictionary of integer -> string mapped variables """ if isinstance(response_mask, dict): # Apply any/all header over-rides defined HTML_LOOKUP.update(response_mask) # Look up our response try: response = HTML_LOOKUP[code] except KeyError: response = '' return response def __len__(self): """ Should be over-ridden and allows the tracking of how many targets are associated with each URLBase object. Default is always 1 """ return 1 def schemas(self): """A simple function that returns a set of all schemas associated with this object based on the object.protocol and object.secure_protocol """ schemas = set([]) for key in ('protocol', 'secure_protocol'): schema = getattr(self, key, None) if isinstance(schema, str): schemas.add(schema) elif isinstance(schema, (set, list, tuple)): # Support iterables list types for s in schema: if isinstance(s, str): schemas.add(s) return schemas apprise-1.9.3/apprise/url.pyi000066400000000000000000000010101477231770000161570ustar00rootroot00000000000000from logging import logger from typing import Any, Iterable, Set, Optional class URLBase: service_name: Optional[str] protocol: Optional[str] secure_protocol: Optional[str] request_rate_per_sec: int socket_connect_timeout: float socket_read_timeout: float tags: Set[str] verify_certificate: bool logger: logger def url(self, privacy: bool = ..., *args: Any, **kwargs: Any) -> str: ... def __contains__(self, tags: Iterable[str]) -> bool: ... def __str__(self) -> str: ...apprise-1.9.3/apprise/utils/000077500000000000000000000000001477231770000160025ustar00rootroot00000000000000apprise-1.9.3/apprise/utils/__init__.py000066400000000000000000000026261477231770000201210ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. apprise-1.9.3/apprise/utils/base64.py000066400000000000000000000061371477231770000174470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import copy import json import binascii import typing import base64 def decode_b64_dict(di: dict) -> dict: """ decodes base64 dictionary previously encoded string entries prefixed with `b64:` are targeted """ di = copy.deepcopy(di) for k, v in di.items(): if not isinstance(v, str) or not v.startswith("b64:"): continue try: parsed_v = base64.b64decode(v[4:]) parsed_v = json.loads(parsed_v) except (ValueError, TypeError, binascii.Error, json.decoder.JSONDecodeError): # ValueError: the length of altchars is not 2. # TypeError: invalid input # binascii.Error: not base64 (bad padding) # json.decoder.JSONDecodeError: Bad JSON object parsed_v = v di[k] = parsed_v return di def encode_b64_dict(di: dict, encoding='utf-8') -> typing.Tuple[dict, bool]: """ Encodes dictionary entries containing binary types (int, float) into base64 Final product is always string based values """ di = copy.deepcopy(di) needs_decoding = False for k, v in di.items(): if isinstance(v, str): continue try: encoded = base64.urlsafe_b64encode(json.dumps(v).encode(encoding)) encoded = "b64:{}".format(encoded.decode(encoding)) needs_decoding = True except (ValueError, TypeError): # ValueError: # - the length of altchars is not 2. # TypeError: # - json not searializable or # - bytes object not passed into urlsafe_b64encode() encoded = str(v) di[k] = encoded return di, needs_decoding apprise-1.9.3/apprise/utils/cwe312.py000066400000000000000000000163171477231770000173700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re from .parse import (parse_url, is_hostname) def cwe312_word(word, force=False, advanced=True, threshold=5): """ This function was written to help mask secure/private information that may or may not be found within Apprise. The idea is to provide a presentable word response that the user who prepared it would understand, yet not reveal any private information for any potential intruder For more detail see CWE-312 @ https://cwe.mitre.org/data/definitions/312.html The `force` is an optional argument used to keep the string formatting consistent and in one place. If set, the content passed in is presumed to be containing secret information and will be updated accordingly. If advanced is set to `True` then content is additionally checked for upper/lower/ascii/numerical variances. If an obscurity threshold is reached, then content is considered secret """ class Variance: """ A Simple List of Possible Character Variances """ # An Upper Case Character (ABCDEF... etc) ALPHA_UPPER = '+' # An Lower Case Character (abcdef... etc) ALPHA_LOWER = '-' # A Special Character ($%^;... etc) SPECIAL = 's' # A Numerical Character (1234... etc) NUMERIC = 'n' if not (isinstance(word, str) and word.strip()): # not a password if it's not something we even support return word # Formatting word = word.strip() if force: # We're forcing the representation to be a secret # We do this for consistency return '{}...{}'.format(word[0:1], word[-1:]) elif len(word) > 1 and \ not is_hostname(word, ipv4=True, ipv6=True, underscore=False): # Verify if it is a hostname or not return '{}...{}'.format(word[0:1], word[-1:]) elif len(word) >= 16: # an IP will be 15 characters so we don't want to use a smaller # value then 16 (e.g 101.102.103.104) # we can assume very long words are passwords otherwise return '{}...{}'.format(word[0:1], word[-1:]) if advanced: # # Mark word a secret based on it's obscurity # # Our variances will increase depending on these variables: last_variance = None obscurity = 0 for c in word: # Detect our variance if c.isdigit(): variance = Variance.NUMERIC elif c.isalpha() and c.isupper(): variance = Variance.ALPHA_UPPER elif c.isalpha() and c.islower(): variance = Variance.ALPHA_LOWER else: variance = Variance.SPECIAL if last_variance != variance or variance == Variance.SPECIAL: obscurity += 1 if obscurity >= threshold: return '{}...{}'.format(word[0:1], word[-1:]) last_variance = variance # Otherwise we're good; return our word return word def cwe312_url(url): """ This function was written to help mask secure/private information that may or may not be found on an Apprise URL. The idea is to not disrupt the structure of the previous URL too much, yet still protect the users private information from being logged directly to screen. For more detail see CWE-312 @ https://cwe.mitre.org/data/definitions/312.html For example, consider the URL: http://user:password@localhost/ When passed into this function, the return value would be: http://user:****@localhost/ Since apprise allows you to put private information everywhere in it's custom URLs, it uses this function to manipulate the content before returning to any kind of logger. The idea is that the URL can still be interpreted by the person who constructed them, but not to an intruder. """ # Parse our URL results = parse_url(url) if not results: # Nothing was returned (invalid data was fed in); return our # information as it was fed to us (without changing it) return url # Update our URL with values results['password'] = cwe312_word(results['password'], force=True) if not results['schema'].startswith('http'): results['user'] = cwe312_word(results['user']) results['host'] = cwe312_word(results['host']) else: results['host'] = cwe312_word(results['host'], advanced=False) results['user'] = cwe312_word(results['user'], advanced=False) # Apply our full path scan in all cases results['fullpath'] = '/' + \ '/'.join([cwe312_word(x) for x in re.split( r'[\\/]+', results['fullpath'].lstrip('/'))]) \ if results['fullpath'] else '' # # Now re-assemble our URL for display purposes # # Determine Authentication auth = '' if results['user'] and results['password']: auth = '{user}:{password}@'.format( user=results['user'], password=results['password'], ) elif results['user']: auth = '{user}@'.format( user=results['user'], ) params = '' if results['qsd']: params = '?{}'.format( "&".join(["{}={}".format(k, cwe312_word(v, force=( k in ('password', 'secret', 'pass', 'token', 'key', 'id', 'apikey', 'to')))) for k, v in results['qsd'].items()])) return '{schema}://{auth}{hostname}{port}{fullpath}{params}'.format( schema=results['schema'], auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=results['host'], port='' if not results['port'] else ':{}'.format(results['port']), fullpath=results['fullpath'] if results['fullpath'] else '', params=params, ) apprise-1.9.3/apprise/utils/disk.py000066400000000000000000000132511477231770000173100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re import os import platform from os.path import expanduser from ..logger import logger # Pre-Escape content since we reference it so much ESCAPED_PATH_SEPARATOR = re.escape('\\/') ESCAPED_WIN_PATH_SEPARATOR = re.escape('\\') ESCAPED_NUX_PATH_SEPARATOR = re.escape('/') TIDY_WIN_PATH_RE = re.compile( r'(^[%s]{2}|[^%s\s][%s]|[\s][%s]{2}])([%s]+)' % ( ESCAPED_WIN_PATH_SEPARATOR, ESCAPED_WIN_PATH_SEPARATOR, ESCAPED_WIN_PATH_SEPARATOR, ESCAPED_WIN_PATH_SEPARATOR, ESCAPED_WIN_PATH_SEPARATOR, ), ) TIDY_WIN_TRIM_RE = re.compile( r'^(.+[^:][^%s])[\s%s]*$' % ( ESCAPED_WIN_PATH_SEPARATOR, ESCAPED_WIN_PATH_SEPARATOR, ), ) TIDY_NUX_PATH_RE = re.compile( r'([%s])([%s]+)' % ( ESCAPED_NUX_PATH_SEPARATOR, ESCAPED_NUX_PATH_SEPARATOR, ), ) # A simple path decoder we can re-use which looks after # ensuring our file info is expanded correctly when provided # a path. __PATH_DECODER = os.path.expandvars if \ platform.system() == 'Windows' else os.path.expanduser def path_decode(path): """ Returns the fully decoded path based on the operating system """ return os.path.abspath(__PATH_DECODER(path)) def tidy_path(path): """take a filename and or directory and attempts to tidy it up by removing trailing slashes and correcting any formatting issues. For example: ////absolute//path// becomes: /absolute/path """ # Windows path = TIDY_WIN_PATH_RE.sub('\\1', path.strip()) # Linux path = TIDY_NUX_PATH_RE.sub('\\1', path) # Windows Based (final) Trim path = expanduser(TIDY_WIN_TRIM_RE.sub('\\1', path)) return path def dir_size(path, max_depth=3, missing_okay=True, _depth=0, _errors=None): """ Scans a provided path an returns it's size (in bytes) of path provided """ if _errors is None: _errors = set() if _depth > max_depth: _errors.add(path) return (0, _errors) total = 0 try: with os.scandir(path) as it: for entry in it: try: if entry.is_file(follow_symlinks=False): total += entry.stat(follow_symlinks=False).st_size elif entry.is_dir(follow_symlinks=False): (totals, _) = dir_size( entry.path, max_depth=max_depth, _depth=_depth + 1, _errors=_errors) total += totals except FileNotFoundError: # no worries; Nothing to do continue except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point _errors.add(entry.path) logger.warning( 'dir_size detetcted inaccessible path: %s', os.fsdecode(entry.path)) logger.debug('dir_size Exception: %s' % str(e)) continue except FileNotFoundError: if not missing_okay: # Conditional error situation _errors.add(path) except (OSError, IOError) as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point _errors.add(path) logger.warning( 'dir_size detetcted inaccessible path: %s', os.fsdecode(path)) logger.debug('dir_size Exception: %s' % str(e)) return (total, _errors) def bytes_to_str(value): """ Covert an integer (in bytes) into it's string representation with acompanied unit value (such as B, KB, MB, GB, TB, etc) """ unit = 'B' try: value = float(value) except (ValueError, TypeError): return None if value >= 1024.0: value = value / 1024.0 unit = 'KB' if value >= 1024.0: value = value / 1024.0 unit = 'MB' if value >= 1024.0: value = value / 1024.0 unit = 'GB' if value >= 1024.0: value = value / 1024.0 unit = 'TB' return '%.2f%s' % (round(value, 2), unit) apprise-1.9.3/apprise/utils/logic.py000066400000000000000000000107351477231770000174570ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from itertools import chain from .. import common from .parse import parse_list def is_exclusive_match(logic, data, match_all=common.MATCH_ALL_TAG, match_always=common.MATCH_ALWAYS_TAG): """ The data variable should always be a set of strings that the logic can be compared against. It should be a set. If it isn't already, then it will be converted as such. These identify the tags themselves. Our logic should be a list as well: - top level entries are treated as an 'or' - second level (or more) entries are treated as 'and' examples: logic="tagA, tagB" = tagA or tagB logic=['tagA', 'tagB'] = tagA or tagB logic=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB logic=[('tagB', 'tagC')] = tagB and tagC If `match_always` is not set to None, then its value is added as an 'or' to all specified logic searches. """ if isinstance(logic, str): # Update our logic to support our delimiters logic = set(parse_list(logic)) if not logic: # If there is no logic to apply then we're done early; we only match # if there is also no data to match against return not data if not isinstance(logic, (list, tuple, set)): # garbage input return False if match_always: # Add our match_always to our logic searching if secified logic = chain(logic, [match_always]) # Track what we match against; but by default we do not match # against anything matched = False # Every entry here will be or'ed with the next for entry in logic: if not isinstance(entry, (str, list, tuple, set)): # Garbage entry in our logic found return False # treat these entries as though all elements found # must exist in the notification service entries = set(parse_list(entry)) if not entries: # We got a bogus set of tags to parse # If there is no logic to apply then we're done early; we only # match if there is also no data to match against return not data if len(entries.intersection(data.union({match_all}))) == len(entries): # our set contains all of the entries found # in our notification data set matched = True break # else: keep looking # Return True if we matched against our logic (or simply none was # specified). return matched def dict_full_update(dict1, dict2): """ Takes 2 dictionaries (dict1 and dict2) that contain sub-dictionaries and gracefully merges them into dict1. This is similar to: dict1.update(dict2) except that internal dictionaries are also recursively applied. """ def _merge(dict1, dict2): for k in dict2: if k in dict1 and isinstance(dict1[k], dict) \ and isinstance(dict2[k], dict): _merge(dict1[k], dict2[k]) else: dict1[k] = dict2[k] _merge(dict1, dict2) return apprise-1.9.3/apprise/utils/module.py000066400000000000000000000041051477231770000176410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import sys import importlib.util from ..logger import logger def import_module(path, name): """ Load our module based on path """ spec = importlib.util.spec_from_file_location(name, path) try: module = importlib.util.module_from_spec(spec) sys.modules[name] = module spec.loader.exec_module(module) except Exception as e: # module isn't loadable try: del sys.modules[name] except KeyError: # nothing to clean up pass module = None logger.debug( 'Module exception raised from %s (name=%s) %s', path, name, str(e)) return module apprise-1.9.3/apprise/utils/parse.py000066400000000000000000001152261477231770000174750ustar00rootroot00000000000000# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2025, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import re from functools import reduce from urllib.parse import unquote from urllib.parse import quote from urllib.parse import urlparse from urllib.parse import urlencode as _urlencode from .disk import tidy_path # URL Indexing Table for returns via parse_url() # The below accepts and scans for: # - schema:// # - schema://path # - schema://path?kwargs # VALID_URL_RE = re.compile( r'^[\s]*((?P[^:\s]+):[/\\]+)?((?P[^?]+)' r'(\?(?P.+))?)?[\s]*$', ) VALID_QUERY_RE = re.compile(r'^(?P.*[/\\])(?P[^/\\]+)?$') # delimiters used to separate values when content is passed in by string. # This is useful when turning a string into a list STRING_DELIMITERS = r'[\[\]\;,\s]+' # String Delimiters without the whitespace STRING_DELIMITERS_NO_WS = r'[\[\]\;,]+' # The handling of custom arguments passed in the URL; we treat any # argument (which would otherwise appear in the qsd area of our parse_url() # function differently if they start with a +, - or : value NOTIFY_CUSTOM_ADD_TOKENS = re.compile(r'^( |\+)(?P.*)\s*') NOTIFY_CUSTOM_DEL_TOKENS = re.compile(r'^-(?P.*)\s*') NOTIFY_CUSTOM_COLON_TOKENS = re.compile(r'^:(?P.*)\s*') # Used for attempting to acquire the schema if the URL can't be parsed. GET_SCHEMA_RE = re.compile(r'\s*(?P[a-z0-9]{1,12})://.*$', re.I) # Used for validating that a provided entry is indeed a schema # this is slightly different then the GET_SCHEMA_RE above which # insists the schema is only valid with a :// entry. this one # extrapolates the individual entries URL_DETAILS_RE = re.compile( r'\s*(?P[a-z0-9]{1,12})(://(?P.*))?$', re.I) # Regular expression based and expanded from: # http://www.regular-expressions.info/email.html # Extended to support colon (:) delimiter for parsing names from the URL # such as: # - 'Optional Name':user@example.com # - 'Optional Name' # # The expression also parses the general email as well such as: # - user@example.com # - label+user@example.com GET_EMAIL_RE = re.compile( r'(([\s"\']+)?(?P[^:<\'"]+)?[:<\s\'"]+)?' r'(?P((?P