mirror of
https://github.com/caronc/apprise.git
synced 2025-01-06 14:09:42 +01:00
Merge pull request #79 from caronc/55-support-configuration-files
Add support for YAML and TEXT Configuration Files; refs #55
This commit is contained in:
commit
edf6d36c7d
59
README.md
59
README.md
@ -48,7 +48,7 @@ The table below identifies the services this tool supports and some example serv
|
||||
| [PushBullet](https://github.com/caronc/apprise/wiki/Notify_pushbullet) | pbul:// | (TCP) 443 | pbul://accesstoken<br />pbul://accesstoken/#channel<br/>pbul://accesstoken/A_DEVICE_ID<br />pbul://accesstoken/email@address.com<br />pbul://accesstoken/#channel/#channel2/email@address.net/DEVICE
|
||||
| [Pushjet](https://github.com/caronc/apprise/wiki/Notify_pushjet) | pjet:// or pjets:// | (TCP) 80 or 443 | pjet://secret@hostname<br />pjet://secret@hostname:port<br />pjets://secret@hostname<br />pjets://secret@hostname:port
|
||||
| [Pushed](https://github.com/caronc/apprise/wiki/Notify_pushed) | pushed:// | (TCP) 443 | pushed://appkey/appsecret/<br/>pushed://appkey/appsecret/#ChannelAlias<br/>pushed://appkey/appsecret/#ChannelAlias1/#ChannelAlias2/#ChannelAliasN<br/>pushed://appkey/appsecret/@UserPushedID<br/>pushed://appkey/appsecret/@UserPushedID1/@UserPushedID2/@UserPushedIDN
|
||||
| [Pushover](https://github.com/caronc/apprise/wiki/Notify_pushover) | pover:// | (TCP) 443 | pover://user@token<br />pover://user@token/DEVICE<br />pover://user@token/DEVICE1/DEVICE2/DEVICEN<br />_Note: you must specify both your user_id and token_
|
||||
| [Pushover](https://github.com/caronc/apprise/wiki/Notify_pushover) | pover:// | (TCP) 443 | pover://user@token<br />pover://user@token/DEVICE<br />pover://user@token/DEVICE1/DEVICE2/DEVICEN<br />**Note**: you must specify both your user_id and token
|
||||
| [Rocket.Chat](https://github.com/caronc/apprise/wiki/Notify_rocketchat) | rocket:// or rockets:// | (TCP) 80 or 443 | rocket://user:password@hostname/RoomID/Channel<br />rockets://user:password@hostname:443/Channel1/Channel1/RoomID<br />rocket://user:password@hostname/Channel
|
||||
| [Ryver](https://github.com/caronc/apprise/wiki/Notify_ryver) | ryver:// | (TCP) 443 | ryver://Organization/Token<br />ryver://botname@Organization/Token
|
||||
| [Slack](https://github.com/caronc/apprise/wiki/Notify_slack) | slack:// | (TCP) 443 | slack://TokenA/TokenB/TokenC/Channel<br />slack://botname@TokenA/TokenB/TokenC/Channel<br />slack://user@TokenA/TokenB/TokenC/Channel1/Channel2/ChannelN
|
||||
@ -92,12 +92,36 @@ cat /proc/cpuinfo | apprise -t 'cpu info' \
|
||||
'mailto://myemail:mypass@gmail.com'
|
||||
```
|
||||
|
||||
### Configuration Files
|
||||
No one wants to put there credentials out for everyone to see on the command line. No problem *apprise* also supports configuration files. It can handle both a specific [YAML format](https://github.com/caronc/apprise/wiki/config_yaml) or a very simple [TEXT format](https://github.com/caronc/apprise/wiki/config_text). You can also pull these configuration files via an HTTP query too! More information concerning Apprise configuration can be found [here](https://github.com/caronc/apprise/wiki/config)
|
||||
```bash
|
||||
# By default now if no url or configuration is specified aprise will
|
||||
# peek for this data in:
|
||||
# ~/.apprise
|
||||
# ~/.apprise.yml
|
||||
# ~/.config/apprise
|
||||
# ~/.config/apprise.yml
|
||||
|
||||
# If you loaded one of those files, your command line gets really easy:
|
||||
apprise -t 'my title' -b 'my notification body'
|
||||
|
||||
# Know the location of the configuration source? No problem, just
|
||||
# specify it.
|
||||
apprise -t 'my title' -b 'my notification body' \
|
||||
--config=/path/to/my/config.yml
|
||||
|
||||
# Got lots of configuration locations? No problem, specify them all:
|
||||
apprise -t 'my title' -b 'my notification body' \
|
||||
--config=/path/to/my/config.yml \
|
||||
--config=https://localhost/my/apprise/config
|
||||
```
|
||||
|
||||
## Developers
|
||||
To send a notification from within your python application, just do the following:
|
||||
```python
|
||||
import apprise
|
||||
|
||||
# create an Apprise instance
|
||||
# Create an Apprise instance
|
||||
apobj = apprise.Apprise()
|
||||
|
||||
# Add all of the notification services by their server url.
|
||||
@ -115,4 +139,35 @@ apobj.notify(
|
||||
)
|
||||
```
|
||||
|
||||
### Configuration Files
|
||||
Developers need access to configuration files too. The good news is they're use just involves declaring another object that Apprise can ingest as easy as the notification urls. You can mix and match config and notification entries too!
|
||||
```python
|
||||
import apprise
|
||||
|
||||
# Create an Apprise instance
|
||||
apobj = apprise.Apprise()
|
||||
|
||||
# Create an Config instance
|
||||
config = apprise.AppriseCofig()
|
||||
|
||||
# Add a configuration source:
|
||||
config.add('/path/to/my/config.yml')
|
||||
|
||||
# Add another...
|
||||
config.add('https://myserver:8080/path/to/config')
|
||||
|
||||
# Make sure to add our config into our apprise object
|
||||
apobj.add(config)
|
||||
|
||||
# You can mix and match; add an entry directly if you want too
|
||||
apobj.add('mailto://myemail:mypass@gmail.com')
|
||||
|
||||
# Then notify these services any time you desire. The below would
|
||||
# notify all of the services loaded into our Apprise object.
|
||||
apobj.notify(
|
||||
body='what a great notification service!',
|
||||
title='my notification title',
|
||||
)
|
||||
```
|
||||
|
||||
If you're interested in reading more about this and methods on how to customize your own notifications, please check out the wiki at https://github.com/caronc/apprise/wiki/Development_API
|
||||
|
@ -24,71 +24,27 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import logging
|
||||
from markdown import markdown
|
||||
from itertools import chain
|
||||
|
||||
from .common import NotifyType
|
||||
from .common import NotifyFormat
|
||||
from .utils import is_exclusive_match
|
||||
from .utils import parse_list
|
||||
from .utils import compat_is_basestring
|
||||
from .utils import GET_SCHEMA_RE
|
||||
|
||||
from .AppriseAsset import AppriseAsset
|
||||
from .AppriseConfig import AppriseConfig
|
||||
from .config.ConfigBase import ConfigBase
|
||||
from .plugins.NotifyBase import NotifyBase
|
||||
|
||||
from . import NotifyBase
|
||||
from . import plugins
|
||||
from . import __version__
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Build a list of supported plugins
|
||||
SCHEMA_MAP = {}
|
||||
|
||||
|
||||
# Load our Lookup Matrix
|
||||
def __load_matrix():
|
||||
"""
|
||||
Dynamically load our schema map; this allows us to gracefully
|
||||
skip over plugins we simply don't have the dependecies for.
|
||||
|
||||
"""
|
||||
# to add it's mapping to our hash table
|
||||
for entry in dir(plugins):
|
||||
|
||||
# Get our plugin
|
||||
plugin = getattr(plugins, entry)
|
||||
if not hasattr(plugin, 'app_id'): # pragma: no branch
|
||||
# Filter out non-notification modules
|
||||
continue
|
||||
|
||||
# Load protocol(s) if defined
|
||||
proto = getattr(plugin, 'protocol', None)
|
||||
if compat_is_basestring(proto):
|
||||
if proto not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[proto] = plugin
|
||||
|
||||
elif isinstance(proto, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in proto:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
# Load secure protocol(s) if defined
|
||||
protos = getattr(plugin, 'secure_protocol', None)
|
||||
if compat_is_basestring(protos):
|
||||
if protos not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[protos] = plugin
|
||||
|
||||
if isinstance(protos, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in protos:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
|
||||
# Dynamically build our module
|
||||
__load_matrix()
|
||||
|
||||
|
||||
class Apprise(object):
|
||||
"""
|
||||
@ -112,10 +68,8 @@ class Apprise(object):
|
||||
# directory images can be found in. It can also identify remote
|
||||
# URL paths that contain the images you want to present to the end
|
||||
# user. If no asset is specified, then the default one is used.
|
||||
self.asset = asset
|
||||
if asset is None:
|
||||
# Load our default configuration
|
||||
self.asset = AppriseAsset()
|
||||
self.asset = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if servers:
|
||||
self.add(servers)
|
||||
@ -128,48 +82,45 @@ class Apprise(object):
|
||||
|
||||
"""
|
||||
# swap hash (#) tag values with their html version
|
||||
# This is useful for accepting channels (as arguments to pushbullet)
|
||||
_url = url.replace('/#', '/%23')
|
||||
|
||||
# Attempt to acquire the schema at the very least to allow our plugins
|
||||
# to determine if they can make a better interpretation of a URL
|
||||
# geared for them anyway.
|
||||
# geared for them
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
if schema is None:
|
||||
logger.error('%s is an unparseable server url.' % url)
|
||||
logger.error('Unparseable schema:// found in URL {}.'.format(url))
|
||||
return None
|
||||
|
||||
# Update the schema
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in SCHEMA_MAP:
|
||||
logger.error(
|
||||
'{0} is not a supported server type (url={1}).'.format(
|
||||
schema,
|
||||
_url,
|
||||
)
|
||||
)
|
||||
if schema not in plugins.SCHEMA_MAP:
|
||||
logger.error('Unsupported schema {}.'.format(schema))
|
||||
return None
|
||||
|
||||
# Parse our url details
|
||||
# the server object is a dictionary containing all of the information
|
||||
# parsed from our URL
|
||||
results = SCHEMA_MAP[schema].parse_url(_url)
|
||||
# Parse our url details of the server object as dictionary containing
|
||||
# all of the information parsed from our URL
|
||||
results = plugins.SCHEMA_MAP[schema].parse_url(_url)
|
||||
|
||||
if not results:
|
||||
if results is None:
|
||||
# Failed to parse the server URL
|
||||
logger.error('Could not parse URL: %s' % url)
|
||||
logger.error('Unparseable URL {}.'.format(url))
|
||||
return None
|
||||
|
||||
# Build a list of tags to associate with the newly added notifications
|
||||
results['tag'] = set(parse_list(tag))
|
||||
|
||||
# Prepare our Asset Object
|
||||
results['asset'] = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if suppress_exceptions:
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information
|
||||
plugin = SCHEMA_MAP[results['schema']](**results)
|
||||
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
except Exception:
|
||||
# the arguments are invalid or can not be used.
|
||||
@ -179,11 +130,7 @@ class Apprise(object):
|
||||
else:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information but don't wrap it in a try catch
|
||||
plugin = SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
# Save our asset
|
||||
if asset:
|
||||
plugin.asset = asset
|
||||
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
return plugin
|
||||
|
||||
@ -202,23 +149,43 @@ class Apprise(object):
|
||||
# Initialize our return status
|
||||
return_status = True
|
||||
|
||||
if asset is None:
|
||||
if isinstance(asset, AppriseAsset):
|
||||
# prepare default asset
|
||||
asset = self.asset
|
||||
|
||||
if isinstance(servers, NotifyBase):
|
||||
if isinstance(servers, six.string_types):
|
||||
# build our server list
|
||||
servers = parse_list(servers)
|
||||
|
||||
elif isinstance(servers, (ConfigBase, NotifyBase, AppriseConfig)):
|
||||
# Go ahead and just add our plugin into our list
|
||||
self.servers.append(servers)
|
||||
return True
|
||||
|
||||
# build our server listings
|
||||
servers = parse_list(servers)
|
||||
elif not isinstance(servers, (tuple, set, list)):
|
||||
logging.error(
|
||||
"An invalid notification (type={}) was specified.".format(
|
||||
type(servers)))
|
||||
return False
|
||||
|
||||
for _server in servers:
|
||||
|
||||
if isinstance(_server, (ConfigBase, NotifyBase, AppriseConfig)):
|
||||
# Go ahead and just add our plugin into our list
|
||||
self.servers.append(_server)
|
||||
continue
|
||||
|
||||
elif not isinstance(_server, six.string_types):
|
||||
logging.error(
|
||||
"An invalid notification (type={}) was specified.".format(
|
||||
type(_server)))
|
||||
return_status = False
|
||||
continue
|
||||
|
||||
# Instantiate ourselves an object, this function throws or
|
||||
# returns None if it fails
|
||||
instance = Apprise.instantiate(_server, asset=asset, tag=tag)
|
||||
if not instance:
|
||||
if not isinstance(instance, NotifyBase):
|
||||
return_status = False
|
||||
logging.error(
|
||||
"Failed to load notification url: {}".format(_server),
|
||||
@ -254,7 +221,7 @@ class Apprise(object):
|
||||
"""
|
||||
|
||||
# Initialize our return result
|
||||
status = len(self.servers) > 0
|
||||
status = len(self) > 0
|
||||
|
||||
if not (title or body):
|
||||
return False
|
||||
@ -273,115 +240,89 @@ class Apprise(object):
|
||||
# tag=[('tagB', 'tagC')] = tagB and tagC
|
||||
|
||||
# Iterate over our loaded plugins
|
||||
for server in self.servers:
|
||||
for entry in self.servers:
|
||||
|
||||
if tag is not None:
|
||||
if isinstance(entry, (ConfigBase, AppriseConfig)):
|
||||
# load our servers
|
||||
servers = entry.servers()
|
||||
|
||||
if isinstance(tag, (list, tuple, set)):
|
||||
# using the tags detected; determine if we'll allow the
|
||||
# notification to be sent or not
|
||||
matched = False
|
||||
else:
|
||||
servers = [entry, ]
|
||||
|
||||
# Every entry here will be or'ed with the next
|
||||
for entry in tag:
|
||||
if isinstance(entry, (list, tuple, set)):
|
||||
|
||||
# treat these entries as though all elements found
|
||||
# must exist in the notification service
|
||||
tags = set(parse_list(entry))
|
||||
|
||||
if len(tags.intersection(
|
||||
server.tags)) == len(tags):
|
||||
# our set contains all of the entries found
|
||||
# in our notification server object
|
||||
matched = True
|
||||
break
|
||||
|
||||
elif entry in server:
|
||||
# our entr(ies) match what was found in our server
|
||||
# object.
|
||||
matched = True
|
||||
break
|
||||
|
||||
# else: keep looking
|
||||
|
||||
if not matched:
|
||||
# We did not meet any of our and'ed criteria
|
||||
continue
|
||||
|
||||
elif tag not in server:
|
||||
# one or more tags were defined and they didn't match the
|
||||
# entry in the current service; move along...
|
||||
for server in servers:
|
||||
# Apply our tag matching based on our defined logic
|
||||
if tag is not None and not is_exclusive_match(
|
||||
logic=tag, data=server.tags):
|
||||
continue
|
||||
|
||||
# else: our content was found inside the server, so we're good
|
||||
# If our code reaches here, we either did not define a tag (it
|
||||
# was set to None), or we did define a tag and the logic above
|
||||
# determined we need to notify the service it's associated with
|
||||
if server.notify_format not in conversion_map:
|
||||
if body_format == NotifyFormat.MARKDOWN and \
|
||||
server.notify_format == NotifyFormat.HTML:
|
||||
|
||||
# If our code reaches here, we either did not define a tag (it was
|
||||
# set to None), or we did define a tag and the logic above
|
||||
# determined we need to notify the service it's associated with
|
||||
if server.notify_format not in conversion_map:
|
||||
if body_format == NotifyFormat.MARKDOWN and \
|
||||
server.notify_format == NotifyFormat.HTML:
|
||||
# Apply Markdown
|
||||
conversion_map[server.notify_format] = markdown(body)
|
||||
|
||||
# Apply Markdown
|
||||
conversion_map[server.notify_format] = markdown(body)
|
||||
elif body_format == NotifyFormat.TEXT and \
|
||||
server.notify_format == NotifyFormat.HTML:
|
||||
|
||||
elif body_format == NotifyFormat.TEXT and \
|
||||
server.notify_format == NotifyFormat.HTML:
|
||||
# Basic TEXT to HTML format map; supports keys only
|
||||
re_map = {
|
||||
# Support Ampersand
|
||||
r'&': '&',
|
||||
|
||||
# Basic TEXT to HTML format map; supports keys only
|
||||
re_map = {
|
||||
# Support Ampersand
|
||||
r'&': '&',
|
||||
# Spaces to for formatting purposes since
|
||||
# multiple spaces are treated as one an this may
|
||||
# not be the callers intention
|
||||
r' ': ' ',
|
||||
|
||||
# Spaces to for formatting purposes since
|
||||
# multiple spaces are treated as one an this may not
|
||||
# be the callers intention
|
||||
r' ': ' ',
|
||||
# Tab support
|
||||
r'\t': ' ',
|
||||
|
||||
# Tab support
|
||||
r'\t': ' ',
|
||||
# Greater than and Less than Characters
|
||||
r'>': '>',
|
||||
r'<': '<',
|
||||
}
|
||||
|
||||
# Greater than and Less than Characters
|
||||
r'>': '>',
|
||||
r'<': '<',
|
||||
}
|
||||
# Compile our map
|
||||
re_table = re.compile(
|
||||
r'(' + '|'.join(
|
||||
map(re.escape, re_map.keys())) + r')',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Compile our map
|
||||
re_table = re.compile(
|
||||
r'(' + '|'.join(map(re.escape, re_map.keys())) + r')',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
# Execute our map against our body in addition to
|
||||
# swapping out new lines and replacing them with <br/>
|
||||
conversion_map[server.notify_format] = \
|
||||
re.sub(r'\r*\n', '<br/>\r\n',
|
||||
re_table.sub(
|
||||
lambda x: re_map[x.group()], body))
|
||||
|
||||
# Execute our map against our body in addition to swapping
|
||||
# out new lines and replacing them with <br/>
|
||||
conversion_map[server.notify_format] = \
|
||||
re.sub(r'\r*\n', '<br/>\r\n',
|
||||
re_table.sub(lambda x: re_map[x.group()], body))
|
||||
else:
|
||||
# Store entry directly
|
||||
conversion_map[server.notify_format] = body
|
||||
|
||||
else:
|
||||
# Store entry directly
|
||||
conversion_map[server.notify_format] = body
|
||||
try:
|
||||
# Send notification
|
||||
if not server.notify(
|
||||
body=conversion_map[server.notify_format],
|
||||
title=title,
|
||||
notify_type=notify_type):
|
||||
|
||||
try:
|
||||
# Send notification
|
||||
if not server.notify(
|
||||
body=conversion_map[server.notify_format],
|
||||
title=title,
|
||||
notify_type=notify_type):
|
||||
# Toggle our return status flag
|
||||
status = False
|
||||
|
||||
# Toggle our return status flag
|
||||
except TypeError:
|
||||
# These our our internally thrown notifications
|
||||
status = False
|
||||
|
||||
except TypeError:
|
||||
# These our our internally thrown notifications
|
||||
status = False
|
||||
|
||||
except Exception:
|
||||
# A catch all so we don't have to abort early
|
||||
# just because one of our plugins has a bug in it.
|
||||
logging.exception("Notification Exception")
|
||||
status = False
|
||||
except Exception:
|
||||
# A catch all so we don't have to abort early
|
||||
# just because one of our plugins has a bug in it.
|
||||
logging.exception("Notification Exception")
|
||||
status = False
|
||||
|
||||
return status
|
||||
|
||||
@ -412,12 +353,12 @@ class Apprise(object):
|
||||
|
||||
# Standard protocol(s) should be None or a tuple
|
||||
protocols = getattr(plugin, 'protocol', None)
|
||||
if compat_is_basestring(protocols):
|
||||
if isinstance(protocols, six.string_types):
|
||||
protocols = (protocols, )
|
||||
|
||||
# Secure protocol(s) should be None or a tuple
|
||||
secure_protocols = getattr(plugin, 'secure_protocol', None)
|
||||
if compat_is_basestring(secure_protocols):
|
||||
if isinstance(secure_protocols, six.string_types):
|
||||
secure_protocols = (secure_protocols, )
|
||||
|
||||
# Build our response object
|
||||
@ -439,27 +380,87 @@ class Apprise(object):
|
||||
|
||||
def pop(self, index):
|
||||
"""
|
||||
Removes an indexed Notification Service from the stack and
|
||||
returns it.
|
||||
Removes an indexed Notification Service from the stack and returns it.
|
||||
|
||||
The thing is we can never pop AppriseConfig() entries, only what was
|
||||
loaded within them. So pop needs to carefully iterate over our list
|
||||
and only track actual entries.
|
||||
"""
|
||||
|
||||
# Remove our entry
|
||||
return self.servers.pop(index)
|
||||
# Tracking variables
|
||||
prev_offset = -1
|
||||
offset = prev_offset
|
||||
|
||||
for idx, s in enumerate(self.servers):
|
||||
if isinstance(s, (ConfigBase, AppriseConfig)):
|
||||
servers = s.servers()
|
||||
if len(servers) > 0:
|
||||
# Acquire a new maximum offset to work with
|
||||
offset = prev_offset + len(servers)
|
||||
|
||||
if offset >= index:
|
||||
# we can pop an element from our config stack
|
||||
fn = s.pop if isinstance(s, ConfigBase) \
|
||||
else s.server_pop
|
||||
|
||||
return fn(index if prev_offset == -1
|
||||
else (index - prev_offset - 1))
|
||||
|
||||
else:
|
||||
offset = prev_offset + 1
|
||||
if offset == index:
|
||||
return self.servers.pop(idx)
|
||||
|
||||
# Update our old offset
|
||||
prev_offset = offset
|
||||
|
||||
# If we reach here, then we indexed out of range
|
||||
raise IndexError('list index out of range')
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the indexed server entry of a loaded notification server
|
||||
"""
|
||||
return self.servers[index]
|
||||
# Tracking variables
|
||||
prev_offset = -1
|
||||
offset = prev_offset
|
||||
|
||||
for idx, s in enumerate(self.servers):
|
||||
if isinstance(s, (ConfigBase, AppriseConfig)):
|
||||
# Get our list of servers associate with our config object
|
||||
servers = s.servers()
|
||||
if len(servers) > 0:
|
||||
# Acquire a new maximum offset to work with
|
||||
offset = prev_offset + len(servers)
|
||||
|
||||
if offset >= index:
|
||||
return servers[index if prev_offset == -1
|
||||
else (index - prev_offset - 1)]
|
||||
|
||||
else:
|
||||
offset = prev_offset + 1
|
||||
if offset == index:
|
||||
return self.servers[idx]
|
||||
|
||||
# Update our old offset
|
||||
prev_offset = offset
|
||||
|
||||
# If we reach here, then we indexed out of range
|
||||
raise IndexError('list index out of range')
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Returns an iterator to our server list
|
||||
Returns an iterator to each of our servers loaded. This includes those
|
||||
found inside configuration.
|
||||
"""
|
||||
return iter(self.servers)
|
||||
return chain(*[[s] if not isinstance(s, (ConfigBase, AppriseConfig))
|
||||
else iter(s.servers()) for s in self.servers])
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of servers loaded
|
||||
Returns the number of servers loaded; this includes those found within
|
||||
loaded configuration. This funtion nnever actually counts the
|
||||
Config entry themselves (if they exist), only what they contain.
|
||||
"""
|
||||
return len(self.servers)
|
||||
return sum([1 if not isinstance(s, (ConfigBase, AppriseConfig))
|
||||
else len(s.servers()) for s in self.servers])
|
||||
|
292
apprise/AppriseConfig.py
Normal file
292
apprise/AppriseConfig.py
Normal file
@ -0,0 +1,292 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import logging
|
||||
|
||||
from . import config
|
||||
from . import ConfigBase
|
||||
from . import URLBase
|
||||
from .AppriseAsset import AppriseAsset
|
||||
|
||||
from .utils import GET_SCHEMA_RE
|
||||
from .utils import parse_list
|
||||
from .utils import is_exclusive_match
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AppriseConfig(object):
|
||||
"""
|
||||
Our Apprise Configuration File Manager
|
||||
|
||||
- Supports a list of URLs defined one after another (text format)
|
||||
- Supports a destinct YAML configuration format
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, paths=None, asset=None, cache=True, **kwargs):
|
||||
"""
|
||||
Loads all of the paths specified (if any).
|
||||
|
||||
The path can either be a single string identifying one explicit
|
||||
location, otherwise you can pass in a series of locations to scan
|
||||
via a list.
|
||||
|
||||
If no path is specified then a default list is used.
|
||||
|
||||
If cache is set to True, then after the data is loaded, it's cached
|
||||
within this object so it isn't retrieved again later.
|
||||
"""
|
||||
|
||||
# Initialize a server list of URLs
|
||||
self.configs = list()
|
||||
|
||||
# Prepare our Asset Object
|
||||
self.asset = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if paths is not None:
|
||||
# Store our path(s)
|
||||
self.add(paths)
|
||||
|
||||
return
|
||||
|
||||
def add(self, configs, asset=None, tag=None):
|
||||
"""
|
||||
Adds one or more config URLs into our list.
|
||||
|
||||
You can override the global asset if you wish by including it with the
|
||||
config(s) that you add.
|
||||
|
||||
"""
|
||||
|
||||
# Initialize our return status
|
||||
return_status = True
|
||||
|
||||
if isinstance(asset, AppriseAsset):
|
||||
# prepare default asset
|
||||
asset = self.asset
|
||||
|
||||
if isinstance(configs, ConfigBase):
|
||||
# Go ahead and just add our configuration into our list
|
||||
self.configs.append(configs)
|
||||
return True
|
||||
|
||||
elif isinstance(configs, six.string_types):
|
||||
# Save our path
|
||||
configs = (configs, )
|
||||
|
||||
elif not isinstance(configs, (tuple, set, list)):
|
||||
logging.error(
|
||||
'An invalid configuration path (type={}) was '
|
||||
'specified.'.format(type(configs)))
|
||||
return False
|
||||
|
||||
# Iterate over our
|
||||
for _config in configs:
|
||||
|
||||
if isinstance(_config, ConfigBase):
|
||||
# Go ahead and just add our configuration into our list
|
||||
self.configs.append(_config)
|
||||
continue
|
||||
|
||||
elif not isinstance(_config, six.string_types):
|
||||
logging.error(
|
||||
"An invalid configuration (type={}) was specified.".format(
|
||||
type(_config)))
|
||||
return_status = False
|
||||
continue
|
||||
|
||||
# Instantiate ourselves an object, this function throws or
|
||||
# returns None if it fails
|
||||
instance = AppriseConfig.instantiate(_config, asset=asset, tag=tag)
|
||||
if not isinstance(instance, ConfigBase):
|
||||
return_status = False
|
||||
logging.error(
|
||||
"Failed to load configuration url: {}".format(_config),
|
||||
)
|
||||
continue
|
||||
|
||||
# Add our initialized plugin to our server listings
|
||||
self.configs.append(instance)
|
||||
|
||||
# Return our status
|
||||
return return_status
|
||||
|
||||
def servers(self, tag=None, cache=True):
|
||||
"""
|
||||
Returns all of our servers dynamically build based on parsed
|
||||
configuration.
|
||||
|
||||
If a tag is specified, it applies to the configuration sources
|
||||
themselves and not the notification services inside them.
|
||||
|
||||
This is for filtering the configuration files polled for
|
||||
results.
|
||||
|
||||
"""
|
||||
# Build our tag setup
|
||||
# - top level entries are treated as an 'or'
|
||||
# - second level (or more) entries are treated as 'and'
|
||||
#
|
||||
# examples:
|
||||
# tag="tagA, tagB" = tagA or tagB
|
||||
# tag=['tagA', 'tagB'] = tagA or tagB
|
||||
# tag=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB
|
||||
# tag=[('tagB', 'tagC')] = tagB and tagC
|
||||
|
||||
response = list()
|
||||
|
||||
for entry in self.configs:
|
||||
|
||||
# Apply our tag matching based on our defined logic
|
||||
if tag is not None and not is_exclusive_match(
|
||||
logic=tag, data=entry.tags):
|
||||
continue
|
||||
|
||||
# Build ourselves a list of services dynamically and return the
|
||||
# as a list
|
||||
response.extend(entry.servers(cache=cache))
|
||||
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def instantiate(url, asset=None, tag=None, suppress_exceptions=True):
|
||||
"""
|
||||
Returns the instance of a instantiated configuration plugin based on
|
||||
the provided Server URL. If the url fails to be parsed, then None
|
||||
is returned.
|
||||
|
||||
"""
|
||||
# Attempt to acquire the schema at the very least to allow our
|
||||
# configuration based urls.
|
||||
schema = GET_SCHEMA_RE.match(url)
|
||||
if schema is None:
|
||||
# Plan B is to assume we're dealing with a file
|
||||
schema = config.ConfigFile.protocol
|
||||
url = '{}://{}'.format(schema, URLBase.quote(url))
|
||||
|
||||
else:
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in config.SCHEMA_MAP:
|
||||
logger.error('Unsupported schema {}.'.format(schema))
|
||||
return None
|
||||
|
||||
# Parse our url details of the server object as dictionary containing
|
||||
# all of the information parsed from our URL
|
||||
results = config.SCHEMA_MAP[schema].parse_url(url)
|
||||
|
||||
if not results:
|
||||
# Failed to parse the server URL
|
||||
logger.error('Unparseable URL {}.'.format(url))
|
||||
return None
|
||||
|
||||
# Build a list of tags to associate with the newly added notifications
|
||||
results['tag'] = set(parse_list(tag))
|
||||
|
||||
# Prepare our Asset Object
|
||||
results['asset'] = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
if suppress_exceptions:
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information
|
||||
cfg_plugin = config.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
except Exception:
|
||||
# the arguments are invalid or can not be used.
|
||||
logger.error('Could not load URL: %s' % url)
|
||||
return None
|
||||
|
||||
else:
|
||||
# Attempt to create an instance of our plugin using the parsed
|
||||
# URL information but don't wrap it in a try catch
|
||||
cfg_plugin = config.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
return cfg_plugin
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Empties our configuration list
|
||||
|
||||
"""
|
||||
self.configs[:] = []
|
||||
|
||||
def server_pop(self, index):
|
||||
"""
|
||||
Removes an indexed Apprise Notification from the servers
|
||||
"""
|
||||
|
||||
# Tracking variables
|
||||
prev_offset = -1
|
||||
offset = prev_offset
|
||||
|
||||
for entry in self.configs:
|
||||
servers = entry.servers(cache=True)
|
||||
if len(servers) > 0:
|
||||
# Acquire a new maximum offset to work with
|
||||
offset = prev_offset + len(servers)
|
||||
|
||||
if offset >= index:
|
||||
# we can pop an notification from our config stack
|
||||
return entry.pop(index if prev_offset == -1
|
||||
else (index - prev_offset - 1))
|
||||
|
||||
# Update our old offset
|
||||
prev_offset = offset
|
||||
|
||||
# If we reach here, then we indexed out of range
|
||||
raise IndexError('list index out of range')
|
||||
|
||||
def pop(self, index):
|
||||
"""
|
||||
Removes an indexed Apprise Configuration from the stack and
|
||||
returns it.
|
||||
"""
|
||||
# Remove our entry
|
||||
return self.configs.pop(index)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the indexed config entry of a loaded apprise configuration
|
||||
"""
|
||||
return self.configs[index]
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Returns an iterator to our config list
|
||||
"""
|
||||
return iter(self.configs)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of config entries loaded
|
||||
"""
|
||||
return len(self.configs)
|
427
apprise/URLBase.py
Normal file
427
apprise/URLBase.py
Normal file
@ -0,0 +1,427 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import logging
|
||||
from time import sleep
|
||||
from datetime import datetime
|
||||
from xml.sax.saxutils import escape as sax_escape
|
||||
|
||||
try:
|
||||
# Python 2.7
|
||||
from urllib import unquote as _unquote
|
||||
from urllib import quote as _quote
|
||||
from urllib import urlencode as _urlencode
|
||||
|
||||
except ImportError:
|
||||
# Python 3.x
|
||||
from urllib.parse import unquote as _unquote
|
||||
from urllib.parse import quote as _quote
|
||||
from urllib.parse import urlencode as _urlencode
|
||||
|
||||
from .AppriseAsset import AppriseAsset
|
||||
from .utils import parse_url
|
||||
from .utils import parse_bool
|
||||
from .utils import parse_list
|
||||
|
||||
# Used to break a path list into parts
|
||||
PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Define the HTML Lookup Table
|
||||
HTML_LOOKUP = {
|
||||
400: 'Bad Request - Unsupported Parameters.',
|
||||
401: 'Verification Failed.',
|
||||
404: 'Page not found.',
|
||||
405: 'Method not allowed.',
|
||||
500: 'Internal server error.',
|
||||
503: 'Servers are overloaded.',
|
||||
}
|
||||
|
||||
|
||||
class URLBase(object):
|
||||
"""
|
||||
This is the base class for all URL Manipulation
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the URL
|
||||
service_name = None
|
||||
|
||||
# The default simple (insecure) protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocol:// (in this example they would specify 'protocol')
|
||||
protocol = None
|
||||
|
||||
# The default secure protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocols:// (in this example they would specify 'protocols')
|
||||
# This value can be the same as the defined protocol.
|
||||
secure_protocol = None
|
||||
|
||||
# Throttle
|
||||
request_rate_per_sec = 0
|
||||
|
||||
# Maintain a set of tags to associate with this specific notification
|
||||
tags = set()
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self, asset=None, **kwargs):
|
||||
"""
|
||||
Initialize some general logging and common server arguments that will
|
||||
keep things consistent when working with the children that
|
||||
inherit this class.
|
||||
|
||||
"""
|
||||
# Prepare our Asset Object
|
||||
self.asset = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
# Certificate Verification (for SSL calls); default to being enabled
|
||||
self.verify_certificate = kwargs.get('verify', True)
|
||||
|
||||
# Secure Mode
|
||||
self.secure = kwargs.get('secure', False)
|
||||
|
||||
self.host = kwargs.get('host', '')
|
||||
self.port = kwargs.get('port')
|
||||
if self.port:
|
||||
try:
|
||||
self.port = int(self.port)
|
||||
|
||||
except (TypeError, ValueError):
|
||||
self.port = None
|
||||
|
||||
self.user = kwargs.get('user')
|
||||
self.password = kwargs.get('password')
|
||||
|
||||
if 'tag' in kwargs:
|
||||
# We want to associate some tags with our notification service.
|
||||
# the code below gets the 'tag' argument if defined, otherwise
|
||||
# it just falls back to whatever was already defined globally
|
||||
self.tags = set(parse_list(kwargs.get('tag', self.tags)))
|
||||
|
||||
# Tracks the time any i/o was made to the remote server. This value
|
||||
# is automatically set and controlled through the throttle() call.
|
||||
self._last_io_datetime = None
|
||||
|
||||
def throttle(self, last_io=None):
|
||||
"""
|
||||
A common throttle control
|
||||
"""
|
||||
|
||||
if last_io is not None:
|
||||
# Assume specified last_io
|
||||
self._last_io_datetime = last_io
|
||||
|
||||
# Get ourselves a reference time of 'now'
|
||||
reference = datetime.now()
|
||||
|
||||
if self._last_io_datetime is None:
|
||||
# Set time to 'now' and no need to throttle
|
||||
self._last_io_datetime = reference
|
||||
return
|
||||
|
||||
if self.request_rate_per_sec <= 0.0:
|
||||
# We're done if there is no throttle limit set
|
||||
return
|
||||
|
||||
# If we reach here, we need to do additional logic.
|
||||
# If the difference between the reference time and 'now' is less than
|
||||
# the defined request_rate_per_sec then we need to throttle for the
|
||||
# remaining balance of this time.
|
||||
|
||||
elapsed = (reference - self._last_io_datetime).total_seconds()
|
||||
|
||||
if elapsed < self.request_rate_per_sec:
|
||||
self.logger.debug('Throttling for {}s...'.format(
|
||||
self.request_rate_per_sec - elapsed))
|
||||
sleep(self.request_rate_per_sec - elapsed)
|
||||
|
||||
# Update our timestamp before we leave
|
||||
self._last_io_datetime = reference
|
||||
return
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Assembles the URL associated with the notification based on the
|
||||
arguments provied.
|
||||
|
||||
"""
|
||||
raise NotImplementedError("url() is implimented by the child class.")
|
||||
|
||||
def __contains__(self, tags):
|
||||
"""
|
||||
Returns true if the tag specified is associated with this notification.
|
||||
|
||||
tag can also be a tuple, set, and/or list
|
||||
|
||||
"""
|
||||
if isinstance(tags, (tuple, set, list)):
|
||||
return bool(set(tags) & self.tags)
|
||||
|
||||
# return any match
|
||||
return tags in self.tags
|
||||
|
||||
@staticmethod
|
||||
def escape_html(html, convert_new_lines=False, whitespace=True):
|
||||
"""
|
||||
Takes html text as input and escapes it so that it won't
|
||||
conflict with any xml/html wrapping characters.
|
||||
|
||||
Args:
|
||||
html (str): The HTML code to escape
|
||||
convert_new_lines (:obj:`bool`, optional): escape new lines (\n)
|
||||
whitespace (:obj:`bool`, optional): escape whitespace
|
||||
|
||||
Returns:
|
||||
str: The escaped html
|
||||
"""
|
||||
if not html:
|
||||
# nothing more to do; return object as is
|
||||
return html
|
||||
|
||||
# Escape HTML
|
||||
escaped = sax_escape(html, {"'": "'", "\"": """})
|
||||
|
||||
if whitespace:
|
||||
# Tidy up whitespace too
|
||||
escaped = escaped\
|
||||
.replace(u'\t', u' ')\
|
||||
.replace(u' ', u' ')
|
||||
|
||||
if convert_new_lines:
|
||||
return escaped.replace(u'\n', u'<br/>')
|
||||
|
||||
return escaped
|
||||
|
||||
@staticmethod
|
||||
def unquote(content, encoding='utf-8', errors='replace'):
|
||||
"""
|
||||
Replace %xx escapes by their single-character equivalent. The optional
|
||||
encoding and errors parameters specify how to decode percent-encoded
|
||||
sequences.
|
||||
|
||||
Wrapper to Python's unquote while remaining compatible with both
|
||||
Python 2 & 3 since the reference to this function changed between
|
||||
versions.
|
||||
|
||||
Note: errors set to 'replace' means that invalid sequences are
|
||||
replaced by a placeholder character.
|
||||
|
||||
Args:
|
||||
content (str): The quoted URI string you wish to unquote
|
||||
encoding (:obj:`str`, optional): encoding type
|
||||
errors (:obj:`str`, errors): how to handle invalid character found
|
||||
in encoded string (defined by encoding)
|
||||
|
||||
Returns:
|
||||
str: The unquoted URI string
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _unquote(content, encoding=encoding, errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _unquote(content)
|
||||
|
||||
@staticmethod
|
||||
def quote(content, safe='/', encoding=None, errors=None):
|
||||
""" Replaces single character non-ascii characters and URI specific
|
||||
ones by their %xx code.
|
||||
|
||||
Wrapper to Python's unquote while remaining compatible with both
|
||||
Python 2 & 3 since the reference to this function changed between
|
||||
versions.
|
||||
|
||||
Args:
|
||||
content (str): The URI string you wish to quote
|
||||
safe (str): non-ascii characters and URI specific ones that you
|
||||
do not wish to escape (if detected). Setting this
|
||||
string to an empty one causes everything to be
|
||||
escaped.
|
||||
encoding (:obj:`str`, optional): encoding type
|
||||
errors (:obj:`str`, errors): how to handle invalid character found
|
||||
in encoded string (defined by encoding)
|
||||
|
||||
Returns:
|
||||
str: The quoted URI string
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _quote(content, safe=safe, encoding=encoding, errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _quote(content, safe=safe)
|
||||
|
||||
@staticmethod
|
||||
def urlencode(query, doseq=False, safe='', encoding=None, errors=None):
|
||||
"""Convert a mapping object or a sequence of two-element tuples
|
||||
|
||||
Wrapper to Python's unquote while remaining compatible with both
|
||||
Python 2 & 3 since the reference to this function changed between
|
||||
versions.
|
||||
|
||||
The resulting string is a series of key=value pairs separated by '&'
|
||||
characters, where both key and value are quoted using the quote()
|
||||
function.
|
||||
|
||||
Note: If the dictionary entry contains an entry that is set to None
|
||||
it is not included in the final result set. If you want to
|
||||
pass in an empty variable, set it to an empty string.
|
||||
|
||||
Args:
|
||||
query (str): The dictionary to encode
|
||||
doseq (:obj:`bool`, optional): Handle sequences
|
||||
safe (:obj:`str`): non-ascii characters and URI specific ones that
|
||||
you do not wish to escape (if detected). Setting this string
|
||||
to an empty one causes everything to be escaped.
|
||||
encoding (:obj:`str`, optional): encoding type
|
||||
errors (:obj:`str`, errors): how to handle invalid character found
|
||||
in encoded string (defined by encoding)
|
||||
|
||||
Returns:
|
||||
str: The escaped parameters returned as a string
|
||||
"""
|
||||
# Tidy query by eliminating any records set to None
|
||||
_query = {k: v for (k, v) in query.items() if v is not None}
|
||||
try:
|
||||
# Python v3.x
|
||||
return _urlencode(
|
||||
_query, doseq=doseq, safe=safe, encoding=encoding,
|
||||
errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _urlencode(_query)
|
||||
|
||||
@staticmethod
|
||||
def split_path(path, unquote=True):
|
||||
"""Splits a URL up into a list object.
|
||||
|
||||
Parses a specified URL and breaks it into a list.
|
||||
|
||||
Args:
|
||||
path (str): The path to split up into a list.
|
||||
unquote (:obj:`bool`, optional): call unquote on each element
|
||||
added to the returned list.
|
||||
|
||||
Returns:
|
||||
list: A list containing all of the elements in the path
|
||||
"""
|
||||
|
||||
if unquote:
|
||||
return PATHSPLIT_LIST_DELIM.split(
|
||||
URLBase.unquote(path).lstrip('/'))
|
||||
return PATHSPLIT_LIST_DELIM.split(path.lstrip('/'))
|
||||
|
||||
@property
|
||||
def app_id(self):
|
||||
return self.asset.app_id
|
||||
|
||||
@property
|
||||
def app_desc(self):
|
||||
return self.asset.app_desc
|
||||
|
||||
@property
|
||||
def app_url(self):
|
||||
return self.asset.app_url
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url, verify_host=True):
|
||||
"""Parses the URL and returns it broken apart into a dictionary.
|
||||
|
||||
This is very specific and customized for Apprise.
|
||||
|
||||
|
||||
Args:
|
||||
url (str): The URL you want to fully parse.
|
||||
verify_host (:obj:`bool`, optional): a flag kept with the parsed
|
||||
URL which some child classes will later use to verify SSL
|
||||
keys (if SSL transactions take place). Unless under very
|
||||
specific circumstances, it is strongly recomended that
|
||||
you leave this default value set to True.
|
||||
|
||||
Returns:
|
||||
A dictionary is returned containing the URL fully parsed if
|
||||
successful, otherwise None is returned.
|
||||
"""
|
||||
|
||||
results = parse_url(
|
||||
url, default_schema='unknown', verify_host=verify_host)
|
||||
|
||||
if not results:
|
||||
# We're done; we failed to parse our url
|
||||
return results
|
||||
|
||||
# if our URL ends with an 's', then assueme our secure flag is set.
|
||||
results['secure'] = (results['schema'][-1] == 's')
|
||||
|
||||
# Support SSL Certificate 'verify' keyword. Default to being enabled
|
||||
results['verify'] = verify_host
|
||||
|
||||
if 'verify' in results['qsd']:
|
||||
results['verify'] = parse_bool(
|
||||
results['qsd'].get('verify', True))
|
||||
|
||||
# Password overrides
|
||||
if 'pass' in results['qsd']:
|
||||
results['password'] = results['qsd']['pass']
|
||||
|
||||
# User overrides
|
||||
if 'user' in results['qsd']:
|
||||
results['user'] = results['qsd']['user']
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def http_response_code_lookup(code, response_mask=None):
|
||||
"""Parses the interger response code returned by a remote call from
|
||||
a web request into it's human readable string version.
|
||||
|
||||
You can over-ride codes or add new ones by providing your own
|
||||
response_mask that contains a dictionary of integer -> string mapped
|
||||
variables
|
||||
"""
|
||||
if isinstance(response_mask, dict):
|
||||
# Apply any/all header over-rides defined
|
||||
HTML_LOOKUP.update(response_mask)
|
||||
|
||||
# Look up our response
|
||||
try:
|
||||
response = HTML_LOOKUP[code]
|
||||
|
||||
except KeyError:
|
||||
response = ''
|
||||
|
||||
return response
|
@ -27,7 +27,7 @@ __title__ = 'apprise'
|
||||
__version__ = '0.7.3'
|
||||
__author__ = 'Chris Caron'
|
||||
__license__ = 'MIT'
|
||||
__copywrite__ = 'Copyright 2019 Chris Caron <lead2gold@gmail.com>'
|
||||
__copywrite__ = 'Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>'
|
||||
__email__ = 'lead2gold@gmail.com'
|
||||
__status__ = 'Production'
|
||||
|
||||
@ -39,10 +39,16 @@ from .common import NotifyFormat
|
||||
from .common import NOTIFY_FORMATS
|
||||
from .common import OverflowMode
|
||||
from .common import OVERFLOW_MODES
|
||||
from .common import ConfigFormat
|
||||
from .common import CONFIG_FORMATS
|
||||
|
||||
from .URLBase import URLBase
|
||||
from .plugins.NotifyBase import NotifyBase
|
||||
from .config.ConfigBase import ConfigBase
|
||||
|
||||
from .Apprise import Apprise
|
||||
from .AppriseAsset import AppriseAsset
|
||||
from .AppriseConfig import AppriseConfig
|
||||
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
import logging
|
||||
@ -51,9 +57,11 @@ logging.getLogger(__name__).addHandler(NullHandler())
|
||||
|
||||
__all__ = [
|
||||
# Core
|
||||
'Apprise', 'AppriseAsset', 'NotifyBase',
|
||||
'Apprise', 'AppriseAsset', 'AppriseConfig', 'URLBase', 'NotifyBase',
|
||||
'ConfigBase',
|
||||
|
||||
# Reference
|
||||
'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'OverflowMode',
|
||||
'NOTIFY_TYPES', 'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS', 'OVERFLOW_MODES',
|
||||
'ConfigFormat', 'CONFIG_FORMATS',
|
||||
]
|
||||
|
@ -31,6 +31,12 @@ import sys
|
||||
from . import NotifyType
|
||||
from . import Apprise
|
||||
from . import AppriseAsset
|
||||
from . import AppriseConfig
|
||||
from .utils import parse_list
|
||||
from . import __title__
|
||||
from . import __version__
|
||||
from . import __license__
|
||||
from . import __copywrite__
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger('apprise.plugins.NotifyBase')
|
||||
@ -39,6 +45,14 @@ logger = logging.getLogger('apprise.plugins.NotifyBase')
|
||||
# can be specified to get the help menu to come up
|
||||
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
|
||||
|
||||
# Define our default configuration we use if nothing is otherwise specified
|
||||
DEFAULT_SEARCH_PATHS = (
|
||||
'file://~/.apprise',
|
||||
'file://~/.apprise.yml',
|
||||
'file://~/.config/apprise',
|
||||
'file://~/.config/apprise.yml',
|
||||
)
|
||||
|
||||
|
||||
def print_help_msg(command):
|
||||
"""
|
||||
@ -49,19 +63,39 @@ def print_help_msg(command):
|
||||
click.echo(command.get_help(ctx))
|
||||
|
||||
|
||||
def print_version_msg():
|
||||
"""
|
||||
Prints version message when -V or --version is specified.
|
||||
|
||||
"""
|
||||
result = list()
|
||||
result.append('{} v{}'.format(__title__, __version__))
|
||||
result.append(__copywrite__)
|
||||
result.append(
|
||||
'This code is licensed under the {} License.'.format(__license__))
|
||||
click.echo('\n'.join(result))
|
||||
|
||||
|
||||
@click.command(context_settings=CONTEXT_SETTINGS)
|
||||
@click.option('--title', '-t', default=None, type=str,
|
||||
help='Specify the message title.')
|
||||
@click.option('--body', '-b', default=None, type=str,
|
||||
help='Specify the message body.')
|
||||
@click.option('--config', '-c', default=None, type=str, multiple=True,
|
||||
help='Specify one or more configuration locations.')
|
||||
@click.option('--notification-type', '-n', default=NotifyType.INFO, type=str,
|
||||
metavar='TYPE', help='Specify the message type (default=info).')
|
||||
@click.option('--theme', '-T', default='default', type=str,
|
||||
help='Specify the default theme.')
|
||||
@click.option('--tag', '-g', default=None, type=str, multiple=True,
|
||||
help='Specify one or more tags to reference.')
|
||||
@click.option('-v', '--verbose', count=True)
|
||||
@click.option('-V', '--version', is_flag=True,
|
||||
help='Display the apprise version and exit.')
|
||||
@click.argument('urls', nargs=-1,
|
||||
metavar='SERVER_URL [SERVER_URL2 [SERVER_URL3]]',)
|
||||
def main(title, body, urls, notification_type, theme, verbose):
|
||||
def main(title, body, config, urls, notification_type, theme, tag, verbose,
|
||||
version):
|
||||
"""
|
||||
Send a notification to all of the specified servers identified by their
|
||||
URLs the content provided within the title, body and notification-type.
|
||||
@ -82,30 +116,47 @@ def main(title, body, urls, notification_type, theme, verbose):
|
||||
else:
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
if version:
|
||||
print_version_msg()
|
||||
sys.exit(0)
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
if not urls:
|
||||
logger.error('You must specify at least one server URL.')
|
||||
print_help_msg(main)
|
||||
sys.exit(1)
|
||||
|
||||
# Prepare our asset
|
||||
asset = AppriseAsset(theme=theme)
|
||||
|
||||
# Create our object
|
||||
a = Apprise(asset=asset)
|
||||
|
||||
# Load our configuration if no URLs or specified configuration was
|
||||
# identified on the command line
|
||||
a.add(AppriseConfig(
|
||||
paths=DEFAULT_SEARCH_PATHS
|
||||
if not (config or urls) else config), asset=asset)
|
||||
|
||||
# Load our inventory up
|
||||
for url in urls:
|
||||
a.add(url)
|
||||
|
||||
if len(a) == 0:
|
||||
logger.error(
|
||||
'You must specify at least one server URL or populated '
|
||||
'configuration file.')
|
||||
print_help_msg(main)
|
||||
sys.exit(1)
|
||||
|
||||
if body is None:
|
||||
# if no body was specified, then read from STDIN
|
||||
body = click.get_text_stream('stdin').read()
|
||||
|
||||
# each --tag entry comprises of a comma separated 'and' list
|
||||
# we or each of of the --tag and sets specified.
|
||||
tags = None if not tag else [parse_list(t) for t in tag]
|
||||
|
||||
# now print it out
|
||||
if a.notify(title=title, body=body, notify_type=notification_type):
|
||||
if a.notify(
|
||||
body=body, title=title, notify_type=notification_type, tag=tags):
|
||||
sys.exit(0)
|
||||
sys.exit(1)
|
||||
|
@ -105,3 +105,26 @@ OVERFLOW_MODES = (
|
||||
OverflowMode.TRUNCATE,
|
||||
OverflowMode.SPLIT,
|
||||
)
|
||||
|
||||
|
||||
class ConfigFormat(object):
|
||||
"""
|
||||
A list of pre-defined config formats that can be passed via the
|
||||
apprise library.
|
||||
"""
|
||||
|
||||
# A text based configuration. This consists of a list of URLs delimited by
|
||||
# a new line. pound/hashtag (#) or semi-colon (;) can be used as comment
|
||||
# characters.
|
||||
TEXT = 'text'
|
||||
|
||||
# YAML files allow a more rich of an experience when settig up your
|
||||
# apprise configuration files.
|
||||
YAML = 'yaml'
|
||||
|
||||
|
||||
# Define our configuration formats mostly used for verification
|
||||
CONFIG_FORMATS = (
|
||||
ConfigFormat.TEXT,
|
||||
ConfigFormat.YAML,
|
||||
)
|
||||
|
584
apprise/config/ConfigBase.py
Normal file
584
apprise/config/ConfigBase.py
Normal file
@ -0,0 +1,584 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import os
|
||||
import re
|
||||
import six
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
from .. import plugins
|
||||
from ..AppriseAsset import AppriseAsset
|
||||
from ..URLBase import URLBase
|
||||
from ..common import ConfigFormat
|
||||
from ..common import CONFIG_FORMATS
|
||||
from ..utils import GET_SCHEMA_RE
|
||||
from ..utils import parse_list
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigBase(URLBase):
|
||||
"""
|
||||
This is the base class for all supported configuration sources
|
||||
"""
|
||||
|
||||
# The Default Encoding to use if not otherwise detected
|
||||
encoding = 'utf-8'
|
||||
|
||||
# The default expected configuration format unless otherwise
|
||||
# detected by the sub-modules
|
||||
default_config_format = ConfigFormat.TEXT
|
||||
|
||||
# This is only set if the user overrides the config format on the URL
|
||||
# this should always initialize itself as None
|
||||
config_format = None
|
||||
|
||||
# Don't read any more of this amount of data into memory as there is no
|
||||
# reason we should be reading in more. This is more of a safe guard then
|
||||
# anything else. 128KB (131072B)
|
||||
max_buffer_size = 131072
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize some general logging and common server arguments that will
|
||||
keep things consistent when working with the configurations that
|
||||
inherit this class.
|
||||
|
||||
"""
|
||||
|
||||
super(ConfigBase, self).__init__(**kwargs)
|
||||
|
||||
# Tracks previously loaded content for speed
|
||||
self._cached_servers = None
|
||||
|
||||
if 'encoding' in kwargs:
|
||||
# Store the encoding
|
||||
self.encoding = kwargs.get('encoding')
|
||||
|
||||
if 'format' in kwargs:
|
||||
# Store the enforced config format
|
||||
self.config_format = kwargs.get('format').lower()
|
||||
|
||||
if self.config_format not in CONFIG_FORMATS:
|
||||
# Simple error checking
|
||||
err = 'An invalid config format ({}) was specified.'.format(
|
||||
self.config_format)
|
||||
self.logger.warning(err)
|
||||
raise TypeError(err)
|
||||
|
||||
return
|
||||
|
||||
def servers(self, asset=None, cache=True, **kwargs):
|
||||
"""
|
||||
Performs reads loaded configuration and returns all of the services
|
||||
that could be parsed and loaded.
|
||||
|
||||
"""
|
||||
|
||||
if cache is True and isinstance(self._cached_servers, list):
|
||||
# We already have cached results to return; use them
|
||||
return self._cached_servers
|
||||
|
||||
# Our response object
|
||||
self._cached_servers = list()
|
||||
|
||||
# read() causes the child class to do whatever it takes for the
|
||||
# config plugin to load the data source and return unparsed content
|
||||
# None is returned if there was an error or simply no data
|
||||
content = self.read(**kwargs)
|
||||
if not isinstance(content, six.string_types):
|
||||
# Nothing more to do
|
||||
return list()
|
||||
|
||||
# Our Configuration format uses a default if one wasn't one detected
|
||||
# or enfored.
|
||||
config_format = \
|
||||
self.default_config_format \
|
||||
if self.config_format is None else self.config_format
|
||||
|
||||
# Dynamically load our parse_ function based on our config format
|
||||
fn = getattr(ConfigBase, 'config_parse_{}'.format(config_format))
|
||||
|
||||
# Execute our config parse function which always returns a list
|
||||
self._cached_servers.extend(fn(content=content, asset=asset))
|
||||
|
||||
return self._cached_servers
|
||||
|
||||
def read(self):
|
||||
"""
|
||||
This object should be implimented by the child classes
|
||||
|
||||
"""
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url, verify_host=True):
|
||||
"""Parses the URL and returns it broken apart into a dictionary.
|
||||
|
||||
This is very specific and customized for Apprise.
|
||||
|
||||
|
||||
Args:
|
||||
url (str): The URL you want to fully parse.
|
||||
verify_host (:obj:`bool`, optional): a flag kept with the parsed
|
||||
URL which some child classes will later use to verify SSL
|
||||
keys (if SSL transactions take place). Unless under very
|
||||
specific circumstances, it is strongly recomended that
|
||||
you leave this default value set to True.
|
||||
|
||||
Returns:
|
||||
A dictionary is returned containing the URL fully parsed if
|
||||
successful, otherwise None is returned.
|
||||
"""
|
||||
|
||||
results = URLBase.parse_url(url, verify_host=verify_host)
|
||||
|
||||
if not results:
|
||||
# We're done; we failed to parse our url
|
||||
return results
|
||||
|
||||
# Allow overriding the default config format
|
||||
if 'format' in results['qsd']:
|
||||
results['format'] = results['qsd'].get('format')
|
||||
if results['format'] not in CONFIG_FORMATS:
|
||||
URLBase.logger.warning(
|
||||
'Unsupported format specified {}'.format(
|
||||
results['format']))
|
||||
del results['format']
|
||||
|
||||
# Defines the encoding of the payload
|
||||
if 'encoding' in results['qsd']:
|
||||
results['encoding'] = results['qsd'].get('encoding')
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def config_parse_text(content, asset=None):
|
||||
"""
|
||||
Parse the specified content as though it were a simple text file only
|
||||
containing a list of URLs. Return a list of loaded notification plugins
|
||||
|
||||
Optionally associate an asset with the notification.
|
||||
|
||||
The file syntax is:
|
||||
|
||||
#
|
||||
# pound/hashtag allow for line comments
|
||||
#
|
||||
# One or more tags can be idenified using comma's (,) to separate
|
||||
# them.
|
||||
<Tag(s)>=<URL>
|
||||
|
||||
# Or you can use this format (no tags associated)
|
||||
<URL>
|
||||
|
||||
"""
|
||||
# For logging, track the line number
|
||||
line = 0
|
||||
|
||||
response = list()
|
||||
|
||||
# Define what a valid line should look like
|
||||
valid_line_re = re.compile(
|
||||
r'^\s*(?P<line>([;#]+(?P<comment>.*))|'
|
||||
r'(\s*(?P<tags>[^=]+)=|=)?\s*'
|
||||
r'(?P<url>[a-z0-9]{2,9}://.*))?$', re.I)
|
||||
|
||||
try:
|
||||
# split our content up to read line by line
|
||||
content = re.split(r'\r*\n', content)
|
||||
|
||||
except TypeError:
|
||||
# content was not expected string type
|
||||
logger.error('Invalid apprise text data specified')
|
||||
return list()
|
||||
|
||||
for entry in content:
|
||||
# Increment our line count
|
||||
line += 1
|
||||
|
||||
result = valid_line_re.match(entry)
|
||||
if not result:
|
||||
# Invalid syntax
|
||||
logger.error(
|
||||
'Invalid apprise text format found '
|
||||
'{} on line {}.'.format(entry, line))
|
||||
|
||||
# Assume this is a file we shouldn't be parsing. It's owner
|
||||
# can read the error printed to screen and take action
|
||||
# otherwise.
|
||||
return list()
|
||||
|
||||
if result.group('comment') or not result.group('line'):
|
||||
# Comment/empty line; do nothing
|
||||
continue
|
||||
|
||||
# Store our url read in
|
||||
url = result.group('url')
|
||||
|
||||
# swap hash (#) tag values with their html version
|
||||
_url = url.replace('/#', '/%23')
|
||||
|
||||
# Attempt to acquire the schema at the very least to allow our
|
||||
# plugins to determine if they can make a better
|
||||
# interpretation of a URL geared for them
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in plugins.SCHEMA_MAP:
|
||||
logger.warning(
|
||||
'Unsupported schema {} on line {}.'.format(
|
||||
schema, line))
|
||||
continue
|
||||
|
||||
# Parse our url details of the server object as dictionary
|
||||
# containing all of the information parsed from our URL
|
||||
results = plugins.SCHEMA_MAP[schema].parse_url(_url)
|
||||
|
||||
if results is None:
|
||||
# Failed to parse the server URL
|
||||
logger.warning(
|
||||
'Unparseable URL {} on line {}.'.format(url, line))
|
||||
continue
|
||||
|
||||
# Build a list of tags to associate with the newly added
|
||||
# notifications if any were set
|
||||
results['tag'] = set(parse_list(result.group('tags')))
|
||||
|
||||
# Prepare our Asset Object
|
||||
results['asset'] = \
|
||||
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the
|
||||
# parsed URL information
|
||||
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
|
||||
|
||||
except Exception:
|
||||
# the arguments are invalid or can not be used.
|
||||
logger.warning(
|
||||
'Could not load URL {} on line {}.'.format(
|
||||
url, line))
|
||||
continue
|
||||
|
||||
# if we reach here, we successfully loaded our data
|
||||
response.append(plugin)
|
||||
|
||||
# Return what was loaded
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def config_parse_yaml(content, asset=None):
|
||||
"""
|
||||
Parse the specified content as though it were a yaml file
|
||||
specifically formatted for apprise. Return a list of loaded
|
||||
notification plugins.
|
||||
|
||||
Optionally associate an asset with the notification.
|
||||
|
||||
"""
|
||||
response = list()
|
||||
|
||||
try:
|
||||
# Load our data
|
||||
result = yaml.load(content)
|
||||
|
||||
except (AttributeError, yaml.error.MarkedYAMLError) as e:
|
||||
# Invalid content
|
||||
logger.error('Invalid apprise yaml data specified.')
|
||||
logger.debug('YAML Exception:{}{}'.format(os.linesep, e))
|
||||
return list()
|
||||
|
||||
if not isinstance(result, dict):
|
||||
# Invalid content
|
||||
logger.error('Invalid apprise yaml structure specified')
|
||||
return list()
|
||||
|
||||
# YAML Version
|
||||
version = result.get('version', 1)
|
||||
if version != 1:
|
||||
# Invalid syntax
|
||||
logger.error(
|
||||
'Invalid apprise yaml version specified {}.'.format(version))
|
||||
return list()
|
||||
|
||||
#
|
||||
# global asset object
|
||||
#
|
||||
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
|
||||
tokens = result.get('asset', None)
|
||||
if tokens and isinstance(tokens, dict):
|
||||
for k, v in tokens.items():
|
||||
|
||||
if k.startswith('_') or k.endswith('_'):
|
||||
# Entries are considered reserved if they start or end
|
||||
# with an underscore
|
||||
logger.warning('Ignored asset key "{}".'.format(k))
|
||||
continue
|
||||
|
||||
if not (hasattr(asset, k) and
|
||||
isinstance(getattr(asset, k), six.string_types)):
|
||||
# We can't set a function or non-string set value
|
||||
logger.warning('Invalid asset key "{}".'.format(k))
|
||||
continue
|
||||
|
||||
if v is None:
|
||||
# Convert to an empty string
|
||||
v = ''
|
||||
|
||||
if not isinstance(v, six.string_types):
|
||||
# we must set strings with a string
|
||||
logger.warning('Invalid asset value to "{}".'.format(k))
|
||||
continue
|
||||
|
||||
# Set our asset object with the new value
|
||||
setattr(asset, k, v.strip())
|
||||
|
||||
#
|
||||
# global tag root directive
|
||||
#
|
||||
global_tags = set()
|
||||
|
||||
tags = result.get('tag', None)
|
||||
if tags and isinstance(tags, (list, tuple, six.string_types)):
|
||||
# Store any preset tags
|
||||
global_tags = set(parse_list(tags))
|
||||
|
||||
#
|
||||
# urls root directive
|
||||
#
|
||||
urls = result.get('urls', None)
|
||||
if not isinstance(urls, (list, tuple)):
|
||||
# Unsupported
|
||||
logger.error('Missing "urls" directive in apprise yaml.')
|
||||
return list()
|
||||
|
||||
# Iterate over each URL
|
||||
for no, url in enumerate(urls):
|
||||
|
||||
# Our results object is what we use to instantiate our object if
|
||||
# we can. Reset it to None on each iteration
|
||||
results = list()
|
||||
|
||||
if isinstance(url, six.string_types):
|
||||
# We're just a simple URL string
|
||||
|
||||
# swap hash (#) tag values with their html version
|
||||
_url = url.replace('/#', '/%23')
|
||||
|
||||
# Attempt to acquire the schema at the very least to allow our
|
||||
# plugins to determine if they can make a better
|
||||
# interpretation of a URL geared for them
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
if schema is None:
|
||||
logger.warning(
|
||||
'Unsupported schema in urls entry #{}'.format(no))
|
||||
continue
|
||||
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in plugins.SCHEMA_MAP:
|
||||
logger.warning(
|
||||
'Unsupported schema {} in urls entry #{}'.format(
|
||||
schema, no))
|
||||
continue
|
||||
|
||||
# Parse our url details of the server object as dictionary
|
||||
# containing all of the information parsed from our URL
|
||||
_results = plugins.SCHEMA_MAP[schema].parse_url(_url)
|
||||
if _results is None:
|
||||
logger.warning(
|
||||
'Unparseable {} based url; entry #{}'.format(
|
||||
schema, no))
|
||||
continue
|
||||
|
||||
# add our results to our global set
|
||||
results.append(_results)
|
||||
|
||||
elif isinstance(url, dict):
|
||||
# We are a url string with additional unescaped options
|
||||
if six.PY2:
|
||||
_url, tokens = next(url.iteritems())
|
||||
else: # six.PY3
|
||||
_url, tokens = next(iter(url.items()))
|
||||
|
||||
# swap hash (#) tag values with their html version
|
||||
_url = _url.replace('/#', '/%23')
|
||||
|
||||
# Get our schema
|
||||
schema = GET_SCHEMA_RE.match(_url)
|
||||
if schema is None:
|
||||
logger.warning(
|
||||
'Unsupported schema in urls entry #{}'.format(no))
|
||||
continue
|
||||
|
||||
# Ensure our schema is always in lower case
|
||||
schema = schema.group('schema').lower()
|
||||
|
||||
# Some basic validation
|
||||
if schema not in plugins.SCHEMA_MAP:
|
||||
logger.warning(
|
||||
'Unsupported schema {} in urls entry #{}'.format(
|
||||
schema, no))
|
||||
continue
|
||||
|
||||
# Parse our url details of the server object as dictionary
|
||||
# containing all of the information parsed from our URL
|
||||
_results = plugins.SCHEMA_MAP[schema].parse_url(_url)
|
||||
if _results is None:
|
||||
# Setup dictionary
|
||||
_results = {
|
||||
# Minimum requirements
|
||||
'schema': schema,
|
||||
}
|
||||
|
||||
if tokens is not None:
|
||||
# populate and/or override any results populated by
|
||||
# parse_url()
|
||||
for entries in tokens:
|
||||
# Copy ourselves a template of our parsed URL as a base
|
||||
# to work with
|
||||
r = _results.copy()
|
||||
|
||||
# We are a url string with additional unescaped options
|
||||
if isinstance(entries, dict):
|
||||
if six.PY2:
|
||||
_url, tokens = next(url.iteritems())
|
||||
else: # six.PY3
|
||||
_url, tokens = next(iter(url.items()))
|
||||
|
||||
# Tags you just can't over-ride
|
||||
if 'schema' in entries:
|
||||
del entries['schema']
|
||||
|
||||
# Extend our dictionary with our new entries
|
||||
r.update(entries)
|
||||
|
||||
# add our results to our global set
|
||||
results.append(r)
|
||||
|
||||
else:
|
||||
# add our results to our global set
|
||||
results.append(_results)
|
||||
|
||||
else:
|
||||
# Unsupported
|
||||
logger.warning(
|
||||
'Unsupported apprise yaml entry #{}'.format(no))
|
||||
continue
|
||||
|
||||
# Track our entries
|
||||
entry = 0
|
||||
|
||||
while len(results):
|
||||
# Increment our entry count
|
||||
entry += 1
|
||||
|
||||
# Grab our first item
|
||||
_results = results.pop(0)
|
||||
|
||||
# tag is a special keyword that is managed by apprise object.
|
||||
# The below ensures our tags are set correctly
|
||||
if 'tag' in _results:
|
||||
# Tidy our list up
|
||||
_results['tag'] = \
|
||||
set(parse_list(_results['tag'])) | global_tags
|
||||
|
||||
else:
|
||||
# Just use the global settings
|
||||
_results['tag'] = global_tags
|
||||
|
||||
# Prepare our Asset Object
|
||||
_results['asset'] = asset
|
||||
|
||||
try:
|
||||
# Attempt to create an instance of our plugin using the
|
||||
# parsed URL information
|
||||
plugin = plugins.SCHEMA_MAP[_results['schema']](**_results)
|
||||
|
||||
except Exception:
|
||||
# the arguments are invalid or can not be used.
|
||||
logger.warning(
|
||||
'Could not load apprise yaml entry #{}, item #{}'
|
||||
.format(no, entry))
|
||||
continue
|
||||
|
||||
# if we reach here, we successfully loaded our data
|
||||
response.append(plugin)
|
||||
|
||||
return response
|
||||
|
||||
def pop(self, index):
|
||||
"""
|
||||
Removes an indexed Notification Service from the stack and
|
||||
returns it.
|
||||
"""
|
||||
|
||||
if not isinstance(self._cached_servers, list):
|
||||
# Generate ourselves a list of content we can pull from
|
||||
self.servers(cache=True)
|
||||
|
||||
# Pop the element off of the stack
|
||||
return self._cached_servers.pop(index)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Returns the indexed server entry associated with the loaded
|
||||
notification servers
|
||||
"""
|
||||
if not isinstance(self._cached_servers, list):
|
||||
# Generate ourselves a list of content we can pull from
|
||||
self.servers(cache=True)
|
||||
|
||||
return self._cached_servers[index]
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Returns an iterator to our server list
|
||||
"""
|
||||
if not isinstance(self._cached_servers, list):
|
||||
# Generate ourselves a list of content we can pull from
|
||||
self.servers(cache=True)
|
||||
|
||||
return iter(self._cached_servers)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the total number of servers loaded
|
||||
"""
|
||||
if not isinstance(self._cached_servers, list):
|
||||
# Generate ourselves a list of content we can pull from
|
||||
self.servers(cache=True)
|
||||
|
||||
return len(self._cached_servers)
|
164
apprise/config/ConfigFile.py
Normal file
164
apprise/config/ConfigFile.py
Normal file
@ -0,0 +1,164 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import io
|
||||
import os
|
||||
from .ConfigBase import ConfigBase
|
||||
from ..common import ConfigFormat
|
||||
|
||||
|
||||
class ConfigFile(ConfigBase):
|
||||
"""
|
||||
A wrapper for File based configuration sources
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'Local File'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'file'
|
||||
|
||||
def __init__(self, path, **kwargs):
|
||||
"""
|
||||
Initialize File Object
|
||||
|
||||
headers can be a dictionary of key/value pairs that you want to
|
||||
additionally include as part of the server headers to post with
|
||||
|
||||
"""
|
||||
super(ConfigFile, self).__init__(**kwargs)
|
||||
|
||||
# Store our file path as it was set
|
||||
self.path = path
|
||||
|
||||
return
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'encoding': self.encoding,
|
||||
}
|
||||
|
||||
if self.config_format:
|
||||
# A format was enforced; make sure it's passed back with the url
|
||||
args['format'] = self.config_format
|
||||
|
||||
return 'file://{path}?{args}'.format(
|
||||
path=self.quote(self.path),
|
||||
args=self.urlencode(args),
|
||||
)
|
||||
|
||||
def read(self, **kwargs):
|
||||
"""
|
||||
Perform retrieval of the configuration based on the specified request
|
||||
"""
|
||||
|
||||
response = None
|
||||
|
||||
path = os.path.expanduser(self.path)
|
||||
try:
|
||||
if self.max_buffer_size > 0 and \
|
||||
os.path.getsize(path) > self.max_buffer_size:
|
||||
|
||||
# Content exceeds maximum buffer size
|
||||
self.logger.error(
|
||||
'File size exceeds maximum allowable buffer length'
|
||||
' ({}KB).'.format(int(self.max_buffer_size / 1024)))
|
||||
return None
|
||||
|
||||
except OSError:
|
||||
# getsize() can throw this acception if the file is missing
|
||||
# and or simply isn't accessible
|
||||
self.logger.debug(
|
||||
'File is not accessible: {}'.format(path))
|
||||
return None
|
||||
|
||||
# Always call throttle before any server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
# Python 3 just supports open(), however to remain compatible with
|
||||
# Python 2, we use the io module
|
||||
with io.open(path, "rt", encoding=self.encoding) as f:
|
||||
# Store our content for parsing
|
||||
response = f.read()
|
||||
|
||||
except (ValueError, UnicodeDecodeError):
|
||||
# A result of our strict encoding check; if we receive this
|
||||
# then the file we're opening is not something we can
|
||||
# understand the encoding of..
|
||||
|
||||
self.logger.error(
|
||||
'File not using expected encoding ({}) : {}'.format(
|
||||
self.encoding, path))
|
||||
return None
|
||||
|
||||
except (IOError, OSError):
|
||||
# IOError is present for backwards compatibility with Python
|
||||
# versions older then 3.3. >= 3.3 throw OSError now.
|
||||
|
||||
# Could not open and/or read the file; this is not a problem since
|
||||
# we scan a lot of default paths.
|
||||
self.logger.debug(
|
||||
'File can not be opened for read: {}'.format(path))
|
||||
return None
|
||||
|
||||
# Detect config format based on file extension if it isn't already
|
||||
# enforced
|
||||
if self.config_format is None and \
|
||||
re.match(r'^.*\.ya?ml\s*$', path, re.I) is not None:
|
||||
|
||||
# YAML Filename Detected
|
||||
self.default_config_format = ConfigFormat.YAML
|
||||
|
||||
self.logger.debug('Read Config File: %s' % (path))
|
||||
|
||||
# Return our response object
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL so that we can handle all different file paths
|
||||
and return it as our path object
|
||||
|
||||
"""
|
||||
|
||||
results = ConfigBase.parse_url(url)
|
||||
if not results:
|
||||
# We're done early; it's not a good URL
|
||||
return results
|
||||
|
||||
match = re.match(r'file://(?P<path>[^?]+)(\?.*)?', url, re.I)
|
||||
if not match:
|
||||
return None
|
||||
|
||||
results['path'] = match.group('path')
|
||||
return results
|
283
apprise/config/ConfigHTTP.py
Normal file
283
apprise/config/ConfigHTTP.py
Normal file
@ -0,0 +1,283 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from .ConfigBase import ConfigBase
|
||||
from ..common import ConfigFormat
|
||||
|
||||
# Support YAML formats
|
||||
# text/yaml
|
||||
# text/x-yaml
|
||||
# application/yaml
|
||||
# application/x-yaml
|
||||
MIME_IS_YAML = re.compile('(text|application)/(x-)?yaml', re.I)
|
||||
|
||||
# Support TEXT formats
|
||||
# text/plain
|
||||
# text/html
|
||||
MIME_IS_TEXT = re.compile('text/(plain|html)', re.I)
|
||||
|
||||
|
||||
class ConfigHTTP(ConfigBase):
|
||||
"""
|
||||
A wrapper for HTTP based configuration sources
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'HTTP'
|
||||
|
||||
# The default protocol
|
||||
protocol = 'http'
|
||||
|
||||
# The default secure protocol
|
||||
secure_protocol = 'https'
|
||||
|
||||
# The maximum number of seconds to wait for a connection to be established
|
||||
# before out-right just giving up
|
||||
connection_timeout_sec = 5.0
|
||||
|
||||
# If an HTTP error occurs, define the number of characters you still want
|
||||
# to read back. This is useful for debugging purposes, but nothing else.
|
||||
# The idea behind enforcing this kind of restriction is to prevent abuse
|
||||
# from queries to services that may be untrusted.
|
||||
max_error_buffer_size = 2048
|
||||
|
||||
def __init__(self, headers=None, **kwargs):
|
||||
"""
|
||||
Initialize HTTP Object
|
||||
|
||||
headers can be a dictionary of key/value pairs that you want to
|
||||
additionally include as part of the server headers to post with
|
||||
|
||||
"""
|
||||
super(ConfigHTTP, self).__init__(**kwargs)
|
||||
|
||||
if self.secure:
|
||||
self.schema = 'https'
|
||||
|
||||
else:
|
||||
self.schema = 'http'
|
||||
|
||||
self.fullpath = kwargs.get('fullpath')
|
||||
if not isinstance(self.fullpath, six.string_types):
|
||||
self.fullpath = '/'
|
||||
|
||||
self.headers = {}
|
||||
if headers:
|
||||
# Store our extra headers
|
||||
self.headers.update(headers)
|
||||
|
||||
return
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Returns the URL built dynamically based on specified arguments.
|
||||
"""
|
||||
|
||||
# Define any arguments set
|
||||
args = {
|
||||
'encoding': self.encoding,
|
||||
}
|
||||
|
||||
if self.config_format:
|
||||
# A format was enforced; make sure it's passed back with the url
|
||||
args['format'] = self.config_format
|
||||
|
||||
# Append our headers into our args
|
||||
args.update({'+{}'.format(k): v for k, v in self.headers.items()})
|
||||
|
||||
# Determine Authentication
|
||||
auth = ''
|
||||
if self.user and self.password:
|
||||
auth = '{user}:{password}@'.format(
|
||||
user=self.quote(self.user, safe=''),
|
||||
password=self.quote(self.password, safe=''),
|
||||
)
|
||||
elif self.user:
|
||||
auth = '{user}@'.format(
|
||||
user=self.quote(self.user, safe=''),
|
||||
)
|
||||
|
||||
default_port = 443 if self.secure else 80
|
||||
|
||||
return '{schema}://{auth}{hostname}{port}/?{args}'.format(
|
||||
schema=self.secure_protocol if self.secure else self.protocol,
|
||||
auth=auth,
|
||||
hostname=self.host,
|
||||
port='' if self.port is None or self.port == default_port
|
||||
else ':{}'.format(self.port),
|
||||
args=self.urlencode(args),
|
||||
)
|
||||
|
||||
def read(self, **kwargs):
|
||||
"""
|
||||
Perform retrieval of the configuration based on the specified request
|
||||
"""
|
||||
|
||||
# prepare XML Object
|
||||
headers = {
|
||||
'User-Agent': self.app_id,
|
||||
}
|
||||
|
||||
# Apply any/all header over-rides defined
|
||||
headers.update(self.headers)
|
||||
|
||||
auth = None
|
||||
if self.user:
|
||||
auth = (self.user, self.password)
|
||||
|
||||
url = '%s://%s' % (self.schema, self.host)
|
||||
if isinstance(self.port, int):
|
||||
url += ':%d' % self.port
|
||||
|
||||
url += self.fullpath
|
||||
|
||||
self.logger.debug('HTTP POST URL: %s (cert_verify=%r)' % (
|
||||
url, self.verify_certificate,
|
||||
))
|
||||
|
||||
# Prepare our response object
|
||||
response = None
|
||||
|
||||
# Where our request object will temporarily live.
|
||||
r = None
|
||||
|
||||
# Always call throttle before any remote server i/o is made
|
||||
self.throttle()
|
||||
|
||||
try:
|
||||
# Make our request
|
||||
r = requests.post(
|
||||
url,
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=self.verify_certificate,
|
||||
timeout=self.connection_timeout_sec,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
status_str = \
|
||||
ConfigBase.http_response_code_lookup(r.status_code)
|
||||
self.logger.error(
|
||||
'Failed to get HTTP configuration: '
|
||||
'{}{} error={}.'.format(
|
||||
status_str,
|
||||
',' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# Display payload for debug information only; Don't read any
|
||||
# more than the first X bytes since we're potentially accessing
|
||||
# content from untrusted servers.
|
||||
if self.max_error_buffer_size > 0:
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(
|
||||
r.content[0:self.max_error_buffer_size]))
|
||||
|
||||
# Close out our connection if it exists to eliminate any
|
||||
# potential inefficiencies with the Request connection pool as
|
||||
# documented on their site when using the stream=True option.
|
||||
r.close()
|
||||
|
||||
# Return None (signifying a failure)
|
||||
return None
|
||||
|
||||
# Store our response
|
||||
if self.max_buffer_size > 0 and \
|
||||
r.headers['Content-Length'] > self.max_buffer_size:
|
||||
|
||||
# Provide warning of data truncation
|
||||
self.logger.error(
|
||||
'HTTP config response exceeds maximum buffer length '
|
||||
'({}KB);'.format(int(self.max_buffer_size / 1024)))
|
||||
|
||||
# Close out our connection if it exists to eliminate any
|
||||
# potential inefficiencies with the Request connection pool as
|
||||
# documented on their site when using the stream=True option.
|
||||
r.close()
|
||||
|
||||
# Return None - buffer execeeded
|
||||
return None
|
||||
|
||||
else:
|
||||
# Store our result
|
||||
response = r.content
|
||||
|
||||
# Detect config format based on mime if the format isn't
|
||||
# already enforced
|
||||
content_type = r.headers.get(
|
||||
'Content-Type', 'application/octet-stream')
|
||||
if self.config_format is None and content_type:
|
||||
if MIME_IS_YAML.match(content_type) is not None:
|
||||
|
||||
# YAML data detected based on header content
|
||||
self.default_config_format = ConfigFormat.YAML
|
||||
|
||||
elif MIME_IS_TEXT.match(content_type) is not None:
|
||||
|
||||
# TEXT data detected based on header content
|
||||
self.default_config_format = ConfigFormat.TEXT
|
||||
|
||||
# else do nothing; fall back to whatever default is
|
||||
# already set.
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured retrieving HTTP '
|
||||
'configuration from %s.' % self.host)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Return None (signifying a failure)
|
||||
return None
|
||||
|
||||
# Close out our connection if it exists to eliminate any potential
|
||||
# inefficiencies with the Request connection pool as documented on
|
||||
# their site when using the stream=True option.
|
||||
r.close()
|
||||
|
||||
# Return our response object
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url):
|
||||
"""
|
||||
Parses the URL and returns enough arguments that can allow
|
||||
us to substantiate this object.
|
||||
|
||||
"""
|
||||
results = ConfigBase.parse_url(url)
|
||||
|
||||
if not results:
|
||||
# We're done early as we couldn't load the results
|
||||
return results
|
||||
|
||||
# Add our headers that the user can potentially over-ride if they wish
|
||||
# to to our returned result set
|
||||
results['headers'] = results['qsd-']
|
||||
results['headers'].update(results['qsd+'])
|
||||
|
||||
return results
|
87
apprise/config/__init__.py
Normal file
87
apprise/config/__init__.py
Normal file
@ -0,0 +1,87 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import six
|
||||
|
||||
from .ConfigHTTP import ConfigHTTP
|
||||
from .ConfigFile import ConfigFile
|
||||
|
||||
# Maintains a mapping of all of the configuration services
|
||||
SCHEMA_MAP = {}
|
||||
|
||||
|
||||
__all__ = [
|
||||
# Configuration Services
|
||||
'ConfigFile', 'ConfigHTTP',
|
||||
]
|
||||
|
||||
|
||||
# Load our Lookup Matrix
|
||||
def __load_matrix():
|
||||
"""
|
||||
Dynamically load our schema map; this allows us to gracefully
|
||||
skip over modules we simply don't have the dependencies for.
|
||||
|
||||
"""
|
||||
|
||||
thismodule = sys.modules[__name__]
|
||||
|
||||
# to add it's mapping to our hash table
|
||||
for entry in dir(thismodule):
|
||||
|
||||
# Get our plugin
|
||||
plugin = getattr(thismodule, entry)
|
||||
if not hasattr(plugin, 'app_id'): # pragma: no branch
|
||||
# Filter out non-notification modules
|
||||
continue
|
||||
|
||||
# Load protocol(s) if defined
|
||||
proto = getattr(plugin, 'protocol', None)
|
||||
if isinstance(proto, six.string_types):
|
||||
if proto not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[proto] = plugin
|
||||
|
||||
elif isinstance(proto, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in proto:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
# Load secure protocol(s) if defined
|
||||
protos = getattr(plugin, 'secure_protocol', None)
|
||||
if isinstance(protos, six.string_types):
|
||||
if protos not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[protos] = plugin
|
||||
|
||||
if isinstance(protos, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in protos:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
|
||||
# Dynamically build our module
|
||||
__load_matrix()
|
@ -24,26 +24,8 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import logging
|
||||
from time import sleep
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
# Python 2.7
|
||||
from urllib import unquote as _unquote
|
||||
from urllib import quote as _quote
|
||||
from urllib import urlencode as _urlencode
|
||||
|
||||
except ImportError:
|
||||
# Python 3.x
|
||||
from urllib.parse import unquote as _unquote
|
||||
from urllib.parse import quote as _quote
|
||||
from urllib.parse import urlencode as _urlencode
|
||||
|
||||
from ..utils import parse_url
|
||||
from ..utils import parse_bool
|
||||
from ..utils import parse_list
|
||||
from ..utils import is_hostname
|
||||
from ..URLBase import URLBase
|
||||
from ..common import NotifyType
|
||||
from ..common import NOTIFY_TYPES
|
||||
from ..common import NotifyFormat
|
||||
@ -51,67 +33,23 @@ from ..common import NOTIFY_FORMATS
|
||||
from ..common import OverflowMode
|
||||
from ..common import OVERFLOW_MODES
|
||||
|
||||
from ..AppriseAsset import AppriseAsset
|
||||
|
||||
# use sax first because it's faster
|
||||
from xml.sax.saxutils import escape as sax_escape
|
||||
|
||||
|
||||
HTTP_ERROR_MAP = {
|
||||
400: 'Bad Request - Unsupported Parameters.',
|
||||
401: 'Verification Failed.',
|
||||
404: 'Page not found.',
|
||||
405: 'Method not allowed.',
|
||||
500: 'Internal server error.',
|
||||
503: 'Servers are overloaded.',
|
||||
}
|
||||
|
||||
# HTML New Line Delimiter
|
||||
NOTIFY_NEWLINE = '\r\n'
|
||||
|
||||
# Used to break a path list into parts
|
||||
PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Regular expression retrieved from:
|
||||
# http://www.regular-expressions.info/email.html
|
||||
IS_EMAIL_RE = re.compile(
|
||||
r"((?P<label>[^+]+)\+)?"
|
||||
r"(?P<userid>[a-z0-9$%=_~-]+"
|
||||
r"(?:\.[a-z0-9$%+=_~-]+)"
|
||||
r"*)@(?P<domain>(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+"
|
||||
r"[a-z0-9](?:[a-z0-9-]*"
|
||||
r"[a-z0-9]))?",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
class NotifyBase(object):
|
||||
class NotifyBase(URLBase):
|
||||
"""
|
||||
This is the base class for all notification services
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = None
|
||||
|
||||
# The services URL
|
||||
service_url = None
|
||||
|
||||
# The default simple (insecure) protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocol:// (in this example they would specify 'protocol')
|
||||
protocol = None
|
||||
|
||||
# The default secure protocol
|
||||
# all inheriting entries must provide their protocol lookup
|
||||
# protocols:// (in this example they would specify 'protocols')
|
||||
# This value can be the same as the defined protocol.
|
||||
secure_protocol = None
|
||||
|
||||
# A URL that takes you to the setup/help of the specific protocol
|
||||
setup_url = None
|
||||
|
||||
# Most Servers do not like more then 1 request per 5 seconds, so 5.5 gives
|
||||
# us a safe play range.
|
||||
# us a safe play range. Override the one defined already in the URLBase
|
||||
request_rate_per_sec = 5.5
|
||||
|
||||
# Allows the user to specify the NotifyImageSize object
|
||||
@ -136,40 +74,14 @@ class NotifyBase(object):
|
||||
# Default Overflow Mode
|
||||
overflow_mode = OverflowMode.UPSTREAM
|
||||
|
||||
# Maintain a set of tags to associate with this specific notification
|
||||
tags = set()
|
||||
|
||||
# Logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initialize some general logging and common server arguments that will
|
||||
keep things consistent when working with the notifiers that will
|
||||
inherit this class.
|
||||
Initialize some general configuration that will keep things consistent
|
||||
when working with the notifiers that will inherit this class.
|
||||
|
||||
"""
|
||||
|
||||
# Prepare our Assets
|
||||
self.asset = AppriseAsset()
|
||||
|
||||
# Certificate Verification (for SSL calls); default to being enabled
|
||||
self.verify_certificate = kwargs.get('verify', True)
|
||||
|
||||
# Secure Mode
|
||||
self.secure = kwargs.get('secure', False)
|
||||
|
||||
self.host = kwargs.get('host', '')
|
||||
self.port = kwargs.get('port')
|
||||
if self.port:
|
||||
try:
|
||||
self.port = int(self.port)
|
||||
|
||||
except (TypeError, ValueError):
|
||||
self.port = None
|
||||
|
||||
self.user = kwargs.get('user')
|
||||
self.password = kwargs.get('password')
|
||||
super(NotifyBase, self).__init__(**kwargs)
|
||||
|
||||
if 'format' in kwargs:
|
||||
# Store the specified format if specified
|
||||
@ -197,53 +109,6 @@ class NotifyBase(object):
|
||||
# Provide override
|
||||
self.overflow_mode = overflow
|
||||
|
||||
if 'tag' in kwargs:
|
||||
# We want to associate some tags with our notification service.
|
||||
# the code below gets the 'tag' argument if defined, otherwise
|
||||
# it just falls back to whatever was already defined globally
|
||||
self.tags = set(parse_list(kwargs.get('tag', self.tags)))
|
||||
|
||||
# Tracks the time any i/o was made to the remote server. This value
|
||||
# is automatically set and controlled through the throttle() call.
|
||||
self._last_io_datetime = None
|
||||
|
||||
def throttle(self, last_io=None):
|
||||
"""
|
||||
A common throttle control
|
||||
"""
|
||||
|
||||
if last_io is not None:
|
||||
# Assume specified last_io
|
||||
self._last_io_datetime = last_io
|
||||
|
||||
# Get ourselves a reference time of 'now'
|
||||
reference = datetime.now()
|
||||
|
||||
if self._last_io_datetime is None:
|
||||
# Set time to 'now' and no need to throttle
|
||||
self._last_io_datetime = reference
|
||||
return
|
||||
|
||||
if self.request_rate_per_sec <= 0.0:
|
||||
# We're done if there is no throttle limit set
|
||||
return
|
||||
|
||||
# If we reach here, we need to do additional logic.
|
||||
# If the difference between the reference time and 'now' is less than
|
||||
# the defined request_rate_per_sec then we need to throttle for the
|
||||
# remaining balance of this time.
|
||||
|
||||
elapsed = (reference - self._last_io_datetime).total_seconds()
|
||||
|
||||
if elapsed < self.request_rate_per_sec:
|
||||
self.logger.debug('Throttling for {}s...'.format(
|
||||
self.request_rate_per_sec - elapsed))
|
||||
sleep(self.request_rate_per_sec - elapsed)
|
||||
|
||||
# Update our timestamp before we leave
|
||||
self._last_io_datetime = reference
|
||||
return
|
||||
|
||||
def image_url(self, notify_type, logo=False, extension=None):
|
||||
"""
|
||||
Returns Image URL if possible
|
||||
@ -407,227 +272,8 @@ class NotifyBase(object):
|
||||
Should preform the actual notification itself.
|
||||
|
||||
"""
|
||||
raise NotImplementedError("send() is implimented by the child class.")
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
Assembles the URL associated with the notification based on the
|
||||
arguments provied.
|
||||
|
||||
"""
|
||||
raise NotImplementedError("url() is implimented by the child class.")
|
||||
|
||||
def __contains__(self, tags):
|
||||
"""
|
||||
Returns true if the tag specified is associated with this notification.
|
||||
|
||||
tag can also be a tuple, set, and/or list
|
||||
|
||||
"""
|
||||
if isinstance(tags, (tuple, set, list)):
|
||||
return bool(set(tags) & self.tags)
|
||||
|
||||
# return any match
|
||||
return tags in self.tags
|
||||
|
||||
@property
|
||||
def app_id(self):
|
||||
return self.asset.app_id
|
||||
|
||||
@property
|
||||
def app_desc(self):
|
||||
return self.asset.app_desc
|
||||
|
||||
@property
|
||||
def app_url(self):
|
||||
return self.asset.app_url
|
||||
|
||||
@staticmethod
|
||||
def escape_html(html, convert_new_lines=False, whitespace=True):
|
||||
"""
|
||||
Takes html text as input and escapes it so that it won't
|
||||
conflict with any xml/html wrapping characters.
|
||||
|
||||
Args:
|
||||
html (str): The HTML code to escape
|
||||
convert_new_lines (:obj:`bool`, optional): escape new lines (\n)
|
||||
whitespace (:obj:`bool`, optional): escape whitespace
|
||||
|
||||
Returns:
|
||||
str: The escaped html
|
||||
"""
|
||||
if not html:
|
||||
# nothing more to do; return object as is
|
||||
return html
|
||||
|
||||
# Escape HTML
|
||||
escaped = sax_escape(html, {"'": "'", "\"": """})
|
||||
|
||||
if whitespace:
|
||||
# Tidy up whitespace too
|
||||
escaped = escaped\
|
||||
.replace(u'\t', u' ')\
|
||||
.replace(u' ', u' ')
|
||||
|
||||
if convert_new_lines:
|
||||
return escaped.replace(u'\n', u'<br/>')
|
||||
|
||||
return escaped
|
||||
|
||||
@staticmethod
|
||||
def unquote(content, encoding='utf-8', errors='replace'):
|
||||
"""
|
||||
Replace %xx escapes by their single-character equivalent. The optional
|
||||
encoding and errors parameters specify how to decode percent-encoded
|
||||
sequences.
|
||||
|
||||
Wrapper to Python's unquote while remaining compatible with both
|
||||
Python 2 & 3 since the reference to this function changed between
|
||||
versions.
|
||||
|
||||
Note: errors set to 'replace' means that invalid sequences are
|
||||
replaced by a placeholder character.
|
||||
|
||||
Args:
|
||||
content (str): The quoted URI string you wish to unquote
|
||||
encoding (:obj:`str`, optional): encoding type
|
||||
errors (:obj:`str`, errors): how to handle invalid character found
|
||||
in encoded string (defined by encoding)
|
||||
|
||||
Returns:
|
||||
str: The unquoted URI string
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _unquote(content, encoding=encoding, errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _unquote(content)
|
||||
|
||||
@staticmethod
|
||||
def quote(content, safe='/', encoding=None, errors=None):
|
||||
""" Replaces single character non-ascii characters and URI specific
|
||||
ones by their %xx code.
|
||||
|
||||
Wrapper to Python's unquote while remaining compatible with both
|
||||
Python 2 & 3 since the reference to this function changed between
|
||||
versions.
|
||||
|
||||
Args:
|
||||
content (str): The URI string you wish to quote
|
||||
safe (str): non-ascii characters and URI specific ones that you
|
||||
do not wish to escape (if detected). Setting this
|
||||
string to an empty one causes everything to be
|
||||
escaped.
|
||||
encoding (:obj:`str`, optional): encoding type
|
||||
errors (:obj:`str`, errors): how to handle invalid character found
|
||||
in encoded string (defined by encoding)
|
||||
|
||||
Returns:
|
||||
str: The quoted URI string
|
||||
"""
|
||||
if not content:
|
||||
return ''
|
||||
|
||||
try:
|
||||
# Python v3.x
|
||||
return _quote(content, safe=safe, encoding=encoding, errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _quote(content, safe=safe)
|
||||
|
||||
@staticmethod
|
||||
def urlencode(query, doseq=False, safe='', encoding=None, errors=None):
|
||||
"""Convert a mapping object or a sequence of two-element tuples
|
||||
|
||||
Wrapper to Python's unquote while remaining compatible with both
|
||||
Python 2 & 3 since the reference to this function changed between
|
||||
versions.
|
||||
|
||||
The resulting string is a series of key=value pairs separated by '&'
|
||||
characters, where both key and value are quoted using the quote()
|
||||
function.
|
||||
|
||||
Note: If the dictionary entry contains an entry that is set to None
|
||||
it is not included in the final result set. If you want to
|
||||
pass in an empty variable, set it to an empty string.
|
||||
|
||||
Args:
|
||||
query (str): The dictionary to encode
|
||||
doseq (:obj:`bool`, optional): Handle sequences
|
||||
safe (:obj:`str`): non-ascii characters and URI specific ones that
|
||||
you do not wish to escape (if detected). Setting this string
|
||||
to an empty one causes everything to be escaped.
|
||||
encoding (:obj:`str`, optional): encoding type
|
||||
errors (:obj:`str`, errors): how to handle invalid character found
|
||||
in encoded string (defined by encoding)
|
||||
|
||||
Returns:
|
||||
str: The escaped parameters returned as a string
|
||||
"""
|
||||
# Tidy query by eliminating any records set to None
|
||||
_query = {k: v for (k, v) in query.items() if v is not None}
|
||||
try:
|
||||
# Python v3.x
|
||||
return _urlencode(
|
||||
_query, doseq=doseq, safe=safe, encoding=encoding,
|
||||
errors=errors)
|
||||
|
||||
except TypeError:
|
||||
# Python v2.7
|
||||
return _urlencode(_query)
|
||||
|
||||
@staticmethod
|
||||
def split_path(path, unquote=True):
|
||||
"""Splits a URL up into a list object.
|
||||
|
||||
Parses a specified URL and breaks it into a list.
|
||||
|
||||
Args:
|
||||
path (str): The path to split up into a list.
|
||||
unquote (:obj:`bool`, optional): call unquote on each element
|
||||
added to the returned list.
|
||||
|
||||
Returns:
|
||||
list: A list containing all of the elements in the path
|
||||
"""
|
||||
|
||||
if unquote:
|
||||
return PATHSPLIT_LIST_DELIM.split(
|
||||
NotifyBase.unquote(path).lstrip('/'))
|
||||
return PATHSPLIT_LIST_DELIM.split(path.lstrip('/'))
|
||||
|
||||
@staticmethod
|
||||
def is_email(address):
|
||||
"""Determine if the specified entry is an email address
|
||||
|
||||
Args:
|
||||
address (str): The string you want to check.
|
||||
|
||||
Returns:
|
||||
bool: Returns True if the address specified is an email address
|
||||
and False if it isn't.
|
||||
"""
|
||||
|
||||
return IS_EMAIL_RE.match(address) is not None
|
||||
|
||||
@staticmethod
|
||||
def is_hostname(hostname):
|
||||
"""Determine if the specified entry is a hostname
|
||||
|
||||
Args:
|
||||
hostname (str): The string you want to check.
|
||||
|
||||
Returns:
|
||||
bool: Returns True if the hostname specified is in fact a hostame
|
||||
and False if it isn't.
|
||||
"""
|
||||
return is_hostname(hostname)
|
||||
raise NotImplementedError(
|
||||
"send() is not implimented by the child class.")
|
||||
|
||||
@staticmethod
|
||||
def parse_url(url, verify_host=True):
|
||||
@ -648,29 +294,17 @@ class NotifyBase(object):
|
||||
A dictionary is returned containing the URL fully parsed if
|
||||
successful, otherwise None is returned.
|
||||
"""
|
||||
|
||||
results = parse_url(
|
||||
url, default_schema='unknown', verify_host=verify_host)
|
||||
results = URLBase.parse_url(url, verify_host=verify_host)
|
||||
|
||||
if not results:
|
||||
# We're done; we failed to parse our url
|
||||
return results
|
||||
|
||||
# if our URL ends with an 's', then assueme our secure flag is set.
|
||||
results['secure'] = (results['schema'][-1] == 's')
|
||||
|
||||
# Support SSL Certificate 'verify' keyword. Default to being enabled
|
||||
results['verify'] = verify_host
|
||||
|
||||
if 'verify' in results['qsd']:
|
||||
results['verify'] = parse_bool(
|
||||
results['qsd'].get('verify', True))
|
||||
|
||||
# Allow overriding the default format
|
||||
if 'format' in results['qsd']:
|
||||
results['format'] = results['qsd'].get('format')
|
||||
if results['format'] not in NOTIFY_FORMATS:
|
||||
NotifyBase.logger.warning(
|
||||
URLBase.logger.warning(
|
||||
'Unsupported format specified {}'.format(
|
||||
results['format']))
|
||||
del results['format']
|
||||
@ -679,17 +313,9 @@ class NotifyBase(object):
|
||||
if 'overflow' in results['qsd']:
|
||||
results['overflow'] = results['qsd'].get('overflow')
|
||||
if results['overflow'] not in OVERFLOW_MODES:
|
||||
NotifyBase.logger.warning(
|
||||
URLBase.logger.warning(
|
||||
'Unsupported overflow specified {}'.format(
|
||||
results['overflow']))
|
||||
del results['overflow']
|
||||
|
||||
# Password overrides
|
||||
if 'pass' in results['qsd']:
|
||||
results['password'] = results['qsd']['pass']
|
||||
|
||||
# User overrides
|
||||
if 'user' in results['qsd']:
|
||||
results['user'] = results['qsd']['user']
|
||||
|
||||
return results
|
||||
|
@ -24,6 +24,7 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
import hmac
|
||||
from json import dumps
|
||||
@ -37,10 +38,8 @@ except ImportError:
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyImageSize
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Default to sending to all devices if nothing is specified
|
||||
DEFAULT_TAG = '@all'
|
||||
@ -148,7 +147,7 @@ class NotifyBoxcar(NotifyBase):
|
||||
self.tags.append(DEFAULT_TAG)
|
||||
recipients = []
|
||||
|
||||
elif compat_is_basestring(recipients):
|
||||
elif isinstance(recipients, six.string_types):
|
||||
recipients = [x for x in filter(bool, TAGS_LIST_DELIM.split(
|
||||
recipients,
|
||||
))]
|
||||
@ -243,20 +242,18 @@ class NotifyBoxcar(NotifyBase):
|
||||
|
||||
# Boxcar returns 201 (Created) when successful
|
||||
if r.status_code != requests.codes.created:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Boxcar notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Boxcar notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Boxcar notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -45,7 +45,6 @@ import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyFormat
|
||||
from ..common import NotifyType
|
||||
@ -215,20 +214,19 @@ class NotifyDiscord(NotifyBase):
|
||||
)
|
||||
if r.status_code not in (
|
||||
requests.codes.ok, requests.codes.no_content):
|
||||
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Discord notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Discord notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
self.logger.warning(
|
||||
'Failed to send Discord notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -32,6 +32,7 @@ from datetime import datetime
|
||||
from .NotifyBase import NotifyBase
|
||||
from ..common import NotifyFormat
|
||||
from ..common import NotifyType
|
||||
from ..utils import is_email
|
||||
|
||||
|
||||
class WebBaseLogin(object):
|
||||
@ -268,11 +269,11 @@ class NotifyEmail(NotifyBase):
|
||||
self.from_addr = kwargs.get('from', None)
|
||||
self.to_addr = kwargs.get('to', self.from_addr)
|
||||
|
||||
if not NotifyBase.is_email(self.from_addr):
|
||||
if not is_email(self.from_addr):
|
||||
# Parse Source domain based on from_addr
|
||||
raise TypeError('Invalid ~From~ email format: %s' % self.from_addr)
|
||||
|
||||
if not NotifyBase.is_email(self.to_addr):
|
||||
if not is_email(self.to_addr):
|
||||
raise TypeError('Invalid ~To~ email format: %s' % self.to_addr)
|
||||
|
||||
# Now detect the SMTP Server
|
||||
@ -330,7 +331,7 @@ class NotifyEmail(NotifyBase):
|
||||
login_type = WEBBASE_LOOKUP_TABLE[i][2]\
|
||||
.get('login_type', [])
|
||||
|
||||
if NotifyBase.is_email(self.user) and \
|
||||
if is_email(self.user) and \
|
||||
WebBaseLogin.EMAIL not in login_type:
|
||||
# Email specified but login type
|
||||
# not supported; switch it to user id
|
||||
|
@ -35,7 +35,6 @@ from json import dumps
|
||||
from json import loads
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..utils import parse_bool
|
||||
from ..common import NotifyType
|
||||
from .. import __version__ as VERSION
|
||||
@ -168,20 +167,19 @@ class NotifyEmby(NotifyBase):
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate user %s details: '
|
||||
'%s (error=%s).' % (
|
||||
self.user,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate user %s details: '
|
||||
'(error=%s).' % (self.user, r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to authenticate Emby user {} details: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Emby Response:\r\n%s' % r.text)
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
@ -329,20 +327,19 @@ class NotifyEmby(NotifyBase):
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to acquire session for user %s details: '
|
||||
'%s (error=%s).' % (
|
||||
self.user,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to acquire session for user %s details: '
|
||||
'(error=%s).' % (self.user, r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to acquire Emby session for user {}: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Emby Response:\r\n%s' % r.text)
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return sessions
|
||||
@ -412,20 +409,20 @@ class NotifyEmby(NotifyBase):
|
||||
# The below show up if we were 'just' logged out
|
||||
requests.codes.ok,
|
||||
requests.codes.no_content):
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to logoff user %s details: '
|
||||
'%s (error=%s).' % (
|
||||
self.user,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to logoff user %s details: '
|
||||
'(error=%s).' % (self.user, r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
self.logger.debug('Emby Response:\r\n%s' % r.text)
|
||||
self.logger.warning(
|
||||
'Failed to logoff Emby user {}: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
@ -509,17 +506,19 @@ class NotifyEmby(NotifyBase):
|
||||
if r.status_code not in (
|
||||
requests.codes.ok,
|
||||
requests.codes.no_content):
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Emby notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Emby notification '
|
||||
'(error=%s).' % (r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Emby notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
|
@ -25,7 +25,6 @@
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
|
||||
@ -99,18 +98,17 @@ class NotifyFaast(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Faast notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Faast notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Faast notification:'
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -43,7 +43,6 @@ import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..utils import parse_list
|
||||
|
||||
@ -163,8 +162,8 @@ class NotifyIFTTT(NotifyBase):
|
||||
payload = {x.lower(): y for x, y in payload.items()
|
||||
if x not in self.del_tokens}
|
||||
|
||||
# Track our failures
|
||||
error_count = 0
|
||||
# error tracking (used for function return)
|
||||
has_error = False
|
||||
|
||||
# Create a copy of our event lit
|
||||
events = list(self.events)
|
||||
@ -202,26 +201,27 @@ class NotifyIFTTT(NotifyBase):
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send IFTTT:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
event,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send IFTTT:%s '
|
||||
'notification (error=%s).' % (
|
||||
event, r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send IFTTT notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
event,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.content)
|
||||
error_count += 1
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
'Sent IFTTT notification to Event %s.' % event)
|
||||
'Sent IFTTT notification to %s.' % event)
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
@ -229,9 +229,12 @@ class NotifyIFTTT(NotifyBase):
|
||||
event) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
error_count += 1
|
||||
|
||||
return (error_count == 0)
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
|
@ -23,14 +23,13 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
|
||||
class NotifyJSON(NotifyBase):
|
||||
@ -74,7 +73,7 @@ class NotifyJSON(NotifyBase):
|
||||
self.schema = 'http'
|
||||
|
||||
self.fullpath = kwargs.get('fullpath')
|
||||
if not compat_is_basestring(self.fullpath):
|
||||
if not isinstance(self.fullpath, six.string_types):
|
||||
self.fullpath = '/'
|
||||
|
||||
self.headers = {}
|
||||
@ -172,17 +171,18 @@ class NotifyJSON(NotifyBase):
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send JSON notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send JSON notification '
|
||||
'(error=%s).' % (r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send JSON notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -34,22 +34,20 @@
|
||||
# https://play.google.com/store/apps/details?id=com.joaomgcd.join
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Token required as part of the API request
|
||||
VALIDATE_APIKEY = re.compile(r'[A-Za-z0-9]{32}')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
JOIN_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
JOIN_HTTP_ERROR_MAP.update({
|
||||
JOIN_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
}
|
||||
|
||||
# Used to break path apart into list of devices
|
||||
DEVICE_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
@ -98,6 +96,9 @@ class NotifyJoin(NotifyBase):
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 1000
|
||||
|
||||
# The default group to use if none is specified
|
||||
default_join_group = 'group.all'
|
||||
|
||||
def __init__(self, apikey, devices, **kwargs):
|
||||
"""
|
||||
Initialize Join Object
|
||||
@ -116,7 +117,7 @@ class NotifyJoin(NotifyBase):
|
||||
# The token associated with the account
|
||||
self.apikey = apikey.strip()
|
||||
|
||||
if compat_is_basestring(devices):
|
||||
if isinstance(devices, six.string_types):
|
||||
self.devices = [x for x in filter(bool, DEVICE_LIST_DELIM.split(
|
||||
devices,
|
||||
))]
|
||||
@ -129,7 +130,7 @@ class NotifyJoin(NotifyBase):
|
||||
|
||||
if len(self.devices) == 0:
|
||||
# Default to everyone
|
||||
self.devices.append('group.all')
|
||||
self.devices.append(self.default_join_group)
|
||||
|
||||
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
|
||||
"""
|
||||
@ -142,7 +143,7 @@ class NotifyJoin(NotifyBase):
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
return_status = True
|
||||
has_error = False
|
||||
|
||||
# Create a copy of the devices list
|
||||
devices = list(self.devices)
|
||||
@ -158,6 +159,8 @@ class NotifyJoin(NotifyBase):
|
||||
device,
|
||||
)
|
||||
)
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
url_args = {
|
||||
@ -192,26 +195,27 @@ class NotifyJoin(NotifyBase):
|
||||
headers=headers,
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Join:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
device,
|
||||
JOIN_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, JOIN_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Join:%s '
|
||||
'notification (error=%s).' % (
|
||||
device,
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Join notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
device,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
return_status = False
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Join notification to %s.' % device)
|
||||
@ -222,9 +226,12 @@ class NotifyJoin(NotifyBase):
|
||||
'notification.' % device
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return_status = False
|
||||
|
||||
return return_status
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
|
@ -29,20 +29,15 @@ from json import dumps
|
||||
from time import time
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
|
||||
# Token required as part of the API request
|
||||
VALIDATE_TOKEN = re.compile(r'[A-Za-z0-9]{64}')
|
||||
|
||||
# Default User
|
||||
MATRIX_DEFAULT_USER = 'apprise'
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
MATRIX_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
MATRIX_HTTP_ERROR_MAP.update({
|
||||
MATRIX_HTTP_ERROR_MAP = {
|
||||
403: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class MatrixNotificationMode(object):
|
||||
@ -79,6 +74,9 @@ class NotifyMatrix(NotifyBase):
|
||||
# The maximum allowable characters allowed in the body per message
|
||||
body_maxlen = 1000
|
||||
|
||||
# Default User
|
||||
matrix_default_user = 'apprise'
|
||||
|
||||
def __init__(self, token, mode=MatrixNotificationMode.MATRIX, **kwargs):
|
||||
"""
|
||||
Initialize Matrix Object
|
||||
@ -112,7 +110,7 @@ class NotifyMatrix(NotifyBase):
|
||||
|
||||
if not self.user:
|
||||
self.logger.warning(
|
||||
'No user was specified; using %s.' % MATRIX_DEFAULT_USER)
|
||||
'No user was specified; using %s.' % self.matrix_default_user)
|
||||
|
||||
if mode not in MATRIX_NOTIFICATION_MODES:
|
||||
self.logger.warning('The mode specified (%s) is invalid.' % mode)
|
||||
@ -145,9 +143,6 @@ class NotifyMatrix(NotifyBase):
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
notify_okay = True
|
||||
|
||||
# Perform Formatting
|
||||
title = self._re_formatting_rules.sub( # pragma: no branch
|
||||
lambda x: self._re_formatting_map[x.group()], title,
|
||||
@ -183,20 +178,21 @@ class NotifyMatrix(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Matrix '
|
||||
'notification: %s (error=%s).' % (
|
||||
MATRIX_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, MATRIX_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Matrix '
|
||||
'notification (error=%s).' % r.status_code)
|
||||
self.logger.warning(
|
||||
'Failed to send Matrix notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
notify_okay = False
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Matrix notification.')
|
||||
@ -206,14 +202,15 @@ class NotifyMatrix(NotifyBase):
|
||||
'A Connection error occured sending Matrix notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
notify_okay = False
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
return notify_okay
|
||||
return True
|
||||
|
||||
def __slack_mode_payload(self, title, body, notify_type):
|
||||
# prepare JSON Object
|
||||
payload = {
|
||||
'username': self.user if self.user else MATRIX_DEFAULT_USER,
|
||||
'username': self.user if self.user else self.matrix_default_user,
|
||||
# Use Markdown language
|
||||
'mrkdwn': True,
|
||||
'attachments': [{
|
||||
@ -234,7 +231,8 @@ class NotifyMatrix(NotifyBase):
|
||||
msg = '<h4>%s</h4>%s<br/>' % (title, body)
|
||||
|
||||
payload = {
|
||||
'displayName': self.user if self.user else MATRIX_DEFAULT_USER,
|
||||
'displayName':
|
||||
self.user if self.user else self.matrix_default_user,
|
||||
'format': 'html',
|
||||
'text': msg,
|
||||
}
|
||||
|
@ -28,7 +28,6 @@ import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
|
||||
@ -157,21 +156,21 @@ class NotifyMatterMost(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send MatterMost notification:'
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send MatterMost notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send MatterMost notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.info('Sent MatterMost notification.')
|
||||
|
||||
|
@ -27,7 +27,6 @@ import re
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
|
||||
# Used to validate API Key
|
||||
@ -54,12 +53,11 @@ PROWL_PRIORITIES = (
|
||||
ProwlPriority.EMERGENCY,
|
||||
)
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PROWL_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
HTTP_ERROR_MAP.update({
|
||||
# Provide some known codes Prowl uses and what they translate to:
|
||||
PROWL_HTTP_ERROR_MAP = {
|
||||
406: 'IP address has exceeded API limit',
|
||||
409: 'Request not aproved.',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class NotifyProwl(NotifyBase):
|
||||
@ -168,20 +166,18 @@ class NotifyProwl(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Prowl notification: '
|
||||
'%s (error=%s).' % (
|
||||
PROWL_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, PROWL_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Prowl notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Prowl notification:'
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -24,14 +24,13 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from .NotifyBase import IS_EMAIL_RE
|
||||
from ..utils import GET_EMAIL_RE
|
||||
from ..common import NotifyType
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Flag used as a placeholder to sending to all devices
|
||||
PUSHBULLET_SEND_TO_ALL = 'ALL_DEVICES'
|
||||
@ -40,11 +39,10 @@ PUSHBULLET_SEND_TO_ALL = 'ALL_DEVICES'
|
||||
# into a usable list.
|
||||
RECIPIENTS_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PUSHBULLET_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
PUSHBULLET_HTTP_ERROR_MAP.update({
|
||||
# Provide some known codes Pushbullet uses and what they translate to:
|
||||
PUSHBULLET_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class NotifyPushBullet(NotifyBase):
|
||||
@ -74,7 +72,7 @@ class NotifyPushBullet(NotifyBase):
|
||||
super(NotifyPushBullet, self).__init__(**kwargs)
|
||||
|
||||
self.accesstoken = accesstoken
|
||||
if compat_is_basestring(recipients):
|
||||
if isinstance(recipients, six.string_types):
|
||||
self.recipients = [x for x in filter(
|
||||
bool, RECIPIENTS_LIST_DELIM.split(recipients))]
|
||||
|
||||
@ -117,7 +115,7 @@ class NotifyPushBullet(NotifyBase):
|
||||
# Send to all
|
||||
pass
|
||||
|
||||
elif IS_EMAIL_RE.match(recipient):
|
||||
elif GET_EMAIL_RE.match(recipient):
|
||||
payload['email'] = recipient
|
||||
self.logger.debug(
|
||||
"Recipient '%s' is an email address" % recipient)
|
||||
@ -150,23 +148,24 @@ class NotifyPushBullet(NotifyBase):
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send PushBullet notification to '
|
||||
'"%s": %s (error=%s).' % (
|
||||
recipient,
|
||||
PUSHBULLET_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, PUSHBULLET_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send PushBullet notification to '
|
||||
'"%s" (error=%s).' % (recipient, r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send PushBullet notification to {}:'
|
||||
'{}{}error={}.'.format(
|
||||
recipient,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
@ -178,7 +177,10 @@ class NotifyPushBullet(NotifyBase):
|
||||
'notification to "%s".' % (recipient),
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
|
@ -24,14 +24,13 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from json import dumps
|
||||
from itertools import chain
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Used to detect and parse channels
|
||||
IS_CHANNEL = re.compile(r'^#(?P<name>[A-Za-z0-9]+)$')
|
||||
@ -94,7 +93,7 @@ class NotifyPushed(NotifyBase):
|
||||
if recipients is None:
|
||||
recipients = []
|
||||
|
||||
elif compat_is_basestring(recipients):
|
||||
elif isinstance(recipients, six.string_types):
|
||||
recipients = [x for x in filter(bool, LIST_DELIM.split(
|
||||
recipients,
|
||||
))]
|
||||
@ -225,19 +224,17 @@ class NotifyPushed(NotifyBase):
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushed notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushed notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
self.logger.warning(
|
||||
'Failed to send Pushed notification:'
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -24,12 +24,11 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Flag used as a placeholder to sending to all devices
|
||||
PUSHOVER_SEND_TO_ALL = 'ALL_DEVICES'
|
||||
@ -65,10 +64,9 @@ PUSHOVER_PRIORITIES = (
|
||||
DEVICE_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
PUSHOVER_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
PUSHOVER_HTTP_ERROR_MAP.update({
|
||||
PUSHOVER_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class NotifyPushover(NotifyBase):
|
||||
@ -117,7 +115,7 @@ class NotifyPushover(NotifyBase):
|
||||
'The API Token specified (%s) is invalid.' % token,
|
||||
)
|
||||
|
||||
if compat_is_basestring(devices):
|
||||
if isinstance(devices, six.string_types):
|
||||
self.devices = [x for x in filter(bool, DEVICE_LIST_DELIM.split(
|
||||
devices,
|
||||
))]
|
||||
@ -173,6 +171,8 @@ class NotifyPushover(NotifyBase):
|
||||
self.logger.warning(
|
||||
'The device specified (%s) is invalid.' % device,
|
||||
)
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
@ -204,25 +204,24 @@ class NotifyPushover(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushover:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
device,
|
||||
PUSHOVER_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, PUSHOVER_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Pushover:%s '
|
||||
'notification (error=%s).' % (
|
||||
device,
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Pushover notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
device,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
@ -234,7 +233,10 @@ class NotifyPushover(NotifyBase):
|
||||
device) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
|
@ -24,24 +24,22 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from json import loads
|
||||
from itertools import chain
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
IS_CHANNEL = re.compile(r'^#(?P<name>[A-Za-z0-9]+)$')
|
||||
IS_ROOM_ID = re.compile(r'^(?P<name>[A-Za-z0-9]+)$')
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
RC_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
RC_HTTP_ERROR_MAP.update({
|
||||
RC_HTTP_ERROR_MAP = {
|
||||
400: 'Channel/RoomId is wrong format, or missing from server.',
|
||||
401: 'Authentication tokens provided is invalid or missing.',
|
||||
})
|
||||
}
|
||||
|
||||
# Used to break apart list of potential tags by their delimiter
|
||||
# into a usable list.
|
||||
@ -103,7 +101,7 @@ class NotifyRocketChat(NotifyBase):
|
||||
if recipients is None:
|
||||
recipients = []
|
||||
|
||||
elif compat_is_basestring(recipients):
|
||||
elif isinstance(recipients, six.string_types):
|
||||
recipients = [x for x in filter(bool, LIST_DELIM.split(
|
||||
recipients,
|
||||
))]
|
||||
@ -253,24 +251,23 @@ class NotifyRocketChat(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Rocket.Chat notification: '
|
||||
'%s (error=%s).' % (
|
||||
RC_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, RC_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Rocket.Chat notification '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Rocket.Chat notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.debug('Rocket.Chat Server Response: %s.' % r.text)
|
||||
self.logger.info('Sent Rocket.Chat notification.')
|
||||
|
||||
except requests.RequestException as e:
|
||||
@ -302,28 +299,30 @@ class NotifyRocketChat(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate with Rocket.Chat server: '
|
||||
'%s (error=%s).' % (
|
||||
RC_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, RC_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to authenticate with Rocket.Chat server '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to authenticate {} with Rocket.Chat: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
||||
else:
|
||||
self.logger.debug('Rocket.Chat authentication successful')
|
||||
response = loads(r.text)
|
||||
response = loads(r.content)
|
||||
if response.get('status') != "success":
|
||||
self.logger.warning(
|
||||
'Could not authenticate with Rocket.Chat server.')
|
||||
'Could not authenticate {} with Rocket.Chat.'.format(
|
||||
self.user))
|
||||
return False
|
||||
|
||||
# Set our headers for further communication
|
||||
@ -334,8 +333,8 @@ class NotifyRocketChat(NotifyBase):
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A Connection error occured authenticating to the '
|
||||
'Rocket.Chat server.')
|
||||
'A Connection error occured authenticating {} on '
|
||||
'Rocket.Chat.'.format(self.user))
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return False
|
||||
|
||||
@ -353,18 +352,19 @@ class NotifyRocketChat(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to log off Rocket.Chat server: '
|
||||
'%s (error=%s).' % (
|
||||
RC_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, RC_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to log off Rocket.Chat server '
|
||||
'(error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to logoff {} from Rocket.Chat: '
|
||||
'{}{}error={}.'.format(
|
||||
self.user,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
@ -372,7 +372,7 @@ class NotifyRocketChat(NotifyBase):
|
||||
else:
|
||||
self.logger.debug(
|
||||
'Rocket.Chat log off successful; response %s.' % (
|
||||
r.text))
|
||||
r.content))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
|
@ -36,7 +36,6 @@ import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
|
||||
@ -193,22 +192,17 @@ class NotifyRyver(NotifyBase):
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Ryver:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
self.organization,
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Ryver:%s '
|
||||
'notification (error=%s).' % (
|
||||
self.organization,
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Ryver notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -24,6 +24,7 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import hmac
|
||||
import requests
|
||||
from hashlib import sha256
|
||||
@ -33,9 +34,7 @@ from xml.etree import ElementTree
|
||||
from itertools import chain
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Some Phone Number Detection
|
||||
IS_PHONE_NO = re.compile(r'^\+?(?P<phone>[0-9\s)(+-]+)\s*$')
|
||||
@ -63,10 +62,9 @@ IS_REGION = re.compile(
|
||||
r'^\s*(?P<country>[a-z]{2})-(?P<area>[a-z]+)-(?P<no>[0-9]+)\s*$', re.I)
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
AWS_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
AWS_HTTP_ERROR_MAP.update({
|
||||
AWS_HTTP_ERROR_MAP = {
|
||||
403: 'Unauthorized - Invalid Access/Secret Key Combination.',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class NotifySNS(NotifyBase):
|
||||
@ -152,7 +150,7 @@ class NotifySNS(NotifyBase):
|
||||
if recipients is None:
|
||||
recipients = []
|
||||
|
||||
elif compat_is_basestring(recipients):
|
||||
elif isinstance(recipients, six.string_types):
|
||||
recipients = [x for x in filter(bool, LIST_DELIM.split(
|
||||
recipients,
|
||||
))]
|
||||
@ -301,22 +299,21 @@ class NotifySNS(NotifyBase):
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send AWS notification to '
|
||||
'"%s": %s (error=%s).' % (
|
||||
to,
|
||||
AWS_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, AWS_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send AWS notification to '
|
||||
'"%s" (error=%s).' % (to, r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send AWS notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
to,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details: %s' % r.text)
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
return (False, NotifySNS.aws_response_to_dict(r.text))
|
||||
return (False, NotifySNS.aws_response_to_dict(r.content))
|
||||
|
||||
else:
|
||||
self.logger.info(
|
||||
@ -330,7 +327,7 @@ class NotifySNS(NotifyBase):
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return (False, NotifySNS.aws_response_to_dict(None))
|
||||
|
||||
return (True, NotifySNS.aws_response_to_dict(r.text))
|
||||
return (True, NotifySNS.aws_response_to_dict(r.content))
|
||||
|
||||
def aws_prepare_request(self, payload, reference=None):
|
||||
"""
|
||||
|
@ -36,15 +36,14 @@
|
||||
#
|
||||
#
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
from json import dumps
|
||||
from time import time
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
# Token required as part of the API request
|
||||
# /AAAAAAAAA/........./........................
|
||||
@ -62,10 +61,9 @@ VALIDATE_TOKEN_C = re.compile(r'[A-Za-z0-9]{24}')
|
||||
SLACK_DEFAULT_USER = 'apprise'
|
||||
|
||||
# Extend HTTP Error Messages
|
||||
SLACK_HTTP_ERROR_MAP = HTTP_ERROR_MAP.copy()
|
||||
SLACK_HTTP_ERROR_MAP.update({
|
||||
SLACK_HTTP_ERROR_MAP = {
|
||||
401: 'Unauthorized - Invalid Token.',
|
||||
})
|
||||
}
|
||||
|
||||
# Used to break path apart into list of channels
|
||||
CHANNEL_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+')
|
||||
@ -143,7 +141,7 @@ class NotifySlack(NotifyBase):
|
||||
self.logger.warning(
|
||||
'No user was specified; using %s.' % SLACK_DEFAULT_USER)
|
||||
|
||||
if compat_is_basestring(channels):
|
||||
if isinstance(channels, six.string_types):
|
||||
self.channels = [x for x in filter(bool, CHANNEL_LIST_DELIM.split(
|
||||
channels,
|
||||
))]
|
||||
@ -186,7 +184,7 @@ class NotifySlack(NotifyBase):
|
||||
}
|
||||
|
||||
# error tracking (used for function return)
|
||||
notify_okay = True
|
||||
has_error = False
|
||||
|
||||
# Perform Formatting
|
||||
title = self._re_formatting_rules.sub( # pragma: no branch
|
||||
@ -214,6 +212,8 @@ class NotifySlack(NotifyBase):
|
||||
channel,
|
||||
)
|
||||
)
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
if len(channel) > 1 and channel[0] == '+':
|
||||
@ -263,28 +263,28 @@ class NotifySlack(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send Slack:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
channel,
|
||||
SLACK_HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(
|
||||
r.status_code, SLACK_HTTP_ERROR_MAP)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Slack:%s '
|
||||
'notification (error=%s).' % (
|
||||
channel,
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Slack notification to {}: '
|
||||
'{}{}error={}.'.format(
|
||||
channel,
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.content)
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
notify_okay = False
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Slack notification.')
|
||||
self.logger.info(
|
||||
'Sent Slack notification to {}.'.format(channel))
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
@ -292,9 +292,12 @@ class NotifySlack(NotifyBase):
|
||||
channel) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
notify_okay = False
|
||||
|
||||
return notify_okay
|
||||
# Mark our failure
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
def url(self):
|
||||
"""
|
||||
|
@ -58,7 +58,6 @@ from json import loads
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyFormat
|
||||
@ -120,14 +119,15 @@ class NotifyTelegram(NotifyBase):
|
||||
|
||||
except AttributeError:
|
||||
# Token was None
|
||||
self.logger.warning('No Bot Token was specified.')
|
||||
raise TypeError('No Bot Token was specified.')
|
||||
err = 'No Bot Token was specified.'
|
||||
self.logger.warning(err)
|
||||
raise TypeError(err)
|
||||
|
||||
result = VALIDATE_BOT_TOKEN.match(self.bot_token)
|
||||
if not result:
|
||||
raise TypeError(
|
||||
'The Bot Token specified (%s) is invalid.' % bot_token,
|
||||
)
|
||||
err = 'The Bot Token specified (%s) is invalid.' % bot_token
|
||||
self.logger.warning(err)
|
||||
raise TypeError(err)
|
||||
|
||||
# Store our Bot Token
|
||||
self.bot_token = result.group('key')
|
||||
@ -146,8 +146,9 @@ class NotifyTelegram(NotifyBase):
|
||||
self.chat_ids.append(str(_id))
|
||||
|
||||
if len(self.chat_ids) == 0:
|
||||
self.logger.warning('No chat_id(s) were specified.')
|
||||
raise TypeError('No chat_id(s) were specified.')
|
||||
err = 'No chat_id(s) were specified.'
|
||||
self.logger.warning(err)
|
||||
raise TypeError(err)
|
||||
|
||||
# Track whether or not we want to send an image with our notification
|
||||
# or not.
|
||||
@ -171,8 +172,7 @@ class NotifyTelegram(NotifyBase):
|
||||
if not path:
|
||||
# No image to send
|
||||
self.logger.debug(
|
||||
'Telegram Image does not exist for %s' % (
|
||||
notify_type))
|
||||
'Telegram Image does not exist for %s' % (notify_type))
|
||||
return None
|
||||
|
||||
files = {'photo': (basename(path), open(path), 'rb')}
|
||||
@ -195,19 +195,18 @@ class NotifyTelegram(NotifyBase):
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to post Telegram Image: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram Image. (error=%s).' % (
|
||||
r.status_code))
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram Image: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
return False
|
||||
|
||||
except requests.RequestException as e:
|
||||
@ -248,6 +247,8 @@ class NotifyTelegram(NotifyBase):
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
try:
|
||||
# Try to get the error message if we can:
|
||||
@ -256,30 +257,26 @@ class NotifyTelegram(NotifyBase):
|
||||
except Exception:
|
||||
error_msg = None
|
||||
|
||||
try:
|
||||
if error_msg:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram user: (%s) %s.' % (
|
||||
r.status_code, error_msg))
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram user: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
if error_msg:
|
||||
self.logger.warning(
|
||||
'Failed to detect Telegram user. (error=%s).' % (
|
||||
'Failed to detect the Telegram user: (%s) %s.' % (
|
||||
r.status_code, error_msg))
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Failed to detect the Telegram user: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
return 0
|
||||
|
||||
except requests.RequestException as e:
|
||||
self.logger.warning(
|
||||
'A connection error occured detecting Telegram User.')
|
||||
'A connection error occured detecting the Telegram User.')
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
return 0
|
||||
|
||||
@ -403,6 +400,8 @@ class NotifyTelegram(NotifyBase):
|
||||
chat_id,
|
||||
)
|
||||
)
|
||||
|
||||
# Flag our error
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
@ -438,6 +437,8 @@ class NotifyTelegram(NotifyBase):
|
||||
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
try:
|
||||
# Try to get the error message if we can:
|
||||
@ -446,32 +447,19 @@ class NotifyTelegram(NotifyBase):
|
||||
except Exception:
|
||||
error_msg = None
|
||||
|
||||
try:
|
||||
if error_msg:
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram:%s '
|
||||
'notification: (%s) %s.' % (
|
||||
payload['chat_id'],
|
||||
r.status_code, error_msg))
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram notification to {}: '
|
||||
'{}, error={}.'.format(
|
||||
payload['chat_id'],
|
||||
error_msg if error_msg else status_str,
|
||||
r.status_code))
|
||||
|
||||
else:
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram:%s '
|
||||
'notification: %s (error=%s).' % (
|
||||
payload['chat_id'],
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send Telegram:%s '
|
||||
'notification (error=%s).' % (
|
||||
payload['chat_id'], r.status_code))
|
||||
|
||||
# self.logger.debug('Response Details: %s' % r.raw.read())
|
||||
self.logger.debug(
|
||||
'Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Flag our error
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
else:
|
||||
self.logger.info('Sent Telegram notification.')
|
||||
@ -482,7 +470,10 @@ class NotifyTelegram(NotifyBase):
|
||||
payload['chat_id']) + 'notification.'
|
||||
)
|
||||
self.logger.debug('Socket Exception: %s' % str(e))
|
||||
|
||||
# Flag our error
|
||||
has_error = True
|
||||
continue
|
||||
|
||||
return not has_error
|
||||
|
||||
|
@ -27,7 +27,6 @@ import requests
|
||||
from json import dumps
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyType
|
||||
from ..common import NotifyImageSize
|
||||
|
||||
@ -204,17 +203,17 @@ class NotifyXBMC(NotifyBase):
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
# We had a problem
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send XBMC/KODI notification:'
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send XBMC/KODI notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
self.logger.warning(
|
||||
'Failed to send XBMC/KODI notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -24,13 +24,12 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import requests
|
||||
|
||||
from .NotifyBase import NotifyBase
|
||||
from .NotifyBase import HTTP_ERROR_MAP
|
||||
from ..common import NotifyImageSize
|
||||
from ..common import NotifyType
|
||||
from ..utils import compat_is_basestring
|
||||
|
||||
|
||||
class NotifyXML(NotifyBase):
|
||||
@ -89,7 +88,7 @@ class NotifyXML(NotifyBase):
|
||||
self.schema = 'http'
|
||||
|
||||
self.fullpath = kwargs.get('fullpath')
|
||||
if not compat_is_basestring(self.fullpath):
|
||||
if not isinstance(self.fullpath, six.string_types):
|
||||
self.fullpath = '/'
|
||||
|
||||
self.headers = {}
|
||||
@ -190,17 +189,18 @@ class NotifyXML(NotifyBase):
|
||||
verify=self.verify_certificate,
|
||||
)
|
||||
if r.status_code != requests.codes.ok:
|
||||
try:
|
||||
self.logger.warning(
|
||||
'Failed to send XML notification: '
|
||||
'%s (error=%s).' % (
|
||||
HTTP_ERROR_MAP[r.status_code],
|
||||
r.status_code))
|
||||
# We had a problem
|
||||
status_str = \
|
||||
NotifyBase.http_response_code_lookup(r.status_code)
|
||||
|
||||
except KeyError:
|
||||
self.logger.warning(
|
||||
'Failed to send XML notification '
|
||||
'(error=%s).' % r.status_code)
|
||||
self.logger.warning(
|
||||
'Failed to send XML notification: '
|
||||
'{}{}error={}.'.format(
|
||||
status_str,
|
||||
', ' if status_str else '',
|
||||
r.status_code))
|
||||
|
||||
self.logger.debug('Response Details:\r\n{}'.format(r.content))
|
||||
|
||||
# Return; we're done
|
||||
return False
|
||||
|
@ -23,8 +23,9 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# Used for Testing; specifically test_email_plugin.py needs access
|
||||
# to the modules WEBBASE_LOOKUP_TABLE and WebBaseLogin objects
|
||||
import sys
|
||||
import six
|
||||
|
||||
from . import NotifyEmail as NotifyEmailBase
|
||||
|
||||
from .NotifyBoxcar import NotifyBoxcar
|
||||
@ -64,6 +65,10 @@ from ..common import NOTIFY_IMAGE_SIZES
|
||||
from ..common import NotifyType
|
||||
from ..common import NOTIFY_TYPES
|
||||
|
||||
# Maintains a mapping of all of the Notification services
|
||||
SCHEMA_MAP = {}
|
||||
|
||||
|
||||
__all__ = [
|
||||
# Notification Services
|
||||
'NotifyBoxcar', 'NotifyDBus', 'NotifyEmail', 'NotifyEmby', 'NotifyDiscord',
|
||||
@ -89,3 +94,51 @@ __all__ = [
|
||||
# tweepy (used for NotifyTwitter Testing)
|
||||
'tweepy',
|
||||
]
|
||||
|
||||
|
||||
# Load our Lookup Matrix
|
||||
def __load_matrix():
|
||||
"""
|
||||
Dynamically load our schema map; this allows us to gracefully
|
||||
skip over modules we simply don't have the dependencies for.
|
||||
|
||||
"""
|
||||
|
||||
thismodule = sys.modules[__name__]
|
||||
|
||||
# to add it's mapping to our hash table
|
||||
for entry in dir(thismodule):
|
||||
|
||||
# Get our plugin
|
||||
plugin = getattr(thismodule, entry)
|
||||
if not hasattr(plugin, 'app_id'): # pragma: no branch
|
||||
# Filter out non-notification modules
|
||||
continue
|
||||
|
||||
# Load protocol(s) if defined
|
||||
proto = getattr(plugin, 'protocol', None)
|
||||
if isinstance(proto, six.string_types):
|
||||
if proto not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[proto] = plugin
|
||||
|
||||
elif isinstance(proto, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in proto:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
# Load secure protocol(s) if defined
|
||||
protos = getattr(plugin, 'secure_protocol', None)
|
||||
if isinstance(protos, six.string_types):
|
||||
if protos not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[protos] = plugin
|
||||
|
||||
if isinstance(protos, (set, list, tuple)):
|
||||
# Support iterables list types
|
||||
for p in protos:
|
||||
if p not in SCHEMA_MAP:
|
||||
SCHEMA_MAP[p] = plugin
|
||||
|
||||
|
||||
# Dynamically build our module
|
||||
__load_matrix()
|
||||
|
107
apprise/utils.py
107
apprise/utils.py
@ -24,7 +24,7 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
|
||||
import six
|
||||
from os.path import expanduser
|
||||
|
||||
try:
|
||||
@ -98,6 +98,18 @@ NOTIFY_CUSTOM_DEL_TOKENS = re.compile(r'^-(?P<key>.*)\s*')
|
||||
# Used for attempting to acquire the schema if the URL can't be parsed.
|
||||
GET_SCHEMA_RE = re.compile(r'\s*(?P<schema>[a-z0-9]{2,9})://.*$', re.I)
|
||||
|
||||
# Regular expression based and expanded from:
|
||||
# http://www.regular-expressions.info/email.html
|
||||
GET_EMAIL_RE = re.compile(
|
||||
r"((?P<label>[^+]+)\+)?"
|
||||
r"(?P<userid>[a-z0-9$%=_~-]+"
|
||||
r"(?:\.[a-z0-9$%+=_~-]+)"
|
||||
r"*)@(?P<domain>(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+"
|
||||
r"[a-z0-9](?:[a-z0-9-]*"
|
||||
r"[a-z0-9]))?",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
def is_hostname(hostname):
|
||||
"""
|
||||
@ -113,18 +125,22 @@ def is_hostname(hostname):
|
||||
return all(allowed.match(x) for x in hostname.split("."))
|
||||
|
||||
|
||||
def compat_is_basestring(content):
|
||||
"""
|
||||
Python 3 support for checking if content is unicode and/or
|
||||
of a string type
|
||||
"""
|
||||
try:
|
||||
# Python v2.x
|
||||
return isinstance(content, basestring)
|
||||
def is_email(address):
|
||||
"""Determine if the specified entry is an email address
|
||||
|
||||
except NameError:
|
||||
# Python v3.x
|
||||
return isinstance(content, str)
|
||||
Args:
|
||||
address (str): The string you want to check.
|
||||
|
||||
Returns:
|
||||
bool: Returns True if the address specified is an email address
|
||||
and False if it isn't.
|
||||
"""
|
||||
|
||||
try:
|
||||
return GET_EMAIL_RE.match(address) is not None
|
||||
except TypeError:
|
||||
# invalid syntax
|
||||
return False
|
||||
|
||||
|
||||
def tidy_path(path):
|
||||
@ -245,7 +261,7 @@ def parse_url(url, default_schema='http', verify_host=True):
|
||||
content could not be extracted.
|
||||
"""
|
||||
|
||||
if not compat_is_basestring(url):
|
||||
if not isinstance(url, six.string_types):
|
||||
# Simple error checking
|
||||
return None
|
||||
|
||||
@ -391,10 +407,10 @@ def parse_url(url, default_schema='http', verify_host=True):
|
||||
|
||||
# Re-assemble cleaned up version of the url
|
||||
result['url'] = '%s://' % result['schema']
|
||||
if compat_is_basestring(result['user']):
|
||||
if isinstance(result['user'], six.string_types):
|
||||
result['url'] += result['user']
|
||||
|
||||
if compat_is_basestring(result['password']):
|
||||
if isinstance(result['password'], six.string_types):
|
||||
result['url'] += ':%s@' % result['password']
|
||||
|
||||
else:
|
||||
@ -420,7 +436,7 @@ def parse_bool(arg, default=False):
|
||||
If the content could not be parsed, then the default is returned.
|
||||
"""
|
||||
|
||||
if compat_is_basestring(arg):
|
||||
if isinstance(arg, six.string_types):
|
||||
# no = no - False
|
||||
# of = short for off - False
|
||||
# 0 = int for False
|
||||
@ -473,7 +489,7 @@ def parse_list(*args):
|
||||
|
||||
result = []
|
||||
for arg in args:
|
||||
if compat_is_basestring(arg):
|
||||
if isinstance(arg, six.string_types):
|
||||
result += re.split(STRING_DELIMITERS, arg)
|
||||
|
||||
elif isinstance(arg, (set, list, tuple)):
|
||||
@ -494,3 +510,60 @@ def parse_list(*args):
|
||||
# a list, we need to change it into a list object to remain compatible with
|
||||
# both distribution types.
|
||||
return sorted([x for x in filter(bool, list(set(result)))])
|
||||
|
||||
|
||||
def is_exclusive_match(logic, data):
|
||||
"""
|
||||
|
||||
The data variable should always be a set of strings that the logic can be
|
||||
compared against. It should be a set. If it isn't already, then it will
|
||||
be converted as such. These identify the tags themselves.
|
||||
|
||||
Our logic should be a list as well:
|
||||
- top level entries are treated as an 'or'
|
||||
- second level (or more) entries are treated as 'and'
|
||||
|
||||
examples:
|
||||
logic="tagA, tagB" = tagA or tagB
|
||||
logic=['tagA', 'tagB'] = tagA or tagB
|
||||
logic=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB
|
||||
logic=[('tagB', 'tagC')] = tagB and tagC
|
||||
"""
|
||||
|
||||
if logic is None:
|
||||
# If there is no logic to apply then we're done early
|
||||
return True
|
||||
|
||||
elif isinstance(logic, six.string_types):
|
||||
# Update our logic to support our delimiters
|
||||
logic = set(parse_list(logic))
|
||||
|
||||
if not isinstance(logic, (list, tuple, set)):
|
||||
# garbage input
|
||||
return False
|
||||
|
||||
# using the data detected; determine if we'll allow the
|
||||
# notification to be sent or not
|
||||
matched = (len(logic) == 0)
|
||||
|
||||
# Every entry here will be or'ed with the next
|
||||
for entry in logic:
|
||||
if not isinstance(entry, (six.string_types, list, tuple, set)):
|
||||
# Garbage entry in our logic found
|
||||
return False
|
||||
|
||||
# treat these entries as though all elements found
|
||||
# must exist in the notification service
|
||||
entries = set(parse_list(entry))
|
||||
|
||||
if len(entries.intersection(data)) == len(entries):
|
||||
# our set contains all of the entries found
|
||||
# in our notification data set
|
||||
matched = True
|
||||
break
|
||||
|
||||
# else: keep looking
|
||||
|
||||
# Return True if we matched against our logic (or simply none was
|
||||
# specified).
|
||||
return matched
|
||||
|
@ -6,3 +6,4 @@ urllib3
|
||||
six
|
||||
click >= 5.0
|
||||
markdown
|
||||
PyYAML
|
||||
|
@ -24,22 +24,28 @@
|
||||
# THE SOFTWARE.
|
||||
|
||||
from __future__ import print_function
|
||||
import six
|
||||
import pytest
|
||||
import requests
|
||||
import mock
|
||||
from os import chmod
|
||||
from os import getuid
|
||||
from os.path import dirname
|
||||
|
||||
from apprise import Apprise
|
||||
from apprise import AppriseAsset
|
||||
from apprise.utils import compat_is_basestring
|
||||
from apprise.Apprise import SCHEMA_MAP
|
||||
from apprise import NotifyBase
|
||||
from apprise import NotifyType
|
||||
from apprise import NotifyFormat
|
||||
from apprise import NotifyImageSize
|
||||
from apprise import __version__
|
||||
from apprise.Apprise import __load_matrix
|
||||
import pytest
|
||||
import requests
|
||||
import mock
|
||||
|
||||
from apprise.plugins import SCHEMA_MAP
|
||||
from apprise.plugins import __load_matrix
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
def test_apprise():
|
||||
@ -89,12 +95,12 @@ def test_apprise():
|
||||
assert(len(a) == 2)
|
||||
|
||||
# We can retrieve elements from our list too by reference:
|
||||
assert(compat_is_basestring(a[0].url()) is True)
|
||||
assert(isinstance(a[0].url(), six.string_types) is True)
|
||||
|
||||
# We can iterate over our list too:
|
||||
count = 0
|
||||
for o in a:
|
||||
assert(compat_is_basestring(o.url()) is True)
|
||||
assert(isinstance(o.url(), six.string_types) is True)
|
||||
count += 1
|
||||
# verify that we did indeed iterate over each element
|
||||
assert(len(a) == count)
|
||||
@ -242,6 +248,10 @@ def test_apprise():
|
||||
a.add(plugin)
|
||||
assert(len(a) == 1)
|
||||
|
||||
# We can add entries as a list too (to add more then one)
|
||||
a.add([plugin, plugin, plugin])
|
||||
assert(len(a) == 4)
|
||||
|
||||
# Reset our object again
|
||||
a.clear()
|
||||
try:
|
||||
@ -659,4 +669,4 @@ def test_apprise_details():
|
||||
# All plugins must have a name defined; the below generates
|
||||
# a list of entrys that do not have a string defined.
|
||||
assert(not len([x['service_name'] for x in details['schemas']
|
||||
if not compat_is_basestring(x['service_name'])]))
|
||||
if not isinstance(x['service_name'], six.string_types)]))
|
||||
|
626
test/test_apprise_config.py
Normal file
626
test/test_apprise_config.py
Normal file
@ -0,0 +1,626 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import io
|
||||
import mock
|
||||
from apprise import NotifyFormat
|
||||
from apprise.Apprise import Apprise
|
||||
from apprise.AppriseConfig import AppriseConfig
|
||||
from apprise.AppriseAsset import AppriseAsset
|
||||
from apprise.config.ConfigBase import ConfigBase
|
||||
from apprise.plugins.NotifyBase import NotifyBase
|
||||
|
||||
from apprise.config import SCHEMA_MAP as CONFIG_SCHEMA_MAP
|
||||
from apprise.plugins import SCHEMA_MAP as NOTIFY_SCHEMA_MAP
|
||||
from apprise.config import __load_matrix
|
||||
from apprise.config.ConfigFile import ConfigFile
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
def test_apprise_config(tmpdir):
|
||||
"""
|
||||
API: AppriseConfig basic testing
|
||||
|
||||
"""
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig()
|
||||
|
||||
# There are no servers loaded
|
||||
assert len(ac) == 0
|
||||
|
||||
# lets try anyway
|
||||
assert len(ac.servers()) == 0
|
||||
|
||||
t = tmpdir.mkdir("simple-formatting").join("apprise")
|
||||
t.write("""
|
||||
# A comment line over top of a URL
|
||||
mailto://usera:pass@gmail.com
|
||||
|
||||
# A line with mulitiple tag assignments to it
|
||||
taga,tagb=gnome://
|
||||
|
||||
# Event if there is accidental leading spaces, this configuation
|
||||
# is accepting of htat and will not exclude them
|
||||
tagc=kde://
|
||||
|
||||
# A very poorly structured url
|
||||
sns://:@/
|
||||
|
||||
# Just 1 token provided causes exception
|
||||
sns://T1JJ3T3L2/
|
||||
""")
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig(paths=str(t))
|
||||
|
||||
# One configuration file should have been found
|
||||
assert len(ac) == 1
|
||||
|
||||
# We should be able to read our 3 servers from that
|
||||
assert len(ac.servers()) == 3
|
||||
|
||||
# Get our URL back
|
||||
assert isinstance(ac[0].url(), six.string_types)
|
||||
|
||||
# Test cases where our URL is invalid
|
||||
t = tmpdir.mkdir("strange-lines").join("apprise")
|
||||
t.write("""
|
||||
# basicly this consists of defined tags and no url
|
||||
tag=
|
||||
""")
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig(paths=str(t), asset=AppriseAsset())
|
||||
|
||||
# One configuration file should have been found
|
||||
assert len(ac) == 1
|
||||
|
||||
# No urls were set
|
||||
assert len(ac.servers()) == 0
|
||||
|
||||
# Create a ConfigBase object
|
||||
cb = ConfigBase()
|
||||
|
||||
# Test adding of all entries
|
||||
assert ac.add(configs=cb, asset=AppriseAsset(), tag='test') is True
|
||||
|
||||
# Test adding of all entries
|
||||
assert ac.add(
|
||||
configs=['file://?', ], asset=AppriseAsset(), tag='test') is False
|
||||
|
||||
# Test the adding of garbage
|
||||
assert ac.add(configs=object()) is False
|
||||
|
||||
# Try again but enforce our format
|
||||
ac = AppriseConfig(paths='file://{}?format=text'.format(str(t)))
|
||||
|
||||
# One configuration file should have been found
|
||||
assert len(ac) == 1
|
||||
|
||||
# No urls were set
|
||||
assert len(ac.servers()) == 0
|
||||
|
||||
#
|
||||
# Test Internatialization and the handling of unicode characters
|
||||
#
|
||||
istr = """
|
||||
# Iñtërnâtiônàlization Testing
|
||||
windows://"""
|
||||
|
||||
if six.PY2:
|
||||
# decode string into unicode
|
||||
istr = istr.decode('utf-8')
|
||||
|
||||
# Write our content to our file
|
||||
t = tmpdir.mkdir("internationalization").join("apprise")
|
||||
with io.open(str(t), 'wb') as f:
|
||||
f.write(istr.encode('latin-1'))
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig(paths=str(t))
|
||||
|
||||
# One configuration file should have been found
|
||||
assert len(ac) == 1
|
||||
|
||||
# This will fail because our default encoding is utf-8; however the file
|
||||
# we opened was not; it was latin-1 and could not be parsed.
|
||||
assert len(ac.servers()) == 0
|
||||
|
||||
# Test iterator
|
||||
count = 0
|
||||
for entry in ac:
|
||||
count += 1
|
||||
assert len(ac) == count
|
||||
|
||||
# We can fix this though; set our encoding to latin-1
|
||||
ac = AppriseConfig(paths='file://{}?encoding=latin-1'.format(str(t)))
|
||||
|
||||
# One configuration file should have been found
|
||||
assert len(ac) == 1
|
||||
|
||||
# Our URL should be found
|
||||
assert len(ac.servers()) == 1
|
||||
|
||||
# Get our URL back
|
||||
assert isinstance(ac[0].url(), six.string_types)
|
||||
|
||||
# pop an entry from our list
|
||||
assert isinstance(ac.pop(0), ConfigBase) is True
|
||||
|
||||
# Determine we have no more configuration entries loaded
|
||||
assert len(ac) == 0
|
||||
|
||||
#
|
||||
# Test buffer handling (and overflow)
|
||||
t = tmpdir.mkdir("buffer-handling").join("apprise")
|
||||
buf = "gnome://"
|
||||
t.write(buf)
|
||||
|
||||
# Reset our config object
|
||||
ac.clear()
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig(paths=str(t))
|
||||
|
||||
# update our length to be the size of our actual file
|
||||
ac[0].max_buffer_size = len(buf)
|
||||
|
||||
# One configuration file should have been found
|
||||
assert len(ac) == 1
|
||||
|
||||
assert len(ac.servers()) == 1
|
||||
|
||||
# update our buffer size to be slightly smaller then what we allow
|
||||
ac[0].max_buffer_size = len(buf) - 1
|
||||
|
||||
# Content is automatically cached; so even though we adjusted the buffer
|
||||
# above, our results have been cached so we get a 1 response.
|
||||
assert len(ac.servers()) == 1
|
||||
|
||||
# Now do the same check but force a flushed cache
|
||||
assert len(ac.servers(cache=False)) == 0
|
||||
|
||||
|
||||
def test_apprise_multi_config_entries(tmpdir):
|
||||
"""
|
||||
API: AppriseConfig basic multi-adding functionality
|
||||
|
||||
"""
|
||||
# temporary file to work with
|
||||
t = tmpdir.mkdir("apprise-multi-add").join("apprise")
|
||||
buf = """
|
||||
good://hostname
|
||||
"""
|
||||
t.write(buf)
|
||||
|
||||
# temporary empty file to work with
|
||||
te = tmpdir.join("apprise-multi-add", "apprise-empty")
|
||||
te.write("")
|
||||
|
||||
# Define our good:// url
|
||||
class GoodNotification(NotifyBase):
|
||||
def __init__(self, **kwargs):
|
||||
super(GoodNotification, self).__init__(
|
||||
notify_format=NotifyFormat.HTML, **kwargs)
|
||||
|
||||
def notify(self, **kwargs):
|
||||
# Pretend everything is okay
|
||||
return True
|
||||
|
||||
# Store our good notification in our schema map
|
||||
NOTIFY_SCHEMA_MAP['good'] = GoodNotification
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig()
|
||||
|
||||
# There are no servers loaded
|
||||
assert len(ac) == 0
|
||||
|
||||
# Support adding of muilt strings and objects:
|
||||
assert ac.add(configs=(str(t), str(t))) is True
|
||||
assert ac.add(configs=(
|
||||
ConfigFile(path=str(te)), ConfigFile(path=str(t)))) is True
|
||||
|
||||
# don't support the adding of invalid content
|
||||
assert ac.add(configs=(object(), object())) is False
|
||||
assert ac.add(configs=object()) is False
|
||||
|
||||
# Try to pop an element out of range
|
||||
try:
|
||||
ac.server_pop(len(ac.servers()))
|
||||
# We should have thrown an exception here
|
||||
assert False
|
||||
|
||||
except IndexError:
|
||||
# We expect to be here
|
||||
assert True
|
||||
|
||||
# Pop our elements
|
||||
while len(ac.servers()) > 0:
|
||||
assert isinstance(
|
||||
ac.server_pop(len(ac.servers()) - 1), NotifyBase) is True
|
||||
|
||||
|
||||
def test_apprise_config_tagging(tmpdir):
|
||||
"""
|
||||
API: AppriseConfig tagging
|
||||
|
||||
"""
|
||||
|
||||
# temporary file to work with
|
||||
t = tmpdir.mkdir("tagging").join("apprise")
|
||||
buf = "gnome://"
|
||||
t.write(buf)
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig()
|
||||
|
||||
# Add an item associated with tag a
|
||||
assert ac.add(configs=str(t), asset=AppriseAsset(), tag='a') is True
|
||||
# Add an item associated with tag b
|
||||
assert ac.add(configs=str(t), asset=AppriseAsset(), tag='b') is True
|
||||
# Add an item associated with tag a or b
|
||||
assert ac.add(configs=str(t), asset=AppriseAsset(), tag='a,b') is True
|
||||
|
||||
# Now filter: a:
|
||||
assert len(ac.servers(tag='a')) == 2
|
||||
# Now filter: a or b:
|
||||
assert len(ac.servers(tag='a,b')) == 3
|
||||
# Now filter: a and b
|
||||
assert len(ac.servers(tag=[('a', 'b')])) == 1
|
||||
|
||||
|
||||
def test_apprise_instantiate():
|
||||
"""
|
||||
API: AppriseConfig.instantiate()
|
||||
|
||||
"""
|
||||
assert AppriseConfig.instantiate(
|
||||
'file://?', suppress_exceptions=True) is None
|
||||
|
||||
assert AppriseConfig.instantiate(
|
||||
'invalid://?', suppress_exceptions=True) is None
|
||||
|
||||
class BadConfig(ConfigBase):
|
||||
def __init__(self, **kwargs):
|
||||
super(BadConfig, self).__init__(**kwargs)
|
||||
|
||||
# We fail whenever we're initialized
|
||||
raise TypeError()
|
||||
|
||||
# Store our bad configuration in our schema map
|
||||
CONFIG_SCHEMA_MAP['bad'] = BadConfig
|
||||
|
||||
try:
|
||||
AppriseConfig.instantiate(
|
||||
'bad://path', suppress_exceptions=False)
|
||||
# We should never make it to this line
|
||||
assert False
|
||||
|
||||
except TypeError:
|
||||
# Exception caught as expected
|
||||
assert True
|
||||
|
||||
# Same call but exceptions suppressed
|
||||
assert AppriseConfig.instantiate(
|
||||
'bad://path', suppress_exceptions=True) is None
|
||||
|
||||
|
||||
def test_apprise_config_with_apprise_obj(tmpdir):
|
||||
"""
|
||||
API: ConfigBase.parse_inaccessible_text_file
|
||||
|
||||
"""
|
||||
|
||||
# temporary file to work with
|
||||
t = tmpdir.mkdir("apprise-obj").join("apprise")
|
||||
buf = """
|
||||
good://hostname
|
||||
localhost=good://localhost
|
||||
"""
|
||||
t.write(buf)
|
||||
|
||||
# Define our good:// url
|
||||
class GoodNotification(NotifyBase):
|
||||
def __init__(self, **kwargs):
|
||||
super(GoodNotification, self).__init__(
|
||||
notify_format=NotifyFormat.HTML, **kwargs)
|
||||
|
||||
def notify(self, **kwargs):
|
||||
# Pretend everything is okay
|
||||
return True
|
||||
|
||||
# Store our good notification in our schema map
|
||||
NOTIFY_SCHEMA_MAP['good'] = GoodNotification
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig(cache=False)
|
||||
|
||||
# Nothing loaded yet
|
||||
assert len(ac) == 0
|
||||
|
||||
# Add an item associated with tag a
|
||||
assert ac.add(configs=str(t), asset=AppriseAsset(), tag='a') is True
|
||||
|
||||
# One configuration file
|
||||
assert len(ac) == 1
|
||||
|
||||
# 2 services found in it
|
||||
assert len(ac.servers()) == 2
|
||||
|
||||
# Pop one of them (at index 0)
|
||||
ac.server_pop(0)
|
||||
|
||||
# Verify that it no longer listed
|
||||
assert len(ac.servers()) == 1
|
||||
|
||||
# Test our ability to add Config objects to our apprise object
|
||||
a = Apprise()
|
||||
|
||||
# Add our configuration object
|
||||
assert a.add(servers=ac) is True
|
||||
|
||||
# Detect our 1 entry (originally there were 2 but we deleted one)
|
||||
assert len(a) == 1
|
||||
|
||||
# Notify our service
|
||||
assert a.notify(body='apprise configuration power!') is True
|
||||
|
||||
# Add our configuration object
|
||||
assert a.add(
|
||||
servers=[AppriseConfig(str(t)), AppriseConfig(str(t))]) is True
|
||||
|
||||
# Detect our 5 loaded entries now; 1 from first config, and another
|
||||
# 2x2 based on adding our list above
|
||||
assert len(a) == 5
|
||||
|
||||
# We can't add garbage
|
||||
assert a.add(servers=object()) is False
|
||||
assert a.add(servers=[object(), object()]) is False
|
||||
|
||||
# Our length is unchanged
|
||||
assert len(a) == 5
|
||||
|
||||
# reference index 0 of our list
|
||||
ref = a[0]
|
||||
assert isinstance(ref, NotifyBase) is True
|
||||
|
||||
# Our length is unchanged
|
||||
assert len(a) == 5
|
||||
|
||||
# pop the index
|
||||
ref_popped = a.pop(0)
|
||||
|
||||
# Verify our response
|
||||
assert isinstance(ref_popped, NotifyBase) is True
|
||||
|
||||
# Our length drops by 1
|
||||
assert len(a) == 4
|
||||
|
||||
# Content popped is the same as one referenced by index
|
||||
# earlier
|
||||
assert ref == ref_popped
|
||||
|
||||
# pop an index out of range
|
||||
try:
|
||||
a.pop(len(a))
|
||||
# We'll thrown an IndexError and not make it this far
|
||||
assert False
|
||||
|
||||
except IndexError:
|
||||
# As expected
|
||||
assert True
|
||||
|
||||
# Our length remains unchanged
|
||||
assert len(a) == 4
|
||||
|
||||
# Reference content out of range
|
||||
try:
|
||||
a[len(a)]
|
||||
|
||||
# We'll thrown an IndexError and not make it this far
|
||||
assert False
|
||||
|
||||
except IndexError:
|
||||
# As expected
|
||||
assert True
|
||||
|
||||
# reference index at the end of our list
|
||||
ref = a[len(a) - 1]
|
||||
|
||||
# Verify our response
|
||||
assert isinstance(ref, NotifyBase) is True
|
||||
|
||||
# Our length stays the same
|
||||
assert len(a) == 4
|
||||
|
||||
# We can pop from the back of the list without a problem too
|
||||
ref_popped = a.pop(len(a) - 1)
|
||||
|
||||
# Verify our response
|
||||
assert isinstance(ref_popped, NotifyBase) is True
|
||||
|
||||
# Content popped is the same as one referenced by index
|
||||
# earlier
|
||||
assert ref == ref_popped
|
||||
|
||||
# Our length drops by 1
|
||||
assert len(a) == 3
|
||||
|
||||
# Now we'll test adding another element to the list so that it mixes up
|
||||
# our response object.
|
||||
# Below we add 3 different types, a ConfigBase, NotifyBase, and URL
|
||||
assert a.add(
|
||||
servers=[
|
||||
ConfigFile(path=(str(t))),
|
||||
'good://another.host',
|
||||
GoodNotification(**{'host': 'nuxref.com'})]) is True
|
||||
|
||||
# Our length increases by 4 (2 entries in the config file, + 2 others)
|
||||
assert len(a) == 7
|
||||
|
||||
# reference index at the end of our list
|
||||
ref = a[len(a) - 1]
|
||||
|
||||
# Verify our response
|
||||
assert isinstance(ref, NotifyBase) is True
|
||||
|
||||
# We can pop from the back of the list without a problem too
|
||||
ref_popped = a.pop(len(a) - 1)
|
||||
|
||||
# Verify our response
|
||||
assert isinstance(ref_popped, NotifyBase) is True
|
||||
|
||||
# Content popped is the same as one referenced by index
|
||||
# earlier
|
||||
assert ref == ref_popped
|
||||
|
||||
# Our length drops by 1
|
||||
assert len(a) == 6
|
||||
|
||||
# pop our list
|
||||
while len(a) > 0:
|
||||
assert isinstance(a.pop(len(a) - 1), NotifyBase) is True
|
||||
|
||||
|
||||
def test_apprise_config_matrix_load():
|
||||
"""
|
||||
API: AppriseConfig() matrix initialization
|
||||
|
||||
"""
|
||||
|
||||
import apprise
|
||||
|
||||
class ConfigDummy(ConfigBase):
|
||||
"""
|
||||
A dummy wrapper for testing the different options in the load_matrix
|
||||
function
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'dummy'
|
||||
|
||||
# protocol as tuple
|
||||
protocol = ('uh', 'oh')
|
||||
|
||||
# secure protocol as tuple
|
||||
secure_protocol = ('no', 'yes')
|
||||
|
||||
class ConfigDummy2(ConfigBase):
|
||||
"""
|
||||
A dummy wrapper for testing the different options in the load_matrix
|
||||
function
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'dummy2'
|
||||
|
||||
# secure protocol as tuple
|
||||
secure_protocol = ('true', 'false')
|
||||
|
||||
class ConfigDummy3(ConfigBase):
|
||||
"""
|
||||
A dummy wrapper for testing the different options in the load_matrix
|
||||
function
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'dummy3'
|
||||
|
||||
# secure protocol as string
|
||||
secure_protocol = 'true'
|
||||
|
||||
class ConfigDummy4(ConfigBase):
|
||||
"""
|
||||
A dummy wrapper for testing the different options in the load_matrix
|
||||
function
|
||||
"""
|
||||
|
||||
# The default descriptive name associated with the Notification
|
||||
service_name = 'dummy4'
|
||||
|
||||
# protocol as string
|
||||
protocol = 'true'
|
||||
|
||||
# Generate ourselfs a fake entry
|
||||
apprise.config.ConfigDummy = ConfigDummy
|
||||
apprise.config.ConfigDummy2 = ConfigDummy2
|
||||
apprise.config.ConfigDummy3 = ConfigDummy3
|
||||
apprise.config.ConfigDummy4 = ConfigDummy4
|
||||
|
||||
__load_matrix()
|
||||
|
||||
# Call it again so we detect our entries already loaded
|
||||
__load_matrix()
|
||||
|
||||
|
||||
@mock.patch('os.path.getsize')
|
||||
def test_config_base_parse_inaccessible_text_file(mock_getsize, tmpdir):
|
||||
"""
|
||||
API: ConfigBase.parse_inaccessible_text_file
|
||||
|
||||
"""
|
||||
|
||||
# temporary file to work with
|
||||
t = tmpdir.mkdir("inaccessible").join("apprise")
|
||||
buf = "gnome://"
|
||||
t.write(buf)
|
||||
|
||||
# Set getsize return value
|
||||
mock_getsize.return_value = None
|
||||
mock_getsize.side_effect = OSError
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig(paths=str(t))
|
||||
|
||||
# The following internally throws an exception but still counts
|
||||
# as a loaded configuration file
|
||||
assert len(ac) == 1
|
||||
|
||||
# Thus no notifications are loaded
|
||||
assert len(ac.servers()) == 0
|
||||
|
||||
|
||||
def test_config_base_parse_yaml_file(tmpdir):
|
||||
"""
|
||||
API: ConfigBase.parse_yaml_file
|
||||
|
||||
"""
|
||||
t = tmpdir.mkdir("empty-file").join("apprise.yml")
|
||||
t.write("")
|
||||
|
||||
# Create ourselves a config object
|
||||
ac = AppriseConfig(paths=str(t))
|
||||
|
||||
# The number of configuration files that exist
|
||||
assert len(ac) == 1
|
||||
|
||||
# no notifications are loaded
|
||||
assert len(ac.servers()) == 0
|
@ -27,26 +27,30 @@ from __future__ import print_function
|
||||
from apprise import cli
|
||||
from apprise import NotifyBase
|
||||
from click.testing import CliRunner
|
||||
from apprise.Apprise import SCHEMA_MAP
|
||||
from apprise.plugins import SCHEMA_MAP
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
def test_apprise_cli():
|
||||
def test_apprise_cli(tmpdir):
|
||||
"""
|
||||
API: Apprise() CLI
|
||||
|
||||
"""
|
||||
|
||||
class GoodNotification(NotifyBase):
|
||||
def __init__(self, **kwargs):
|
||||
super(GoodNotification, self).__init__()
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(GoodNotification, self).__init__(*args, **kwargs)
|
||||
|
||||
def notify(self, **kwargs):
|
||||
# Pretend everything is okay
|
||||
return True
|
||||
|
||||
class BadNotification(NotifyBase):
|
||||
def __init__(self, **kwargs):
|
||||
super(BadNotification, self).__init__()
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(BadNotification, self).__init__(*args, **kwargs)
|
||||
|
||||
def notify(self, **kwargs):
|
||||
# Pretend everything is okay
|
||||
@ -70,6 +74,10 @@ def test_apprise_cli():
|
||||
result = runner.invoke(cli.main, ['-vvv'])
|
||||
assert result.exit_code == 1
|
||||
|
||||
# Display version information and exit
|
||||
result = runner.invoke(cli.main, ['-V'])
|
||||
assert result.exit_code == 0
|
||||
|
||||
result = runner.invoke(cli.main, [
|
||||
'-t', 'test title',
|
||||
'-b', 'test body',
|
||||
@ -89,3 +97,52 @@ def test_apprise_cli():
|
||||
'bad://localhost',
|
||||
])
|
||||
assert result.exit_code == 1
|
||||
|
||||
# Write a simple text based configuration file
|
||||
t = tmpdir.mkdir("apprise-obj").join("apprise")
|
||||
buf = """
|
||||
taga,tagb=good://localhost
|
||||
tagc=good://nuxref.com
|
||||
"""
|
||||
t.write(buf)
|
||||
|
||||
# This will read our configuration and send 2 notices to
|
||||
# each of the above defined good:// entries
|
||||
result = runner.invoke(cli.main, [
|
||||
'-b', 'test config',
|
||||
'--config', str(t),
|
||||
])
|
||||
assert result.exit_code == 0
|
||||
|
||||
# This will send out 1 notification because our tag matches
|
||||
# one of the entries above
|
||||
# translation: has taga
|
||||
result = runner.invoke(cli.main, [
|
||||
'-b', 'has taga',
|
||||
'--config', str(t),
|
||||
'--tag', 'taga',
|
||||
])
|
||||
assert result.exit_code == 0
|
||||
|
||||
# This will send out 0 notification because our tag requests that we meet
|
||||
# at least 2 tags associated with the same notification service (which
|
||||
# isn't the case above)
|
||||
# translation: has taga AND tagd
|
||||
result = runner.invoke(cli.main, [
|
||||
'-b', 'has taga AND tagd',
|
||||
'--config', str(t),
|
||||
'--tag', 'taga,tagd',
|
||||
])
|
||||
assert result.exit_code == 0
|
||||
|
||||
# This will send out 2 notifications because by specifying 2 tag
|
||||
# entries, we 'or' them together:
|
||||
# translation: has taga or tagb or tagd
|
||||
result = runner.invoke(cli.main, [
|
||||
'-b', 'has taga OR tagc OR tagd',
|
||||
'--config', str(t),
|
||||
'--tag', 'taga',
|
||||
'--tag', 'tagc',
|
||||
'--tag', 'tagd',
|
||||
])
|
||||
assert result.exit_code == 0
|
||||
|
593
test/test_config_base.py
Normal file
593
test/test_config_base.py
Normal file
@ -0,0 +1,593 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
from apprise.AppriseAsset import AppriseAsset
|
||||
from apprise.config.ConfigBase import ConfigBase
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
def test_config_base():
|
||||
"""
|
||||
API: ConfigBase() object
|
||||
|
||||
"""
|
||||
|
||||
# invalid types throw exceptions
|
||||
try:
|
||||
ConfigBase(**{'format': 'invalid'})
|
||||
# We should never reach here as an exception should be thrown
|
||||
assert(False)
|
||||
|
||||
except TypeError:
|
||||
assert(True)
|
||||
|
||||
# Notify format types are not the same as ConfigBase ones
|
||||
try:
|
||||
ConfigBase(**{'format': 'markdown'})
|
||||
# We should never reach here as an exception should be thrown
|
||||
assert(False)
|
||||
|
||||
except TypeError:
|
||||
assert(True)
|
||||
|
||||
cb = ConfigBase(**{'format': 'yaml'})
|
||||
assert isinstance(cb, ConfigBase)
|
||||
|
||||
cb = ConfigBase(**{'format': 'text'})
|
||||
assert isinstance(cb, ConfigBase)
|
||||
|
||||
# Set encoding
|
||||
cb = ConfigBase(encoding='utf-8', format='text')
|
||||
assert isinstance(cb, ConfigBase)
|
||||
|
||||
# read is not supported in the base object; only the children
|
||||
assert cb.read() is None
|
||||
|
||||
# There are no servers loaded on a freshly created object
|
||||
assert len(cb.servers()) == 0
|
||||
|
||||
# Unsupported URLs are not parsed
|
||||
assert ConfigBase.parse_url(url='invalid://') is None
|
||||
|
||||
# Valid URL & Valid Format
|
||||
results = ConfigBase.parse_url(
|
||||
url='file://relative/path?format=yaml&encoding=latin-1')
|
||||
assert isinstance(results, dict)
|
||||
# These are moved into the root
|
||||
assert results.get('format') == 'yaml'
|
||||
assert results.get('encoding') == 'latin-1'
|
||||
|
||||
# But they also exist in the qsd location
|
||||
assert isinstance(results.get('qsd'), dict)
|
||||
assert results['qsd'].get('encoding') == 'latin-1'
|
||||
assert results['qsd'].get('format') == 'yaml'
|
||||
|
||||
# Valid URL & Invalid Format
|
||||
results = ConfigBase.parse_url(
|
||||
url='file://relative/path?format=invalid&encoding=latin-1')
|
||||
assert isinstance(results, dict)
|
||||
# Only encoding is moved into the root
|
||||
assert 'format' not in results
|
||||
assert results.get('encoding') == 'latin-1'
|
||||
|
||||
# But they will always exist in the qsd location
|
||||
assert isinstance(results.get('qsd'), dict)
|
||||
assert results['qsd'].get('encoding') == 'latin-1'
|
||||
assert results['qsd'].get('format') == 'invalid'
|
||||
|
||||
|
||||
def test_config_base_config_parse_text():
|
||||
"""
|
||||
API: ConfigBase.config_parse_text object
|
||||
|
||||
"""
|
||||
|
||||
# Garbage Handling
|
||||
assert isinstance(ConfigBase.config_parse_text(object()), list)
|
||||
assert isinstance(ConfigBase.config_parse_text(None), list)
|
||||
assert isinstance(ConfigBase.config_parse_text(''), list)
|
||||
|
||||
# Valid Configuration
|
||||
result = ConfigBase.config_parse_text("""
|
||||
# A comment line over top of a URL
|
||||
mailto://userb:pass@gmail.com
|
||||
|
||||
# A line with mulitiple tag assignments to it
|
||||
taga,tagb=kde://
|
||||
""", asset=AppriseAsset())
|
||||
|
||||
# We expect to parse 2 entries from the above
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 2
|
||||
assert len(result[0].tags) == 0
|
||||
|
||||
# Our second element will have tags associated with it
|
||||
assert len(result[1].tags) == 2
|
||||
assert 'taga' in result[1].tags
|
||||
assert 'tagb' in result[1].tags
|
||||
|
||||
# Here is a similar result set however this one has an invalid line
|
||||
# in it which invalidates the entire file
|
||||
result = ConfigBase.config_parse_text("""
|
||||
# A comment line over top of a URL
|
||||
mailto://userc:pass@gmail.com
|
||||
|
||||
# A line with mulitiple tag assignments to it
|
||||
taga,tagb=windows://
|
||||
|
||||
I am an invalid line that does not follow any of the Apprise file rules!
|
||||
""")
|
||||
|
||||
# We expect to parse 0 entries from the above
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# More invalid data
|
||||
result = ConfigBase.config_parse_text("""
|
||||
# An invalid URL
|
||||
invalid://user:pass@gmail.com
|
||||
|
||||
# A tag without a url
|
||||
taga=
|
||||
|
||||
# A very poorly structured url
|
||||
sns://:@/
|
||||
|
||||
# Just 1 token provided
|
||||
sns://T1JJ3T3L2/
|
||||
""")
|
||||
|
||||
# We expect to parse 0 entries from the above
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Here is an empty file
|
||||
result = ConfigBase.config_parse_text('')
|
||||
|
||||
# We expect to parse 0 entries from the above
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
|
||||
def test_config_base_config_parse_yaml():
|
||||
"""
|
||||
API: ConfigBase.config_parse_yaml object
|
||||
|
||||
"""
|
||||
|
||||
# general reference used below
|
||||
asset = AppriseAsset()
|
||||
|
||||
# Garbage Handling
|
||||
assert isinstance(ConfigBase.config_parse_yaml(object()), list)
|
||||
assert isinstance(ConfigBase.config_parse_yaml(None), list)
|
||||
assert isinstance(ConfigBase.config_parse_yaml(''), list)
|
||||
|
||||
# Invalid Version
|
||||
result = ConfigBase.config_parse_yaml("version: 2a", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Invalid Syntax (throws a ScannerError)
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# if no version is specified then version 1 is presumed
|
||||
version: 1
|
||||
|
||||
urls
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Missing url token
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# if no version is specified then version 1 is presumed
|
||||
version: 1
|
||||
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# No urls defined
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# if no version is specified then version 1 is presumed
|
||||
version: 1
|
||||
|
||||
urls:
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Invalid url defined
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# if no version is specified then version 1 is presumed
|
||||
version: 1
|
||||
|
||||
# Invalid URL definition; yet the answer to life at the same time
|
||||
urls: 43
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Invalid url/schema
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# if no version is specified then version 1 is presumed
|
||||
version: 1
|
||||
|
||||
urls:
|
||||
- invalid://
|
||||
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Invalid url/schema
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# if no version is specified then version 1 is presumed
|
||||
version: 1
|
||||
|
||||
urls:
|
||||
- invalid://:
|
||||
- a: b
|
||||
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Invalid url/schema
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
urls:
|
||||
- just some free text that isn't valid:
|
||||
- a garbage entry to go with it
|
||||
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Invalid url/schema
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# if no version is specified then version 1 is presumed
|
||||
version: 1
|
||||
|
||||
urls:
|
||||
- not even a proper url
|
||||
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Invalid url/schema
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# no lists... just no
|
||||
urls: [milk, pumpkin pie, eggs, juice]
|
||||
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Invalid url/schema
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
urls:
|
||||
# a very invalid sns entry
|
||||
- sns://T1JJ3T3L2/
|
||||
- sns://:@/:
|
||||
- invalid: test
|
||||
- sns://T1JJ3T3L2/:
|
||||
- invalid: test
|
||||
|
||||
# some strangness
|
||||
-
|
||||
-
|
||||
- test
|
||||
|
||||
""", asset=asset)
|
||||
|
||||
# Invalid data gets us an empty result set
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# Valid Configuration
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# if no version is specified then version 1 is presumed
|
||||
version: 1
|
||||
|
||||
#
|
||||
# Define your notification urls:
|
||||
#
|
||||
urls:
|
||||
- pbul://o.gn5kj6nfhv736I7jC3cj3QLRiyhgl98b
|
||||
- mailto://test:password@gmail.com
|
||||
""", asset=asset)
|
||||
|
||||
# We expect to parse 2 entries from the above
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 2
|
||||
assert len(result[0].tags) == 0
|
||||
|
||||
# Valid Configuration
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
urls:
|
||||
- json://localhost:
|
||||
- tag: my-custom-tag, my-other-tag
|
||||
|
||||
# How to stack multiple entries:
|
||||
- mailto://:
|
||||
- user: jeff
|
||||
pass: 123abc
|
||||
from: jeff@yahoo.ca
|
||||
|
||||
- user: jack
|
||||
pass: pass123
|
||||
from: jack@hotmail.com
|
||||
|
||||
# This is an illegal entry; the schema can not be changed
|
||||
schema: json
|
||||
|
||||
# accidently left a colon at the end of the url; no problem
|
||||
# we'll accept it
|
||||
- mailto://oscar:pass@gmail.com:
|
||||
|
||||
# A telegram entry (returns a None in parse_url())
|
||||
- tgram://invalid
|
||||
|
||||
""", asset=asset)
|
||||
|
||||
# We expect to parse 4 entries from the above because the tgram:// entry
|
||||
# would have failed to be loaded
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 4
|
||||
assert len(result[0].tags) == 2
|
||||
|
||||
# Global Tags
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# Global Tags stacked as a list
|
||||
tag:
|
||||
- admin
|
||||
- devops
|
||||
|
||||
urls:
|
||||
- json://localhost
|
||||
- dbus://
|
||||
""", asset=asset)
|
||||
|
||||
# We expect to parse 3 entries from the above
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 2
|
||||
|
||||
# all entries will have our global tags defined in them
|
||||
for entry in result:
|
||||
assert 'admin' in entry.tags
|
||||
assert 'devops' in entry.tags
|
||||
|
||||
# Global Tags
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# Global Tags
|
||||
tag: admin, devops
|
||||
|
||||
urls:
|
||||
# The following tags will get added to the global set
|
||||
- json://localhost:
|
||||
- tag: string-tag, my-other-tag, text
|
||||
|
||||
# Tags can be presented in this list format too:
|
||||
- dbus://:
|
||||
- tag:
|
||||
- list-tag
|
||||
- dbus
|
||||
""", asset=asset)
|
||||
|
||||
# all entries will have our global tags defined in them
|
||||
for entry in result:
|
||||
assert 'admin' in entry.tags
|
||||
assert 'devops' in entry.tags
|
||||
|
||||
# We expect to parse 3 entries from the above
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 2
|
||||
|
||||
# json:// has 2 globals + 3 defined
|
||||
assert len(result[0].tags) == 5
|
||||
assert 'text' in result[0].tags
|
||||
|
||||
# json:// has 2 globals + 2 defined
|
||||
assert len(result[1].tags) == 4
|
||||
assert 'list-tag' in result[1].tags
|
||||
|
||||
# An invalid set of entries
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
urls:
|
||||
# The following tags will get added to the global set
|
||||
- json://localhost:
|
||||
-
|
||||
-
|
||||
- entry
|
||||
""", asset=asset)
|
||||
|
||||
# We expect to parse 3 entries from the above
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 0
|
||||
|
||||
# An asset we'll manipulate
|
||||
asset = AppriseAsset()
|
||||
|
||||
# Global Tags
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# Test the creation of our apprise asset object
|
||||
asset:
|
||||
app_id: AppriseTest
|
||||
app_desc: Apprise Test Notifications
|
||||
app_url: http://nuxref.com
|
||||
|
||||
# Support setting empty values
|
||||
image_url_mask:
|
||||
image_url_logo:
|
||||
|
||||
image_path_mask: tmp/path
|
||||
|
||||
# invalid entry
|
||||
theme:
|
||||
-
|
||||
-
|
||||
- entry
|
||||
|
||||
# Now for some invalid entries
|
||||
invalid: entry
|
||||
__init__: can't be over-ridden
|
||||
nolists:
|
||||
- we don't support these entries
|
||||
- in the apprise object
|
||||
|
||||
urls:
|
||||
- json://localhost:
|
||||
""", asset=asset)
|
||||
|
||||
# We expect to parse 3 entries from the above
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 1
|
||||
assert asset.app_id == "AppriseTest"
|
||||
assert asset.app_desc == "Apprise Test Notifications"
|
||||
assert asset.app_url == "http://nuxref.com"
|
||||
|
||||
# the theme was not updated and remains the same as it was
|
||||
assert asset.theme == AppriseAsset().theme
|
||||
|
||||
# Empty string assignment
|
||||
assert isinstance(asset.image_url_mask, six.string_types) is True
|
||||
assert asset.image_url_mask == ""
|
||||
assert isinstance(asset.image_url_logo, six.string_types) is True
|
||||
assert asset.image_url_logo == ""
|
||||
|
||||
# For on-lookers looking through this file; here is a perfectly formatted
|
||||
# YAML configuration file for your reference so you can see it without
|
||||
# all of the errors like the ones identified above
|
||||
result = ConfigBase.config_parse_yaml("""
|
||||
# if no version is specified then version 1 is presumed. Thus this is a
|
||||
# completely optional field. It's a good idea to just add this line because it
|
||||
# will help with future ambiguity (if it ever occurs).
|
||||
version: 1
|
||||
|
||||
# Define an Asset object if you wish (Optional)
|
||||
asset:
|
||||
app_id: AppriseTest
|
||||
app_desc: Apprise Test Notifications
|
||||
app_url: http://nuxref.com
|
||||
|
||||
# Optionally define some global tags to associate with ALL of your
|
||||
# urls below.
|
||||
tag: admin, devops
|
||||
|
||||
# Define your URLs (Mandatory!)
|
||||
urls:
|
||||
# Either on-line each entry like this:
|
||||
- json://localhost
|
||||
|
||||
# Or add a colon to the end of the URL where you can optionally provide
|
||||
# over-ride entries. One of the most likely entry to be used here
|
||||
# is the tag entry. This gets extended to the global tag (if defined)
|
||||
# above
|
||||
- xml://localhost:
|
||||
- tag: customer
|
||||
|
||||
# The more elements you specify under a URL the more times the URL will
|
||||
# get replicated and used. Hence this entry actually could be considered
|
||||
# 2 URLs being called with just the destination email address changed:
|
||||
- mailto://george:password@gmail.com:
|
||||
- to: jason@hotmail.com
|
||||
- to: fred@live.com
|
||||
|
||||
# Again... to re-iterate, the above mailto:// would actually fire two (2)
|
||||
# separate emails each with a different destination address specified.
|
||||
# Be careful when defining your arguments and differentiating between
|
||||
# when to use the dash (-) and when not to. Each time you do, you will
|
||||
# cause another instance to be created.
|
||||
|
||||
# Defining more then 1 element to a muti-set is easy, it looks like this:
|
||||
- mailto://jackson:abc123@hotmail.com:
|
||||
- to: jeff@gmail.com
|
||||
tag: jeff, customer
|
||||
|
||||
- to: chris@yahoo.com
|
||||
tag: chris, customer
|
||||
""", asset=asset)
|
||||
|
||||
# okay, here is how we get our total based on the above (read top-down)
|
||||
# +1 json:// entry
|
||||
# +1 xml:// entry
|
||||
# +2 mailto:// entry to jason@hotmail.com and fred@live.com
|
||||
# +2 mailto:// entry to jeff@gmail.com and chris@yahoo.com
|
||||
# = 6
|
||||
assert len(result) == 6
|
||||
|
||||
# all six entries will have our global tags defined in them
|
||||
for entry in result:
|
||||
assert 'admin' in entry.tags
|
||||
assert 'devops' in entry.tags
|
||||
|
||||
# Entries can be directly accessed as they were added
|
||||
|
||||
# our json:// had no additional tags added; so just the global ones
|
||||
# So just 2; admin and devops (these were already validated above in the
|
||||
# for loop
|
||||
assert len(result[0].tags) == 2
|
||||
|
||||
# our xml:// object has 1 tag added (customer)
|
||||
assert len(result[1].tags) == 3
|
||||
assert 'customer' in result[1].tags
|
||||
|
||||
# You get the idea, here is just a direct mapping to the remaining entries
|
||||
# in the same order they appear above
|
||||
assert len(result[2].tags) == 2
|
||||
assert len(result[3].tags) == 2
|
||||
|
||||
assert len(result[4].tags) == 4
|
||||
assert 'customer' in result[4].tags
|
||||
assert 'jeff' in result[4].tags
|
||||
|
||||
assert len(result[5].tags) == 4
|
||||
assert 'customer' in result[5].tags
|
||||
assert 'chris' in result[5].tags
|
104
test/test_config_file.py
Normal file
104
test/test_config_file.py
Normal file
@ -0,0 +1,104 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import mock
|
||||
from apprise.config.ConfigFile import ConfigFile
|
||||
from apprise.plugins.NotifyBase import NotifyBase
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
def test_config_file(tmpdir):
|
||||
"""
|
||||
API: ConfigFile() object
|
||||
|
||||
"""
|
||||
|
||||
assert ConfigFile.parse_url('garbage://') is None
|
||||
|
||||
# Test cases where our URL is invalid
|
||||
t = tmpdir.mkdir("testing").join("apprise")
|
||||
t.write("gnome://")
|
||||
|
||||
assert ConfigFile.parse_url('file://?'.format(str(t))) is None
|
||||
|
||||
# Initialize our object
|
||||
cf = ConfigFile(path=str(t), format='text')
|
||||
|
||||
# one entry added
|
||||
assert len(cf) == 1
|
||||
|
||||
assert isinstance(cf.url(), six.string_types) is True
|
||||
|
||||
# Testing of pop
|
||||
cf = ConfigFile(path=str(t), format='text')
|
||||
|
||||
ref = cf[0]
|
||||
assert isinstance(ref, NotifyBase) is True
|
||||
|
||||
ref_popped = cf.pop(0)
|
||||
assert isinstance(ref_popped, NotifyBase) is True
|
||||
|
||||
assert ref == ref_popped
|
||||
|
||||
assert len(cf) == 0
|
||||
|
||||
# reference to calls on initial reference
|
||||
cf = ConfigFile(path=str(t), format='text')
|
||||
assert isinstance(cf.pop(0), NotifyBase) is True
|
||||
|
||||
cf = ConfigFile(path=str(t), format='text')
|
||||
assert isinstance(cf[0], NotifyBase) is True
|
||||
# Second reference actually uses cache
|
||||
assert isinstance(cf[0], NotifyBase) is True
|
||||
|
||||
cf = ConfigFile(path=str(t), format='text')
|
||||
# Itereator creation (nothing needed to assert here)
|
||||
iter(cf)
|
||||
# Second reference actually uses cache
|
||||
iter(cf)
|
||||
|
||||
|
||||
@mock.patch('io.open')
|
||||
def test_config_file_exceptions(mock_open, tmpdir):
|
||||
"""
|
||||
API: ConfigFile() i/o exception handling
|
||||
|
||||
"""
|
||||
|
||||
# Test cases where our URL is invalid
|
||||
t = tmpdir.mkdir("testing").join("apprise")
|
||||
t.write("gnome://")
|
||||
|
||||
mock_open.side_effect = OSError
|
||||
|
||||
# Initialize our object
|
||||
cf = ConfigFile(path=str(t), format='text')
|
||||
|
||||
# Internal Exception would have been thrown and this would fail
|
||||
assert cf.read() is None
|
213
test/test_config_http.py
Normal file
213
test/test_config_http.py
Normal file
@ -0,0 +1,213 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This code is licensed under the MIT License.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files(the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions :
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import mock
|
||||
import requests
|
||||
from apprise.common import ConfigFormat
|
||||
from apprise.config.ConfigHTTP import ConfigHTTP
|
||||
from apprise.plugins.NotifyBase import NotifyBase
|
||||
from apprise.plugins import SCHEMA_MAP
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
# Some exception handling we'll use
|
||||
REQUEST_EXCEPTIONS = (
|
||||
requests.ConnectionError(
|
||||
0, 'requests.ConnectionError() not handled'),
|
||||
requests.RequestException(
|
||||
0, 'requests.RequestException() not handled'),
|
||||
requests.HTTPError(
|
||||
0, 'requests.HTTPError() not handled'),
|
||||
requests.ReadTimeout(
|
||||
0, 'requests.ReadTimeout() not handled'),
|
||||
requests.TooManyRedirects(
|
||||
0, 'requests.TooManyRedirects() not handled'),
|
||||
)
|
||||
|
||||
|
||||
@mock.patch('requests.get')
|
||||
@mock.patch('requests.post')
|
||||
def test_config_http(mock_post, mock_get):
|
||||
"""
|
||||
API: ConfigHTTP() object
|
||||
|
||||
"""
|
||||
|
||||
# Define our good:// url
|
||||
class GoodNotification(NotifyBase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(GoodNotification, self).__init__(*args, **kwargs)
|
||||
|
||||
def notify(self, *args, **kwargs):
|
||||
# Pretend everything is okay
|
||||
return True
|
||||
|
||||
# Store our good notification in our schema map
|
||||
SCHEMA_MAP['good'] = GoodNotification
|
||||
|
||||
# Prepare Mock
|
||||
dummy_request = mock.Mock()
|
||||
dummy_request.close.return_value = True
|
||||
dummy_request.status_code = requests.codes.ok
|
||||
dummy_request.content = """
|
||||
taga,tagb=good://server01
|
||||
"""
|
||||
dummy_request.headers = {
|
||||
'Content-Length': len(dummy_request.content),
|
||||
'Content-Type': 'text/plain',
|
||||
}
|
||||
|
||||
mock_post.return_value = dummy_request
|
||||
mock_get.return_value = dummy_request
|
||||
|
||||
assert ConfigHTTP.parse_url('garbage://') is None
|
||||
|
||||
results = ConfigHTTP.parse_url('http://user:pass@localhost?+key=value')
|
||||
assert isinstance(results, dict)
|
||||
ch = ConfigHTTP(**results)
|
||||
assert isinstance(ch.url(), six.string_types) is True
|
||||
assert isinstance(ch.read(), six.string_types) is True
|
||||
|
||||
# one entry added
|
||||
assert len(ch) == 1
|
||||
|
||||
results = ConfigHTTP.parse_url('http://localhost:8080/path/')
|
||||
assert isinstance(results, dict)
|
||||
ch = ConfigHTTP(**results)
|
||||
assert isinstance(ch.url(), six.string_types) is True
|
||||
assert isinstance(ch.read(), six.string_types) is True
|
||||
|
||||
# one entry added
|
||||
assert len(ch) == 1
|
||||
|
||||
results = ConfigHTTP.parse_url('http://user@localhost?format=text')
|
||||
assert isinstance(results, dict)
|
||||
ch = ConfigHTTP(**results)
|
||||
assert isinstance(ch.url(), six.string_types) is True
|
||||
assert isinstance(ch.read(), six.string_types) is True
|
||||
|
||||
# one entry added
|
||||
assert len(ch) == 1
|
||||
|
||||
results = ConfigHTTP.parse_url('https://localhost')
|
||||
assert isinstance(results, dict)
|
||||
ch = ConfigHTTP(**results)
|
||||
assert isinstance(ch.url(), six.string_types) is True
|
||||
assert isinstance(ch.read(), six.string_types) is True
|
||||
|
||||
# one entry added
|
||||
assert len(ch) == 1
|
||||
|
||||
# Testing of pop
|
||||
ch = ConfigHTTP(**results)
|
||||
|
||||
ref = ch[0]
|
||||
assert isinstance(ref, NotifyBase) is True
|
||||
|
||||
ref_popped = ch.pop(0)
|
||||
assert isinstance(ref_popped, NotifyBase) is True
|
||||
|
||||
assert ref == ref_popped
|
||||
|
||||
assert len(ch) == 0
|
||||
|
||||
# reference to calls on initial reference
|
||||
ch = ConfigHTTP(**results)
|
||||
assert isinstance(ch.pop(0), NotifyBase) is True
|
||||
|
||||
ch = ConfigHTTP(**results)
|
||||
assert isinstance(ch[0], NotifyBase) is True
|
||||
# Second reference actually uses cache
|
||||
assert isinstance(ch[0], NotifyBase) is True
|
||||
|
||||
ch = ConfigHTTP(**results)
|
||||
# Itereator creation (nothing needed to assert here)
|
||||
iter(ch)
|
||||
# Second reference actually uses cache
|
||||
iter(ch)
|
||||
|
||||
# Test a buffer size limit reach
|
||||
ch.max_buffer_size = len(dummy_request.content)
|
||||
assert isinstance(ch.read(), six.string_types) is True
|
||||
|
||||
# Test YAML detection
|
||||
yaml_supported_types = (
|
||||
'text/yaml', 'text/x-yaml', 'application/yaml', 'application/x-yaml')
|
||||
|
||||
for st in yaml_supported_types:
|
||||
dummy_request.headers['Content-Type'] = st
|
||||
ch.default_config_format = None
|
||||
assert isinstance(ch.read(), six.string_types) is True
|
||||
# Set to YAML
|
||||
assert ch.default_config_format == ConfigFormat.YAML
|
||||
|
||||
# Test TEXT detection
|
||||
text_supported_types = ('text/plain', 'text/html')
|
||||
|
||||
for st in text_supported_types:
|
||||
dummy_request.headers['Content-Type'] = st
|
||||
ch.default_config_format = None
|
||||
assert isinstance(ch.read(), six.string_types) is True
|
||||
# Set to TEXT
|
||||
assert ch.default_config_format == ConfigFormat.TEXT
|
||||
|
||||
# The type is never adjusted to mime types we don't understand
|
||||
ukwn_supported_types = ('text/css', 'application/zip')
|
||||
|
||||
for st in ukwn_supported_types:
|
||||
dummy_request.headers['Content-Type'] = st
|
||||
ch.default_config_format = None
|
||||
assert isinstance(ch.read(), six.string_types) is True
|
||||
# Remains unchanged
|
||||
assert ch.default_config_format is None
|
||||
|
||||
# When the entry is missing; we handle this too
|
||||
del dummy_request.headers['Content-Type']
|
||||
ch.default_config_format = None
|
||||
assert isinstance(ch.read(), six.string_types) is True
|
||||
# Remains unchanged
|
||||
assert ch.default_config_format is None
|
||||
|
||||
# Restore our content type object for lower tests
|
||||
dummy_request.headers['Content-Type'] = 'text/plain'
|
||||
|
||||
ch.max_buffer_size = len(dummy_request.content) - 1
|
||||
assert ch.read() is None
|
||||
|
||||
# Test an invalid return code
|
||||
dummy_request.status_code = 400
|
||||
assert ch.read() is None
|
||||
ch.max_error_buffer_size = 0
|
||||
assert ch.read() is None
|
||||
|
||||
# Exception handling
|
||||
for _exception in REQUEST_EXCEPTIONS:
|
||||
mock_post.side_effect = _exception
|
||||
mock_get.side_effect = _exception
|
||||
assert ch.read() is None
|
@ -23,15 +23,19 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import re
|
||||
import six
|
||||
import mock
|
||||
import smtplib
|
||||
|
||||
from apprise import plugins
|
||||
from apprise import NotifyType
|
||||
from apprise import Apprise
|
||||
from apprise.utils import compat_is_basestring
|
||||
from apprise.plugins import NotifyEmailBase
|
||||
|
||||
import smtplib
|
||||
import mock
|
||||
import re
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
TEST_URLS = (
|
||||
@ -232,7 +236,7 @@ def test_email_plugin(mock_smtp, mock_smtpssl):
|
||||
|
||||
if isinstance(obj, plugins.NotifyBase.NotifyBase):
|
||||
# We loaded okay; now lets make sure we can reverse this url
|
||||
assert(compat_is_basestring(obj.url()) is True)
|
||||
assert(isinstance(obj.url(), six.string_types) is True)
|
||||
|
||||
# Instantiate the exact same object again using the URL from
|
||||
# the one that was already created properly
|
||||
|
@ -23,12 +23,12 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import pytest
|
||||
import mock
|
||||
import sys
|
||||
import types
|
||||
import apprise
|
||||
from apprise.utils import compat_is_basestring
|
||||
|
||||
try:
|
||||
# Python v3.4+
|
||||
@ -41,6 +41,10 @@ except ImportError:
|
||||
# Python v2.7
|
||||
pass
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
if 'dbus' not in sys.modules:
|
||||
# Environment doesn't allow for dbus
|
||||
pytest.skip("Skipping dbus-python based tests", allow_module_level=True)
|
||||
@ -225,7 +229,7 @@ def test_dbus_plugin(mock_mainloop, mock_byte, mock_bytearray,
|
||||
obj.duration = 0
|
||||
|
||||
# Test url() call
|
||||
assert(compat_is_basestring(obj.url()) is True)
|
||||
assert(isinstance(obj.url(), six.string_types) is True)
|
||||
|
||||
# Our notification succeeds even though the gi library was not loaded
|
||||
assert(obj.notify(title='title', body='body',
|
||||
@ -255,7 +259,7 @@ def test_dbus_plugin(mock_mainloop, mock_byte, mock_bytearray,
|
||||
obj.duration = 0
|
||||
|
||||
# Test url() call
|
||||
assert(compat_is_basestring(obj.url()) is True)
|
||||
assert(isinstance(obj.url(), six.string_types) is True)
|
||||
|
||||
# Our notification succeeds even though the gi library was not loaded
|
||||
assert(obj.notify(title='title', body='body',
|
||||
@ -277,7 +281,7 @@ def test_dbus_plugin(mock_mainloop, mock_byte, mock_bytearray,
|
||||
obj.duration = 0
|
||||
|
||||
# Test url() call
|
||||
assert(compat_is_basestring(obj.url()) is True)
|
||||
assert(isinstance(obj.url(), six.string_types) is True)
|
||||
|
||||
# Our notification fail because the dbus library wasn't present
|
||||
assert(obj.notify(title='title', body='body',
|
||||
|
@ -23,12 +23,12 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
import mock
|
||||
import sys
|
||||
import types
|
||||
|
||||
import apprise
|
||||
from apprise.utils import compat_is_basestring
|
||||
|
||||
try:
|
||||
# Python v3.4+
|
||||
@ -41,6 +41,10 @@ except ImportError:
|
||||
# Python v2.7
|
||||
pass
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
def test_gnome_plugin():
|
||||
"""
|
||||
@ -115,7 +119,7 @@ def test_gnome_plugin():
|
||||
assert(obj._enabled is True)
|
||||
|
||||
# Test url() call
|
||||
assert(compat_is_basestring(obj.url()) is True)
|
||||
assert(isinstance(obj.url(), six.string_types) is True)
|
||||
|
||||
# test notifications
|
||||
assert(obj.notify(title='title', body='body',
|
||||
|
@ -23,12 +23,15 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import mock
|
||||
import six
|
||||
from apprise import plugins
|
||||
from apprise import NotifyType
|
||||
from apprise import Apprise
|
||||
from apprise.utils import compat_is_basestring
|
||||
|
||||
import mock
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
TEST_URLS = (
|
||||
@ -222,7 +225,7 @@ def test_growl_plugin(mock_gntp):
|
||||
|
||||
if isinstance(obj, plugins.NotifyBase.NotifyBase):
|
||||
# We loaded okay; now lets make sure we can reverse this url
|
||||
assert(compat_is_basestring(obj.url()) is True)
|
||||
assert(isinstance(obj.url(), six.string_types) is True)
|
||||
|
||||
# Instantiate the exact same object again using the URL from
|
||||
# the one that was already created properly
|
||||
|
@ -22,7 +22,7 @@
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
||||
@ -30,7 +30,10 @@ from apprise.plugins.NotifyBase import NotifyBase
|
||||
from apprise import NotifyType
|
||||
from apprise import NotifyImageSize
|
||||
from timeit import default_timer
|
||||
from apprise.utils import compat_is_basestring
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
def test_notify_base():
|
||||
@ -41,7 +44,7 @@ def test_notify_base():
|
||||
|
||||
# invalid types throw exceptions
|
||||
try:
|
||||
nb = NotifyBase(**{'format': 'invalid'})
|
||||
NotifyBase(**{'format': 'invalid'})
|
||||
# We should never reach here as an exception should be thrown
|
||||
assert(False)
|
||||
|
||||
@ -50,7 +53,7 @@ def test_notify_base():
|
||||
|
||||
# invalid types throw exceptions
|
||||
try:
|
||||
nb = NotifyBase(**{'overflow': 'invalid'})
|
||||
NotifyBase(**{'overflow': 'invalid'})
|
||||
# We should never reach here as an exception should be thrown
|
||||
assert(False)
|
||||
|
||||
@ -154,8 +157,9 @@ def test_notify_base():
|
||||
|
||||
# Color handling
|
||||
assert nb.color(notify_type='invalid') is None
|
||||
assert compat_is_basestring(
|
||||
nb.color(notify_type=NotifyType.INFO, color_type=None))
|
||||
assert isinstance(
|
||||
nb.color(notify_type=NotifyType.INFO, color_type=None),
|
||||
six.string_types)
|
||||
assert isinstance(
|
||||
nb.color(notify_type=NotifyType.INFO, color_type=int), int)
|
||||
assert isinstance(
|
||||
@ -192,13 +196,6 @@ def test_notify_base():
|
||||
'/path/?name=Dr%20Disrespect', unquote=True) == \
|
||||
['path', '?name=Dr', 'Disrespect']
|
||||
|
||||
# Test is_email
|
||||
assert NotifyBase.is_email('test@gmail.com') is True
|
||||
assert NotifyBase.is_email('invalid.com') is False
|
||||
|
||||
# Test is_hostname
|
||||
assert NotifyBase.is_hostname('example.com') is True
|
||||
|
||||
# Test quote
|
||||
assert NotifyBase.unquote('%20') == ' '
|
||||
assert NotifyBase.quote(' ') == '%20'
|
||||
|
@ -23,13 +23,18 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import six
|
||||
from apprise import plugins
|
||||
from apprise import NotifyType
|
||||
from apprise import Apprise
|
||||
from apprise.utils import compat_is_basestring
|
||||
|
||||
import mock
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
TEST_URLS = (
|
||||
##################################
|
||||
# NotifyPushjet
|
||||
@ -121,7 +126,7 @@ def test_plugin(mock_refresh, mock_send):
|
||||
|
||||
if isinstance(obj, plugins.NotifyBase.NotifyBase):
|
||||
# We loaded okay; now lets make sure we can reverse this url
|
||||
assert(compat_is_basestring(obj.url()) is True)
|
||||
assert(isinstance(obj.url(), six.string_types) is True)
|
||||
|
||||
# Instantiate the exact same object again using the URL from
|
||||
# the one that was already created properly
|
||||
|
@ -23,22 +23,25 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
from apprise import plugins
|
||||
from apprise import NotifyType
|
||||
from apprise import NotifyBase
|
||||
from apprise import Apprise
|
||||
from apprise import AppriseAsset
|
||||
from apprise.utils import compat_is_basestring
|
||||
from apprise.common import NotifyFormat
|
||||
from apprise.common import OverflowMode
|
||||
|
||||
import six
|
||||
import requests
|
||||
import mock
|
||||
from json import dumps
|
||||
from random import choice
|
||||
from string import ascii_uppercase as str_alpha
|
||||
from string import digits as str_num
|
||||
|
||||
import requests
|
||||
import mock
|
||||
from apprise import plugins
|
||||
from apprise import NotifyType
|
||||
from apprise import NotifyBase
|
||||
from apprise import Apprise
|
||||
from apprise import AppriseAsset
|
||||
from apprise.common import NotifyFormat
|
||||
from apprise.common import OverflowMode
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
# Some exception handling we'll use
|
||||
REQUEST_EXCEPTIONS = (
|
||||
@ -142,6 +145,13 @@ TEST_URLS = (
|
||||
'instance': plugins.NotifyDiscord,
|
||||
'requests_response_code': requests.codes.no_content,
|
||||
}),
|
||||
('discord://%s/%s?format=markdown&footer=Yes&thumbnail=Yes' % (
|
||||
'i' * 24, 't' * 64), {
|
||||
'instance': plugins.NotifyDiscord,
|
||||
'requests_response_code': requests.codes.no_content,
|
||||
# don't include an image by default
|
||||
'include_image': False,
|
||||
}),
|
||||
('discord://%s/%s?format=markdown&avatar=No&footer=No' % (
|
||||
'i' * 24, 't' * 64), {
|
||||
'instance': plugins.NotifyDiscord,
|
||||
@ -338,6 +348,7 @@ TEST_URLS = (
|
||||
# APIKey + bad device
|
||||
('join://%s/%s' % ('a' * 32, 'd' * 10), {
|
||||
'instance': plugins.NotifyJoin,
|
||||
'response': False,
|
||||
}),
|
||||
# APIKey + bad url
|
||||
('join://:@/', {
|
||||
@ -1129,6 +1140,9 @@ TEST_URLS = (
|
||||
# No username specified; this is still okay as we sub in
|
||||
# default; The one invalid channel is skipped when sending a message
|
||||
'instance': plugins.NotifySlack,
|
||||
# There is an invalid channel that we will fail to deliver to
|
||||
# as a result the response type will be false
|
||||
'response': False,
|
||||
}),
|
||||
('slack://T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/#channel', {
|
||||
# No username specified; this is still okay as we sub in
|
||||
@ -1524,7 +1538,7 @@ def test_rest_plugins(mock_post, mock_get):
|
||||
# Allow us to force the server response text to be something other then
|
||||
# the defaults
|
||||
requests_response_text = meta.get('requests_response_text')
|
||||
if not compat_is_basestring(requests_response_text):
|
||||
if not isinstance(requests_response_text, six.string_types):
|
||||
# Convert to string
|
||||
requests_response_text = dumps(requests_response_text)
|
||||
|
||||
@ -1541,17 +1555,14 @@ def test_rest_plugins(mock_post, mock_get):
|
||||
else:
|
||||
# Disable images
|
||||
asset = AppriseAsset(image_path_mask=False, image_url_mask=False)
|
||||
asset.image_url_logo = None
|
||||
|
||||
test_requests_exceptions = meta.get(
|
||||
'test_requests_exceptions', False)
|
||||
|
||||
# A request
|
||||
robj = mock.Mock()
|
||||
setattr(robj, 'raw', mock.Mock())
|
||||
# Allow raw.read() calls
|
||||
robj.raw.read.return_value = ''
|
||||
robj.text = ''
|
||||
robj.content = ''
|
||||
robj.content = u''
|
||||
mock_get.return_value = robj
|
||||
mock_post.return_value = robj
|
||||
|
||||
@ -1561,8 +1572,8 @@ def test_rest_plugins(mock_post, mock_get):
|
||||
mock_get.return_value.status_code = requests_response_code
|
||||
|
||||
# Handle our default text response
|
||||
mock_get.return_value.text = requests_response_text
|
||||
mock_post.return_value.text = requests_response_text
|
||||
mock_get.return_value.content = requests_response_text
|
||||
mock_post.return_value.content = requests_response_text
|
||||
|
||||
# Ensure there is no side effect set
|
||||
mock_post.side_effect = None
|
||||
@ -1592,7 +1603,7 @@ def test_rest_plugins(mock_post, mock_get):
|
||||
|
||||
if isinstance(obj, plugins.NotifyBase.NotifyBase):
|
||||
# We loaded okay; now lets make sure we can reverse this url
|
||||
assert(compat_is_basestring(obj.url()) is True)
|
||||
assert(isinstance(obj.url(), six.string_types) is True)
|
||||
|
||||
# Instantiate the exact same object again using the URL from
|
||||
# the one that was already created properly
|
||||
@ -1666,12 +1677,16 @@ def test_rest_plugins(mock_post, mock_get):
|
||||
|
||||
except AssertionError:
|
||||
# Don't mess with these entries
|
||||
print('%s AssertionError' % url)
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
# Check that we were expecting this exception to happen
|
||||
if not isinstance(e, response):
|
||||
try:
|
||||
if not isinstance(e, response):
|
||||
raise
|
||||
|
||||
except TypeError:
|
||||
print('%s Unhandled response %s' % (url, type(e)))
|
||||
raise
|
||||
|
||||
#
|
||||
@ -1901,9 +1916,7 @@ def test_notify_emby_plugin_login(mock_post, mock_get):
|
||||
|
||||
# Our login flat out fails if we don't have proper parseable content
|
||||
mock_post.return_value.content = u''
|
||||
mock_post.return_value.text = ''
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
# KeyError handling
|
||||
mock_post.return_value.status_code = 999
|
||||
@ -1944,9 +1957,7 @@ def test_notify_emby_plugin_login(mock_post, mock_get):
|
||||
mock_post.return_value.content = dumps({
|
||||
u'AccessToken': u'0000-0000-0000-0000',
|
||||
})
|
||||
mock_post.return_value.text = str(mock_post.return_value.content)
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
obj = Apprise.instantiate('emby://l2g:l2gpass@localhost')
|
||||
assert isinstance(obj, plugins.NotifyEmby)
|
||||
@ -1964,9 +1975,7 @@ def test_notify_emby_plugin_login(mock_post, mock_get):
|
||||
u'Id': u'123abc',
|
||||
u'AccessToken': u'0000-0000-0000-0000',
|
||||
})
|
||||
mock_post.return_value.text = str(mock_post.return_value.content)
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
obj = Apprise.instantiate('emby://l2g:l2gpass@localhost')
|
||||
assert isinstance(obj, plugins.NotifyEmby)
|
||||
@ -1985,9 +1994,7 @@ def test_notify_emby_plugin_login(mock_post, mock_get):
|
||||
},
|
||||
u'AccessToken': u'0000-0000-0000-0000',
|
||||
})
|
||||
mock_post.return_value.text = str(mock_post.return_value.content)
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
# Login
|
||||
assert obj.login() is True
|
||||
@ -2036,9 +2043,7 @@ def test_notify_emby_plugin_sessions(mock_post, mock_get, mock_logout,
|
||||
|
||||
# Our login flat out fails if we don't have proper parseable content
|
||||
mock_post.return_value.content = u''
|
||||
mock_post.return_value.text = ''
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
# KeyError handling
|
||||
mock_post.return_value.status_code = 999
|
||||
@ -2056,9 +2061,7 @@ def test_notify_emby_plugin_sessions(mock_post, mock_get, mock_logout,
|
||||
|
||||
mock_post.return_value.status_code = requests.codes.ok
|
||||
mock_get.return_value.status_code = requests.codes.ok
|
||||
mock_post.return_value.text = str(mock_post.return_value.content)
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
# Disable the port completely
|
||||
obj.port = None
|
||||
@ -2079,9 +2082,7 @@ def test_notify_emby_plugin_sessions(mock_post, mock_get, mock_logout,
|
||||
u'InvalidEntry': None,
|
||||
},
|
||||
])
|
||||
mock_post.return_value.text = str(mock_post.return_value.content)
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
sessions = obj.sessions(user_controlled=True)
|
||||
assert isinstance(sessions, dict) is True
|
||||
@ -2138,9 +2139,7 @@ def test_notify_emby_plugin_logout(mock_post, mock_get, mock_login):
|
||||
|
||||
# Our login flat out fails if we don't have proper parseable content
|
||||
mock_post.return_value.content = u''
|
||||
mock_post.return_value.text = ''
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
# KeyError handling
|
||||
mock_post.return_value.status_code = 999
|
||||
@ -2158,9 +2157,7 @@ def test_notify_emby_plugin_logout(mock_post, mock_get, mock_login):
|
||||
|
||||
mock_post.return_value.status_code = requests.codes.ok
|
||||
mock_get.return_value.status_code = requests.codes.ok
|
||||
mock_post.return_value.text = str(mock_post.return_value.content)
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
# Disable the port completely
|
||||
obj.port = None
|
||||
@ -2181,11 +2178,11 @@ def test_notify_emby_plugin_notify(mock_post, mock_get, mock_logout,
|
||||
# Disable Throttling to speed testing
|
||||
plugins.NotifyBase.NotifyBase.request_rate_per_sec = 0
|
||||
|
||||
# Prepare Mock
|
||||
mock_get.return_value = requests.Request()
|
||||
mock_post.return_value = requests.Request()
|
||||
mock_post.return_value.status_code = requests.codes.ok
|
||||
mock_get.return_value.status_code = requests.codes.ok
|
||||
req = requests.Request()
|
||||
req.status_code = requests.codes.ok
|
||||
req.content = ''
|
||||
mock_get.return_value = req
|
||||
mock_post.return_value = req
|
||||
|
||||
# This is done so we don't obstruct our access_token and user_id values
|
||||
mock_login.return_value = True
|
||||
@ -2218,9 +2215,7 @@ def test_notify_emby_plugin_notify(mock_post, mock_get, mock_logout,
|
||||
|
||||
# Our login flat out fails if we don't have proper parseable content
|
||||
mock_post.return_value.content = u''
|
||||
mock_post.return_value.text = ''
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
# KeyError handling
|
||||
mock_post.return_value.status_code = 999
|
||||
@ -2234,9 +2229,7 @@ def test_notify_emby_plugin_notify(mock_post, mock_get, mock_logout,
|
||||
|
||||
mock_post.return_value.status_code = requests.codes.ok
|
||||
mock_get.return_value.status_code = requests.codes.ok
|
||||
mock_post.return_value.text = str(mock_post.return_value.content)
|
||||
mock_get.return_value.content = mock_post.return_value.content
|
||||
mock_get.return_value.text = mock_post.return_value.text
|
||||
|
||||
# Disable the port completely
|
||||
obj.port = None
|
||||
@ -2356,10 +2349,11 @@ def test_notify_join_plugin(mock_post, mock_get):
|
||||
p = plugins.NotifyJoin(apikey=apikey, devices=[group, device])
|
||||
|
||||
# Prepare our mock responses
|
||||
mock_get.return_value = requests.Request()
|
||||
mock_post.return_value = requests.Request()
|
||||
mock_post.return_value.status_code = requests.codes.created
|
||||
mock_get.return_value.status_code = requests.codes.created
|
||||
req = requests.Request()
|
||||
req.status_code = requests.codes.created
|
||||
req.content = ''
|
||||
mock_get.return_value = req
|
||||
mock_post.return_value = req
|
||||
|
||||
# Test notifications without a body or a title; nothing to send
|
||||
# so we return False
|
||||
@ -2482,8 +2476,6 @@ def test_notify_pushed_plugin(mock_post, mock_get):
|
||||
mock_post.return_value = requests.Request()
|
||||
mock_post.return_value.status_code = requests.codes.ok
|
||||
mock_get.return_value.status_code = requests.codes.ok
|
||||
mock_post.return_value.text = ''
|
||||
mock_get.return_value.text = ''
|
||||
|
||||
try:
|
||||
obj = plugins.NotifyPushed(
|
||||
@ -2556,8 +2548,6 @@ def test_notify_pushed_plugin(mock_post, mock_get):
|
||||
# Prepare Mock to fail
|
||||
mock_post.return_value.status_code = requests.codes.internal_server_error
|
||||
mock_get.return_value.status_code = requests.codes.internal_server_error
|
||||
mock_post.return_value.text = ''
|
||||
mock_get.return_value.text = ''
|
||||
|
||||
|
||||
@mock.patch('requests.get')
|
||||
@ -2646,8 +2636,8 @@ def test_notify_rocketchat_plugin(mock_post, mock_get):
|
||||
mock_post.return_value = requests.Request()
|
||||
mock_post.return_value.status_code = requests.codes.ok
|
||||
mock_get.return_value.status_code = requests.codes.ok
|
||||
mock_post.return_value.text = ''
|
||||
mock_get.return_value.text = ''
|
||||
mock_post.return_value.content = ''
|
||||
mock_get.return_value.content = ''
|
||||
|
||||
try:
|
||||
obj = plugins.NotifyRocketChat(
|
||||
@ -2701,8 +2691,6 @@ def test_notify_rocketchat_plugin(mock_post, mock_get):
|
||||
# Prepare Mock to fail
|
||||
mock_post.return_value.status_code = requests.codes.internal_server_error
|
||||
mock_get.return_value.status_code = requests.codes.internal_server_error
|
||||
mock_post.return_value.text = ''
|
||||
mock_get.return_value.text = ''
|
||||
|
||||
#
|
||||
# Send Notification
|
||||
@ -2732,13 +2720,10 @@ def test_notify_rocketchat_plugin(mock_post, mock_get):
|
||||
#
|
||||
assert obj.logout() is False
|
||||
|
||||
mock_post.return_value.text = ''
|
||||
# Generate exceptions
|
||||
mock_get.side_effect = requests.ConnectionError(
|
||||
0, 'requests.ConnectionError() not handled')
|
||||
mock_post.side_effect = mock_get.side_effect
|
||||
mock_get.return_value.text = ''
|
||||
mock_post.return_value.text = ''
|
||||
|
||||
#
|
||||
# Send Notification
|
||||
@ -2824,7 +2809,7 @@ def test_notify_telegram_plugin(mock_post, mock_get):
|
||||
assert(len(obj.chat_ids) == 2)
|
||||
|
||||
# test url call
|
||||
assert(compat_is_basestring(obj.url()))
|
||||
assert(isinstance(obj.url(), six.string_types))
|
||||
# Test that we can load the string we generate back:
|
||||
obj = plugins.NotifyTelegram(**plugins.NotifyTelegram.parse_url(obj.url()))
|
||||
assert(isinstance(obj, plugins.NotifyTelegram))
|
||||
@ -2839,7 +2824,7 @@ def test_notify_telegram_plugin(mock_post, mock_get):
|
||||
response.status_code = requests.codes.internal_server_error
|
||||
|
||||
# a error response
|
||||
response.text = dumps({
|
||||
response.content = dumps({
|
||||
'description': 'test',
|
||||
})
|
||||
mock_get.return_value = response
|
||||
@ -3054,7 +3039,7 @@ def test_notify_overflow_truncate():
|
||||
chunks = obj._apply_overflow(
|
||||
body=body, title=title, overflow=OverflowMode.SPLIT)
|
||||
assert len(chunks) == 1
|
||||
assert body == chunks[0].get('body')
|
||||
assert body.rstrip() == chunks[0].get('body')
|
||||
assert title[0:TestNotification.title_maxlen] == chunks[0].get('title')
|
||||
|
||||
#
|
||||
|
@ -28,6 +28,11 @@ import requests
|
||||
from apprise import plugins
|
||||
from apprise import Apprise
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
TEST_ACCESS_KEY_ID = 'AHIAJGNT76XIMXDBIJYA'
|
||||
TEST_ACCESS_KEY_SECRET = 'bu1dHSdO22pfaaVy/wmNsdljF4C07D3bndi9PQJ9'
|
||||
TEST_REGION = 'us-east-2'
|
||||
@ -326,7 +331,7 @@ def test_aws_topic_handling(mock_post):
|
||||
|
||||
# A request
|
||||
robj = mock.Mock()
|
||||
robj.text = ''
|
||||
robj.content = ''
|
||||
robj.status_code = requests.codes.ok
|
||||
|
||||
if data.find('=CreateTopic') >= 0:
|
||||
@ -361,11 +366,11 @@ def test_aws_topic_handling(mock_post):
|
||||
|
||||
# A request
|
||||
robj = mock.Mock()
|
||||
robj.text = ''
|
||||
robj.content = ''
|
||||
robj.status_code = requests.codes.ok
|
||||
|
||||
if data.find('=CreateTopic') >= 0:
|
||||
robj.text = arn_response
|
||||
robj.content = arn_response
|
||||
|
||||
# Manipulate Topic Publishing only (not phone)
|
||||
elif data.find('=Publish') >= 0 and data.find('TopicArn=') >= 0:
|
||||
@ -385,7 +390,7 @@ def test_aws_topic_handling(mock_post):
|
||||
|
||||
# Handle case where TopicArn is missing:
|
||||
robj = mock.Mock()
|
||||
robj.text = "<CreateTopicResponse></CreateTopicResponse>"
|
||||
robj.content = "<CreateTopicResponse></CreateTopicResponse>"
|
||||
robj.status_code = requests.codes.ok
|
||||
|
||||
# Assign ourselves a new function
|
||||
@ -394,14 +399,14 @@ def test_aws_topic_handling(mock_post):
|
||||
|
||||
# Handle case where we fails get a bad response
|
||||
robj = mock.Mock()
|
||||
robj.text = ''
|
||||
robj.content = ''
|
||||
robj.status_code = requests.codes.bad_request
|
||||
mock_post.return_value = robj
|
||||
assert(a.notify(title='', body='test') is False)
|
||||
|
||||
# Handle case where we get a valid response and TopicARN
|
||||
robj = mock.Mock()
|
||||
robj.text = arn_response
|
||||
robj.content = arn_response
|
||||
robj.status_code = requests.codes.ok
|
||||
mock_post.return_value = robj
|
||||
# We would have failed to make Post
|
||||
|
@ -28,6 +28,10 @@ from apprise import NotifyType
|
||||
from apprise import Apprise
|
||||
import mock
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
TEST_URLS = (
|
||||
##################################
|
||||
|
@ -34,6 +34,10 @@ except ImportError:
|
||||
|
||||
from apprise import utils
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
def test_parse_qsd():
|
||||
"utils: parse_qsd() testing """
|
||||
@ -387,6 +391,21 @@ def test_is_hostname():
|
||||
assert utils.is_hostname('') is False
|
||||
|
||||
|
||||
def test_is_email():
|
||||
"""
|
||||
API: is_email() function
|
||||
|
||||
"""
|
||||
# Valid Emails
|
||||
assert utils.is_email('test@gmail.com') is True
|
||||
assert utils.is_email('tag+test@gmail.com') is True
|
||||
|
||||
# Invalid Emails
|
||||
assert utils.is_email('invalid.com') is False
|
||||
assert utils.is_email(object()) is False
|
||||
assert utils.is_email(None) is False
|
||||
|
||||
|
||||
def test_parse_list():
|
||||
"utils: parse_list() testing """
|
||||
|
||||
@ -424,3 +443,79 @@ def test_parse_list():
|
||||
'.divx', '.wmv', '.iso', '.mkv', '.mov', '.mpg', '.avi', '.vob',
|
||||
'.xvid', '.mpeg', '.mp4',
|
||||
]))
|
||||
|
||||
|
||||
def test_exclusive_match():
|
||||
"""utils: is_exclusive_match() testing
|
||||
"""
|
||||
|
||||
# No Logic always returns True
|
||||
assert utils.is_exclusive_match(data=None, logic=None) is True
|
||||
assert utils.is_exclusive_match(data=None, logic=set()) is True
|
||||
assert utils.is_exclusive_match(data='', logic=set()) is True
|
||||
assert utils.is_exclusive_match(data=u'', logic=set()) is True
|
||||
assert utils.is_exclusive_match(data=u'check', logic=set()) is True
|
||||
assert utils.is_exclusive_match(
|
||||
data=['check', 'checkb'], logic=set()) is True
|
||||
|
||||
# String delimters are stripped out so that a list can be formed
|
||||
# the below is just an empty token list
|
||||
assert utils.is_exclusive_match(data=set(), logic=',; ,') is True
|
||||
|
||||
# garbage logic is never an exclusive match
|
||||
assert utils.is_exclusive_match(data=set(), logic=object()) is False
|
||||
assert utils.is_exclusive_match(data=set(), logic=[object(), ]) is False
|
||||
|
||||
#
|
||||
# Test with logic:
|
||||
#
|
||||
data = set(['abc'])
|
||||
|
||||
# def in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic='def', data=data) is False
|
||||
# def in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic=['def', ], data=data) is False
|
||||
# def in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic=('def', ), data=data) is False
|
||||
# def in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic=set(['def', ]), data=data) is False
|
||||
# abc in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic=['abc', ], data=data) is True
|
||||
# abc in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic=('abc', ), data=data) is True
|
||||
# abc in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic=set(['abc', ]), data=data) is True
|
||||
# abc or def in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic='abc, def', data=data) is True
|
||||
|
||||
#
|
||||
# Update our data set so we can do more advance checks
|
||||
#
|
||||
data = set(['abc', 'def', 'efg', 'xyz'])
|
||||
|
||||
# def and abc in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic=[('abc', 'def')], data=data) is True
|
||||
|
||||
# cba and abc in data
|
||||
assert utils.is_exclusive_match(
|
||||
logic=[('cba', 'abc')], data=data) is False
|
||||
|
||||
# www or zzz or abc and xyz
|
||||
assert utils.is_exclusive_match(
|
||||
logic=['www', 'zzz', ('abc', 'xyz')], data=data) is True
|
||||
# www or zzz or abc and xyz (strings are valid too)
|
||||
assert utils.is_exclusive_match(
|
||||
logic=['www', 'zzz', ('abc, xyz')], data=data) is True
|
||||
|
||||
# www or zzz or abc and jjj
|
||||
assert utils.is_exclusive_match(
|
||||
logic=['www', 'zzz', ('abc', 'jjj')], data=data) is False
|
||||
|
@ -25,11 +25,11 @@
|
||||
|
||||
import mock
|
||||
import sys
|
||||
import six
|
||||
import types
|
||||
|
||||
# Rebuild our Apprise environment
|
||||
import apprise
|
||||
from apprise.utils import compat_is_basestring
|
||||
|
||||
try:
|
||||
# Python v3.4+
|
||||
@ -42,6 +42,10 @@ except ImportError:
|
||||
# Python v2.7
|
||||
pass
|
||||
|
||||
# Disable logging for a cleaner testing output
|
||||
import logging
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
def test_windows_plugin():
|
||||
"""
|
||||
@ -109,7 +113,7 @@ def test_windows_plugin():
|
||||
obj.duration = 0
|
||||
|
||||
# Test URL functionality
|
||||
assert(compat_is_basestring(obj.url()) is True)
|
||||
assert(isinstance(obj.url(), six.string_types) is True)
|
||||
|
||||
# Check that it found our mocked environments
|
||||
assert(obj._enabled is True)
|
||||
|
Loading…
Reference in New Issue
Block a user