diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..3be90e9 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,23 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Calling the function '...' +2. With arguments'....' +3. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..bbcbbe7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..4adfcf4 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,10 @@ + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3c1d8fa --- /dev/null +++ b/.gitignore @@ -0,0 +1,118 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# Pipenv +.local/ + +# Pylint +.pylint.d + +# Pytest +cov.xml +/test-*.xml + +# Vim +*.swp +*.swo diff --git a/.pyspelling.yml b/.pyspelling.yml new file mode 100644 index 0000000..a54a4d4 --- /dev/null +++ b/.pyspelling.yml @@ -0,0 +1,89 @@ +spellchecker: aspell + +matrix: +- name: restructedtext + sources: + - ./**/*.rst + hunspell: + d: docs/spelling_wordlist.txt + aspell: + lang: en + dictionary: + wordlists: + - docs/spelling_wordlist.txt + output: build/dictionary/restructured.dic + pipeline: + - resplendent.filters.restructuredtext: + - pyspelling.filters.html: + comments: false + attributes: + - title + - alt + ignores: + - code + - pre + - pyspelling.filters.url: + +- name: markdown + sources: + - ./**/*.md + hunspell: + d: docs/spelling_wordlist.txt + aspell: + lang: en + dictionary: + wordlists: + - docs/spelling_wordlist.txt + output: build/dictionary/mkdocs.dic + pipeline: + - pyspelling.filters.markdown: + - pyspelling.filters.html: + comments: false + attributes: + - title + - alt + ignores: + - code + - pre + - pyspelling.filters.url: + +- name: python + sources: + - ./**/*.py + hunspell: + d: docs/src/dictionary/hunspell/en_AU + aspell: + lang: en + dictionary: + wordlists: + - docs/spelling_wordlist.txt + output: build/dictionary/python.dic + pipeline: + - pyspelling.filters.python: + group_comments: true + - pyspelling.flow_control.wildcard: + allow: + - py-comment + - pyspelling.filters.context: + context_visible_first: true + delimiters: + # Ignore lint (noqa) and coverage (pragma) as well as shebang (#!) + - open: '^(?: *(?:noqa\b|pragma: no cover)|!)' + close: '$' + # Ignore Python encoding string -*- encoding stuff -*- + - open: '^ *-\*-' + close: '-\*-$' + - pyspelling.filters.context: + context_visible_first: true + escapes: '\\[\\`]' + delimiters: + # Ignore multiline content between fences (fences can have 3 or more back ticks) + # ``` + # content + # ``` + - open: '(?s)^(?P *`{3,})$' + close: '^(?P=open)$' + # Ignore text between inline back ticks + - open: '(?P`+)' + close: '(?P=open)' + - pyspelling.filters.url: diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..218f147 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at tim.gates@iress.com. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..0f2ba0a --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,31 @@ +# Contributing + +## Code of Conduct + +Please note we have a [Code of Conduct](CODE_OF_CONDUCT.md), please follow it +in all your interactions with the project. + +## Member Responsibilities + +First things first, the prtg-dev community welcomes all contributors with +good intentions and we will as much as possible respect your efforts to +contribute. Specifically we will try and request changes where required and as +last resort make changes on top of pull requests leaving your work with due +acknowledgement. We will specifically avoid finding some fault with your code +and then submit a similar pull request that contains little acknowledgement of +the original work. + +## Contributor Responsibilities + +When contributing to this repository, please first discuss the change you wish to make via issue, +email, or any other method with the owners of this repository before making a change. Contributing +to this repository is greatly welcomed and the process is to save everyone time to make contributions +efficient rather than be a challenge to overcome. + + +## Pull Request Process + +1. Update the README.md with details of significant changes. +2. You may merge the Pull Request in once you have the sign-off of one other developer, or if you + do not have permission to do that, you may request the reviewer to merge it for you. + diff --git a/COOKIECUTTER_UPDATES.md b/COOKIECUTTER_UPDATES.md new file mode 100644 index 0000000..ba5eb16 --- /dev/null +++ b/COOKIECUTTER_UPDATES.md @@ -0,0 +1,30 @@ +# Source +This project was generated from the +(3Amigos CookieCutter Template)[https://github.com/3amigos-dev/cookiecutter-3amigos-py] +this file records the updates received from the upstream cookiecutter template +so that it is clear what upstream updates have already been applied to +consider application of new updates. + +# Updates from the Cookiecutter Template +* Use an (Azure Pipelines)[https://azure.microsoft.com/en-au/services/devops/pipelines/] CI. +* Follow (3Amigos)[https://3amigos-dev.github.io/] to use CI container pattern. +* Use (pyspelling)[https://github.com/facelessuser/pyspelling] to check spelling in CI. +* Use (flake8)[https://github.com/PyCQA/flake8] for static code analysis. +* Use (bandit)[https://github.com/PyCQA/bandit] for security static analysis. +* Use (pylint)[https://github.com/PyCQA/pylint] for static code analysis. +* Use (isort)[https://github.com/timothycrosley/isort] for sorting imports. +* Use (towncrier)[https://github.com/hawkowl/towncrier] for news updates. +* Use (dlint)[https://github.com/duo-labs/dlint] for security static analysis. +* Use (safety)[https://github.com/pyupio/safety] to check package safety. +* Use (pytest)[https://github.com/pytest-dev/pytest/] for unit testing. +* Use (pytest-cov)[https://github.com/pytest-dev/pytest-cov] to check 100% unit test coverage. +* Use (pytest-xdist)[https://github.com/pytest-dev/pytest-xdist] for test parallelization. +* Use (pytest-azurepipelines)[https://github.com/tonybaloney/pytest-azurepipelines] to publish tests results and coverage to Azure Pipelines Artifacts. +* Use (resplendent)[https://github.com/resplendent-dev/resplendent] to spell check reStructuredText in pyspelling. +* Use (shellcheck)[https://github.com/koalaman/shellcheck] for shell script static analysis. +* Use (sphinx)[https://github.com/sphinx-doc/sphinx] for documentation generation. +* Use (twine)[https://github.com/pypa/twine] for publishing to PyPi. +* Badges for (PyPi)[https://pypi.org/], (ReadTheDocs)[https://readthedocs.org/] and (Azure Pipelines)[https://azure.microsoft.com/en-au/services/devops/pipelines/]. + +# Minor Updates +* Configurable code coverage minimum. diff --git a/NEWS.rst b/NEWS.rst new file mode 100644 index 0000000..c4326fd --- /dev/null +++ b/NEWS.rst @@ -0,0 +1,7 @@ +prtg 0.1.0dev0 (2019-07-15) +====================================================== + +Features +-------- + +- Use towncrier for news updates. (#1) diff --git a/README.md b/README.md index b753e99..cf158e0 100644 --- a/README.md +++ b/README.md @@ -1,41 +1,87 @@ # prtg + Python module to manage PRTG servers -Prerequisites: +[![Build Status](https://dev.azure.com/timgates/timgates/_apis/build/status/timgates42.prtg?branchName=master)](https://dev.azure.com/timgates/timgates/_build/latest?definitionId=14&branchName=master) +[![PyPi version](https://img.shields.io/pypi/v/prtg.svg)](https://pypi.org/project/prtg) +[![Python Versions](https://img.shields.io/pypi/pyversions/prtg.svg)](https://pypi.org/project/prtg) +[![PyPi downloads per month](https://img.shields.io/pypi/dm/prtg.svg)](https://pypi.org/project/prtg) +[![Documentation Status](https://readthedocs.org/projects/prtg/badge/?version=latest)](https://prtg.readthedocs.io/en/latest/?badge=latest) + +# Additional Documentation: +* [Online Documentation](https://prtg.readthedocs.io/en/latest/) +* [News](NEWS.rst). +* [Template Updates](COOKIECUTTER_UPDATES.md). +* [Code of Conduct](CODE_OF_CONDUCT.md). +* [Contribution Guidelines](CONTRIBUTING.md). + +## Prerequisites: + - bs4 (BeautifulSoup) -- requests +- click +- future - lxml +- requests + +## Installation + +Can be installed via pip: + +``` +pip install prtg +``` + +## Warnings + +Tested only on Python 3.5.2 so far. Does work with python 2.7 but not +extensively tested. -Tested only on Python 3.5.2 so far. Does work with python 2.7 but not extensively tested. +## Description -This is a Python module to facilitate in managing PRTG servers from CLI or for automating changes. It is really useful for scripting changes to prtg objects. +This is a Python module to facilitate in managing PRTG servers from CLI or for +automating changes. It is really useful for scripting changes to prtg objects. -The prtg_api no longer uses a config file. Instead you need to enter your PRTG parameters when initiating the prtg_api class. This change was to allow this to be used in a more flexible way, or to manage multiple PRTG instances, you can still set up a local config file for your parameters if you wish. The parameters for initiating the prtg_api class are: +The prtg\_api no longer uses a config file. Instead you need to enter your +PRTG parameters when initiating the prtg\_api class. This change was to allow +this to be used in a more flexible way, or to manage multiple PRTG instances, +you can still set up a local config file for your parameters if you wish. The +parameters for initiating the prtg\_api class are: ``` -prtg_api(host,user,passhash,protocol='https',port='443',rootid=0) +prtg.PRTGApi(host,user,passhash,protocol='https',port='443',rootid=0) ``` -Upon initialisation the entire device tree is downloaded and each probe, group, device, sensor and channel is provided as a modifiable object. From the main object (called prtg in example) you can access all objects in the tree using the prtg.allprobes, prtg.allgroups, prtg.alldevices and prtg.allsensors attributes. The channels are not available by default, you must run sensor.get_channels() to the get the child channels of that sensor. +Upon initialisation the entire device tree is downloaded and each probe, +group, device, sensor and channel is provided as a modifiable object. From the +main object (called prtg in example) you can access all objects in the tree +using the prtg.allprobes, prtg.allgroups, prtg.alldevices and prtg.allsensors +attributes. The channels are not available by default, you must run +sensor.get\_channels() to the get the child channels of that sensor. -You can also set the root of your sensor tree as a group that is not the root of PRTG. This was added to allow a partial sensortree to be downloaded where your PRTG server may have many objects or to provide access to a user with restricted permissions. +You can also set the root of your sensor tree as a group that is not the root +of PRTG. This was added to allow a partial sensortree to be downloaded where +your PRTG server may have many objects or to provide access to a user with +restricted permissions. -When you are accessing an object further down the tree you only have access to the direct children of that object. This for example will show the devices that are in the 4th group of the allgroups array: +When you are accessing an object further down the tree you only have access to +the direct children of that object. This for example will show the devices +that are in the 4th group of the allgroups array: ``` -from prtg import prtg_api +from prtg import PRTGApi -prtg = prtg_api('192.168.1.1','prtgadmin','0000000000') +prtg = PRTGApi('192.168.1.1','prtgadmin','0000000000') prtg.allgroups[3].devices ``` -Probe and group objects can have groups and devices as children, device objects have sensors as children and sensors can have channels as children. +Probe and group objects can have groups and devices as children, device +objects have sensors as children and sensors can have channels as children. ``` -from prtg import prtg_api +from prtg import PRTGApi -prtg = prtg_api('192.168.1.1','prtgadmin','0000000000') +prtg = PRTGApi('192.168.1.1','prtgadmin','0000000000') probeobject = prtg.allprobes[0] groups = probeobject.groups @@ -51,47 +97,56 @@ channel = sensorobject.channels[0] ``` -Current methods and parameters (* = required) on all objects include: +Current methods and parameters (\* = required) on all objects include: - rename() - pause(duration=0,message='') (pause and resume on a channel will change the parent sensor) - resume() -- clone(newname=''*,newplaceid=''*) +- clone(newname=''\*,newplaceid=''\*) - delete(confirm=True) (you can't delete the root object or channels) - refresh() -- set_property(name*,value*) -- get_property(name*) -- set_additional_param(param*) (for custom script sensors) -- set_interval(interval*) -- set_host(host*) (ip address or hostname) -- search_byid(id) -- add_tags(['tag1','tag2']*,clear_old=False) +- set\_property(name\*,value\*) +- get\_property(name\*) +- set\_additional\_param(param\*) (for custom script sensors) +- set\_interval(interval\*) +- set\_host(host\*) (ip address or hostname) +- search\_byid(id) +- add\_tags(['tag1','tag2']\*,clear\_old=False) To come: - move -If you are making small changes such as pause, resume, rename; the local data will update as you go. If you are doing larger changes you should refresh the data after each change. If you refresh the main prtg object it will refresh everything otherwise you can just refresh an object further down the tree to only refresh part of the local data. To refresh an object call the .refresh() method. +If you are making small changes such as pause, resume, rename; the local data +will update as you go. If you are doing larger changes you should refresh the +data after each change. If you refresh the main prtg object it will refresh +everything otherwise you can just refresh an object further down the tree to +only refresh part of the local data. To refresh an object call the .refresh() +method. -The set_property method is very powerful and flexible. You can change anything for an object that you can change in the objects settings tab in the web ui. I will add the more commonly used settings as seperate methods. You can use the get_property method to test the name of the property: +The set\_property method is very powerful and flexible. You can change anything +for an object that you can change in the objects settings tab in the web ui. I +will add the more commonly used settings as separate methods. You can use the +get\_property method to test the name of the property: ``` -from prtg import prtg_api +from prtg import PRTGApi -prtg = prtg_api('192.168.1.1','prtgadmin','0000000000') +prtg = PRTGApi('192.168.1.1','prtgadmin','0000000000') prtg.get_property(name='location') #returns the location and sets prtg.location to the result. prtg.set_property(name='location',value='Canada') ``` -There are delays with some actions such as resuming so you should add time delays where appropriate. +There are delays with some actions such as resuming so you should add time +delays where appropriate. example usage: ``` import time -from prtg import prtg_api +from prtg import PRTGApi -prtg = prtg_api('192.168.1.1','prtgadmin','0000000000') +prtg = PRTGApi('192.168.1.1','prtgadmin','0000000000') for device in prtg.alldevices: if device.id == "1234": @@ -110,7 +165,10 @@ for device in prtg.alldevices: ``` -The prtg_api class can be used with the root id set as the root group, a probe, or a group. If you wanted to manage a device or sensor and don't want to download the entire sensortree to loop through the results; you can use the prtg_device and prtg_sensor classes. For example: +The PRTGApi class can be used with the root id set as the root group, a probe, +or a group. If you wanted to manage a device or sensor and don't want to +download the entire sensortree to loop through the results; you can use the +PRTGDevice and PRTGSensor classes. For example: ``` host = '192.168.1.1' @@ -120,9 +178,9 @@ passhash = '0000000' protocol = 'http' deviceid = '2025' -device = prtg_device(host,port,user,passhash,protocol,deviceid) +device = PRTGDevice(host,port,user,passhash,protocol,deviceid) sensorid = '2123' -sensor = prtg_sensor(host,port,user,passhash,protocol,sensorid) +sensor = PRTGSensor(host,port,user,passhash,protocol,sensorid) ``` diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..b13e7a1 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,14 @@ +# Security Policy + +## Supported Versions + +| Version | Supported | +| ------- | ------------------ | +| > 0.1.x | :white_check_mark: | + +## Reporting a Vulnerability + +Please report vulnerabilities to the project email tim.gates@iress.com. +Security vulnerabilities will be gratefully received, taken seriously and +promptly addressed. + diff --git a/app/.coveragerc b/app/.coveragerc new file mode 100644 index 0000000..f4a5869 --- /dev/null +++ b/app/.coveragerc @@ -0,0 +1,4 @@ +[report] + +exclude_lines = + if __name__ == .__main__.: diff --git a/app/.gitignore b/app/.gitignore new file mode 100644 index 0000000..5d80183 --- /dev/null +++ b/app/.gitignore @@ -0,0 +1,3 @@ +test-output.xml +LICENSE +README.md diff --git a/app/.pylintrc b/app/.pylintrc new file mode 100644 index 0000000..f56e9ce --- /dev/null +++ b/app/.pylintrc @@ -0,0 +1,470 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=useless-object-inheritance + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio).You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=8 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=optparse.Values,sys.exit + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,io,builtins + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=80 + +# Maximum number of lines in a module +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma, + dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[BASIC] + +# Naming style matching correct argument names +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style +#argument-rgx= + +# Naming style matching correct attribute names +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Naming style matching correct class attribute names +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style +#class-attribute-rgx= + +# Naming style matching correct class names +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming-style +#class-rgx= + +# Naming style matching correct constant names +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma +good-names=i, + j, + k, + ex, + Run, + _ + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Naming style matching correct inline iteration names +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style +#inlinevar-rgx= + +# Naming style matching correct method names +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style +#method-rgx= + +# Naming style matching correct module names +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style +#variable-rgx= + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub, + TERMIOS, + Bastion, + rexec + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=8 + +# Maximum number of attributes for a class (see R0902). +max-attributes=12 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + +# Maximum number of branch for function / method body +max-branches=40 + +# Maximum number of locals for function / method body +max-locals=20 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of statements in function / method body +max-statements=80 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=0 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/app/Dockerfile b/app/Dockerfile new file mode 100644 index 0000000..38d57aa --- /dev/null +++ b/app/Dockerfile @@ -0,0 +1,7 @@ +FROM 3amigos/pipenv-all:latest +ENV PYTHONVERS "2.7 3.4 3.5 3.6 3.7" +COPY install.sh /build/install.sh +COPY pip /build/pip +RUN /build/install.sh +WORKDIR /workspace/app +ENV HOME /workspace/app diff --git a/app/MANIFEST.in b/app/MANIFEST.in new file mode 100644 index 0000000..f3155af --- /dev/null +++ b/app/MANIFEST.in @@ -0,0 +1,5 @@ +include LICENSE +include README.md + +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] diff --git a/app/install.sh b/app/install.sh new file mode 100755 index 0000000..5541f86 --- /dev/null +++ b/app/install.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +set -euxo pipefail + +BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +cd "${BASEDIR}" + +apt-get update +apt-get install -qq -y aspell aspell-en +apt-get install -qq -y hunspell hunspell-en-au +apt-get install -qq -y shellcheck +apt-get install -qq -y git + +for PYVER in ${PYTHONVERS} ; do + cd "${BASEDIR}/pip/${PYVER}" + for reqfile in */requirements.txt ; do + if [ "$(wc -l < "${reqfile}")" -gt 0 ] ; then + "python${PYVER}" -m pip install -r "${reqfile}" + fi + done + # Display installation + "python${PYVER}" -m pip freeze + "python${PYVER}" -m safety check +done diff --git a/app/pip/2.7/app/requirements.txt b/app/pip/2.7/app/requirements.txt new file mode 100644 index 0000000..de45286 --- /dev/null +++ b/app/pip/2.7/app/requirements.txt @@ -0,0 +1,5 @@ +bs4 +click +future +lxml +requests diff --git a/app/pip/2.7/test/requirements.txt b/app/pip/2.7/test/requirements.txt new file mode 100644 index 0000000..c0bbe31 --- /dev/null +++ b/app/pip/2.7/test/requirements.txt @@ -0,0 +1,13 @@ +bandit +dlint +flake8 +isort +mock +pylint +pytest +pytest-azurepipelines42 +pytest-cov +pytest-xdist +safety +twine +wheel diff --git a/app/pip/3.4/app/requirements.txt b/app/pip/3.4/app/requirements.txt new file mode 100644 index 0000000..de45286 --- /dev/null +++ b/app/pip/3.4/app/requirements.txt @@ -0,0 +1,5 @@ +bs4 +click +future +lxml +requests diff --git a/app/pip/3.4/test/requirements.txt b/app/pip/3.4/test/requirements.txt new file mode 100644 index 0000000..c0bbe31 --- /dev/null +++ b/app/pip/3.4/test/requirements.txt @@ -0,0 +1,13 @@ +bandit +dlint +flake8 +isort +mock +pylint +pytest +pytest-azurepipelines42 +pytest-cov +pytest-xdist +safety +twine +wheel diff --git a/app/pip/3.5/app/requirements.txt b/app/pip/3.5/app/requirements.txt new file mode 100644 index 0000000..de45286 --- /dev/null +++ b/app/pip/3.5/app/requirements.txt @@ -0,0 +1,5 @@ +bs4 +click +future +lxml +requests diff --git a/app/pip/3.5/test/requirements.txt b/app/pip/3.5/test/requirements.txt new file mode 100644 index 0000000..c0bbe31 --- /dev/null +++ b/app/pip/3.5/test/requirements.txt @@ -0,0 +1,13 @@ +bandit +dlint +flake8 +isort +mock +pylint +pytest +pytest-azurepipelines42 +pytest-cov +pytest-xdist +safety +twine +wheel diff --git a/app/pip/3.6/app/requirements.txt b/app/pip/3.6/app/requirements.txt new file mode 100644 index 0000000..de45286 --- /dev/null +++ b/app/pip/3.6/app/requirements.txt @@ -0,0 +1,5 @@ +bs4 +click +future +lxml +requests diff --git a/app/pip/3.6/test/requirements.txt b/app/pip/3.6/test/requirements.txt new file mode 100644 index 0000000..c0bbe31 --- /dev/null +++ b/app/pip/3.6/test/requirements.txt @@ -0,0 +1,13 @@ +bandit +dlint +flake8 +isort +mock +pylint +pytest +pytest-azurepipelines42 +pytest-cov +pytest-xdist +safety +twine +wheel diff --git a/app/pip/3.7/app/requirements.txt b/app/pip/3.7/app/requirements.txt new file mode 100644 index 0000000..de45286 --- /dev/null +++ b/app/pip/3.7/app/requirements.txt @@ -0,0 +1,5 @@ +bs4 +click +future +lxml +requests diff --git a/app/pip/3.7/doco/requirements.txt b/app/pip/3.7/doco/requirements.txt new file mode 100644 index 0000000..8213302 --- /dev/null +++ b/app/pip/3.7/doco/requirements.txt @@ -0,0 +1,2 @@ +sphinx +sphinx_rtd_theme diff --git a/app/pip/3.7/news/requirements.txt b/app/pip/3.7/news/requirements.txt new file mode 100644 index 0000000..0d2d2d7 --- /dev/null +++ b/app/pip/3.7/news/requirements.txt @@ -0,0 +1 @@ +towncrier diff --git a/app/pip/3.7/spelling/requirements.txt b/app/pip/3.7/spelling/requirements.txt new file mode 100644 index 0000000..610a2fe --- /dev/null +++ b/app/pip/3.7/spelling/requirements.txt @@ -0,0 +1,2 @@ +pyspelling +resplendent diff --git a/app/pip/3.7/test/requirements.txt b/app/pip/3.7/test/requirements.txt new file mode 100644 index 0000000..c0bbe31 --- /dev/null +++ b/app/pip/3.7/test/requirements.txt @@ -0,0 +1,13 @@ +bandit +dlint +flake8 +isort +mock +pylint +pytest +pytest-azurepipelines42 +pytest-cov +pytest-xdist +safety +twine +wheel diff --git a/app/prtg/__init__.py b/app/prtg/__init__.py new file mode 100644 index 0000000..d992b17 --- /dev/null +++ b/app/prtg/__init__.py @@ -0,0 +1,12 @@ +""" +Module load point +""" + +from .client import (AuthenticationError, PRTGApi, PRTGDevice, + PRTGHistoricData, PRTGSensor, ResourceNotFound) +from .version import __version__ + +__all__ = [ + '__version__', 'PRTGApi', 'PRTGDevice', 'PRTGSensor', + 'AuthenticationError', 'ResourceNotFound', 'PRTGHistoricData', +] diff --git a/app/prtg/__main__.py b/app/prtg/__main__.py new file mode 100644 index 0000000..b85dc38 --- /dev/null +++ b/app/prtg/__main__.py @@ -0,0 +1,83 @@ +""" +Module load handler for execution via: + +python -m prtg +""" +from __future__ import absolute_import, division, print_function + +import getpass + +import click +from six.moves.urllib.parse import urlparse + +from .client import PRTGApi +from .version import __version__ as app_version + +CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) + + +def get_api(**kwargs): + """ + Handle command line arguments to construct API object + """ + url = kwargs['url'] + urlp = urlparse(url) + passhash = getpass.getpass('PassHash: ') + api = PRTGApi( + host=urlp.hostname, + user=kwargs['user'], + passhash=passhash, + rootid=kwargs['rootid'], + protocol=urlp.scheme, + port=urlp.port if urlp.port else { + 'https': 443, + 'http': 80, + }[urlp.scheme], + ) + return api + + +def show_probe_or_group(obj): + """ + Display and descend a nested element + """ + print(repr(obj)) + for probe in obj.probes: + show_probe_or_group(probe) + for group in obj.groups: + show_probe_or_group(group) + for device in obj.devices: + print(repr(device)) + for sensor in device.sensors: + print(repr(sensor)) + + +def run_show(**kwargs): + """ + Display Groups and Devices under the specified rootid + """ + api = get_api(**kwargs) + show_probe_or_group(api) + + +@click.group(context_settings=CONTEXT_SETTINGS) +@click.version_option(version=app_version) +def main(): + """ + Click Main Entry Point + """ + + +@main.command() +@click.argument('url') +@click.option('--user', default='admin', help='Authentication Username') +@click.option('--rootid', default=0, help='PRTG ID of Root Node to Display') +def show(**kwargs): + """ + Runs the display command + """ + run_show(**kwargs) + + +if __name__ == '__main__': + main() diff --git a/app/prtg/client.py b/app/prtg/client.py new file mode 100644 index 0000000..bbcd8c7 --- /dev/null +++ b/app/prtg/client.py @@ -0,0 +1,1147 @@ +# pylint: disable=too-many-lines +""" +Python API client module to manage PRTG servers +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import csv +from builtins import input as safe_input +from datetime import datetime + +import requests +from bs4 import BeautifulSoup + + +class AuthenticationError(Exception): + """ + Raised when failing to login to the API + """ + + +class UnhandledStatusCode(Exception): + """ + Raised when a non-successful status code is observed calling the API and + no expected handling takes place. + """ + + +class ResourceNotFound(Exception): + """ + Raised if requesting a node and PRTG says it is not found or it can not be + located via an ID search + """ + + +class GlobalArrays(object): + """ + class used by PRTGApi and children to manage global arrays of all objects + """ + allprobes = [] + allgroups = [] + alldevices = [] + allsensors = [] + + +class ConnectionMethods(object): + """ + class used by all prtg_* objects to build urls and query prtg using + requests + """ + def __init__(self): + self.host = None + self.port = None + self.user = None + self.passhash = None + self.protocol = None + self.verify = None + self.confdata = None + self.base_url = None + self.base_url_no_api = None + self.url_auth = None + + def unpack_config(self, confdata): + """ + Load the connection config from the config pack + """ + self.host = confdata[0] + self.port = confdata[1] + self.user = confdata[2] + self.passhash = confdata[3] + self.protocol = confdata[4] + self.verify = confdata[5] + self.confdata = confdata + self.base_url = "{protocol}://{host}:{port}/api/".format( + protocol=self.protocol, host=self.host, port=self.port + ) + self.base_url_no_api = "{protocol}://{host}:{port}/".format( + protocol=self.protocol, host=self.host, port=self.port + ) + self.url_auth = "username={username}&passhash={passhash}".format( + username=self.user, passhash=self.passhash + ) + + def get_request(self, url_string, api=True): + """ + global method for api calls. Provides errors for the 401 and 404 + responses + """ + if api: + url = "{base}{content}&{auth}".format( + base=self.base_url, content=url_string, auth=self.url_auth + ) + else: + url = "{base}{content}&{auth}".format( + base=self.base_url_no_api, content=url_string, + auth=self.url_auth, + ) + req = requests.get(url, verify=self.verify) + if 200 <= req.status_code <= 299: + return req + if req.status_code == 401: + raise ( + AuthenticationError( + "PRTG authentication failed." + " Check credentials in config file" + ) + ) + if req.status_code == 404: + raise ( + ResourceNotFound( + "No resource at URL used: {0}".format(url) + ) + ) + raise UnhandledStatusCode( + 'Response code was {0}: {1}'.format( + req.status_code, req.text, + ) + ) + + +class BaseConfig(ConnectionMethods): + """ + Base class used for common PRTG functionality + """ + def __init__(self): + super(BaseConfig, self).__init__() + self.id = None # pylint: disable=invalid-name + self.objid = None + self.sensorid = None + self.type = None + self.name = None + self.status = None + self.status_raw = None + self.active = None + self.allprobes = GlobalArrays.allprobes + self.allgroups = GlobalArrays.allgroups + self.alldevices = GlobalArrays.alldevices + self.allsensors = GlobalArrays.allsensors + + def __str__(self): + return "".format( + name=self.name, id=self.id, active=self.active + ) + + def __repr__(self): + return "".format( + name=self.name, id=self.id, active=self.active + ) + + def clear_arrays(self): + """ + Remove cached data + """ + del self.allprobes[:] + del self.allgroups[:] + del self.alldevices[:] + del self.allsensors[:] + + def delete(self, confirm=True): + """ + Called to remove this node in the PRTG tree. + """ + if self.type == "Root": + return "You cannot delete the root object." + delete_url = "deleteobject.htm?id={objid}&approve=1".format( + objid=self.id + ) + if confirm: + response = "" + while response.upper() not in ["Y", "N"]: + response = safe_input( + "Would you like to continue?(Y/[N]) " + ) + if response == "": + response = "N" + if response.upper() == "Y": + _ = self.get_request(url_string=delete_url) + else: + _ = self.get_request( + url_string=delete_url + ) + return "" + + def set_property(self, name, value): + """ + Used to call the PRTG API to update a property on the node + """ + if self.type != "Channel": + setprop_url = ( + "setobjectproperty.htm?id={objid}" + "&name={propname}&value={propval}".format( + objid=self.id, propname=name, propval=value + ) + ) + else: + setprop_url = ( + "setobjectproperty.htm?id={objid}&subid={subid}" + "&name={propname}&value={propval}".format( + objid=self.sensorid, subid=self.objid, propname=name, + propval=value, + ) + ) + _ = self.get_request(url_string=setprop_url) + self.name = value + + def get_property(self, name): + """ + Used to call the PRTG API to retrieve a property on the node + """ + if self.type != "Channel": + getprop_url = ( + "getobjectproperty.htm?id={objid}" + "&name={propname}&show=text".format( + objid=self.id, propname=name + ) + ) + else: + getprop_url = ( + "getobjectproperty.htm?id={objid}" + "&subid={subid}&name={propname}".format( + objid=self.sensorid, subid=self.objid, propname=name + ) + ) + req = self.get_request(url_string=getprop_url) + soup = BeautifulSoup(req.text, "lxml") + if soup.result.text != "(Property not found)": + setattr(self, name, soup.result.text) + return soup.result.text + raise ( + ResourceNotFound( + "No object property of name: {name}".format(name=name) + ) + ) + + def set_interval(self, interval): + """ + note: you will still need to disable inheritance manually. + Valid intervals are (seconds): 30, 60, 300, 600, 900, 1800, 3600, + 14400, 21600, 43200, 86400 + """ + self.set_property(name="interval", value=interval) + + def get_tree(self, root=""): + """ + Gets `sensortree` from prtg. If no `rootid` is provided returns entire + tree + """ + tree_url = ( + "table.xml?content=sensortree" + "&output=xml&id={rootid}".format( + rootid=root + ) + ) + req = self.get_request(url_string=tree_url) + raw_data = req.text + treesoup = BeautifulSoup(raw_data, "lxml") + # returns the xml as a beautifulsoup object + if treesoup.sensortree.nodes: + return treesoup + raise ResourceNotFound( + "No objects at ID: {id}".format(id=root) + ) + + def rename(self, newname): + """ + Used to call the API to rename an element. + """ + rename_url = "rename.htm?id={objid}&value={name}".format( + objid=self.id, name=newname + ) + _ = self.get_request(url_string=rename_url) + self.name = newname + + def pause(self, duration=0, message=""): + """ + Used to pause a check to avoid alerts on this element. + """ + if duration > 0: + pause_url = "pauseobjectfor.htm?id={objid}&duration={time}".format( + objid=self.id, time=str(duration) + ) + else: + pause_url = ( + "pause.htm?id={objid}&action=0".format( + objid=self.id + ) + ) + if message: + pause_url += "&pausemsg={string}".format(string=message) + _ = self.get_request(url_string=pause_url) + self.status = "Paused" + self.active = "false" + self.status_raw = "7" + + def resume(self): + """ + Resume a paused node to receive any further alerts. + """ + resume_url = "pause.htm?id={objid}&action=1".format(objid=self.id) + _ = self.get_request(url_string=resume_url) + # these are question marks because we don't know what status is after + # resume + self.status = "?" + self.active = "true" + self.status_raw = "?" + + def get_status(self, name="status"): + """ + Retrieve the status of this element. + """ + status_url = ( + "getobjectstatus.htm?id={objid}" + "&name={name}&show=text".format( + objid=self.id, name=name + ) + ) + req = self.get_request(url_string=status_url) + soup = BeautifulSoup(req.text, "lxml") + status = soup.result.text.strip() + self.status = status + return status + + def clone(self, newname, newplaceid): + """ + Creating elements is only possible via cloning them and + setting their properties. + """ + clone_url = ( + "duplicateobject.htm?id={objid}" + "&name={name}&targetid={newparent}".format( + objid=self.id, name=newname, newparent=newplaceid + ) + ) + _ = self.get_request(url_string=clone_url) + + def add_tags(self, tags, clear_old=False): + """ + Convenience method to append to the existing tags property. + """ + if not isinstance(tags, list): + raise Exception("Needs tags as type: list") + if clear_old: + old_tags = [] + else: + old_tags = self.get_property("tags").split(" ") + new_tags = " ".join(old_tags + tags) + self.set_property(name="tags", value=new_tags) + + +class PRTGApi(GlobalArrays, BaseConfig): + """ + Parameters: + - host: Enter the ip address or `hostname` where PRTG is running + - port: Enter the tcp port used to connect to prtg. (usually 80 or 443) + - user: Enter your PRTG username + - passhash: Enter your PRTG passhash. Can be found in PRTG + webgui > settings > account settings + - protocol: Enter the protocol used to connect to PRTG server (http or + https) + - rootid: Enter the id of the group/probe that contains all the objects + you want to manage. Defaults to 0 (gets entire `sensortree`) + + Example: + host = '192.168.1.1' + port = '443' + user = 'prtgadmin' + passhash = '0000000' + protocol = 'https' + rootid = '53' + prtg = PRTGApi(host,user,passhash,rootid,protocol,port) + """ + + def __init__(self, host, user, passhash, rootid=0, protocol="https", + port="443", verify=True): + super(PRTGApi, self).__init__() + self.confdata = (host, port, user, passhash, protocol, verify) + self.unpack_config(self.confdata) + self.probes = [] + self.groups = [] + self.devices = [] + self.treesoup = None + self.id = rootid + self.initialize() + + def initialize(self): + """ + Called to load the local cache + """ + self.clear_arrays() + # get `sensortree` from root id downwards + self.treesoup = self.get_tree(root=self.id) + # Finds all the direct child nodes in `sensortree` and creates python + # objects, passes each object its xml data + for child in self.treesoup.sensortree.nodes.children: + if child.name is not None: + for childr in child.children: + if childr.name == "probenode": + probeobj = Probe(childr, self.confdata) + self.allprobes.append(probeobj) + self.probes.append(probeobj) + elif childr.name == "device": + deviceobj = Device(childr, self.confdata) + self.devices.append(deviceobj) + self.alldevices.append(deviceobj) + elif childr.name == "group": + groupobj = Group(childr, self.confdata) + self.groups.append(groupobj) + self.allgroups.append(groupobj) + elif childr.name is not None: + if childr.string is None: + childr.string = "" + setattr(self, childr.name, childr.string) + + def refresh(self, refreshsoup=None): + """ + Used to supply or obtain and update local cache + """ + if refreshsoup is None: + # download fresh `sensortree` + refreshsoup = self.get_tree(root=self.id) + self.treesoup = refreshsoup + probeids = [] + newprobeids = [] + groupids = [] + newgroupids = [] + deviceids = [] + newdeviceids = [] + # get ids of existing probes + for aprobe in self.probes: + probeids.append(aprobe.id) + for agroup in self.groups: + groupids.append(agroup.id) + for adevice in self.devices: + deviceids.append(adevice.id) + # for all the child objects in `sensortree`, if it already exists + # refresh the object, otherwise create a new one + for child in self.treesoup.sensortree.nodes.children: + if child.name is not None: + for childr in child.children: + if childr.name == "probenode": + if childr.find("id").string in probeids: + for aprobe in self.probes: + if aprobe.id == childr.find("id").string: + aprobe.refresh(childr) + else: + probeobj = Probe(childr, self.confdata) + self.probes.append(probeobj) + self.allprobes.append(probeobj) + # add all probe ids from the `sensortree` to this list + newprobeids.append(childr.find("id").string) + elif childr.name == "group": + if childr.find("id").string in groupids: + for agroup in self.groups: + if agroup.id == childr.find("id").string: + agroup.refresh(childr) + else: + groupobj = Group(childr, self.confdata) + self.allgroups.append(groupobj) + self.groups.append(groupobj) + # add all probe ids from the `sensortree` to this list + newgroupids.append(childr.find("id").string) + elif childr.name == "device": + if childr.find("id").string in deviceids: + for adevice in self.devices: + if adevice.id == childr.find("id").string: + adevice.refresh(childr) + else: + deviceobj = Device(childr, self.confdata) + self.alldevices.append(deviceobj) + self.devices.append(deviceobj) + # add all probe ids from the `sensortree` to this list + newdeviceids.append(childr.find("id").string) + elif childr.name is not None: + if childr.string is None: + childr.string = "" + setattr(self, childr.name, childr.string) + # if existing probes were not in the new `sensortree`, remove from + # `allprobes` + for idval in probeids: + if idval not in newprobeids: + for aprobe in self.probes: + if aprobe.id == idval: + self.allprobes.remove(aprobe) + self.probes.remove(aprobe) + for idval in groupids: + if idval not in newgroupids: + for agroup in self.groups: + if agroup.id == idval: + self.allgroups.remove(agroup) + self.groups.remove(agroup) + for idval in deviceids: + if idval not in newdeviceids: + for adevice in self.devices: + if adevice.id == idval: + self.devices.remove(adevice) + self.devices.remove(adevice) + + def search_byid(self, idval): + """ + Find an element with the specified ID looking in all the cached kinds + of data. + """ + idval = str(idval) + for obj in (self.allprobes + self.allgroups + self.alldevices + + self.allsensors): + if obj.id == idval: + return obj + raise ResourceNotFound( + 'Object with ID {0} not found'.format(idval) + ) + + +class Channel(PRTGApi): + """ + A channel is a PRTG concept, sensors have a series of channels. + """ + def __init__(self, channelsoup, sensorid, confdata): + self.unpack_config(confdata) + self.sensorid = sensorid + self.lastvalue = None + self.id = None + self.channelsoup = channelsoup + super(Channel, self).__init__( + host=self.host, user=self.user, passhash=self.passhash, + protocol=self.protocol, port=self.port, verify=self.verify, + ) + + def initialize(self): + """ + Called to load the local cache + """ + for child in self.channelsoup.children: + if child.string is None: + child.string = "" + if child.name is not None: + setattr(self, child.name, child.string) + self.id = self.objid + if self.lastvalue is not None: + if self.lastvalue.replace(".", "").isdigit(): + try: + self.lastvalue_int = int( + self.lastvalue.split(" ")[0].replace(",", "") + ) + self.lastvalue_float = float(self.lastvalue_int) + except ValueError: + self.lastvalue_float = float( + self.lastvalue.split(" ")[0].replace(",", "") + ) + self.lastvalue_int = int(self.lastvalue_float) + self.unit = self.lastvalue.split(" ")[1] + self.type = "Channel" + + def __str__(self): + return "".format(name=self.name, id=self.id) + + def __repr__(self): + return "".format(name=self.name, id=self.id) + + def rename(self, newname): + self.set_property(name="name", value=newname) + self.name = newname + + def pause(self, duration=0, message=""): + print("Channels cannot be paused, pausing parent sensor.") + if duration > 0: + pause_url = "pauseobjectfor.htm?id={objid}&duration={time}".format( + objid=self.sensorid, time=duration + ) + else: + pause_url = ( + "pause.htm?id={objid}&action=0&".format( + objid=self.sensorid + ) + ) + if message: + pause_url += "&pausemsg={string}".format(string=message) + _ = self.get_request(url_string=pause_url) + + def resume(self): + print("Channels cannot be resumed, resuming parent sensor.") + resume_url = ( + "pause.htm?id={objid}&action=1".format(objid=self.sensorid) + ) + _ = self.get_request(url_string=resume_url) + + def refresh(self, refreshsoup=None): + """ + Used to supply or obtain and update local cache + """ + channelsoup = refreshsoup + for child in channelsoup.children: + if child.string is None: + child.string = "" + if child.name is not None: + setattr(self, child.name, child.string) + self.id = self.objid + + def delete(self, confirm=True): + return "You cannot delete a channel" + + +class Sensor(PRTGApi): + """ + Used to monitor a target in PRTG. + """ + def __init__(self, sensorsoup, deviceid, confdata): + self.unpack_config(confdata) + self.channels = [] + self.type = "Sensor" + self.deviceid = deviceid + self.filepath = None + self.sensorsoup = sensorsoup + super(Sensor, self).__init__( + host=self.host, user=self.user, passhash=self.passhash, + protocol=self.protocol, port=self.port, verify=self.verify, + ) + + def initialize(self): + """ + Called to load the local cache + """ + for child in self.sensorsoup.children: + if child.string is None: + child.string = "" + if child.name is not None: + setattr(self, child.name, child.string) + setattr(self, "attributes", self.sensorsoup.attrs) + + def get_channels(self): + """ + Get the channels for this sensor. + """ + channel_url = ( + "table.xml?content=channels&output=xml" + "&columns=name,lastvalue_,objid&id={sensorid}".format( + sensorid=self.id + ) + ) + req = self.get_request(url_string=channel_url) + channelsoup = BeautifulSoup(req.text, "lxml") + if not self.channels: + for child in channelsoup.find_all("item"): + self.channels.append(Channel(child, self.id, self.confdata)) + else: + for child in channelsoup.find_all("item"): + for achannel in self.channels: + if achannel.objid == child.find("objid").string: + achannel.refresh(child) + return self.channels + + def refresh(self, refreshsoup=None): + """ + Used to supply or obtain and update local cache + """ + sensorsoup = refreshsoup + if sensorsoup is None: + soup = self.get_tree(root=self.id) + sensorsoup = soup.sensortree.nodes.sensor + for child in sensorsoup.children: + if child.string is None: + child.string = "" + if child.name is not None: + setattr(self, child.name, child.string) + setattr(self, "attributes", sensorsoup.attrs) + if self.channels: + self.get_channels() + + def set_additional_param(self, parameterstring): + """ + Set the params property + """ + self.set_property(name="params", value=parameterstring) + + def acknowledge(self, message=""): + """ + Used indicate a response to an alarm + """ + acknowledge_url = ( + "acknowledgealarm.htm?id={objid}" + "&ackmsg={string}".format( + objid=self.id, string=message + ) + ) + _ = self.get_request(url_string=acknowledge_url) + self.get_status() + + def save_graph(self, graphid, filepath, size, hidden_channels="", + filetype="svg"): + """ + Size options: S,M,L + """ + if size.upper() == "L": + width = "1500" + height = "500" + font = "13" + elif size.upper() == "S": + width = "400" + height = "300" + font = "9" + else: + width = "800" + height = "350" + font = "13" + if hidden_channels: + hidden_channels = "&hide={hc}".format(hc=hidden_channels) + chart_url = ( + "chart.{ft}?type=graph&graphid={gid}&id={sid}&width={w}" + "&height={h}{hc}&plotcolor=%23ffffff&gridcolor=%23ffffff" + "&graphstyling=showLegend%3D%271%27" + "+baseFontSize%3D%27{f}%27".format( + ft=filetype, + gid=graphid, + sid=self.id, + w=width, + h=height, + hc=hidden_channels, + f=font, + ) + ) + req = self.get_request(url_string=chart_url, api=False) + with open(filepath, "wb") as imgfile: + for chunk in req: + imgfile.write(chunk) + self.filepath = filepath + + +class Device(PRTGApi): + """ + A physical device that can be monitored by a sensor + """ + def __init__(self, devicesoup, confdata): + self.unpack_config(confdata) + self.sensors = [] + self.devicesoup = devicesoup + super(Device, self).__init__( + host=self.host, user=self.user, passhash=self.passhash, + protocol=self.protocol, port=self.port, verify=self.verify, + ) + + def initialize(self): + """ + Called to load the local cache + """ + for child in self.devicesoup.children: + if child.name == "sensor": + sensorobj = Sensor(child, self.id, self.confdata) + self.sensors.append(sensorobj) + self.allsensors.append(sensorobj) + elif child.name is not None: + if child.string is None: + child.string = "" + setattr(self, child.name, child.string) + # Adds sensors to a dictionary based on their status + self.sensors_by_status = { + "Up": [], "Down": [], "Warning": [], "Paused": [] + } + for asensor in self.sensors: + if asensor.status in self.sensors_by_status.keys(): + self.sensors_by_status[asensor.status].append(asensor) + else: + self.sensors_by_status[asensor.status] = [asensor] + setattr(self, "attributes", self.devicesoup.attrs) + self.type = "Device" + + def refresh(self, refreshsoup=None): + """ + Used to supply or obtain and update local cache + """ + devicesoup = refreshsoup + if devicesoup is None: + soup = self.get_tree(root=self.id) + devicesoup = soup.sensortree.nodes.device + sensorids = [] + newsensorids = [] + for asensor in self.sensors: + sensorids.append(asensor.id) + for child in devicesoup.children: + if child.name == "sensor": + if child.find("id").string in sensorids: + for asensor in self.sensors: + if asensor.id == child.find("id").string: + asensor.refresh(child) + else: + sensorobj = Sensor(child, self.id, self.confdata) + self.sensors.append(sensorobj) + self.allsensors.append(sensorobj) + newsensorids.append(child.find("id").string) + elif child.name is not None: + if child.string is None: + child.string = "" + setattr(self, child.name, child.string) + for idval in sensorids: + if idval not in newsensorids: + for asensor in self.sensors: + if asensor.id == idval: + sensortoremove = asensor + self.sensors.remove(sensortoremove) + self.allsensors.remove(sensortoremove) + setattr(self, "attributes", devicesoup.attrs) + + def set_host(self, host): + """ + Set the host property and update the local cache + """ + self.set_property(name="host", value=host) + self.host = host + + +class Group(PRTGApi): + """ + A Tree Nesting Feature - Groups can contain other Groups and Devices + """ + def __init__(self, groupsoup, confdata): + self.unpack_config(confdata) + self.groups = [] + self.devices = [] + self.groupsoup = groupsoup + super(Group, self).__init__( + host=self.host, user=self.user, passhash=self.passhash, + protocol=self.protocol, port=self.port, verify=self.verify, + ) + + def initialize(self): + """ + Called to load the local cache + """ + # `groupsoup` is passed into `__init__` method + # The children objects are either added to this object as an attribute + # or a device/group object is created + for child in self.groupsoup.children: + if child.name == "device": + deviceobj = Device(child, self.confdata) + self.devices.append(deviceobj) + self.alldevices.append(deviceobj) + elif child.name == "group": + groupobj = Group(child, self.confdata) + self.groups.append(groupobj) + self.allgroups.append(groupobj) + elif child.name is not None: + if child.string is None: + child.string = "" + setattr(self, child.name, child.string) + setattr(self, "attributes", self.groupsoup.attrs) + self.type = "Group" + + def refresh(self, refreshsoup=None): + """ + Used to supply or obtain and update local cache + """ + groupsoup = refreshsoup + if groupsoup is None: + if self.type == "Group": + soup = self.get_tree(root=self.id) + groupsoup = soup.sensortree.nodes.group + elif self.type == "Probe": + soup = self.get_tree(root=self.id) + groupsoup = soup.sensortree.nodes.probenode + deviceids = [] + newdeviceids = [] + for adevice in self.devices: + deviceids.append(adevice.id) + groupids = [] + newgroupids = [] + for agroup in self.groups: + groupids.append(agroup.id) + for child in groupsoup.children: + if child.name == "device": + if child.find("id").string in deviceids: + for adevice in self.devices: + if adevice.id == child.find("id").string: + adevice.refresh(child) + else: + deviceobj = Device(child, self.confdata) + self.devices.append(deviceobj) + self.alldevices.append(deviceobj) + newdeviceids.append(child.find("id").string) + elif child.name == "group": + if child.find("id").string in groupids: + for agroup in self.groups: + if agroup.id == child.find("id").string: + agroup.refresh(child) + else: + groupobj = Group(child, self.confdata) + self.groups.append(groupobj) + self.allgroups.append(groupobj) + newgroupids.append(child.find("id").string) + elif child.name is not None: + if child.string is None: + child.string = "" + setattr(self, child.name, child.string) + for idval in deviceids: + if idval not in newdeviceids: + for adevice in self.devices: + if adevice.id == idval: + devicetoremove = adevice + self.devices.remove(devicetoremove) + self.alldevices.remove(devicetoremove) + for idval in groupids: + if idval not in newgroupids: + for agroup in self.groups: + if agroup.id == idval: + grouptoremove = agroup + self.groups.remove(grouptoremove) + self.allgroups.remove(grouptoremove) + setattr(self, "attributes", groupsoup.attrs) + + +class Probe(Group): + """ + Probe is the same as group so it inherits all methods and attributes except + type + """ + type = "Probe" + + +class PRTGDevice(BaseConfig): + """ + Separate top level object to manage just a device and its sensors instead + of downloading details for an entire group + """ + + def __init__(self, host, user, passhash, deviceid, protocol="https", + port="443", verify=True): + self.confdata = (host, port, user, passhash, protocol, verify) + self.unpack_config(self.confdata) + self.sensors = [] + self.sensors_by_status = { + "Up": [], "Down": [], "Warning": [], "Paused": [] + } + self.deviceid = deviceid + super(PRTGDevice, self).__init__( + host=self.host, user=self.user, passhash=self.passhash, + protocol=self.protocol, port=self.port, verify=self.verify, + ) + + def initialize(self): + """ + Called to load the local cache + """ + soup = self.get_tree(root=self.deviceid) + for child in soup.sensortree.nodes.device: + if child.name == "sensor": + sensorobj = Sensor(child, self.id, self.confdata) + self.sensors.append(sensorobj) + elif child.name is not None: + if child.string is None: + child.string = "" + setattr(self, child.name, child.string) + for asensor in self.sensors: + if asensor.status in self.sensors_by_status.keys(): + self.sensors_by_status[asensor.status].append(asensor) + else: + self.sensors_by_status[asensor.status] = [asensor] + + def refresh(self, refreshsoup=None): + """ + Used to supply or obtain and update local cache + """ + soup = refreshsoup + if soup is None: + soup = self.get_tree(root=self.deviceid) + sensorids = [] + for asensor in self.sensors: + sensorids.append(asensor.id) + for child in soup.sensortree.nodes.device: + if child.name == "sensor": + if child.find("id").string in sensorids: + for asensor in self.sensors: + if asensor.id == child.find("id").string: + asensor.refresh(child) + else: + sensorobj = Sensor(child, self.id, self.confdata) + self.sensors.append(sensorobj) + elif child.name is not None: + if child.string is None: + child.string = "" + setattr(self, child.name, child.string) + + +class PRTGSensor(BaseConfig): + """Separate top level object to manage just a sensor and its channels + instead of downloading details for an entire group""" + + def __init__(self, host, user, passhash, sensorid, protocol="https", + port="443", verify=True): + self.confdata = (host, port, user, passhash, protocol, verify) + self.unpack_config(self.confdata) + self.channels = [] + self.sensorid = sensorid + self.filepath = None + super(PRTGSensor, self).__init__() + + def initialize(self): + """ + Called to load the local cache + """ + soup = self.get_tree(root=self.sensorid) + for child in soup.sensortree.nodes.sensor: + if child.name is not None: + if child.string is None: + child.string = "" + setattr(self, child.name, child.string) + self.get_channels() + + def refresh(self, refreshsoup=None): + """ + Used to supply or obtain and update local cache + """ + soup = refreshsoup + if soup is None: + soup = self.get_tree(root=self.id) + sensorsoup = soup.sensortree.nodes.sensor + for child in sensorsoup.children: + if child.string is None: + child.string = "" + if child.name is not None: + setattr(self, child.name, child.string) + setattr(self, "attributes", sensorsoup.attrs) + self.get_channels() + + def get_channels(self): + """ + Lookup the channels the sensor has. + """ + channel_url = ( + "table.xml?content=channels&output=xml" + "&columns=name,lastvalue_,objid&id={sensorid}".format( + sensorid=self.id + ) + ) + req = self.get_request(url_string=channel_url) + channelsoup = BeautifulSoup(req.text, "lxml") + if not self.channels: + for child in channelsoup.find_all("item"): + self.channels.append(Channel(child, self.id, self.confdata)) + else: + for child in channelsoup.find_all("item"): + for achannel in self.channels: + if achannel.objid == child.find("objid").string: + achannel.refresh(child) + + def acknowledge(self, message=""): + """ + Used to indicate a response to a sensor being investigated. + """ + acknowledge_url = ( + "acknowledgealarm.htm?id={objid}" + "&ackmsg={string}".format( + objid=self.id, string=message + ) + ) + _ = self.get_request(url_string=acknowledge_url) + + def save_graph(self, graphid, filepath, size, hidden_channels="", + filetype="svg"): + """ + Size options: S,M,L + """ + if size.upper() == "L": + width = "1500" + height = "500" + font = "13" + elif size.upper() == "S": + width = "400" + height = "300" + font = "9" + else: + width = "800" + height = "350" + font = "13" + if hidden_channels: + hidden_channels = "&hide={hc}".format(hc=hidden_channels) + chart_url = ( + "chart.{ft}?type=graph&graphid={gid}&id={sid}" + "&width={w}&height={h}{hc}&plotcolor=%23ffffff" + "&gridcolor=%23ffffff&graphstyling=showLegend" + "%3D%271%27+baseFontSize%3D%27{f}%27".format( + ft=filetype, + gid=graphid, + sid=self.id, + w=width, + h=height, + hc=hidden_channels, + f=font, + ) + ) + req = self.get_request(url_string=chart_url, api=False) + with open(filepath, "wb") as imgfile: + for chunk in req: + imgfile.write(chunk) + self.filepath = filepath + + +class PRTGHistoricData(ConnectionMethods): + """class used for calls to the historic data api. + Call the class first using connection params then use + methods to get/process data. yyyy-mm-dd-hh-mm-ss""" + + def __init__(self, host, port, user, passhash, protocol, verify=True): + self.confdata = (host, port, user, passhash, protocol, verify) + self.unpack_config(self.confdata) + super(PRTGHistoricData, self).__init__( + host=self.host, user=self.user, passhash=self.passhash, + protocol=self.protocol, port=self.port, verify=self.verify, + ) + + @staticmethod + def format_date(dateobj): + """Pass a datetime object and this will format appropriately + for use with the historic data api""" + return dateobj.strftime("%Y-%m-%d-%H-%M-%S") + + def get_historic_data(self, objid, startdate, enddate, timeaverage): + """ + Call PRTG API to load historic data + """ + if isinstance(startdate, datetime): + startdate = self.format_date(startdate) + if isinstance(enddate, datetime): + enddate = self.format_date(enddate) + historic_url = ( + "historicdata.csv?id={id}&avg={avg}" + "&sdate={sdate}&edate={edate}".format( + id=objid, avg=timeaverage, sdate=startdate, edate=enddate + ) + ) + req = self.get_request(url_string=historic_url) + csv_raw = req.text + csv_lines = (csv_raw.split("\n"))[:-2] + csv_reader = csv.reader(csv_lines) + data = {} + for ind, row in enumerate(csv_reader): + if ind == 0: + headers = row + for header in headers: + data[header] = [] + else: + for inde, cell in enumerate(row): + if headers[inde] == "Date Time": + if "-" in cell: + cell = cell[: cell.index(" -")] + data[headers[inde]].append( + datetime.strptime(cell, "%m/%d/%Y %I:%M:%S %p") + ) + else: + data[headers[inde]].append(cell) + return data diff --git a/app/prtg/newsfragments/.gitignore b/app/prtg/newsfragments/.gitignore new file mode 100644 index 0000000..c8a9d02 --- /dev/null +++ b/app/prtg/newsfragments/.gitignore @@ -0,0 +1,2 @@ + +!.gitignore diff --git a/app/prtg/newsfragments/1.bugfix b/app/prtg/newsfragments/1.bugfix new file mode 100644 index 0000000..46dcf80 --- /dev/null +++ b/app/prtg/newsfragments/1.bugfix @@ -0,0 +1,2 @@ +Ran flake8, bandit, pyspelling, pylint and isort to find a number of bugs +(undefined variables mainly). diff --git a/app/prtg/version.py b/app/prtg/version.py new file mode 100644 index 0000000..51ff65b --- /dev/null +++ b/app/prtg/version.py @@ -0,0 +1,5 @@ +""" +Version information for package +""" + +__version__ = '0.1.0dev0' diff --git a/app/setup.py b/app/setup.py new file mode 100644 index 0000000..79329a4 --- /dev/null +++ b/app/setup.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +`setuptools` Distribution for prtg +""" + +# System Imports +import codecs +import os +import re + +# External Imports +from setuptools import find_packages, setup + +PACKAGE_NAME = 'prtg' + + +def load_readme(fname): + """ + Read the contents of relative `README` file. + """ + file_path = os.path.join(os.path.dirname(__file__), fname) + with codecs.open(file_path, encoding='utf-8') as fobj: + sub = ( + '(https://github.com/' + 'timgates42/prtg' + '/blob/master/\\g<1>)' + ) + markdown_fixed = re.sub( + '[(]([^)]*[.](?:md|rst))[)]', + sub, + fobj.read(), + ) + rst_fixed = re.sub( + '^[.][.] [_][`][^`]*[`][:] ([^)]*[.](?:md|rst))', + sub, + markdown_fixed + ) + return rst_fixed + + +def read_version(): + """ + Read the contents of relative file. + """ + file_path = os.path.join( + os.path.dirname(__file__), PACKAGE_NAME, 'version.py' + ) + regex = re.compile('__version__ = [\'\"]([^\'\"]*)[\'\"]') + with codecs.open(file_path, encoding='utf-8') as fobj: + for line in fobj: + mobj = regex.match(line) + if mobj: + return mobj.group(1) + raise Exception('Failed to read version') + + +setup( + name=PACKAGE_NAME, + version=read_version(), + author='Chris Hutchings', + author_email='hutchris@gmail.com', + maintainer='Tim Gates', + maintainer_email='tim.gates@iress.com', + packages=find_packages(exclude=['tests']), + license='MIT', + description=( + 'Python module to manage PRTG servers' + ), + long_description=load_readme('README.md'), + long_description_content_type='text/markdown', + python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', + install_requires=[ + elem for elem in + 'bs4\nclick\nfuture\nlxml\nrequests'.split('\n') + if elem + ], + url='https://github.com/timgates42/prtg', + classifiers=[elem for elem in [ + 'Development Status :: 4 - Beta', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: Implementation :: CPython', + 'Programming Language :: Python :: Implementation :: PyPy', + 'Operating System :: OS Independent', + 'License :: OSI Approved :: MIT License', + ] if elem], +) diff --git a/app/tests/__init__.py b/app/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/tests/test_client.py b/app/tests/test_client.py new file mode 100644 index 0000000..290c1f5 --- /dev/null +++ b/app/tests/test_client.py @@ -0,0 +1,10 @@ +""" +Test Cases for the PRTG API Client +""" + + +def test_noop(): + """ + Initial passing test case + """ + assert 1 + 1 == 2 # nosec diff --git a/app/tests/test_main.py b/app/tests/test_main.py new file mode 100644 index 0000000..cb36722 --- /dev/null +++ b/app/tests/test_main.py @@ -0,0 +1,12 @@ +""" +Test modules for prtg.__main__ +""" + + +def test_main(): + """ + Starting point for test + """ + # Setup + # Exercise + # Verify diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000..d5a5426 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,13 @@ +# Docker based build via ./ci.sh + +trigger: +- master + +jobs: + +- job: 'Docker' + pool: + vmImage: 'ubuntu-16.04' + + steps: + - script: ./ci.sh diff --git a/ci.sh b/ci.sh new file mode 100755 index 0000000..e7481d0 --- /dev/null +++ b/ci.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -euxo pipefail + +BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +CMD="${1:-test}" +if ! which docker ; then + echo 'Docker is missing!' >&2 + exit 1 +fi +if ! which docker-compose ; then + echo 'Docker-Compose is missing!' >&2 + exit 1 +fi +if [[ "$CMD" =~ [^a-zA-Z0-9_] ]]; then + echo "Invalid Command: ${CMD}" >&2 + exit 1 +fi +cd "${BASEDIR}" +"${BASEDIR}/ci/${CMD}.sh" "${@:2}" + diff --git a/ci/doco.sh b/ci/doco.sh new file mode 100755 index 0000000..1c44017 --- /dev/null +++ b/ci/doco.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "${THISDIR}" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/shared/_docker_helper.sh" + +docker_compose_run "app" "/workspace/ci/in_docker/doco.sh" "$@" diff --git a/ci/in_docker/doco.sh b/ci/in_docker/doco.sh new file mode 100755 index 0000000..6cd256b --- /dev/null +++ b/ci/in_docker/doco.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "$( dirname "${THISDIR}" )" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/in_docker/prepare.sh" + +cd "${BASEDIR}/docs" +make html diff --git a/ci/in_docker/news.sh b/ci/in_docker/news.sh new file mode 100755 index 0000000..e433a52 --- /dev/null +++ b/ci/in_docker/news.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "$( dirname "${THISDIR}" )" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/in_docker/prepare.sh" + +PYVER=3.7 +cd "${BASEDIR}" +"python${PYVER}" -m towncrier "$@" diff --git a/ci/in_docker/pipenv.sh b/ci/in_docker/pipenv.sh new file mode 100755 index 0000000..414c648 --- /dev/null +++ b/ci/in_docker/pipenv.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "$( dirname "${THISDIR}" )" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/in_docker/prepare.sh" + +cd "${BASEDIR}" +for PYVER in ${PYTHONVERS} ; do + cd "${BASEDIR}/app/pipenv/${PYVER}" + rm -rf "${BASEDIR}/.local" + "python${PYVER}" -m pipenv "$@" + rm -rf "${BASEDIR}/.local" +done diff --git a/ci/in_docker/prepare.sh b/ci/in_docker/prepare.sh new file mode 100755 index 0000000..8172294 --- /dev/null +++ b/ci/in_docker/prepare.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "$( dirname "${THISDIR}" )" )" + +cp "${BASEDIR}/README.md" "${BASEDIR}/app/README.md" +cp "${BASEDIR}/LICENSE" "${BASEDIR}/app/LICENSE" + +MAIN_MODULE="prtg" +MODULES=( "${MAIN_MODULE}" "tests" ) +export MODULES +export MAIN_MODULE diff --git a/ci/in_docker/publish.sh b/ci/in_docker/publish.sh new file mode 100755 index 0000000..4bdb06d --- /dev/null +++ b/ci/in_docker/publish.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "$( dirname "${THISDIR}" )" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/in_docker/prepare.sh" + +cd "${BASEDIR}/app" +rm -rf dist build +for PYVER in ${PYTHONVERS} ; do + "python${PYVER}" setup.py sdist bdist_wheel +done +python3.7 -m twine upload --repository-url https://test.pypi.org/legacy/ dist/* +python3.7 -m twine upload dist/* diff --git a/ci/in_docker/pylint.sh b/ci/in_docker/pylint.sh new file mode 100755 index 0000000..e2c66bc --- /dev/null +++ b/ci/in_docker/pylint.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "$( dirname "${THISDIR}" )" )" + +PYTHON="${1}" +TARGET="${2}" +if ! "${PYTHON}" -m pylint --rcfile "${BASEDIR}/app/.pylintrc" "${TARGET}" ; then + echo "Pylint failed on ${TARGET}" >&2 + exit 255 +fi diff --git a/ci/in_docker/test.sh b/ci/in_docker/test.sh new file mode 100755 index 0000000..a66ae68 --- /dev/null +++ b/ci/in_docker/test.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "$( dirname "${THISDIR}" )" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/in_docker/prepare.sh" + +cd "${BASEDIR}" +find . -iname \*.sh -print0 | xargs -0 shellcheck +# Version independant checks +PYVER=3.7 +# Run pyspelling in root to check docs +"python${PYVER}" -m pyspelling +cd "${BASEDIR}/app" +# Version dependant checks +for PYVER in ${PYTHONVERS} ; do + "python${PYVER}" -m flake8 "${MODULES[@]}" + "python${PYVER}" -m isort -rc -c --diff "${MODULES[@]}" + "python${PYVER}" -m bandit -r "${MODULES[@]}" + find "${MODULES[@]}" -iname \*.py -print0 | xargs -0 -n 1 "${BASEDIR}/ci/in_docker/pylint.sh" "python${PYVER}" + "python${PYVER}" -m pytest -n auto --cov-config=.coveragerc --cov-fail-under=0 "--cov=${MAIN_MODULE}" --cov-report=xml:test-cov.xml --cov-report=html +done +# validate doco +"${BASEDIR}/ci/in_docker/doco.sh" +echo 'Testing Complete' diff --git a/ci/news.sh b/ci/news.sh new file mode 100755 index 0000000..b5bb2fc --- /dev/null +++ b/ci/news.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "${THISDIR}" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/shared/_docker_helper.sh" + +docker_compose_run "app" "/workspace/ci/in_docker/news.sh" "$@" diff --git a/ci/pipenv.sh b/ci/pipenv.sh new file mode 100755 index 0000000..16079e5 --- /dev/null +++ b/ci/pipenv.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "${THISDIR}" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/shared/_docker_helper.sh" + +docker_compose_run "app" "/workspace/ci/in_docker/pipenv.sh" "$@" diff --git a/ci/publish.sh b/ci/publish.sh new file mode 100755 index 0000000..cf3dfd3 --- /dev/null +++ b/ci/publish.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "${THISDIR}" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/shared/_docker_helper.sh" + +docker_compose_run "app" "/workspace/ci/in_docker/publish.sh" "$@" diff --git a/ci/shared/_docker_helper.sh b/ci/shared/_docker_helper.sh new file mode 100755 index 0000000..ee2e4bc --- /dev/null +++ b/ci/shared/_docker_helper.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +function docker_compose_run() { + USEROPT="$(id -u):$(id -g)" + docker-compose build + docker-compose up -d + docker-compose run --rm -u "${USEROPT}" "$@" + docker-compose down +} diff --git a/ci/test.sh b/ci/test.sh new file mode 100755 index 0000000..c48d57f --- /dev/null +++ b/ci/test.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -euxo pipefail + +THISDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BASEDIR="$( dirname "${THISDIR}" )" + +# shellcheck source=/dev/null +source "${BASEDIR}/ci/shared/_docker_helper.sh" + +docker_compose_run "app" "/workspace/ci/in_docker/test.sh" "$@" diff --git a/cookiecutter.json b/cookiecutter.json new file mode 100644 index 0000000..66aae12 --- /dev/null +++ b/cookiecutter.json @@ -0,0 +1,34 @@ +{ + "project_name": "prtg", + "project_long_name": "{{ cookiecutter.project_name }}", + "project_description": "Python module to manage PRTG servers", + "project_long_description": "{{ cookiecutter.project_description }}", + "project_slug": "{{ cookiecutter.project_name.lower()|replace(' ', '_')|replace('-', '_')|replace('.', '_')|trim() }}", + "github_org": "timgates42", + "github_repo": "prtg", + "project_url": "https://github.com/{{ cookiecutter.github_org }}/{{ cookiecutter.github_repo }}", + "py_modulename": "{{ cookiecutter.project_slug }}", + "community_name": "prtg-dev", + "docker_application_tagname": "app", + "docker_workspace_dirname": "workspace", + "docker_application_dirname": "{{ cookiecutter.docker_application_tagname }}", + "author_email": "hutchris@gmail.com", + "author_name": "Chris Hutchings", + "maintainer_name": "Tim Gates", + "maintainer_email": "tim.gates@iress.com", + "project_email": "{{ cookiecutter.maintainer_email }}", + "azure_buildid": "14", + "azure_user": "timgates", + "azure_project": "{{ cookiecutter.azure_user }}", + "build_status_url": "https://dev.azure.com/{{ cookiecutter.azure_user }}/{{ cookiecutter.azure_project }}/_apis/build/status/{{ cookiecutter.github_org }}.{{ cookiecutter.github_repo }}?branchName=master", + "build_url": "https://dev.azure.com/{{ cookiecutter.azure_user }}/{{ cookiecutter.azure_project }}/_build/latest?definitionId={{ cookiecutter.azure_buildid }}&branchName=master", + "pypi_name": "{{ cookiecutter.project_name }}", + "readthedocs_name": "{{ cookiecutter.project_name }}", + "app_requirements": "bs4\nclick\nfuture\nlxml\nrequests", + "cov_fail_under": "0", + "dictionary": "en_AU", + "python_requires": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", + "python_versions": "2.7 3.4 3.5 3.6 3.7", + "classifier_pytwo": "Programming Language :: Python :: 2", + "classifier_pytwoseven": "Programming Language :: Python :: 2.7" +} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..4e24c69 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,11 @@ +--- +version: '3.7' +services: + # Service image + app: + image: prtg:latest + build: app + volumes: + - type: bind + source: . + target: /workspace diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..298ea9e --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,19 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..2a714b9 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,84 @@ +""" +Configuration file for the Sphinx documentation builder. +""" +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# http://www.sphinx-doc.org/en/master/config +# +# pylint: disable=invalid-name,redefined-builtin + +# -- Path setup -------------------------------------------------------------- + +import os +import sys +sys.path.insert(0, os.path.abspath(os.path.join( + '..', + 'app', +))) + + +# -- Project information ----------------------------------------------------- + +project = 'prtg' +copyright = '2019, Chris Hutchings' +author = 'Chris Hutchings' + + +def read_version(): + """ + Read the contents of relative file. + """ + import re + import codecs + file_path = os.path.join( + '..', + 'app', + 'prtg', + 'version.py', + ) + regex = re.compile('__version__ = [\'\"]([^\'\"]*)[\'\"]') + with codecs.open(file_path, encoding='utf-8') as fobj: + for line in fobj: + mobj = regex.match(line) + if mobj: + return mobj.group(1) + raise Exception('Failed to read version') + + +# The full version, including alpha/beta/rc tags +release = read_version() + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Sphinx 2.0 changes from index to contents +master_doc = 'index' diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..c7b796b --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,20 @@ +.. prtg documentation master file, created by + sphinx-quickstart on Tue Jul 23 07:42:50 2019. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Documentation for prtg! +================================================================ + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/modules.rst b/docs/modules.rst new file mode 100644 index 0000000..37edf53 --- /dev/null +++ b/docs/modules.rst @@ -0,0 +1,7 @@ +prtg +========= + +.. toctree:: + :maxdepth: 4 + + prtg diff --git a/docs/prtg.rst b/docs/prtg.rst new file mode 100644 index 0000000..dc19915 --- /dev/null +++ b/docs/prtg.rst @@ -0,0 +1,22 @@ +prtg package +================= + +Submodules +---------- + +prtg.version module +------------------------ + +.. automodule:: prtg.version + :members: + :undoc-members: + :show-inheritance: + + +Module contents +--------------- + +.. automodule:: prtg + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..de45286 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,5 @@ +bs4 +click +future +lxml +requests diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt new file mode 100644 index 0000000..fecac32 --- /dev/null +++ b/docs/spelling_wordlist.txt @@ -0,0 +1,102 @@ +alldevices +allgroups +allprobes +allsensors +api +args +automodule +azurepipelines +backlink +beautifulsoup +BeautifulSoup +bs +Bugfixes +byid +charset +CLI +cobertura +config +cookiecutter +CookieCutter +cov +css +datetime +dev +deviceid +deviceobj +deviceobject +dlint +DOCTYPE +exename +genindex +hh +hostname +href +html +http +https +ip +isort +junit +JUnit +li +lt +lxml +maxdepth +md +modindex +newname +newplaceid +noqa +nosec +parallelization +param +params +passhash +pipefish +pre +probeobject +prtg +prtgadmin +PRTGApi +PRTGDevice +PRTGSensor +py +pylint +PyPi +pyspelling +pytest +quickstart +rc +README +ReadTheDocs +resplendent +reStructuredText +rootid +rst +sensorid +sensorobject +sensortree +setuptools +sexualized +shellcheck +socio +ss +submodules +Submodules +tcp +th +toctree +towncrier +txt +ui +ul +unanimous +undoc +unicode +urls +utf +webgui +xdist +xml +yyyy diff --git a/hooks/post_gen_project.sh b/hooks/post_gen_project.sh new file mode 100755 index 0000000..9204d09 --- /dev/null +++ b/hooks/post_gen_project.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -euxo pipefail + +BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +TOP="$(dirname "${BASEDIR}")" + +cd "${TOP}" +git checkout -- \ + app/prtg \ + app/tests \ + app/.pylintrc \ + README.md \ + LICENSE diff --git a/prtg.py b/prtg.py deleted file mode 100644 index 5e30bf8..0000000 --- a/prtg.py +++ /dev/null @@ -1,680 +0,0 @@ -import os -import csv -import requests -from datetime import datetime,timedelta -from bs4 import BeautifulSoup -from requests.packages.urllib3.exceptions import InsecureRequestWarning - -requests.packages.urllib3.disable_warnings(InsecureRequestWarning) - -#class used by prtg_api and children to manage global arrays of all objects -class global_arrays(object): - allprobes = [] - allgroups = [] - alldevices = [] - allsensors = [] - -#class used by all prtg_* objects to build urls and query prtg using requests -class connection_methods(object): - def unpack_config(self,confdata): - self.host = confdata[0] - self.port = confdata[1] - self.user = confdata[2] - self.passhash = confdata[3] - self.protocol = confdata[4] - self.confdata = confdata - self.base_url = "{protocol}://{host}:{port}/api/".format(protocol=self.protocol,host=self.host,port=self.port) - self.base_url_no_api = "{protocol}://{host}:{port}/".format(protocol=self.protocol,host=self.host,port=self.port) - self.url_auth = "username={username}&passhash={passhash}".format(username=self.user,passhash=self.passhash) - def get_request(self,url_string,api=True): - #global method for api calls. Provides errors for the 401 and 404 responses - if api: - url = "{base}{content}&{auth}".format(base=self.base_url,content=url_string,auth=self.url_auth) - else: - url = "{base}{content}&{auth}".format(base=self.base_url_no_api,content=url_string,auth=self.url_auth) - req = requests.get(url,verify=False) - if req.status_code == 200: - return(req) - elif req.status_code == 401: - raise(self.AuthenticationError("PRTG authentication failed. Check credentials in config file")) - elif req.status_code == 404: - raise(self.ResourceNotFound("No resource at URL used: {0}".format(tree_url))) - -class baseconfig(connection_methods): - def __str__(self): - return("".format(name=self.name,id=self.id,active=self.active)) - def __repr__(self): - return("".format(name=self.name,id=self.id,active=self.active)) - def clear_arrays(self): - del self.allprobes[:] - del self.allgroups[:] - del self.alldevices[:] - del self.allsensors[:] - def delete(self,confirm=True): - if self.type == "Root": - return("You cannot delete the root object.") - else: - delete_url = "deleteobject.htm?id={objid}&approve=1".format(objid=self.id) - if confirm: - response = "" - while response.upper() not in ["Y","N"]: - response = str(input("Would you like to continue?(Y/[N]) ")) - if response == "": - response = "N" - if response.upper() == "Y": - req = self.get_request(url_string=delete_url) - else: - req = self.get_request(url_string=delete_url) - def set_property(self,name,value): - if self.type != "Channel": - setprop_url = "setobjectproperty.htm?id={objid}&name={propname}&value={propval}".format(objid=self.id,propname=name,propval=value) - else: - setprop_url = "setobjectproperty.htm?id={objid}&subid={subid}&name={propname}&value={propval}".format(objid=self.sensorid,subid=self.objid,propname=name,propval=value) - req = self.get_request(url_string=setprop_url) - self.name = value - def get_property(self,name): - if self.type != "Channel": - getprop_url = "getobjectproperty.htm?id={objid}&name={propname}&show=text".format(objid=self.id,propname=name) - else: - getprop_url = "getobjectproperty.htm?id={objid}&subid={subid}&name={propname}".format(objid=self.sensorid,subid=self.objid,propname=name) - req = self.get_request(url_string=getprop_url) - soup = BeautifulSoup(req.text,'lxml') - if soup.result.text != "(Property not found)": - setattr(self,name,soup.result.text) - return(soup.result.text) - else: - raise(self.ResourceNotFound("No object property of name: {name}".format(name=name))) - def set_interval(self,interval): - '''note: you will still need to disable inheritance manually. - Valid intervals are (seconds): 30, 60, 300, 600, 900, 1800, 3600, 14400, 21600, 43200, 86400''' - self.set_property(name="interval",value=interval) - def get_tree(self,root=''): - #gets sensortree from prtg. If no rootid is provided returns entire tree - tree_url = "table.xml?content=sensortree&output=xml&id={rootid}".format(rootid=root) - req = self.get_request(url_string=tree_url) - raw_data = req.text - treesoup = BeautifulSoup(raw_data,"lxml") - #returns the xml as a beautifulsoup object - if len(treesoup.sensortree.nodes) > 0: - return(treesoup) - else: - raise(self.ResourceNotFound("No objects at ID: {id}".format(id=root))) - def rename(self,newname): - rename_url = "rename.htm?id={objid}&value={name}".format(objid=self.id,name=newname) - req = self.get_request(url_string=rename_url) - self.name = newname - def pause(self,duration=0,message=""): - if duration > 0: - pause_url = "pauseobjectfor.htm?id={objid}&duration={time}".format(objid=self.id,time=str(duration)) - else: - pause_url = "pause.htm?id={objid}&action=0".format(objid=self.id) - if message: - pause_url += "&pausemsg={string}".format(string=message) - req = self.get_request(url_string=pause_url) - self.status = "Paused" - self.active = "false" - self.status_raw = "7" - def resume(self): - resume_url = "pause.htm?id={objid}&action=1".format(objid=self.id) - req = self.get_request(url_string=resume_url) - #these are question marks because we don't know what status is after resume - self.status = "?" - self.active = "true" - self.status_raw = "?" - def get_status(self, name="status"): - status_url= "getobjectstatus.htm?id={objid}&name={name}&show=text".format(objid=self.id, name=name) - req = self.get_request(url_string=status_url) - soup = BeautifulSoup(req.text,'lxml') - status = soup.result.text.strip() - self.status = status - return(status) - def clone(self,newname,newplaceid): - clone_url = "duplicateobject.htm?id={objid}&name={name}&targetid={newparent}".format(objid=self.id,name=newname,newparent=newplaceid) - req = self.get_request(url_string=clone_url) - def add_tags(self,tags,clear_old=False): - if not isinstance(tags,list): - raise(Exception("Needs tags as type: list")) - if clear_old: - old_tags = [] - else: - old_tags = self.get_property('tags').split(' ') - new_tags = " ".join(old_tags+tags) - self.set_property(name='tags',value=new_tags) - #define global arrays, inherited to all objects - class AuthenticationError(Exception): - pass - class ResourceNotFound(Exception): - pass - -class prtg_api(global_arrays,baseconfig): - ''' - Parameters: - - host: Enter the ip address or hostname where PRTG is running - - port: Enter the tcp port used to connect to prtg. (usually 80 or 443) - - user: Enter your PRTG username - - passhash: Enter your PRTG passhash. Can be found in PRTG webgui > settings > account settings - - protocol: Enter the protocol used to connect to PRTG server (http or https) - - rootid: Enter the id of the group/probe that contains all the objects you want to manage. Defaults to 0 (gets entire sensortree) - - Example: - host = '192.168.1.1' - port = '443' - user = 'prtgadmin' - passhash = '0000000' - protocol = 'https' - rootid = '53' - prtg = prtg_api(host,user,passhash,rootid,protocol,port) - ''' - def __init__(self,host,user,passhash,rootid=0,protocol='https',port='443'): - self.confdata = (host,port,user,passhash,protocol) - self.unpack_config(self.confdata) - self.clear_arrays() - self.probes = [] - self.groups = [] - self.devices = [] - #get sensortree from root id downwards - self.treesoup = self.get_tree(root=rootid) - #Finds all the direct child nodes in sensortree and creates python objects, passes each object its xml data - for child in self.treesoup.sensortree.nodes.children: - if child.name is not None: - for childr in child.children: - if childr.name == "probenode": - probeobj = probe(childr,self.confdata) - self.allprobes.append(probeobj) - self.probes.append(probeobj) - elif childr.name == "device": - deviceobj = device(childr,self.confdata) - self.devices.append(deviceobj) - self.alldevices.append(deviceobj) - elif childr.name == "group": - groupobj = group(childr,self.confdata) - self.groups.append(groupobj) - self.allgroups.append(groupobj) - elif childr.name is not None: - if childr.string is None: - childr.string = "" - setattr(self,childr.name,childr.string) - def refresh(self): - #download fresh sensortree - self.treesoup = self.get_tree(root=self.id) - probeids = [] - newprobeids = [] - groupids = [] - newgroupids = [] - deviceids = [] - newdeviceids = [] - #get ids of existing probes - for aprobe in self.probes: - probeids.append(aprobe.id) - for agroup in self.groups: - groupids.append(agroup.id) - for adevice in self.devices: - deviceids.append(adevice.id) - #for all the child objects in sensortree, if it already exists refresh the object, otherwise create a new one - for child in self.treesoup.sensortree.nodes.children: - if child.name is not None: - for childr in child.children: - if childr.name == "probenode": - if childr.find("id").string in probeids: - for aprobe in self.probes: - if aprobe.id == childr.find("id").string: - aprobe.refresh(childr) - else: - probeobj = probe(childr,self.confdata) - self.probes.append(probeobj) - self.allprobes.append(probeobj) - #add all probe ids from the sensortree to this list - newprobeids.append(childr.find("id").string) - elif childr.name == "group": - if childr.find("id").string in groupids: - for agroup in self.groups: - if agroup.id == childr.find("id").string: - agroup.refresh(childr) - else: - groupobj = group(childr,self.confdata) - self.allgroups.append(groupobj) - self.groups.append(groupobj) - #add all probe ids from the sensortree to this list - newgroupids.append(childr.find("id").string) - elif childr.name == "device": - if childr.find("id").string in deviceids: - for adevice in self.devices: - if adevice.id == childr.find("id").string: - adevice.refresh(childr) - else: - deviceobj = device(childr,self.confdata) - self.alldevices.append(devicebj) - self.device.append(deviceobj) - #add all probe ids from the sensortree to this list - newdeviceids.append(childr.find("id").string) - elif childr.name is not None: - if childr.string is None: - childr.string = "" - setattr(self,childr.name,childr.string) - #if existing probes were not in the new sensortree, remove from allprobes - for id in probeids: - if id not in newprobeids: - for aprobe in self.probes: - if aprobe.id == id: - self.allprobes.remove(aprobe) - self.probes.remove(aprobe) - for id in groupids: - if id not in newgroupids: - for agroup in self.groups: - if agroup.id == id: - self.allgroups.remove(agroup) - self.groups.remove(agroup) - for id in deviceids: - if id not in newdeviceids: - for adevice in self.devices: - if adevice.id == id: - self.alldevice.remove(adevice) - self.devices.remove(adevice) - def search_byid(self,id): - id = str(id) - for obj in self.allprobes + self.allgroups + self.alldevices + self.allsensors: - if obj.id == id: - return(obj) - -class channel(prtg_api): - def __init__(self,channelsoup,sensorid,confdata): - self.unpack_config(confdata) - self.sensorid = sensorid - for child in channelsoup.children: - if child.string is None: - child.string = "" - if child.name is not None: - setattr(self,child.name,child.string) - self.id = self.objid - if hasattr(self,'lastvalue'): - if self.lastvalue.replace(".","").isdigit(): - try: - self.lastvalue_int = int(self.lastvalue.split(" ")[0].replace(",","")) - self.lastvalue_float = float(self.lastvalue_int) - except ValueError: - self.lastvalue_float = float(self.lastvalue.split(" ")[0].replace(",","")) - self.lastvalue_int = int(self.lastvalue_float) - self.unit = self.lastvalue.split(" ")[1] - self.type = "Channel" - def __str__(self): - return("".format(name=self.name,id=self.id)) - def __repr__(self): - return("".format(name=self.name,id=self.id)) - def rename(self,newname): - self.set_property(name="name",value=newname) - self.name = newname - def pause(self,duration=0,message=""): - print("Channels cannot be paused, pausing parent sensor.") - if duration > 0: - pause_url = "pauseobjectfor.htm?id={objid}&duration={time}".format(objid=self.sensorid,time=duration) - else: - pause_url = "pause.htm?id={objid}&action=0&".format(objid=self.sensorid) - if message: - pause_url += "&pausemsg={string}".format(string=message) - req = self.get_request(url_string=pause_url) - def resume(self): - print("Channels cannot be resumed, resuming parent sensor.") - resume_url = "pause.htm?id={objid}&action=1".format(objid=self.sensorid) - req = self.get_request(url_string=resume_url) - def refresh(self,channelsoup): - for child in channelsoup.children: - if child.string is None: - child.string = "" - if child.name is not None: - setattr(self,child.name,child.string) - self.id = self.objid - def delete(self): - return("You cannot delete a channel") - -class sensor(prtg_api): - def __init__(self,sensorsoup,deviceid,confdata): - self.unpack_config(confdata) - for child in sensorsoup.children: - if child.string is None: - child.string = "" - if child.name is not None: - setattr(self,child.name,child.string) - setattr(self,"attributes",sensorsoup.attrs) - self.channels = [] - self.type = "Sensor" - self.deviceid = deviceid - def get_channels(self): - channel_url = "table.xml?content=channels&output=xml&columns=name,lastvalue_,objid&id={sensorid}".format(sensorid=self.id) - req = self.get_request(url_string=channel_url) - channelsoup = BeautifulSoup(req.text,"lxml") - if len(self.channels) == 0: - for child in channelsoup.find_all("item"): - self.channels.append(channel(child,self.id,self.confdata)) - else: - for child in channelsoup.find_all("item"): - for achannel in self.channels: - if achannel.objid == child.find("objid").string: - achannel.refresh(child) - def refresh(self,sensorsoup=None): - if sensorsoup is None: - soup = self.get_tree(root=self.id) - sensorsoup = soup.sensortree.nodes.sensor - for child in sensorsoup.children: - if child.string is None: - child.string = "" - if child.name is not None: - setattr(self,child.name,child.string) - setattr(self,"attributes",sensorsoup.attrs) - if len(self.channels) > 0: - self.get_channels() - def set_additional_param(self,parameterstring): - self.set_property(name="params",value=parameterstring) - def acknowledge(self,message=""): - acknowledge_url = "acknowledgealarm.htm?id={objid}&ackmsg={string}".format(objid=self.id,string=message) - req = self.get_request(url_string=acknowledge_url) - self.get_status() - def save_graph(self,graphid,filepath,size,hidden_channels='',filetype='svg'): - ''' - Size options: S,M,L - ''' - if size.upper() == "L": - width = "1500" - height = "500" - font = "13" - elif size.upper() == "S": - width = "400" - height = "300" - font = "9" - else: - width = "800" - height = "350" - font = "13" - if hidden_channels: - hidden_channels = "&hide={hc}".format(hc=hidden_channels) - chart_url = "chart.{ft}?type=graph&graphid={gid}&id={sid}&width={w}&height={h}{hc}&plotcolor=%23ffffff&gridcolor=%23ffffff&graphstyling=showLegend%3D%271%27+baseFontSize%3D%27{f}%27".format( - ft=filetype,gid=graphid,sid=self.id,w=width,h=height,hc=hidden_channels,f=font) - req = self.get_request(url_string=chart_url,api=False) - with open(filepath,"wb") as imgfile: - for chunk in req: - imgfile.write(chunk) - self.filepath = filepath - -class device(prtg_api): - def __init__(self,devicesoup,confdata): - self.unpack_config(confdata) - self.sensors = [] - for child in devicesoup.children: - if child.name == "sensor": - sensorobj = sensor(child,self.id,self.confdata) - self.sensors.append(sensorobj) - self.allsensors.append(sensorobj) - elif child.name is not None: - if child.string is None: - child.string = "" - setattr(self,child.name,child.string) - #Adds sensors to a dictionary based on their status - self.sensors_by_status = {"Up":[],"Down":[],"Warning":[],"Paused":[]} - for asensor in self.sensors: - if asensor.status in self.sensors_by_status.keys(): - self.sensors_by_status[asensor.status].append(asensor) - else: - self.sensors_by_status[asensor.status] = [asensor] - setattr(self,"attributes",devicesoup.attrs) - self.type = "Device" - def refresh(self,devicesoup=None): - if devicesoup is None: - soup = self.get_tree(root=self.id) - devicesoup = soup.sensortree.nodes.device - sensorids = [] - newsensorids = [] - for asensor in self.sensors: - sensorids.append(asensor.id) - for child in devicesoup.children: - if child.name == "sensor": - if child.find("id").string in sensorids: - for asensor in self.sensors: - if asensor.id == child.find("id").string: - asensor.refresh(child) - else: - sensorobj = sensor(child,self.id,self.confdata) - self.sensors.append(sensorobj) - self.allsensors.append(sensorobj) - newsensorids.append(child.find("id").string) - elif child.name is not None: - if child.string is None: - child.string = "" - setattr(self,child.name,child.string) - for id in sensorids: - if id not in newsensorids: - for asensor in self.sensors: - if asensor.id == id: - sensortoremove = asensor - self.sensors.remove(sensortoremove) - self.allsensors.remove(sensortoremove) - setattr(self,"attributes",devicesoup.attrs) - def set_host(self,host): - self.set_property(name="host",value=host) - self.host = host - -class group(prtg_api): - def __init__(self,groupsoup,confdata): - self.unpack_config(confdata) - self.groups = [] - self.devices = [] - #groupsoup is passed into __init__ method - #The children objects are either added to this object as an attribute - #or a device/group object is created - for child in groupsoup.children: - if child.name == "device": - deviceobj = device(child,self.confdata) - self.devices.append(deviceobj) - self.alldevices.append(deviceobj) - elif child.name == "group": - groupobj = group(child,self.confdata) - self.groups.append(groupobj) - self.allgroups.append(groupobj) - elif child.name is not None: - if child.string is None: - child.string = "" - setattr(self,child.name,child.string) - setattr(self,"attributes",groupsoup.attrs) - self.type = "Group" - def refresh(self,groupsoup=None): - if groupsoup is None: - if self.type == "Group": - soup = self.get_tree(root=self.id) - groupsoup = soup.sensortree.nodes.group - elif self.type == "Probe": - soup = self.get_tree(root=self.id) - groupsoup = soup.sensortree.nodes.probenode - deviceids = [] - newdeviceids = [] - for adevice in self.devices: - deviceids.append(adevice.id) - groupids = [] - newgroupids = [] - for agroup in self.groups: - groupids.append(agroup.id) - for child in groupsoup.children: - if child.name == "device": - if child.find("id").string in deviceids: - for adevice in self.devices: - if adevice.id == child.find("id").string: - adevice.refresh(child) - else: - deviceobj = device(child,self.confdata) - self.devices.append(deviceobj) - self.alldevices.append(deviceobj) - newdeviceids.append(child.find("id").string) - elif child.name == "group": - if child.find("id").string in groupids: - for agroup in self.groups: - if agroup.id == child.find("id").string: - agroup.refresh(child) - else: - groupobj = group(child,self.confdata) - self.groups.append(groupobj) - self.allgroups.append(groupobj) - newgroupids.append(child.find("id").string) - elif child.name is not None: - if child.string is None: - child.string = "" - setattr(self,child.name,child.string) - for id in deviceids: - if id not in newdeviceids: - for adevice in self.devices: - if adevice.id == id: - devicetoremove = adevice - self.devices.remove(devicetoremove) - self.alldevices.remove(devicetoremove) - for id in groupids: - if id not in newgroupids: - for agroup in self.groups: - if agroup.id == id: - grouptoremove = agroup - self.groups.remove(grouptoremove) - self.allgroups.remove(grouptoremove) - setattr(self,"attributes",groupsoup.attrs) - -#probe is the same as group so it inherits all methods and attributes except type -class probe(group): - type = "Probe" - -class prtg_device(baseconfig): - '''Seperate top level object to manage just a device and its sensors instead of - downloading details for an entire group''' - def __init__(self,host,user,passhash,deviceid,protocol='https',port='443'): - self.confdata = (host,port,user,passhash,protocol) - self.unpack_config(self.confdata) - self.sensors = [] - soup = self.get_tree(root=deviceid) - for child in soup.sensortree.nodes.device: - if child.name == "sensor": - sensorobj = sensor(child,self.id,self.confdata) - self.sensors.append(sensorobj) - elif child.name is not None: - if child.string is None: - child.string = "" - setattr(self,child.name,child.string) - self.sensors_by_status = {"Up":[],"Down":[],"Warning":[],"Paused":[]} - for asensor in self.sensors: - if asensor.status in self.sensors_by_status.keys(): - self.sensors_by_status[asensor.status].append(asensor) - else: - self.sensors_by_status[asensor.status] = [asensor] - def refresh(self): - soup = self.get_tree(root=deviceid) - sensorids = [] - for asensor in self.sensors: - sensorids.append(asensor.id) - for child in soup.sensortree.nodes.device: - if child.name == "sensor": - if child.find("id").string in sensorids: - for asensor in self.sensors: - if asensor.id == child.find("id").string: - asensor.refresh(child) - else: - sensorobj = sensor(child,self.id,self.confdata) - self.sensors.append(sensorobj) - elif child.name is not None: - if child.string is None: - child.string = "" - setattr(self,child.name,child.string) - -class prtg_sensor(baseconfig): - '''Seperate top level object to manage just a sensor and its channels instead of - downloading details for an entire group''' - def __init__(self,host,user,passhash,sensorid,protocol='https',port='443'): - self.confdata = (host,port,user,passhash,protocol) - self.unpack_config(self.confdata) - self.channels = [] - soup = self.get_tree(root=sensorid) - for child in soup.sensortree.nodes.sensor: - if child.name is not None: - if child.string is None: - child.string = "" - setattr(self,child.name,child.string) - self.get_channels() - def refresh(self): - soup = self.get_tree(root=self.id) - for child in soup.sensortree.nodes.sensor.children: - if child.string is None: - child.string = "" - if child.name is not None: - setattr(self,child.name,child.string) - setattr(self,"attributes",sensorsoup.attrs) - self.get_channels() - def get_channels(self): - channel_url = "table.xml?content=channels&output=xml&columns=name,lastvalue_,objid&id={sensorid}".format(sensorid=self.id) - req = self.get_request(url_string=channel_url) - channelsoup = BeautifulSoup(req.text,"lxml") - if len(self.channels) == 0: - for child in channelsoup.find_all("item"): - self.channels.append(channel(child,self.id,self.confdata)) - else: - for child in channelsoup.find_all("item"): - for achannel in self.channels: - if achannel.objid == child.find("objid").string: - achannel.refresh(child) - def acknowledge(self,message=""): - acknowledge_url = "acknowledgealarm.htm?id={objid}&ackmsg={string}".format(objid=self.id,string=message) - req = self.get_request(url_string=acknowledge_url) - def save_graph(self,graphid,filepath,size,hidden_channels='',filetype='svg'): - ''' - Size options: S,M,L - ''' - if size.upper() == "L": - width = "1500" - height = "500" - font = "13" - elif size.upper() == "S": - width = "400" - height = "300" - font = "9" - else: - width = "800" - height = "350" - font = "13" - if hidden_channels: - hidden_channels = "&hide={hc}".format(hc=hidden_channels) - chart_url = "chart.{ft}?type=graph&graphid={gid}&id={sid}&width={w}&height={h}{hc}&plotcolor=%23ffffff&gridcolor=%23ffffff&graphstyling=showLegend%3D%271%27+baseFontSize%3D%27{f}%27".format( - ft=filetype,gid=graphid,sid=self.id,w=width,h=height,hc=hidden_channels,f=font) - req = self.get_request(url_string=chart_url,api=False) - with open(filepath,"wb") as imgfile: - for chunk in req: - imgfile.write(chunk) - self.filepath = filepath - - -class prtg_historic_data(connection_methods): - '''class used for calls to the historic data api. - Call the class first using connection params then use - methods to get/process data. yyyy-mm-dd-hh-mm-ss''' - def __init__(self,host,port,user,passhash,protocol): - self.confdata = (host,port,user,passhash,protocol) - self.unpack_config(self.confdata) - def format_date(self,dateobj): - '''Pass a datetime object and this will format appropriately - for use with the historic data api''' - return(dateobj.strftime("%Y-%m-%d-%H-%M-%S")) - def get_historic_data(self,objid,startdate,enddate,timeaverage): - if type(startdate) == datetime: - startdate = self.format_date(startdate) - if type(enddate) == datetime: - enddate = self.format_date(enddate) - historic_url = "historicdata.csv?id={id}&avg={avg}&sdate={sdate}&edate={edate}".format(id=objid,avg=timeaverage,sdate=startdate,edate=enddate) - req = self.get_request(url_string=historic_url) - csvRaw = req.text - csvLines = (csvRaw.split("\n"))[:-2] - csvReader = csv.reader(csvLines) - data = {} - for ind,row in enumerate(csvReader): - if ind == 0: - headers = row - for header in headers: - data[header] = [] - else: - for inde,cell in enumerate(row): - if headers[inde] == "Date Time": - if "-" in cell: - cell = cell[:cell.index(" -")] - data[headers[inde]].append(datetime.strptime(cell,"%m/%d/%Y %I:%M:%S %p")) - else: - data[headers[inde]].append(cell) - return(data) - \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..552371e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,4 @@ +[tool.towncrier] + package = "prtg" + package_dir = "app" + filename = "NEWS.rst"